code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.Placements.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of placements, possibly filtered. This method supports
-- paging.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.placements.list@.
module Network.Google.Resource.DFAReporting.Placements.List
(
-- * REST Resource
PlacementsListResource
-- * Creating a Request
, placementsList
, PlacementsList
-- * Request Lenses
, pPlacementStrategyIds
, pXgafv
, pContentCategoryIds
, pMaxEndDate
, pUploadProtocol
, pAccessToken
, pCampaignIds
, pPricingTypes
, pSearchString
, pSizeIds
, pUploadType
, pIds
, pProFileId
, pGroupIds
, pDirectorySiteIds
, pSortOrder
, pPaymentSource
, pSiteIds
, pPageToken
, pSortField
, pCompatibilities
, pMaxStartDate
, pAdvertiserIds
, pMinStartDate
, pArchived
, pMaxResults
, pMinEndDate
, pCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.placements.list@ method which the
-- 'PlacementsList' request conforms to.
type PlacementsListResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"placements" :>
QueryParams "placementStrategyIds" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParams "contentCategoryIds" (Textual Int64) :>
QueryParam "maxEndDate" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParams "campaignIds" (Textual Int64) :>
QueryParams "pricingTypes"
PlacementsListPricingTypes
:>
QueryParam "searchString" Text :>
QueryParams "sizeIds" (Textual Int64) :>
QueryParam "uploadType" Text :>
QueryParams "ids" (Textual Int64) :>
QueryParams "groupIds" (Textual Int64) :>
QueryParams "directorySiteIds"
(Textual Int64)
:>
QueryParam "sortOrder"
PlacementsListSortOrder
:>
QueryParam "paymentSource"
PlacementsListPaymentSource
:>
QueryParams "siteIds"
(Textual Int64)
:>
QueryParam "pageToken" Text :>
QueryParam "sortField"
PlacementsListSortField
:>
QueryParams
"compatibilities"
PlacementsListCompatibilities
:>
QueryParam "maxStartDate"
Text
:>
QueryParams
"advertiserIds"
(Textual Int64)
:>
QueryParam
"minStartDate"
Text
:>
QueryParam
"archived"
Bool
:>
QueryParam
"maxResults"
(Textual Int32)
:>
QueryParam
"minEndDate"
Text
:>
QueryParam
"callback"
Text
:>
QueryParam
"alt"
AltJSON
:>
Get
'[JSON]
PlacementsListResponse
-- | Retrieves a list of placements, possibly filtered. This method supports
-- paging.
--
-- /See:/ 'placementsList' smart constructor.
data PlacementsList =
PlacementsList'
{ _pPlacementStrategyIds :: !(Maybe [Textual Int64])
, _pXgafv :: !(Maybe Xgafv)
, _pContentCategoryIds :: !(Maybe [Textual Int64])
, _pMaxEndDate :: !(Maybe Text)
, _pUploadProtocol :: !(Maybe Text)
, _pAccessToken :: !(Maybe Text)
, _pCampaignIds :: !(Maybe [Textual Int64])
, _pPricingTypes :: !(Maybe [PlacementsListPricingTypes])
, _pSearchString :: !(Maybe Text)
, _pSizeIds :: !(Maybe [Textual Int64])
, _pUploadType :: !(Maybe Text)
, _pIds :: !(Maybe [Textual Int64])
, _pProFileId :: !(Textual Int64)
, _pGroupIds :: !(Maybe [Textual Int64])
, _pDirectorySiteIds :: !(Maybe [Textual Int64])
, _pSortOrder :: !PlacementsListSortOrder
, _pPaymentSource :: !(Maybe PlacementsListPaymentSource)
, _pSiteIds :: !(Maybe [Textual Int64])
, _pPageToken :: !(Maybe Text)
, _pSortField :: !PlacementsListSortField
, _pCompatibilities :: !(Maybe [PlacementsListCompatibilities])
, _pMaxStartDate :: !(Maybe Text)
, _pAdvertiserIds :: !(Maybe [Textual Int64])
, _pMinStartDate :: !(Maybe Text)
, _pArchived :: !(Maybe Bool)
, _pMaxResults :: !(Textual Int32)
, _pMinEndDate :: !(Maybe Text)
, _pCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PlacementsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pPlacementStrategyIds'
--
-- * 'pXgafv'
--
-- * 'pContentCategoryIds'
--
-- * 'pMaxEndDate'
--
-- * 'pUploadProtocol'
--
-- * 'pAccessToken'
--
-- * 'pCampaignIds'
--
-- * 'pPricingTypes'
--
-- * 'pSearchString'
--
-- * 'pSizeIds'
--
-- * 'pUploadType'
--
-- * 'pIds'
--
-- * 'pProFileId'
--
-- * 'pGroupIds'
--
-- * 'pDirectorySiteIds'
--
-- * 'pSortOrder'
--
-- * 'pPaymentSource'
--
-- * 'pSiteIds'
--
-- * 'pPageToken'
--
-- * 'pSortField'
--
-- * 'pCompatibilities'
--
-- * 'pMaxStartDate'
--
-- * 'pAdvertiserIds'
--
-- * 'pMinStartDate'
--
-- * 'pArchived'
--
-- * 'pMaxResults'
--
-- * 'pMinEndDate'
--
-- * 'pCallback'
placementsList
:: Int64 -- ^ 'pProFileId'
-> PlacementsList
placementsList pPProFileId_ =
PlacementsList'
{ _pPlacementStrategyIds = Nothing
, _pXgafv = Nothing
, _pContentCategoryIds = Nothing
, _pMaxEndDate = Nothing
, _pUploadProtocol = Nothing
, _pAccessToken = Nothing
, _pCampaignIds = Nothing
, _pPricingTypes = Nothing
, _pSearchString = Nothing
, _pSizeIds = Nothing
, _pUploadType = Nothing
, _pIds = Nothing
, _pProFileId = _Coerce # pPProFileId_
, _pGroupIds = Nothing
, _pDirectorySiteIds = Nothing
, _pSortOrder = Ascending
, _pPaymentSource = Nothing
, _pSiteIds = Nothing
, _pPageToken = Nothing
, _pSortField = PLSFID
, _pCompatibilities = Nothing
, _pMaxStartDate = Nothing
, _pAdvertiserIds = Nothing
, _pMinStartDate = Nothing
, _pArchived = Nothing
, _pMaxResults = 1000
, _pMinEndDate = Nothing
, _pCallback = Nothing
}
-- | Select only placements that are associated with these placement
-- strategies.
pPlacementStrategyIds :: Lens' PlacementsList [Int64]
pPlacementStrategyIds
= lens _pPlacementStrategyIds
(\ s a -> s{_pPlacementStrategyIds = a})
. _Default
. _Coerce
-- | V1 error format.
pXgafv :: Lens' PlacementsList (Maybe Xgafv)
pXgafv = lens _pXgafv (\ s a -> s{_pXgafv = a})
-- | Select only placements that are associated with these content
-- categories.
pContentCategoryIds :: Lens' PlacementsList [Int64]
pContentCategoryIds
= lens _pContentCategoryIds
(\ s a -> s{_pContentCategoryIds = a})
. _Default
. _Coerce
-- | Select only placements or placement groups whose end date is on or
-- before the specified maxEndDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pMaxEndDate :: Lens' PlacementsList (Maybe Text)
pMaxEndDate
= lens _pMaxEndDate (\ s a -> s{_pMaxEndDate = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pUploadProtocol :: Lens' PlacementsList (Maybe Text)
pUploadProtocol
= lens _pUploadProtocol
(\ s a -> s{_pUploadProtocol = a})
-- | OAuth access token.
pAccessToken :: Lens' PlacementsList (Maybe Text)
pAccessToken
= lens _pAccessToken (\ s a -> s{_pAccessToken = a})
-- | Select only placements that belong to these campaigns.
pCampaignIds :: Lens' PlacementsList [Int64]
pCampaignIds
= lens _pCampaignIds (\ s a -> s{_pCampaignIds = a})
. _Default
. _Coerce
-- | Select only placements with these pricing types.
pPricingTypes :: Lens' PlacementsList [PlacementsListPricingTypes]
pPricingTypes
= lens _pPricingTypes
(\ s a -> s{_pPricingTypes = a})
. _Default
. _Coerce
-- | Allows searching for placements by name or ID. Wildcards (*) are
-- allowed. For example, \"placement*2015\" will return placements with
-- names like \"placement June 2015\", \"placement May 2015\", or simply
-- \"placements 2015\". Most of the searches also add wildcards implicitly
-- at the start and the end of the search string. For example, a search
-- string of \"placement\" will match placements with name \"my
-- placement\", \"placement 2015\", or simply \"placement\" .
pSearchString :: Lens' PlacementsList (Maybe Text)
pSearchString
= lens _pSearchString
(\ s a -> s{_pSearchString = a})
-- | Select only placements that are associated with these sizes.
pSizeIds :: Lens' PlacementsList [Int64]
pSizeIds
= lens _pSizeIds (\ s a -> s{_pSizeIds = a}) .
_Default
. _Coerce
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pUploadType :: Lens' PlacementsList (Maybe Text)
pUploadType
= lens _pUploadType (\ s a -> s{_pUploadType = a})
-- | Select only placements with these IDs.
pIds :: Lens' PlacementsList [Int64]
pIds
= lens _pIds (\ s a -> s{_pIds = a}) . _Default .
_Coerce
-- | User profile ID associated with this request.
pProFileId :: Lens' PlacementsList Int64
pProFileId
= lens _pProFileId (\ s a -> s{_pProFileId = a}) .
_Coerce
-- | Select only placements that belong to these placement groups.
pGroupIds :: Lens' PlacementsList [Int64]
pGroupIds
= lens _pGroupIds (\ s a -> s{_pGroupIds = a}) .
_Default
. _Coerce
-- | Select only placements that are associated with these directory sites.
pDirectorySiteIds :: Lens' PlacementsList [Int64]
pDirectorySiteIds
= lens _pDirectorySiteIds
(\ s a -> s{_pDirectorySiteIds = a})
. _Default
. _Coerce
-- | Order of sorted results.
pSortOrder :: Lens' PlacementsList PlacementsListSortOrder
pSortOrder
= lens _pSortOrder (\ s a -> s{_pSortOrder = a})
-- | Select only placements with this payment source.
pPaymentSource :: Lens' PlacementsList (Maybe PlacementsListPaymentSource)
pPaymentSource
= lens _pPaymentSource
(\ s a -> s{_pPaymentSource = a})
-- | Select only placements that are associated with these sites.
pSiteIds :: Lens' PlacementsList [Int64]
pSiteIds
= lens _pSiteIds (\ s a -> s{_pSiteIds = a}) .
_Default
. _Coerce
-- | Value of the nextPageToken from the previous result page.
pPageToken :: Lens' PlacementsList (Maybe Text)
pPageToken
= lens _pPageToken (\ s a -> s{_pPageToken = a})
-- | Field by which to sort the list.
pSortField :: Lens' PlacementsList PlacementsListSortField
pSortField
= lens _pSortField (\ s a -> s{_pSortField = a})
-- | Select only placements that are associated with these compatibilities.
-- DISPLAY and DISPLAY_INTERSTITIAL refer to rendering either on desktop or
-- on mobile devices for regular or interstitial ads respectively. APP and
-- APP_INTERSTITIAL are for rendering in mobile apps. IN_STREAM_VIDEO
-- refers to rendering in in-stream video ads developed with the VAST
-- standard.
pCompatibilities :: Lens' PlacementsList [PlacementsListCompatibilities]
pCompatibilities
= lens _pCompatibilities
(\ s a -> s{_pCompatibilities = a})
. _Default
. _Coerce
-- | Select only placements or placement groups whose start date is on or
-- before the specified maxStartDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pMaxStartDate :: Lens' PlacementsList (Maybe Text)
pMaxStartDate
= lens _pMaxStartDate
(\ s a -> s{_pMaxStartDate = a})
-- | Select only placements that belong to these advertisers.
pAdvertiserIds :: Lens' PlacementsList [Int64]
pAdvertiserIds
= lens _pAdvertiserIds
(\ s a -> s{_pAdvertiserIds = a})
. _Default
. _Coerce
-- | Select only placements or placement groups whose start date is on or
-- after the specified minStartDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pMinStartDate :: Lens' PlacementsList (Maybe Text)
pMinStartDate
= lens _pMinStartDate
(\ s a -> s{_pMinStartDate = a})
-- | Select only archived placements. Don\'t set this field to select both
-- archived and non-archived placements.
pArchived :: Lens' PlacementsList (Maybe Bool)
pArchived
= lens _pArchived (\ s a -> s{_pArchived = a})
-- | Maximum number of results to return.
pMaxResults :: Lens' PlacementsList Int32
pMaxResults
= lens _pMaxResults (\ s a -> s{_pMaxResults = a}) .
_Coerce
-- | Select only placements or placement groups whose end date is on or after
-- the specified minEndDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pMinEndDate :: Lens' PlacementsList (Maybe Text)
pMinEndDate
= lens _pMinEndDate (\ s a -> s{_pMinEndDate = a})
-- | JSONP
pCallback :: Lens' PlacementsList (Maybe Text)
pCallback
= lens _pCallback (\ s a -> s{_pCallback = a})
instance GoogleRequest PlacementsList where
type Rs PlacementsList = PlacementsListResponse
type Scopes PlacementsList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient PlacementsList'{..}
= go _pProFileId (_pPlacementStrategyIds ^. _Default)
_pXgafv
(_pContentCategoryIds ^. _Default)
_pMaxEndDate
_pUploadProtocol
_pAccessToken
(_pCampaignIds ^. _Default)
(_pPricingTypes ^. _Default)
_pSearchString
(_pSizeIds ^. _Default)
_pUploadType
(_pIds ^. _Default)
(_pGroupIds ^. _Default)
(_pDirectorySiteIds ^. _Default)
(Just _pSortOrder)
_pPaymentSource
(_pSiteIds ^. _Default)
_pPageToken
(Just _pSortField)
(_pCompatibilities ^. _Default)
_pMaxStartDate
(_pAdvertiserIds ^. _Default)
_pMinStartDate
_pArchived
(Just _pMaxResults)
_pMinEndDate
_pCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient (Proxy :: Proxy PlacementsListResource)
mempty
|
brendanhay/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/Placements/List.hs
|
mpl-2.0
| 18,010
| 0
| 40
| 6,689
| 2,754
| 1,569
| 1,185
| 396
| 1
|
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-- | This Module defines some util syntactic sugar for embedded atom notation.
module OpenCog.AtomSpace.Sugar (
stv
, ctv
, itv
, ftv
, ptv
, noTv
, atomList
, (|>)
, (\>)
) where
import OpenCog.AtomSpace.Inheritance (type (<~))
import OpenCog.AtomSpace.Types (TruthVal(..),Gen(..),Atom(..))
import Data.Typeable (Typeable)
-- | TruthVal syntactic sugar.
noTv :: Maybe TruthVal
noTv = Nothing
stv :: Double -> Double -> Maybe TruthVal
stv a b = Just $ SimpleTV a b
ctv :: Double -> Double -> Double -> Maybe TruthVal
ctv a b c = Just $ CountTV a b c
itv :: Double -> Double -> Double -> Double -> Double -> Maybe TruthVal
itv a b c d e = Just $ IndefTV a b c d e
ftv :: Double -> Double -> Maybe TruthVal
ftv a b = Just $ FuzzyTV a b
ptv :: Double -> Double -> Double -> Maybe TruthVal
ptv a b c = Just $ ProbTV a b c
-- | 'atomList' is simple sugar notation for listing atoms, using operators '|>'
-- and '\>'. For example, if you want to define a list of atoms:
-- l :: [AtomGen]
-- l = atomList
-- |> ConceptNode "concept1" noTv
-- |> PredicateNode "predicate2" noTv
-- \> ConceptNode "lastconcept" noTv
atomList :: (Typeable c) => ([Gen c] -> [Gen c])
atomList = id
infixl 5 |>
infixr 4 \>
-- | '|>' and '\>' operators are provided for easier notation of list of 'Gen'
-- elements when working with atoms of random arity (e.g. 'ListLink').
(|>) :: (Typeable c,b <~ c) => ([Gen c] -> a) -> Atom b -> ([Gen c] -> a)
f |> at = \l -> f $ (Gen at) : l
(\>) :: (Typeable c,b <~ c) => ([Gen c] -> a) -> Atom b -> a
f \> at = f [Gen at]
|
printedheart/atomspace
|
opencog/haskell/OpenCog/AtomSpace/Sugar.hs
|
agpl-3.0
| 1,858
| 0
| 11
| 443
| 546
| 303
| 243
| -1
| -1
|
{-# OPTIONS_GHC -fallow-undecidable-instances #-}
-- coverage condition を満たしていないインスタンス宣言があるため
-----------------------------------------------------------------------------
data Prop
data S
data E
infixl 9 :@
data Expr t where
BVar :: Int -> Expr t
FVar :: String -> Expr t
(:@) :: Expr (a->b) -> Expr a -> Expr b
Lam :: Expr b -> Expr (a->b)
Int :: Expr a -> Expr (S->a)
Ext :: Expr (S->a) -> Expr a
-----------------------------------------------------------------------------
infixl 1 :/
data Sen
data IV
data CN
data (:/) a b
type T = Sen :/ IV
type TV = IV :/ T
type IAV = IV :/ IV
data P c where
F :: CatToType b u => P (a :/ b) -> P b -> P a
class CatToType c t | c -> t
instance CatToType Sen Prop
instance CatToType IV (E->Prop)
instance CatToType CN (E->Prop)
instance (CatToType a t, CatToType b u) => CatToType (a :/ b) ((S->u)->t)
translate :: CatToType c t => P c -> Expr t
translate (F fun arg) = translate fun :@ Int (translate arg)
-----------------------------------------------------------------------------
{-
class Assoc a b | a -> b
instance Assoc Int Int
f :: Assoc Int b => Int -> b
f x = x
-}
|
msakai/ptq
|
misc/Test3.hs
|
lgpl-2.1
| 1,207
| 0
| 9
| 248
| 400
| 215
| 185
| -1
| -1
|
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveGeneric, DerivingVia #-}
module ProjectM36.MerkleHash where
import Data.ByteString (ByteString)
import GHC.Generics
import Control.DeepSeq (NFData)
newtype MerkleHash = MerkleHash { _unMerkleHash :: ByteString }
deriving (Show, Eq, Generic, Monoid, Semigroup, NFData)
|
agentm/project-m36
|
src/lib/ProjectM36/MerkleHash.hs
|
unlicense
| 351
| 0
| 6
| 69
| 72
| 43
| 29
| 7
| 0
|
module Main where
import Data.Char
import System.Environment
lowers :: String -> Int
lowers xs = length [x | x <- xs, isLower x]
count :: Char -> String -> Int
count x xs = length [x' | x' <- xs, x==x']
positions x xs = [ i | (x',i) <- zip xs [0..n], x == x' ]
where
n = length xs - 1
let2int :: Char -> Int
let2int c = ord c - ord 'a'
int2let :: Int -> Char
int2let n = chr (ord 'a' + n)
shift :: Int -> Char -> Char
shift n c | isLower c = int2let ((let2int c + n) `mod` 26)
| otherwise = c
encode :: Int -> String -> String
encode n xs = [shift n x | x <- xs]
table :: [Float]
table = [8.2, 1.5, 2.8, 4.3, 12.7, 2.2, 2.0, 6.1, 7.0, 0.2, 0.8, 4.0, 2.4,
6.7, 7.5, 1.9, 0.1, 6.0, 6.3, 9.1, 2.8, 1.0, 2.4, 0.2, 2.0, 0.1]
percent :: Int -> Int -> Float
percent n m = (fromIntegral n / fromIntegral m) * 100
freqs :: String -> [Float]
freqs xs = [percent (count x xs) n | x <- ['a'..'z']]
where n = lowers xs
chisqr :: [Float] -> [Float] -> Float
chisqr os es = sum [ (o - e)^2 / e | (o,e) <- zip os es ]
rotate :: Int -> [a] -> [a]
rotate n xs = drop n xs ++ take n xs
crack :: String -> String
crack xs = encode (-factor) xs
where
factor = head (positions (minimum chitab) chitab)
chitab = [ chisqr (rotate n table') table | n <- [0..25] ]
table' = freqs xs
main = do
xs <- getContents
(n:_) <- getArgs
let m = read n
ys | m==0 = crack xs
| m/=0 = encode m xs
putStr ys
|
Some-T/Portfolio
|
HASKELL/Labs/H7/H7.1.hsproj/H7_1.hs
|
unlicense
| 1,434
| 0
| 13
| 376
| 790
| 415
| 375
| 42
| 1
|
module Exercise5 where
isPrime :: Int -> Bool
isPrime x = length [ y | y <- [1..x], x `mod` y == 0] <= 2
primes :: Int -> [Int]
primes x = take x [y | y <- [1..], isPrime y]
|
tonilopezmr/Learning-Haskell
|
Exercises/2/Exercise_5.hs
|
apache-2.0
| 180
| 0
| 10
| 46
| 104
| 56
| 48
| 5
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-
Copyright 2017 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CommentUtil where
import Control.Monad
import Data.Aeson
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as LB
import Data.List
import Data.Maybe (fromJust)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import System.Directory
import System.FilePath
import DataUtil
import Model
newtype CommentId = CommentId { unCommentId :: Text } deriving Eq
commentHashRootDir :: BuildMode -> FilePath
commentHashRootDir (BuildMode m) = "data" </> m </> "commentHash"
commentRootDir :: BuildMode -> Text -> FilePath -> ProjectId -> FilePath
commentRootDir mode userId' path projectId =
userProjectDir mode userId' </> path </> projectFile projectId <.> "comments"
sharedCommentsDir :: BuildMode -> Text -> FilePath
sharedCommentsDir mode userId' = userProjectDir mode userId' </> "commentables"
commentHashLink :: CommentId -> FilePath
commentHashLink (CommentId c) = let s = T.unpack c in take 3 s </> s
commentProjectLink :: ProjectId -> FilePath
commentProjectLink projectId = take (length file - 3) file
where file = projectFile projectId
nameToCommentHash :: FilePath -> CommentId
nameToCommentHash = CommentId . hashToId "C" . BC.pack
ensureCommentHashDir :: BuildMode -> CommentId -> IO ()
ensureCommentHashDir mode (CommentId c) = createDirectoryIfMissing True dir
where dir = commentHashRootDir mode </> take 3 (T.unpack c)
ensureSharedCommentsDir :: BuildMode -> Text -> IO ()
ensureSharedCommentsDir mode userId' = createDirectoryIfMissing True dir
where dir = sharedCommentsDir mode userId'
cleanCommentHashPath :: BuildMode -> Text -> FilePath -> IO ()
cleanCommentHashPath mode userId' commentFolder = do
dirBool <- doesDirectoryExist commentFolder
case dirBool of
True -> do
let commentHash = nameToCommentHash commentFolder
commentHashPath = commentHashRootDir mode </> commentHashLink commentHash
removeFileIfExists commentHashPath
Just (currentUsers :: [UserDump]) <- decodeStrict <$>
B.readFile (commentHashPath <.> "users")
forM_ currentUsers $ \u -> do
case uuserId u == userId' of
True -> return ()
False -> do
removeFileIfExists . T.unpack $ upath u
removeFileIfExists $ (T.unpack . upath $ u) <.> "info"
cleanBaseDirectory . T.unpack $ upath u
removeFileIfExists $ commentHashPath <.> "users"
cleanBaseDirectory commentHashPath
False -> return ()
correctOwnerPathInComments :: BuildMode -> Text -> FilePath -> FilePath -> IO ()
correctOwnerPathInComments mode userId' userPath commentFolder = do
let commentHash = nameToCommentHash commentFolder
commentHashPath = commentHashRootDir mode </> commentHashLink commentHash
Just (currentUsers :: [UserDump]) <- decodeStrict <$>
B.readFile (commentHashPath <.> "users")
let newUsr usr = usr { upath = T.pack userPath }
newUsers = map (\usr -> if uuserId usr /= userId' then usr
else newUsr usr) currentUsers
B.writeFile (commentHashPath <.> "users") $
LB.toStrict . encode $ newUsers
removeOwnerPathInComments :: BuildMode -> Text -> FilePath -> IO ()
removeOwnerPathInComments mode userId' commentFolder = do
let commentHash = nameToCommentHash commentFolder
commentHashPath = commentHashRootDir mode </> commentHashLink commentHash
Just (currentUsers :: [UserDump]) <- decodeStrict <$>
B.readFile (commentHashPath <.> "users")
B.writeFile (commentHashPath <.> "users") $
LB.toStrict . encode $ filter (\x -> uuserId x /= userId') currentUsers
updateSharedCommentPath :: BuildMode -> FilePath -> FilePath -> IO ()
updateSharedCommentPath mode oldCommentFolder commentFolder = do
let oldCommentHash = nameToCommentHash oldCommentFolder
oldCommentHashPath = commentHashRootDir mode </> commentHashLink oldCommentHash
commentHash = nameToCommentHash commentFolder
commentHashPath = commentHashRootDir mode </> commentHashLink commentHash
createDirectoryIfMissing False $ takeDirectory commentHashPath
mapM_ (\x -> renameFile (oldCommentHashPath <.> x) $ commentHashPath <.> x)
["", "users"]
cleanBaseDirectory oldCommentHashPath
B.writeFile commentHashPath $ BC.pack commentFolder
Just (currentUsers :: [UserDump]) <- decodeStrict <$>
B.readFile (commentHashPath <.> "users")
forM_ currentUsers $ \u -> do
case utype u of
"owner" -> return ()
_ -> B.writeFile (T.unpack . upath $ u) $ BC.pack commentHashPath
createNewVersionIfReq :: Text -> Int -> FilePath -> IO (Either String ())
createNewVersionIfReq latestSource versionNo' commentFolder = do
currentVersions :: [Int] <- reverse . sort . map read <$>
listDirectory (commentFolder <.> "versions")
let currentVersion = currentVersions !! 0
currentSource <- T.decodeUtf8 <$>
B.readFile (commentFolder <.> "versions" </> show currentVersion)
case (currentSource == latestSource, currentVersion > versionNo') of
(_, True) -> return $ Left "Cannot Edit A Previous Version."
(True, _) -> return $ Right ()
(False, _) -> do
currentLines :: [Int] <- delete 0 . fmap read <$> listDirectory commentFolder
commentVersionLists :: [[[CommentDesc]]] <- mapM (\x -> versions . fromJust . decodeStrict <$>
B.readFile (commentFolder </> show x)) currentLines
let hasComments = foldr (\l acc ->
if (l !! currentVersion /= []) then True else acc
) False commentVersionLists
case hasComments of
True -> do
B.writeFile (commentFolder <.> "versions" </> show (currentVersion + 1)) $
T.encodeUtf8 latestSource
ensureVersionLines (currentVersion + 1) commentFolder
return $ Right ()
False -> do
B.writeFile (commentFolder <.> "versions" </> show currentVersion) $
T.encodeUtf8 latestSource
ensureVersionLines currentVersion commentFolder
return $ Right ()
updateUserVersionLS :: Text -> FilePath -> IO ()
updateUserVersionLS userIdent' commentFolder = do
currentLines :: [Int] <- delete 0 . fmap read <$> listDirectory commentFolder
currentVersions :: [Int] <- fmap read <$> (listDirectory $ commentFolder <.> "versions")
commentVersionLists :: [[[CommentDesc]]] <- mapM (\x -> versions . fromJust . decodeStrict <$>
B.readFile (commentFolder </> show x)) currentLines
let versionLS = map (\v -> VersionLS v . LineStatuses $ foldr (\l acc ->
LineStatus (currentLines !! (fromJust $
l `elemIndex` commentVersionLists))
(if (l !! v /= []) then "unread" else "read") : acc
) [] commentVersionLists
) currentVersions
B.writeFile (commentFolder <.> "users" </> T.unpack userIdent') $
LB.toStrict . encode $ VersionLS_ versionLS
ensureVersionLines :: Int -> FilePath -> IO ()
ensureVersionLines versionNo' commentFolder = do
totalLines <- (length . lines . BC.unpack) <$>
(B.readFile $ commentFolder <.> "versions" </> show versionNo')
currentLines :: [Int] <- delete 0 . fmap read <$> listDirectory commentFolder
mapM_ (\x -> do
fileBool <- doesFileExist $ commentFolder </> show x
case fileBool of
True -> do
Just (currentLC :: LineComment) <- decodeStrict <$>
B.readFile (commentFolder </> show x)
let currLength = length . versions $ currentLC
let newLC = LineComment x (versions currentLC ++
replicate (versionNo' - currLength + 1) [])
B.writeFile (commentFolder </> show x) $ LB.toStrict . encode $ newLC
False -> do
let newLC = LineComment x (replicate (versionNo' + 1) [])
B.writeFile (commentFolder </> show x) $ LB.toStrict . encode $ newLC
)[1..totalLines `max` length currentLines]
currentUsers <- map T.pack <$> listDirectory (commentFolder <.> "users")
forM_ currentUsers $ \u -> do
Just (versionLS :: [VersionLS]) <- fmap getVersionLS <$> decodeStrict <$>
B.readFile (commentFolder <.> "users" </> T.unpack u)
let newVersionLS = versionLS ++ if (length versionLS == versionNo' + 1) then []
else [VersionLS versionNo' (LineStatuses [])]
B.writeFile (commentFolder <.> "users" </> T.unpack u) $
LB.toStrict . encode . VersionLS_ $ newVersionLS
addNewUser :: Text -> Text -> FilePath -> FilePath -> FilePath -> IO (Either String ())
addNewUser userId' userIdent' name userPath commentHashPath = do
let identAllowed = foldl (\acc l -> if l `elem` (T.unpack userIdent')
then False else acc) True ['/', '.', '+']
fileBool <- doesFileExist commentHashPath
-- make user id unique instead of user identifier only
case (identAllowed, fileBool) of
(_, False) -> return $ Left "File Does Not Exists"
(False, _) -> return $ Left "User Identifier Has Unallowed Char(/+.)"
(True, True) -> do
Just (currentUsers :: [UserDump]) <- decodeStrict <$>
B.readFile (commentHashPath <.> "users")
let currentIdents = map uuserIdent currentUsers
currentIds = map uuserId currentUsers
case (userId' `elem` currentIds, userIdent' `elem` currentIdents) of
(False, False) -> do
createDirectoryIfMissing False $ takeDirectory userPath
B.writeFile userPath $ BC.pack commentHashPath
B.writeFile (userPath <.> "info") $ BC.pack name
B.writeFile (commentHashPath <.> "users") $
LB.toStrict . encode $ UserDump
userId' userIdent' (T.pack userPath) "not_owner" : currentUsers
commentFolder <- BC.unpack <$> B.readFile commentHashPath
updateUserVersionLS userIdent' commentFolder
return $ Right ()
(False, True) -> return $ Left "User Identifier Already Exists"
(True, _) -> return $ Left "You already have access to comment in this file"
addNewOwner :: BuildMode -> UserDump -> FilePath -> IO (Either String ())
addNewOwner mode userDump commentFolder = do
let commentHash = nameToCommentHash commentFolder
commentHashPath = commentHashRootDir mode </> commentHashLink commentHash
Just (currentUsers :: [UserDump]) <- decodeStrict <$>
B.readFile (commentHashPath <.> "users")
let currentIdents = map uuserIdent currentUsers
currentIds = map uuserId currentUsers
case (uuserId userDump `elem` currentIds, uuserIdent userDump `elem` currentIdents) of
(True, _) -> return $ Left "User already exists for commenting maybe with a different identifier."
(False, True) -> return $ Left "User Identifier Already Exists"
(False, False) -> do
B.writeFile (commentHashPath <.> "users") $
LB.toStrict . encode $ userDump : currentUsers
updateUserVersionLS (uuserIdent userDump) commentFolder
return $ Right ()
addCommentFunc :: BuildMode -> UserDump -> Project -> FilePath -> IO ()
addCommentFunc mode userDump project commentFolder = do
let commentHash = nameToCommentHash commentFolder
commentHashPath = commentHashRootDir mode </> commentHashLink commentHash
createDirectoryIfMissing False commentFolder
ensureCommentHashDir mode commentHash
B.writeFile commentHashPath $ BC.pack commentFolder
B.writeFile (commentHashPath <.> "users") $
LB.toStrict . encode $ userDump : []
createDirectoryIfMissing False $ commentFolder <.> "users"
createDirectoryIfMissing False $ commentFolder <.> "versions"
B.writeFile (commentFolder <.> "versions" </> "0") $ T.encodeUtf8 . projectSource $ project
ensureVersionLines 0 commentFolder
updateUserVersionLS (uuserIdent userDump) commentFolder
listUnreadComments :: Text -> FilePath -> Int -> IO [Int]
listUnreadComments userIdent' commentFolder versionNo' = do
Just (versionLS :: VersionLS_) <- decodeStrict <$>
B.readFile (commentFolder <.> "users" </> T.unpack userIdent')
let currentLineList = listStatuses . versionStatus $ (getVersionLS versionLS) !! versionNo'
unreadLineList = foldr (\l acc ->
if ((T.unpack . lstatus $ l) == "unread") then (llineNo l) : acc
else acc)
[] currentLineList
return unreadLineList
getLineComment :: FilePath -> Int -> Int -> IO [CommentDesc]
getLineComment commentFolder lineNo' versionNo' = do
Just (lc :: LineComment) <- decodeStrict <$> B.readFile (commentFolder </> show lineNo')
return $ (versions lc) !! versionNo'
markReadComments :: Text -> FilePath -> Int -> Int -> IO ()
markReadComments userIdent' commentFolder lineNo' versionNo' = do
Just (versionLS :: VersionLS_) <- decodeStrict <$>
B.readFile (commentFolder <.> "users" </> T.unpack userIdent')
let currentLineList = listStatuses . versionStatus $ (getVersionLS versionLS) !! versionNo'
newLineList = VersionLS versionNo' . LineStatuses . map (\x ->
if llineNo x == lineNo' then LineStatus lineNo' "read"
else x) $ currentLineList
spnll = splitAt versionNo' (getVersionLS versionLS)
B.writeFile (commentFolder <.> "users" </> T.unpack userIdent') $
LB.toStrict . encode . VersionLS_ $ fst spnll ++ (newLineList : (tail $ snd spnll))
addCommentToFile :: FilePath -> Int -> Int -> CommentDesc -> IO ()
addCommentToFile commentFolder lineNo' versionNo' comment' = do
Just (lc :: LineComment) <- decodeStrict <$> B.readFile (commentFolder </> show lineNo')
let newComments = ((versions lc) !! versionNo') ++ [comment']
spvn = splitAt versionNo' (versions lc)
B.writeFile (commentFolder </> show lineNo') $ LB.toStrict . encode $ LineComment lineNo' $
fst spvn ++ (newComments : (tail $ snd spvn))
markUnreadComments :: Text -> FilePath -> Int -> Int -> IO ()
markUnreadComments userIdent' commentFolder lineNo' versionNo' = do
currentUsers <- delete (T.unpack userIdent') <$> listDirectory (commentFolder <.> "users")
forM_ currentUsers $ \u -> do
Just (versionLS :: VersionLS_) <- decodeStrict <$>
B.readFile (commentFolder <.> "users" </> u)
let currentLineList = listStatuses . versionStatus $
(getVersionLS versionLS) !! versionNo'
newLineList = VersionLS versionNo' . LineStatuses . map (\x ->
if llineNo x == lineNo' then LineStatus lineNo' "unread"
else x) $ currentLineList
spnll = splitAt versionNo' (getVersionLS versionLS)
B.writeFile (commentFolder <.> "users" </> T.unpack userIdent') $
LB.toStrict . encode . VersionLS_ $ fst spnll ++ (newLineList : (tail $ snd spnll))
addReplyToComment :: FilePath -> Int -> Int -> CommentDesc -> ReplyDesc -> IO ()
addReplyToComment commentFolder lineNo' versionNo' cd rd = do
Just (lc :: LineComment) <- decodeStrict <$> B.readFile (commentFolder </> show lineNo')
let Just ind = elemIndex cd ((versions lc) !! versionNo')
newcd = CommentDesc (cuserIdent cd) (cdateTime cd) (cstatus cd)
(comment cd) (replies cd ++ [rd])
splc = splitAt versionNo' $ versions lc
spvn = splitAt ind ((versions lc) !! versionNo')
newvn = fst spvn ++ (newcd : (tail $ snd spvn))
newlc = LineComment lineNo' $ fst splc ++ (newvn : (tail $ snd splc))
B.writeFile (commentFolder </> show lineNo') $ LB.toStrict . encode $ newlc
deleteCommentFromFile :: FilePath -> Int -> Int -> CommentDesc -> IO ()
deleteCommentFromFile commentFolder lineNo' versionNo' cd = do
Just (lc :: LineComment) <- decodeStrict <$> B.readFile (commentFolder </> show lineNo')
let Just ind = elemIndex cd ((versions lc) !! versionNo')
newcd = CommentDesc "none" (cdateTime cd) "deleted" "none" (replies cd)
splc = splitAt versionNo' $ versions lc
spvn = splitAt ind ((versions lc) !! versionNo')
newvn = fst spvn ++ (if (length $ replies cd) /= 0
then newcd : (tail $ snd spvn)
else tail $ snd spvn)
newlc = LineComment lineNo' $ fst splc ++ (newvn : (tail $ snd splc))
B.writeFile (commentFolder </> show lineNo') $ LB.toStrict . encode $ newlc
deleteReplyFromComment :: FilePath -> Int -> Int -> CommentDesc -> ReplyDesc -> IO ()
deleteReplyFromComment commentFolder lineNo' versionNo' cd rd = do
Just (lc :: LineComment) <- decodeStrict <$> B.readFile (commentFolder </> show lineNo')
let Just cdInd = elemIndex cd ((versions lc) !! versionNo')
Just rdInd = elemIndex rd (replies cd)
spvn = splitAt cdInd $ (versions lc) !! versionNo'
splc = splitAt versionNo' $ versions lc
spcd = splitAt rdInd $ replies cd
newcd = CommentDesc (cuserIdent cd) (cdateTime cd) (cstatus cd) (comment cd) $
(fst spcd) ++ (tail $ snd spcd)
newvn = fst spvn ++ (if (length $ replies newcd) /= 0
then newcd : (tail $ snd spvn)
else if cstatus newcd == "deleted"
then (tail $ snd spvn)
else newcd : (tail $ snd spvn))
newlc = LineComment lineNo' $ fst splc ++ (newvn : (tail $ snd splc))
B.writeFile (commentFolder </> show lineNo') $ LB.toStrict . encode $ newlc
|
parvmor/codeworld
|
codeworld-server/src/CommentUtil.hs
|
apache-2.0
| 19,059
| 0
| 26
| 5,125
| 5,564
| 2,742
| 2,822
| 305
| 6
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, IncoherentInstances #-}
{-# LANGUAGE OverloadedStrings, Rank2Types #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Properties.Common
(
Small(..)
, qc
, Props
, Eq'(..)
, SameAs(..)
, (=?=)
, (=??=)
, (=*=)
, (=?*=)
, (=??*=)
, (=**=)
, (=*==)
, notEmpty
, prepends
, kf
) where
import Control.Applicative ((<$>))
import qualified Data.ByteString.Char8 as B
import Data.CritBit.Map.Lazy (CritBitKey, byteCount)
import Data.Monoid (Monoid, mappend)
import Data.String (IsString, fromString)
import qualified Data.Text as T
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import Data.Word
import Test.Framework (Test)
import Test.QuickCheck (Arbitrary(..), Args(..), quickCheckWith, stdArgs)
import Test.QuickCheck.Gen (Gen, resize, sized)
import Test.QuickCheck.Property (Property, Testable, (===), (.&&.), (.||.))
instance IsString (U.Vector Word8) where
fromString = fromStringV
instance IsString (U.Vector Word16) where
fromString = fromStringV
instance IsString (U.Vector Word32) where
fromString = fromStringV
instance IsString (U.Vector Word64) where
fromString = fromStringV
instance IsString (U.Vector Word) where
fromString = fromStringV
instance IsString (U.Vector Char) where
fromString = G.fromList
fromStringV :: (G.Vector v a, Integral a) => String -> v a
fromStringV = G.fromList . map (fromIntegral . fromEnum)
instance Arbitrary B.ByteString where
arbitrary = B.pack <$> arbitrary
shrink = map B.pack . shrink . B.unpack
instance Arbitrary T.Text where
arbitrary = T.pack <$> arbitrary
shrink = map T.pack . shrink . T.unpack
instance Arbitrary (U.Vector Word8) where
arbitrary = arbitraryV
shrink = shrinkV
instance Arbitrary (U.Vector Word16) where
arbitrary = arbitraryV
shrink = shrinkV
instance Arbitrary (U.Vector Word32) where
arbitrary = arbitraryV
shrink = shrinkV
instance Arbitrary (U.Vector Word64) where
arbitrary = arbitraryV
shrink = shrinkV
instance Arbitrary (U.Vector Word) where
arbitrary = arbitraryV
shrink = shrinkV
instance Arbitrary (U.Vector Char) where
arbitrary = arbitraryV
shrink = shrinkV
arbitraryV :: (G.Vector v a, Arbitrary a) => Gen (v a)
arbitraryV = G.fromList <$> arbitrary
shrinkV :: (G.Vector v a, Arbitrary a) => v a -> [v a]
shrinkV = map G.fromList . shrink . G.toList
newtype Small a = Small { fromSmall :: a }
deriving (Eq, Ord, Show)
instance (Show a, Arbitrary a) => Arbitrary (Small a) where
arbitrary = Small <$> (sized $ \n -> resize (smallish n) arbitrary)
where
smallish = round . (sqrt :: Double -> Double) . fromIntegral . abs
shrink = map Small . shrink . fromSmall
type Props k = (Arbitrary k, CritBitKey k, Ord k, IsString k, Monoid k, Show k) => k -> [Test]
infix 4 =^=, =?=, =??=
-- | Compares heterogeneous values
class (Show f, Show g) => Eq' f g where
(=^=) :: f -> g -> Property
instance (Show t, Eq t) => Eq' t t where
(=^=) = (===)
instance (Eq' a1 b1, Eq' a2 b2, Eq' a3 b3) => Eq' (a1, a2, a3) (b1, b2, b3)
where (a1, a2, a3) =^= (b1, b2, b3) = a1 =^= b1 .&&. a2 =^= b2 .&&. a3 =^= b3
-- | Compares functions taking one scalar
(=?=) :: Eq' a b => (t -> a) -> (t -> b) -> k -> t -> Property
f =?= g = const $ \t -> f t =^= g t
-- | Compares functions taking two scalars
(=??=) :: Eq' a b => (t -> s -> a) -> (t -> s -> b) -> k -> t -> s -> Property
f =??= g = const $ \t s -> f t s =^= g t s
infix 4 =*=, =?*=, =*==
-- | Types 'f' and 'g' have same behavior and common represenation 'r'.
data SameAs f g r = SameAs {
toF :: r -> f
, fromF :: f -> r
, toG :: r -> g
, fromG :: g -> r
}
-- | Compares two functions taking one container
(=*=) :: (Eq' a b) => (f -> a) -> (g -> b)
-> SameAs f g r -> r -> Property
(f =*= g) sa i = f (toF sa i) =^= g (toG sa i)
-- | Compares two functions taking one scalar and one container
(=?*=) :: (Eq' a b) => (t -> f -> a) -> (t -> g -> b)
-> SameAs f g r -> r -> t -> Property
(f =?*= g) sa i t = (f t =*= g t) sa i
-- | Compares functions taking two scalars and one container
(=??*=) :: (Eq' a b) => (t -> s -> f -> a) -> (t -> s -> g -> b)
-> SameAs f g r -> r -> t -> s -> Property
(f =??*= g) sa i t s = (f t s =*= g t s) sa i
-- | Compares two functions taking two containers
(=**=) :: (Eq' a b) => (f -> f -> a) -> (g -> g -> b)
-> SameAs f g r -> r -> r -> Property
(f =**= g) sa i = (f (toF sa i) =*= g (toG sa i)) sa
-- | Compares two functions taking one container with preprocessing
(=*==) :: (Eq' f g) => (z -> f) -> (z -> g) -> (p -> z)
-> SameAs f g r -> p -> Property
(f =*== g) p _ i = f i' =^= g i'
where i' = p i
-- | Input litst is non-empty
notEmpty :: (SameAs c1 c2 [i] -> [i] -> Property)
-> SameAs c1 c2 [i] -> [i] -> Property
notEmpty f t items = null items .||. f t items
prepends :: (IsString k, Monoid k) => k -> k
prepends = mappend "test"
-- | Keys mapping function
kf :: (CritBitKey k, IsString k, Monoid k) => k -> k
kf k = fromString (show (byteCount k)) `mappend` k
-- Handy functions for fiddling with from ghci.
qc :: Testable prop => Int -> prop -> IO ()
qc n = quickCheckWith stdArgs { maxSuccess = n }
|
bos/critbit
|
tests/Properties/Common.hs
|
bsd-2-clause
| 5,361
| 0
| 15
| 1,287
| 2,168
| 1,194
| 974
| -1
| -1
|
-- |
--
-- Copyright:
-- This file is part of the package vimeta. It is subject to the
-- license terms in the LICENSE file found in the top-level
-- directory of this distribution and at:
--
-- https://github.com/pjones/vimeta
--
-- No part of this package, including this file, may be copied,
-- modified, propagated, or distributed except according to the terms
-- contained in the LICENSE file.
--
-- License: BSD-2-Clause
module Vimeta.UI.Common.Util
( parens,
dayAsYear,
dayRange,
)
where
import Data.Time (Day, defaultTimeLocale, formatTime)
-- | Wrap some text with parenthesis.
parens :: Text -> Text
parens t = " (" <> t <> ")"
-- | Format a 'Maybe Day' as a year ('Text').
dayAsYear :: Maybe Day -> Text
dayAsYear Nothing = "----"
dayAsYear (Just d) = toText (formatTime defaultTimeLocale "%Y" d)
-- | Given a start 'Day' and an end 'Day', produce a string
-- representing a range.
dayRange :: Maybe Day -> Maybe Day -> Text
dayRange d1 d2 = dayAsYear d1 <> " - " <> dayAsYear d2
|
pjones/vimeta
|
src/Vimeta/UI/Common/Util.hs
|
bsd-2-clause
| 1,023
| 0
| 7
| 207
| 169
| 98
| 71
| 12
| 1
|
module Compiler.CodeGeneration.StatementCompilation where
import Control.Monad
import Compiler.Syntax
import Compiler.SymbolTable
import Compiler.CodeGeneration.CompilationState
import Compiler.CodeGeneration.ExpressionEvaluation
import Compiler.CodeGeneration.InstructionSet
import Compiler.CodeGeneration.RuntimeEnvironment
import Debug.Trace
class Compilable a where
compile :: a -> Compiler ()
instance Compilable Program where
compile (Program syms) =
withBlock $ do program_prelude
mapM compile syms
program_finale
instance Compilable Symbol where
compile (FuncSymbol _ f) = compile f
compile _ = return ()
instance Compilable Function where
compile f@(Function t _ params block) =
withFunction f params $ do compile block
instance Compilable Statement where
compile (ExpressionStatement (AssignmentExpr (VariableRef v) rhs)) =
do t1 <- evaluate rhs
assign v t1
freeRegister t1
compile (ExpressionStatement (AssignmentExpr (ArrayRef name indexExpr) rhs)) =
do t1 <- evaluate indexExpr
r <- claimRegister
addr <- currentAddress
emit $ LDA r (SymbolRef addr name) -- Get array base
emit $ ADD r r t1
freeRegister t1
t2 <- evaluate rhs
emit $ ST t2 (Memory 0 r)
freeRegister t2
freeRegister r
compile (ExpressionStatement e) = do evaluate e
return ()
compile (CompoundStatement locals stmts) =
withBlock $ do allocate_locals (map deposition locals)
sequence_ $ map compile stmts
compile (IterationStatement cond body) =
withBlock $ do label "condition"
if_false cond "after"
compile body
jump_to "condition"
label "after"
compile (SelectionStatement cond then_clause else_clause) =
withBlock $ do if_false cond "else"
compile then_clause
jump_to "after"
label "else"
compile else_clause
label "after"
compile (ValueReturnStatement expr) =
do t1 <- evaluate expr
return_register t1
compile ReturnStatement = do return_void
compile NullStatement = return ()
-- Strip off positioning information when assembling
instance (Compilable a, Show a) => Compilable (Positioned a) where
compile (Positioned _ a) = compile a
compile (AnyPosition a) = compile a
if_false (ArithmeticExpr op lhs rhs) label =
do t1 <- evaluate lhs
t2 <- evaluate rhs
emit $ OUT t1
emit $ OUT t2
t3 <- claimRegister -- get a place to keep the result
emit $ SUB t3 t1 t2 -- do the comparison
freeRegister t1
freeRegister t2
a <- currentAddress
emit $ (toInst op) t3 (LabelRef a label)
freeRegister t3
where toInst Less = JGE
toInst LessOrEqual = JGT
toInst Greater = JLE
toInst GreaterOrEqual = JLT
toInst Equal = JNE
toInst NotEqual = JEQ
|
michaelmelanson/cminus-compiler
|
Compiler/CodeGeneration/StatementCompilation.hs
|
bsd-2-clause
| 3,632
| 0
| 12
| 1,481
| 887
| 408
| 479
| 82
| 6
|
module Fusion where
import Criterion.Main
import qualified Data.Vector as V
testV' :: Int -> V.Vector Int
testV' n =
V.map (+n) $ V.map (+n) $
V.map (+n) $ V.map (+n)
(V.fromList [1..10000])
testV :: Int -> V.Vector Int
testV n =
V.map ( (+n) . (+n)
. (+n) . (+n) ) (V.fromList [1..10000])
main :: IO ()
main = defaultMain
[ bench "vector map prefused" $
whnf testV 9998
, bench "vector map will be fused" $
whnf testV' 9998
]
|
dmvianna/strict
|
src/fusion.hs
|
bsd-3-clause
| 501
| 0
| 10
| 155
| 223
| 121
| 102
| 18
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module LendingClub.Invest
( InvestResponse (..)
, ErrorMessage (..)
, InvestConfirmation (..)
, ExecutionStatus (..)
) where
import Control.Applicative (pure, (<|>), (<$>), (<*>))
import Data.Aeson
import Data.Text (Text)
import GHC.Generics
import LendingClub.Money
data InvestResponse
= InvestResponse
{ orderInstructId :: Int
, orderConfirmations :: [InvestConfirmation]
}
| InvestError
{ errors :: [ErrorMessage] }
deriving (Show, Eq)
instance FromJSON InvestResponse where
parseJSON (Object v) =
InvestResponse
<$> v .: "orderInstructId"
<*> v .: "orderConfirmations"
<|> InvestError <$> v .: "errors"
parseJSON _ = pure (InvestError [])
data ErrorMessage = ErrorMessage
{ field :: Text
, code :: Text
, message :: Text
} deriving (Generic, Show, Eq)
instance FromJSON ErrorMessage where
data InvestConfirmation = InvestConfirmation
{ loanId :: Int
, requestedAmount :: Money
, investedAmount :: Money
, executionStatus :: [ExecutionStatus]
} deriving (Generic, Show, Eq)
instance FromJSON InvestConfirmation where
data ExecutionStatus
= OrderFulfilled
| LoanAmountExceeded
| NotAnInfundingLoan
| RequestedAmountLow
| RequestedAmountRounded
| AugmentedByMerge
| ElimByMerge
| InsufficientCash
| NotAnInvestor
| NotAValidInvestment
| NoteAddedToPortfolio
| NotAValidPortfolio
| ErrorAddingNoteToPortfolio
| SystemBusy
| UnknownError
deriving (Show, Eq)
instance FromJSON ExecutionStatus where
parseJSON (String "ORDER_FULFILLED") = pure OrderFulfilled
parseJSON (String "LOAN_AMNT_EXCEEDED") = pure LoanAmountExceeded
parseJSON (String "NOT_AN_INFUNDING_LOAN") = pure NotAnInfundingLoan
parseJSON (String "REQUESTED_AMNT_LOW") = pure RequestedAmountLow
parseJSON (String "REQUESTED_AMNT_ROUNDED") = pure RequestedAmountRounded
parseJSON (String "AUGMENTED_BY_MERGE") = pure AugmentedByMerge
parseJSON (String "ELIM_BY_MERGE") = pure ElimByMerge
parseJSON (String "INSUFFICIENT_CASH") = pure InsufficientCash
parseJSON (String "NOT_AN_INVESTOR") = pure NotAnInvestor
parseJSON (String "NOT_A_VALID_INVESTMENT") = pure NotAValidInvestment
parseJSON (String "NOTE_ADDED_TO_PORTFOLIO") = pure NoteAddedToPortfolio
parseJSON (String "NOT_A_VALID_PORTFOLIO") = pure NotAValidPortfolio
parseJSON (String "ERROR_ADDING_NOTE_TO_PORTFOLIO") = pure ErrorAddingNoteToPortfolio
parseJSON (String "SYSTEM_BUSY") = pure SystemBusy
parseJSON _ = pure UnknownError
|
WraithM/lendingclub
|
src/LendingClub/Invest.hs
|
bsd-3-clause
| 2,792
| 0
| 12
| 647
| 632
| 347
| 285
| 72
| 0
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module RnSource (
rnSrcDecls, addTcgDUs, findSplice
) where
#include "HsVersions.h"
import {-# SOURCE #-} RnExpr( rnLExpr )
import {-# SOURCE #-} RnSplice ( rnSpliceDecl )
import HsSyn
import RdrName
import RnTypes
import RnBinds
import RnEnv
import RnNames
import RnHsDoc ( rnHsDoc, rnMbLHsDoc )
import TcAnnotations ( annCtxt )
import TcRnMonad
import ForeignCall ( CCallTarget(..) )
import Module
import HscTypes ( Warnings(..), plusWarns )
import Class ( FunDep )
import PrelNames ( isUnboundName )
import Name
import NameSet
import NameEnv
import Avail
import Outputable
import Bag
import BasicTypes ( RuleName )
import FastString
import SrcLoc
import DynFlags
import HscTypes ( HscEnv, hsc_dflags )
import ListSetOps ( findDupsEq, removeDups )
import Digraph ( SCC, flattenSCC, stronglyConnCompFromEdgedVertices )
import Util ( mapSnd )
import Control.Monad
import Data.List( partition, sortBy )
import Maybes( orElse, mapMaybe )
#if __GLASGOW_HASKELL__ < 709
import Data.Traversable (traverse)
#endif
{-
@rnSourceDecl@ `renames' declarations.
It simultaneously performs dependency analysis and precedence parsing.
It also does the following error checks:
\begin{enumerate}
\item
Checks that tyvars are used properly. This includes checking
for undefined tyvars, and tyvars in contexts that are ambiguous.
(Some of this checking has now been moved to module @TcMonoType@,
since we don't have functional dependency information at this point.)
\item
Checks that all variable occurrences are defined.
\item
Checks the @(..)@ etc constraints in the export list.
\end{enumerate}
-}
-- Brings the binders of the group into scope in the appropriate places;
-- does NOT assume that anything is in scope already
rnSrcDecls :: Maybe FreeVars -> HsGroup RdrName -> RnM (TcGblEnv, HsGroup Name)
-- Rename a top-level HsGroup; used for normal source files *and* hs-boot files
rnSrcDecls extra_deps group@(HsGroup { hs_valds = val_decls,
hs_splcds = splice_decls,
hs_tyclds = tycl_decls,
hs_instds = inst_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = warn_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls,
hs_docs = docs })
= do {
-- (A) Process the fixity declarations, creating a mapping from
-- FastStrings to FixItems.
-- Also checks for duplicates.
local_fix_env <- makeMiniFixityEnv fix_decls ;
-- (B) Bring top level binders (and their fixities) into scope,
-- *except* for the value bindings, which get done in step (D)
-- with collectHsIdBinders. However *do* include
--
-- * Class ops, data constructors, and record fields,
-- because they do not have value declarations.
-- Aso step (C) depends on datacons and record fields
--
-- * For hs-boot files, include the value signatures
-- Again, they have no value declarations
--
(tc_envs, tc_bndrs) <- getLocalNonValBinders local_fix_env group ;
setEnvs tc_envs $ do {
failIfErrsM ; -- No point in continuing if (say) we have duplicate declarations
-- (C) Extract the mapping from data constructors to field names and
-- extend the record field env.
-- This depends on the data constructors and field names being in
-- scope from (B) above
inNewEnv (extendRecordFieldEnv tycl_decls inst_decls) $ \ _ -> do {
-- (D1) Bring pattern synonyms into scope.
-- Need to do this before (D2) because rnTopBindsLHS
-- looks up those pattern synonyms (Trac #9889)
pat_syn_bndrs <- mapM newTopSrcBinder (hsPatSynBinders val_decls) ;
tc_envs <- extendGlobalRdrEnvRn (map Avail pat_syn_bndrs) local_fix_env ;
setEnvs tc_envs $ do {
-- (D2) Rename the left-hand sides of the value bindings.
-- This depends on everything from (B) being in scope,
-- and on (C) for resolving record wild cards.
-- It uses the fixity env from (A) to bind fixities for view patterns.
new_lhs <- rnTopBindsLHS local_fix_env val_decls ;
-- Bind the LHSes (and their fixities) in the global rdr environment
let { id_bndrs = collectHsIdBinders new_lhs } ; -- Excludes pattern-synonym binders
-- They are already in scope
traceRn (text "rnSrcDecls" <+> ppr id_bndrs) ;
tc_envs <- extendGlobalRdrEnvRn (map Avail id_bndrs) local_fix_env ;
setEnvs tc_envs $ do {
-- Now everything is in scope, as the remaining renaming assumes.
-- (E) Rename type and class decls
-- (note that value LHSes need to be in scope for default methods)
--
-- You might think that we could build proper def/use information
-- for type and class declarations, but they can be involved
-- in mutual recursion across modules, and we only do the SCC
-- analysis for them in the type checker.
-- So we content ourselves with gathering uses only; that
-- means we'll only report a declaration as unused if it isn't
-- mentioned at all. Ah well.
traceRn (text "Start rnTyClDecls") ;
(rn_tycl_decls, src_fvs1) <- rnTyClDecls extra_deps tycl_decls ;
-- (F) Rename Value declarations right-hand sides
traceRn (text "Start rnmono") ;
let { val_bndr_set = mkNameSet id_bndrs `unionNameSet` mkNameSet pat_syn_bndrs } ;
(rn_val_decls, bind_dus) <- rnTopBindsRHS val_bndr_set new_lhs ;
traceRn (text "finish rnmono" <+> ppr rn_val_decls) ;
-- (G) Rename Fixity and deprecations
-- Rename fixity declarations and error if we try to
-- fix something from another module (duplicates were checked in (A))
let { all_bndrs = tc_bndrs `unionNameSet` val_bndr_set } ;
rn_fix_decls <- rnSrcFixityDecls all_bndrs fix_decls ;
-- Rename deprec decls;
-- check for duplicates and ensure that deprecated things are defined locally
-- at the moment, we don't keep these around past renaming
rn_warns <- rnSrcWarnDecls all_bndrs warn_decls ;
-- (H) Rename Everything else
(rn_inst_decls, src_fvs2) <- rnList rnSrcInstDecl inst_decls ;
(rn_rule_decls, src_fvs3) <- setXOptM Opt_ScopedTypeVariables $
rnList rnHsRuleDecls rule_decls ;
-- Inside RULES, scoped type variables are on
(rn_vect_decls, src_fvs4) <- rnList rnHsVectDecl vect_decls ;
(rn_foreign_decls, src_fvs5) <- rnList rnHsForeignDecl foreign_decls ;
(rn_ann_decls, src_fvs6) <- rnList rnAnnDecl ann_decls ;
(rn_default_decls, src_fvs7) <- rnList rnDefaultDecl default_decls ;
(rn_deriv_decls, src_fvs8) <- rnList rnSrcDerivDecl deriv_decls ;
(rn_splice_decls, src_fvs9) <- rnList rnSpliceDecl splice_decls ;
-- Haddock docs; no free vars
rn_docs <- mapM (wrapLocM rnDocDecl) docs ;
last_tcg_env <- getGblEnv ;
-- (I) Compute the results and return
let {rn_group = HsGroup { hs_valds = rn_val_decls,
hs_splcds = rn_splice_decls,
hs_tyclds = rn_tycl_decls,
hs_instds = rn_inst_decls,
hs_derivds = rn_deriv_decls,
hs_fixds = rn_fix_decls,
hs_warnds = [], -- warns are returned in the tcg_env
-- (see below) not in the HsGroup
hs_fords = rn_foreign_decls,
hs_annds = rn_ann_decls,
hs_defds = rn_default_decls,
hs_ruleds = rn_rule_decls,
hs_vects = rn_vect_decls,
hs_docs = rn_docs } ;
tcf_bndrs = hsTyClForeignBinders rn_tycl_decls rn_inst_decls rn_foreign_decls ;
other_def = (Just (mkNameSet tcf_bndrs), emptyNameSet) ;
other_fvs = plusFVs [src_fvs1, src_fvs2, src_fvs3, src_fvs4,
src_fvs5, src_fvs6, src_fvs7, src_fvs8,
src_fvs9] ;
-- It is tiresome to gather the binders from type and class decls
src_dus = [other_def] `plusDU` bind_dus `plusDU` usesOnly other_fvs ;
-- Instance decls may have occurrences of things bound in bind_dus
-- so we must put other_fvs last
final_tcg_env = let tcg_env' = (last_tcg_env `addTcgDUs` src_dus)
in -- we return the deprecs in the env, not in the HsGroup above
tcg_env' { tcg_warns = tcg_warns tcg_env' `plusWarns` rn_warns };
} ;
traceRn (text "finish rnSrc" <+> ppr rn_group) ;
traceRn (text "finish Dus" <+> ppr src_dus ) ;
return (final_tcg_env, rn_group)
}}}}}
-- some utils because we do this a bunch above
-- compute and install the new env
inNewEnv :: TcM TcGblEnv -> (TcGblEnv -> TcM a) -> TcM a
inNewEnv env cont = do e <- env
setGblEnv e $ cont e
addTcgDUs :: TcGblEnv -> DefUses -> TcGblEnv
-- This function could be defined lower down in the module hierarchy,
-- but there doesn't seem anywhere very logical to put it.
addTcgDUs tcg_env dus = tcg_env { tcg_dus = tcg_dus tcg_env `plusDU` dus }
rnList :: (a -> RnM (b, FreeVars)) -> [Located a] -> RnM ([Located b], FreeVars)
rnList f xs = mapFvRn (wrapLocFstM f) xs
{-
*********************************************************
* *
HsDoc stuff
* *
*********************************************************
-}
rnDocDecl :: DocDecl -> RnM DocDecl
rnDocDecl (DocCommentNext doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNext rn_doc)
rnDocDecl (DocCommentPrev doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentPrev rn_doc)
rnDocDecl (DocCommentNamed str doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNamed str rn_doc)
rnDocDecl (DocGroup lev doc) = do
rn_doc <- rnHsDoc doc
return (DocGroup lev rn_doc)
{-
*********************************************************
* *
Source-code fixity declarations
* *
*********************************************************
-}
rnSrcFixityDecls :: NameSet -> [LFixitySig RdrName] -> RnM [LFixitySig Name]
-- Rename the fixity decls, so we can put
-- the renamed decls in the renamed syntax tree
-- Errors if the thing being fixed is not defined locally.
--
-- The returned FixitySigs are not actually used for anything,
-- except perhaps the GHCi API
rnSrcFixityDecls bndr_set fix_decls
= do fix_decls <- mapM rn_decl fix_decls
return (concat fix_decls)
where
sig_ctxt = TopSigCtxt bndr_set
rn_decl :: LFixitySig RdrName -> RnM [LFixitySig Name]
-- GHC extension: look up both the tycon and data con
-- for con-like things; hence returning a list
-- If neither are in scope, report an error; otherwise
-- return a fixity sig for each (slightly odd)
rn_decl (L loc (FixitySig fnames fixity))
= do names <- mapM lookup_one fnames
return [ L loc (FixitySig name fixity)
| name <- names ]
lookup_one :: Located RdrName -> RnM [Located Name]
lookup_one (L name_loc rdr_name)
= setSrcSpan name_loc $
-- this lookup will fail if the definition isn't local
do names <- lookupLocalTcNames sig_ctxt what rdr_name
return [ L name_loc name | name <- names ]
what = ptext (sLit "fixity signature")
{-
*********************************************************
* *
Source-code deprecations declarations
* *
*********************************************************
Check that the deprecated names are defined, are defined locally, and
that there are no duplicate deprecations.
It's only imported deprecations, dealt with in RnIfaces, that we
gather them together.
-}
-- checks that the deprecations are defined locally, and that there are no duplicates
rnSrcWarnDecls :: NameSet -> [LWarnDecls RdrName] -> RnM Warnings
rnSrcWarnDecls _ []
= return NoWarnings
rnSrcWarnDecls bndr_set decls'
= do { -- check for duplicates
; mapM_ (\ dups -> let (L loc rdr:lrdr':_) = dups
in addErrAt loc (dupWarnDecl lrdr' rdr))
warn_rdr_dups
; pairs_s <- mapM (addLocM rn_deprec) decls
; return (WarnSome ((concat pairs_s))) }
where
decls = concatMap (\(L _ d) -> wd_warnings d) decls'
sig_ctxt = TopSigCtxt bndr_set
rn_deprec (Warning rdr_names txt)
-- ensures that the names are defined locally
= do { names <- concatMapM (lookupLocalTcNames sig_ctxt what . unLoc)
rdr_names
; return [(nameOccName name, txt) | name <- names] }
what = ptext (sLit "deprecation")
warn_rdr_dups = findDupRdrNames $ concatMap (\(L _ (Warning ns _)) -> ns)
decls
findDupRdrNames :: [Located RdrName] -> [[Located RdrName]]
findDupRdrNames = findDupsEq (\ x -> \ y -> rdrNameOcc (unLoc x) == rdrNameOcc (unLoc y))
-- look for duplicates among the OccNames;
-- we check that the names are defined above
-- invt: the lists returned by findDupsEq always have at least two elements
dupWarnDecl :: Located RdrName -> RdrName -> SDoc
-- Located RdrName -> DeprecDecl RdrName -> SDoc
dupWarnDecl (L loc _) rdr_name
= vcat [ptext (sLit "Multiple warning declarations for") <+> quotes (ppr rdr_name),
ptext (sLit "also at ") <+> ppr loc]
{-
*********************************************************
* *
\subsection{Annotation declarations}
* *
*********************************************************
-}
rnAnnDecl :: AnnDecl RdrName -> RnM (AnnDecl Name, FreeVars)
rnAnnDecl ann@(HsAnnotation s provenance expr)
= addErrCtxt (annCtxt ann) $
do { (provenance', provenance_fvs) <- rnAnnProvenance provenance
; (expr', expr_fvs) <- setStage (Splice False) $
rnLExpr expr
; return (HsAnnotation s provenance' expr',
provenance_fvs `plusFV` expr_fvs) }
rnAnnProvenance :: AnnProvenance RdrName -> RnM (AnnProvenance Name, FreeVars)
rnAnnProvenance provenance = do
provenance' <- traverse lookupTopBndrRn provenance
return (provenance', maybe emptyFVs unitFV (annProvenanceName_maybe provenance'))
{-
*********************************************************
* *
\subsection{Default declarations}
* *
*********************************************************
-}
rnDefaultDecl :: DefaultDecl RdrName -> RnM (DefaultDecl Name, FreeVars)
rnDefaultDecl (DefaultDecl tys)
= do { (tys', fvs) <- rnLHsTypes doc_str tys
; return (DefaultDecl tys', fvs) }
where
doc_str = DefaultDeclCtx
{-
*********************************************************
* *
\subsection{Foreign declarations}
* *
*********************************************************
-}
rnHsForeignDecl :: ForeignDecl RdrName -> RnM (ForeignDecl Name, FreeVars)
rnHsForeignDecl (ForeignImport name ty _ spec)
= do { topEnv :: HscEnv <- getTopEnv
; name' <- lookupLocatedTopBndrRn name
; (ty', fvs) <- rnLHsType (ForeignDeclCtx name) ty
-- Mark any PackageTarget style imports as coming from the current package
; let packageKey = thisPackage $ hsc_dflags topEnv
spec' = patchForeignImport packageKey spec
; return (ForeignImport name' ty' noForeignImportCoercionYet spec', fvs) }
rnHsForeignDecl (ForeignExport name ty _ spec)
= do { name' <- lookupLocatedOccRn name
; (ty', fvs) <- rnLHsType (ForeignDeclCtx name) ty
; return (ForeignExport name' ty' noForeignExportCoercionYet spec, fvs `addOneFV` unLoc name') }
-- NB: a foreign export is an *occurrence site* for name, so
-- we add it to the free-variable list. It might, for example,
-- be imported from another module
-- | For Windows DLLs we need to know what packages imported symbols are from
-- to generate correct calls. Imported symbols are tagged with the current
-- package, so if they get inlined across a package boundry we'll still
-- know where they're from.
--
patchForeignImport :: PackageKey -> ForeignImport -> ForeignImport
patchForeignImport packageKey (CImport cconv safety fs spec src)
= CImport cconv safety fs (patchCImportSpec packageKey spec) src
patchCImportSpec :: PackageKey -> CImportSpec -> CImportSpec
patchCImportSpec packageKey spec
= case spec of
CFunction callTarget -> CFunction $ patchCCallTarget packageKey callTarget
_ -> spec
patchCCallTarget :: PackageKey -> CCallTarget -> CCallTarget
patchCCallTarget packageKey callTarget =
case callTarget of
StaticTarget src label Nothing isFun
-> StaticTarget src label (Just packageKey) isFun
_ -> callTarget
{-
*********************************************************
* *
\subsection{Instance declarations}
* *
*********************************************************
-}
rnSrcInstDecl :: InstDecl RdrName -> RnM (InstDecl Name, FreeVars)
rnSrcInstDecl (TyFamInstD { tfid_inst = tfi })
= do { (tfi', fvs) <- rnTyFamInstDecl Nothing tfi
; return (TyFamInstD { tfid_inst = tfi' }, fvs) }
rnSrcInstDecl (DataFamInstD { dfid_inst = dfi })
= do { (dfi', fvs) <- rnDataFamInstDecl Nothing dfi
; return (DataFamInstD { dfid_inst = dfi' }, fvs) }
rnSrcInstDecl (ClsInstD { cid_inst = cid })
= do { (cid', fvs) <- rnClsInstDecl cid
; return (ClsInstD { cid_inst = cid' }, fvs) }
rnClsInstDecl :: ClsInstDecl RdrName -> RnM (ClsInstDecl Name, FreeVars)
rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds
, cid_sigs = uprags, cid_tyfam_insts = ats
, cid_overlap_mode = oflag
, cid_datafam_insts = adts })
-- Used for both source and interface file decls
= do { (inst_ty', inst_fvs) <- rnLHsInstType (text "In an instance declaration") inst_ty
; case splitLHsInstDeclTy_maybe inst_ty' of {
Nothing -> return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = emptyLHsBinds
, cid_sigs = [], cid_tyfam_insts = []
, cid_overlap_mode = oflag
, cid_datafam_insts = [] }
, inst_fvs) ;
Just (inst_tyvars, _, L _ cls,_) ->
do { let (spec_inst_prags, other_sigs) = partition isSpecInstLSig uprags
ktv_names = hsLKiTyVarNames inst_tyvars
-- Rename the associated types, and type signatures
-- Both need to have the instance type variables in scope
; traceRn (text "rnSrcInstDecl" <+> ppr inst_ty' $$ ppr inst_tyvars $$ ppr ktv_names)
; ((ats', adts', other_sigs'), more_fvs)
<- extendTyVarEnvFVRn ktv_names $
do { (ats', at_fvs) <- rnATInstDecls rnTyFamInstDecl cls inst_tyvars ats
; (adts', adt_fvs) <- rnATInstDecls rnDataFamInstDecl cls inst_tyvars adts
; (other_sigs', sig_fvs) <- renameSigs (InstDeclCtxt cls) other_sigs
; return ( (ats', adts', other_sigs')
, at_fvs `plusFV` adt_fvs `plusFV` sig_fvs) }
-- Rename the bindings
-- The typechecker (not the renamer) checks that all
-- the bindings are for the right class
-- (Slightly strangely) when scoped type variables are on, the
-- forall-d tyvars scope over the method bindings too
; (mbinds', meth_fvs) <- extendTyVarEnvForMethodBinds ktv_names $
rnMethodBinds cls (mkSigTvFn other_sigs')
mbinds
-- Rename the SPECIALISE instance pramas
-- Annoyingly the type variables are not in scope here,
-- so that instance Eq a => Eq (T a) where
-- {-# SPECIALISE instance Eq a => Eq (T [a]) #-}
-- works OK. That's why we did the partition game above
--
; (spec_inst_prags', spec_inst_fvs)
<- renameSigs (InstDeclCtxt cls) spec_inst_prags
; let uprags' = spec_inst_prags' ++ other_sigs'
all_fvs = meth_fvs `plusFV` more_fvs
`plusFV` spec_inst_fvs
`plusFV` inst_fvs
; return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = mbinds'
, cid_sigs = uprags', cid_tyfam_insts = ats'
, cid_overlap_mode = oflag
, cid_datafam_insts = adts' },
all_fvs) } } }
-- We return the renamed associated data type declarations so
-- that they can be entered into the list of type declarations
-- for the binding group, but we also keep a copy in the instance.
-- The latter is needed for well-formedness checks in the type
-- checker (eg, to ensure that all ATs of the instance actually
-- receive a declaration).
-- NB: Even the copies in the instance declaration carry copies of
-- the instance context after renaming. This is a bit
-- strange, but should not matter (and it would be more work
-- to remove the context).
rnFamInstDecl :: HsDocContext
-> Maybe (Name, [Name])
-> Located RdrName
-> [LHsType RdrName]
-> rhs
-> (HsDocContext -> rhs -> RnM (rhs', FreeVars))
-> RnM (Located Name, HsWithBndrs Name [LHsType Name], rhs',
FreeVars)
rnFamInstDecl doc mb_cls tycon pats payload rnPayload
= do { tycon' <- lookupFamInstName (fmap fst mb_cls) tycon
; let loc = case pats of
[] -> pprPanic "rnFamInstDecl" (ppr tycon)
(L loc _ : []) -> loc
(L loc _ : ps) -> combineSrcSpans loc (getLoc (last ps))
(kv_rdr_names, tv_rdr_names) = extractHsTysRdrTyVars pats
; rdr_env <- getLocalRdrEnv
; kv_names <- mapM (newTyVarNameRn mb_cls rdr_env loc) kv_rdr_names
; tv_names <- mapM (newTyVarNameRn mb_cls rdr_env loc) tv_rdr_names
-- All the free vars of the family patterns
-- with a sensible binding location
; ((pats', payload'), fvs)
<- bindLocalNamesFV kv_names $
bindLocalNamesFV tv_names $
do { (pats', pat_fvs) <- rnLHsTypes doc pats
; (payload', rhs_fvs) <- rnPayload doc payload
-- See Note [Renaming associated types]
; let lhs_names = mkNameSet kv_names `unionNameSet` mkNameSet tv_names
bad_tvs = case mb_cls of
Nothing -> []
Just (_,cls_tkvs) -> filter is_bad cls_tkvs
is_bad cls_tkv = cls_tkv `elemNameSet` rhs_fvs
&& not (cls_tkv `elemNameSet` lhs_names)
; unless (null bad_tvs) (badAssocRhs bad_tvs)
; return ((pats', payload'), rhs_fvs `plusFV` pat_fvs) }
; let all_fvs = fvs `addOneFV` unLoc tycon'
; return (tycon',
HsWB { hswb_cts = pats', hswb_kvs = kv_names,
hswb_tvs = tv_names, hswb_wcs = [] },
payload',
all_fvs) }
-- type instance => use, hence addOneFV
rnTyFamInstDecl :: Maybe (Name, [Name])
-> TyFamInstDecl RdrName
-> RnM (TyFamInstDecl Name, FreeVars)
rnTyFamInstDecl mb_cls (TyFamInstDecl { tfid_eqn = L loc eqn })
= do { (eqn', fvs) <- rnTyFamInstEqn mb_cls eqn
; return (TyFamInstDecl { tfid_eqn = L loc eqn'
, tfid_fvs = fvs }, fvs) }
rnTyFamInstEqn :: Maybe (Name, [Name])
-> TyFamInstEqn RdrName
-> RnM (TyFamInstEqn Name, FreeVars)
rnTyFamInstEqn mb_cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = HsWB { hswb_cts = pats }
, tfe_rhs = rhs })
= do { (tycon', pats', rhs', fvs) <-
rnFamInstDecl (TySynCtx tycon) mb_cls tycon pats rhs rnTySyn
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = pats'
, tfe_rhs = rhs' }, fvs) }
rnTyFamDefltEqn :: Name
-> TyFamDefltEqn RdrName
-> RnM (TyFamDefltEqn Name, FreeVars)
rnTyFamDefltEqn cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tyvars
, tfe_rhs = rhs })
= bindHsTyVars ctx (Just cls) [] tyvars $ \ tyvars' ->
do { tycon' <- lookupFamInstName (Just cls) tycon
; (rhs', fvs) <- rnLHsType ctx rhs
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = tyvars'
, tfe_rhs = rhs' }, fvs) }
where
ctx = TyFamilyCtx tycon
rnDataFamInstDecl :: Maybe (Name, [Name])
-> DataFamInstDecl RdrName
-> RnM (DataFamInstDecl Name, FreeVars)
rnDataFamInstDecl mb_cls (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = HsWB { hswb_cts = pats }
, dfid_defn = defn })
= do { (tycon', pats', defn', fvs) <-
rnFamInstDecl (TyDataCtx tycon) mb_cls tycon pats defn rnDataDefn
; return (DataFamInstDecl { dfid_tycon = tycon'
, dfid_pats = pats'
, dfid_defn = defn'
, dfid_fvs = fvs }, fvs) }
-- Renaming of the associated types in instances.
-- Rename associated type family decl in class
rnATDecls :: Name -- Class
-> [LFamilyDecl RdrName]
-> RnM ([LFamilyDecl Name], FreeVars)
rnATDecls cls at_decls
= rnList (rnFamDecl (Just cls)) at_decls
rnATInstDecls :: (Maybe (Name, [Name]) -> -- The function that renames
decl RdrName -> -- an instance. rnTyFamInstDecl
RnM (decl Name, FreeVars)) -- or rnDataFamInstDecl
-> Name -- Class
-> LHsTyVarBndrs Name
-> [Located (decl RdrName)]
-> RnM ([Located (decl Name)], FreeVars)
-- Used for data and type family defaults in a class decl
-- and the family instance declarations in an instance
--
-- NB: We allow duplicate associated-type decls;
-- See Note [Associated type instances] in TcInstDcls
rnATInstDecls rnFun cls hs_tvs at_insts
= rnList (rnFun (Just (cls, tv_ns))) at_insts
where
tv_ns = hsLKiTyVarNames hs_tvs
-- See Note [Renaming associated types]
{-
Note [Renaming associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check that the RHS of the decl mentions only type variables
bound on the LHS. For example, this is not ok
class C a b where
type F a x :: *
instance C (p,q) r where
type F (p,q) x = (x, r) -- BAD: mentions 'r'
c.f. Trac #5515
The same thing applies to kind variables, of course (Trac #7938, #9574):
class Funct f where
type Codomain f :: *
instance Funct ('KProxy :: KProxy o) where
type Codomain 'KProxy = NatTr (Proxy :: o -> *)
Here 'o' is mentioned on the RHS of the Codomain function, but
not on the LHS.
All this applies only for *instance* declarations. In *class*
declarations there is no RHS to worry about, and the class variables
can all be in scope (Trac #5862):
class Category (x :: k -> k -> *) where
type Ob x :: k -> Constraint
id :: Ob x a => x a a
(.) :: (Ob x a, Ob x b, Ob x c) => x b c -> x a b -> x a c
Here 'k' is in scope in the kind signature, just like 'x'.
-}
extendTyVarEnvForMethodBinds :: [Name]
-> RnM (LHsBinds Name, FreeVars)
-> RnM (LHsBinds Name, FreeVars)
-- For the method bindings in class and instance decls, we extend
-- the type variable environment iff -XScopedTypeVariables
extendTyVarEnvForMethodBinds ktv_names thing_inside
= do { scoped_tvs <- xoptM Opt_ScopedTypeVariables
; if scoped_tvs then
extendTyVarEnvFVRn ktv_names thing_inside
else
thing_inside }
{-
*********************************************************
* *
\subsection{Stand-alone deriving declarations}
* *
*********************************************************
-}
rnSrcDerivDecl :: DerivDecl RdrName -> RnM (DerivDecl Name, FreeVars)
rnSrcDerivDecl (DerivDecl ty overlap)
= do { standalone_deriv_ok <- xoptM Opt_StandaloneDeriving
; unless standalone_deriv_ok (addErr standaloneDerivErr)
; (ty', fvs) <- rnLHsInstType (text "In a deriving declaration") ty
; return (DerivDecl ty' overlap, fvs) }
standaloneDerivErr :: SDoc
standaloneDerivErr
= hang (ptext (sLit "Illegal standalone deriving declaration"))
2 (ptext (sLit "Use StandaloneDeriving to enable this extension"))
{-
*********************************************************
* *
\subsection{Rules}
* *
*********************************************************
-}
rnHsRuleDecls :: RuleDecls RdrName -> RnM (RuleDecls Name, FreeVars)
rnHsRuleDecls (HsRules src rules)
= do { (rn_rules,fvs) <- rnList rnHsRuleDecl rules
; return (HsRules src rn_rules,fvs) }
rnHsRuleDecl :: RuleDecl RdrName -> RnM (RuleDecl Name, FreeVars)
rnHsRuleDecl (HsRule rule_name act vars lhs _fv_lhs rhs _fv_rhs)
= do { let rdr_names_w_loc = map get_var vars
; checkDupRdrNames rdr_names_w_loc
; checkShadowedRdrNames rdr_names_w_loc
; names <- newLocalBndrsRn rdr_names_w_loc
; bindHsRuleVars (snd $ unLoc rule_name) vars names $ \ vars' ->
do { (lhs', fv_lhs') <- rnLExpr lhs
; (rhs', fv_rhs') <- rnLExpr rhs
; checkValidRule (snd $ unLoc rule_name) names lhs' fv_lhs'
; return (HsRule rule_name act vars' lhs' fv_lhs' rhs' fv_rhs',
fv_lhs' `plusFV` fv_rhs') } }
where
get_var (L _ (RuleBndrSig v _)) = v
get_var (L _ (RuleBndr v)) = v
bindHsRuleVars :: RuleName -> [LRuleBndr RdrName] -> [Name]
-> ([LRuleBndr Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
bindHsRuleVars rule_name vars names thing_inside
= go vars names $ \ vars' ->
bindLocalNamesFV names (thing_inside vars')
where
doc = RuleCtx rule_name
go (L l (RuleBndr (L loc _)) : vars) (n : ns) thing_inside
= go vars ns $ \ vars' ->
thing_inside (L l (RuleBndr (L loc n)) : vars')
go (L l (RuleBndrSig (L loc _) bsig) : vars) (n : ns) thing_inside
= rnHsBndrSig doc bsig $ \ bsig' ->
go vars ns $ \ vars' ->
thing_inside (L l (RuleBndrSig (L loc n) bsig') : vars')
go [] [] thing_inside = thing_inside []
go vars names _ = pprPanic "bindRuleVars" (ppr vars $$ ppr names)
{-
Note [Rule LHS validity checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check the shape of a transformation rule LHS. Currently we only allow
LHSs of the form @(f e1 .. en)@, where @f@ is not one of the
@forall@'d variables.
We used restrict the form of the 'ei' to prevent you writing rules
with LHSs with a complicated desugaring (and hence unlikely to match);
(e.g. a case expression is not allowed: too elaborate.)
But there are legitimate non-trivial args ei, like sections and
lambdas. So it seems simmpler not to check at all, and that is why
check_e is commented out.
-}
checkValidRule :: FastString -> [Name] -> LHsExpr Name -> NameSet -> RnM ()
checkValidRule rule_name ids lhs' fv_lhs'
= do { -- Check for the form of the LHS
case (validRuleLhs ids lhs') of
Nothing -> return ()
Just bad -> failWithTc (badRuleLhsErr rule_name lhs' bad)
-- Check that LHS vars are all bound
; let bad_vars = [var | var <- ids, not (var `elemNameSet` fv_lhs')]
; mapM_ (addErr . badRuleVar rule_name) bad_vars }
validRuleLhs :: [Name] -> LHsExpr Name -> Maybe (HsExpr Name)
-- Nothing => OK
-- Just e => Not ok, and e is the offending expression
validRuleLhs foralls lhs
= checkl lhs
where
checkl (L _ e) = check e
check (OpApp e1 op _ e2) = checkl op `mplus` checkl_e e1 `mplus` checkl_e e2
check (HsApp e1 e2) = checkl e1 `mplus` checkl_e e2
check (HsVar v) | v `notElem` foralls = Nothing
check other = Just other -- Failure
-- Check an argument
checkl_e (L _ _e) = Nothing -- Was (check_e e); see Note [Rule LHS validity checking]
{- Commented out; see Note [Rule LHS validity checking] above
check_e (HsVar v) = Nothing
check_e (HsPar e) = checkl_e e
check_e (HsLit e) = Nothing
check_e (HsOverLit e) = Nothing
check_e (OpApp e1 op _ e2) = checkl_e e1 `mplus` checkl_e op `mplus` checkl_e e2
check_e (HsApp e1 e2) = checkl_e e1 `mplus` checkl_e e2
check_e (NegApp e _) = checkl_e e
check_e (ExplicitList _ es) = checkl_es es
check_e other = Just other -- Fails
checkl_es es = foldr (mplus . checkl_e) Nothing es
-}
badRuleVar :: FastString -> Name -> SDoc
badRuleVar name var
= sep [ptext (sLit "Rule") <+> doubleQuotes (ftext name) <> colon,
ptext (sLit "Forall'd variable") <+> quotes (ppr var) <+>
ptext (sLit "does not appear on left hand side")]
badRuleLhsErr :: FastString -> LHsExpr Name -> HsExpr Name -> SDoc
badRuleLhsErr name lhs bad_e
= sep [ptext (sLit "Rule") <+> ftext name <> colon,
nest 4 (vcat [ptext (sLit "Illegal expression:") <+> ppr bad_e,
ptext (sLit "in left-hand side:") <+> ppr lhs])]
$$
ptext (sLit "LHS must be of form (f e1 .. en) where f is not forall'd")
{-
*********************************************************
* *
\subsection{Vectorisation declarations}
* *
*********************************************************
-}
rnHsVectDecl :: VectDecl RdrName -> RnM (VectDecl Name, FreeVars)
-- FIXME: For the moment, the right-hand side is restricted to be a variable as we cannot properly
-- typecheck a complex right-hand side without invoking 'vectType' from the vectoriser.
rnHsVectDecl (HsVect s var rhs@(L _ (HsVar _)))
= do { var' <- lookupLocatedOccRn var
; (rhs', fv_rhs) <- rnLExpr rhs
; return (HsVect s var' rhs', fv_rhs `addOneFV` unLoc var')
}
rnHsVectDecl (HsVect _ _var _rhs)
= failWith $ vcat
[ ptext (sLit "IMPLEMENTATION RESTRICTION: right-hand side of a VECTORISE pragma")
, ptext (sLit "must be an identifier")
]
rnHsVectDecl (HsNoVect s var)
= do { var' <- lookupLocatedTopBndrRn var -- only applies to local (not imported) names
; return (HsNoVect s var', unitFV (unLoc var'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon Nothing)
= do { tycon' <- lookupLocatedOccRn tycon
; return (HsVectTypeIn s isScalar tycon' Nothing, unitFV (unLoc tycon'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon (Just rhs_tycon))
= do { tycon' <- lookupLocatedOccRn tycon
; rhs_tycon' <- lookupLocatedOccRn rhs_tycon
; return ( HsVectTypeIn s isScalar tycon' (Just rhs_tycon')
, mkFVs [unLoc tycon', unLoc rhs_tycon'])
}
rnHsVectDecl (HsVectTypeOut _ _ _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectTypeOut'"
rnHsVectDecl (HsVectClassIn s cls)
= do { cls' <- lookupLocatedOccRn cls
; return (HsVectClassIn s cls', unitFV (unLoc cls'))
}
rnHsVectDecl (HsVectClassOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectClassOut'"
rnHsVectDecl (HsVectInstIn instTy)
= do { (instTy', fvs) <- rnLHsInstType (text "In a VECTORISE pragma") instTy
; return (HsVectInstIn instTy', fvs)
}
rnHsVectDecl (HsVectInstOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectInstOut'"
{-
*********************************************************
* *
\subsection{Type, class and iface sig declarations}
* *
*********************************************************
@rnTyDecl@ uses the `global name function' to create a new type
declaration in which local names have been replaced by their original
names, reporting any unknown names.
Renaming type variables is a pain. Because they now contain uniques,
it is necessary to pass in an association list which maps a parsed
tyvar to its @Name@ representation.
In some cases (type signatures of values),
it is even necessary to go over the type first
in order to get the set of tyvars used by it, make an assoc list,
and then go over it again to rename the tyvars!
However, we can also do some scoping checks at the same time.
Note [Extra dependencies from .hs-boot files]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following case:
A.hs-boot
module A where
data A1
B.hs
module B where
import {-# SOURCE #-} A
type DisguisedA1 = A1
data B1 = B1 DisguisedA1
A.hs
module A where
import B
data A2 = A2 A1
data A1 = A1 B1
Here A1 is really recursive (via B1), but we won't see that easily when
doing dependency analysis when compiling A.hs
To handle this problem, we add a dependency
- from every local declaration
- to everything that comes from this module's .hs-boot file.
In this case, we'll add and edges
- from A2 to A1 (but that edge is there already)
- from A1 to A1 (which is new)
Well, not quite *every* declaration. Imagine module A
above had another datatype declaration:
data A3 = A3 Int
Even though A3 has a dependency (on Int), all its dependencies are from things
that live on other packages. Since we don't have mutual dependencies across
packages, it is safe not to add the dependencies on the .hs-boot stuff to A2.
Hence function Name.thisPackageImport.
See also Note [Grouping of type and class declarations] in TcTyClsDecls.
-}
rnTyClDecls :: Maybe FreeVars -> [TyClGroup RdrName]
-> RnM ([TyClGroup Name], FreeVars)
-- Rename the declarations and do depedency analysis on them
rnTyClDecls extra_deps tycl_ds
= do { ds_w_fvs <- mapM (wrapLocFstM rnTyClDecl) (tyClGroupConcat tycl_ds)
; let decl_names = mkNameSet (map (tcdName . unLoc . fst) ds_w_fvs)
; role_annot_env <- rnRoleAnnots decl_names (concatMap group_roles tycl_ds)
; this_mod <- getModule
; let add_boot_deps :: FreeVars -> FreeVars
-- See Note [Extra dependencies from .hs-boot files]
add_boot_deps fvs
| Just extra <- extra_deps
, has_local_imports fvs = fvs `plusFV` extra
| otherwise = fvs
has_local_imports fvs
= foldNameSet ((||) . nameIsHomePackageImport this_mod)
False fvs
ds_w_fvs' = mapSnd add_boot_deps ds_w_fvs
sccs :: [SCC (LTyClDecl Name)]
sccs = depAnalTyClDecls ds_w_fvs'
all_fvs = foldr (plusFV . snd) emptyFVs ds_w_fvs'
raw_groups = map flattenSCC sccs
-- See Note [Role annotations in the renamer]
(groups, orphan_roles)
= foldr (\group (groups_acc, orphans_acc) ->
let names = map (tcdName . unLoc) group
roles = mapMaybe (lookupNameEnv orphans_acc) names
orphans' = delListFromNameEnv orphans_acc names
-- there doesn't seem to be an interface to
-- do the above more efficiently
in ( TyClGroup { group_tyclds = group
, group_roles = roles } : groups_acc
, orphans' )
)
([], role_annot_env)
raw_groups
; mapM_ orphanRoleAnnotErr (nameEnvElts orphan_roles)
; traceRn (text "rnTycl" <+> (ppr ds_w_fvs $$ ppr sccs))
; return (groups, all_fvs) }
rnTyClDecl :: TyClDecl RdrName
-> RnM (TyClDecl Name, FreeVars)
-- All flavours of type family declarations ("type family", "newtype family",
-- and "data family"), both top level and (for an associated type)
-- in a class decl
rnTyClDecl (FamDecl { tcdFam = decl })
= do { (decl', fvs) <- rnFamDecl Nothing decl
; return (FamDecl decl', fvs) }
rnTyClDecl (SynDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdRhs = rhs })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; let kvs = fst (extractHsTyRdrTyVars rhs)
doc = TySynCtx tycon
; traceRn (text "rntycl-ty" <+> ppr tycon <+> ppr kvs)
; ((tyvars', rhs'), fvs) <- bindHsTyVars doc Nothing kvs tyvars $
\ tyvars' ->
do { (rhs', fvs) <- rnTySyn doc rhs
; return ((tyvars', rhs'), fvs) }
; return (SynDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdRhs = rhs', tcdFVs = fvs }, fvs) }
-- "data", "newtype" declarations
-- both top level and (for an associated type) in an instance decl
rnTyClDecl (DataDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdDataDefn = defn })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; let kvs = extractDataDefnKindVars defn
doc = TyDataCtx tycon
; traceRn (text "rntycl-data" <+> ppr tycon <+> ppr kvs)
; ((tyvars', defn'), fvs) <- bindHsTyVars doc Nothing kvs tyvars $ \ tyvars' ->
do { (defn', fvs) <- rnDataDefn doc defn
; return ((tyvars', defn'), fvs) }
; return (DataDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdDataDefn = defn', tcdFVs = fvs }, fvs) }
rnTyClDecl (ClassDecl {tcdCtxt = context, tcdLName = lcls,
tcdTyVars = tyvars, tcdFDs = fds, tcdSigs = sigs,
tcdMeths = mbinds, tcdATs = ats, tcdATDefs = at_defs,
tcdDocs = docs})
= do { lcls' <- lookupLocatedTopBndrRn lcls
; let cls' = unLoc lcls'
kvs = [] -- No scoped kind vars except those in
-- kind signatures on the tyvars
-- Tyvars scope over superclass context and method signatures
; ((tyvars', context', fds', ats', sigs'), stuff_fvs)
<- bindHsTyVars cls_doc Nothing kvs tyvars $ \ tyvars' -> do
-- Checks for distinct tyvars
{ (context', cxt_fvs) <- rnContext cls_doc context
; fds' <- rnFds fds
-- The fundeps have no free variables
; (ats', fv_ats) <- rnATDecls cls' ats
; (sigs', sig_fvs) <- renameSigs (ClsDeclCtxt cls') sigs
; let fvs = cxt_fvs `plusFV`
sig_fvs `plusFV`
fv_ats
; return ((tyvars', context', fds', ats', sigs'), fvs) }
; (at_defs', fv_at_defs) <- rnList (rnTyFamDefltEqn cls') at_defs
-- No need to check for duplicate associated type decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- Check the signatures
-- First process the class op sigs (op_sigs), then the fixity sigs (non_op_sigs).
; let sig_rdr_names_w_locs = [op | L _ (TypeSig ops _ _) <- sigs, op <- ops]
; checkDupRdrNames sig_rdr_names_w_locs
-- Typechecker is responsible for checking that we only
-- give default-method bindings for things in this class.
-- The renamer *could* check this for class decls, but can't
-- for instance decls.
-- The newLocals call is tiresome: given a generic class decl
-- class C a where
-- op :: a -> a
-- op {| x+y |} (Inl a) = ...
-- op {| x+y |} (Inr b) = ...
-- op {| a*b |} (a*b) = ...
-- we want to name both "x" tyvars with the same unique, so that they are
-- easy to group together in the typechecker.
; (mbinds', meth_fvs)
<- extendTyVarEnvForMethodBinds (hsLKiTyVarNames tyvars') $
-- No need to check for duplicate method signatures
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- and the methods are already in scope
rnMethodBinds cls' (mkSigTvFn sigs') mbinds
-- Haddock docs
; docs' <- mapM (wrapLocM rnDocDecl) docs
; let all_fvs = meth_fvs `plusFV` stuff_fvs `plusFV` fv_at_defs
; return (ClassDecl { tcdCtxt = context', tcdLName = lcls',
tcdTyVars = tyvars', tcdFDs = fds', tcdSigs = sigs',
tcdMeths = mbinds', tcdATs = ats', tcdATDefs = at_defs',
tcdDocs = docs', tcdFVs = all_fvs },
all_fvs ) }
where
cls_doc = ClassDeclCtx lcls
-- "type" and "type instance" declarations
rnTySyn :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnTySyn doc rhs = rnLHsType doc rhs
-- | Renames role annotations, returning them as the values in a NameEnv
-- and checks for duplicate role annotations.
-- It is quite convenient to do both of these in the same place.
-- See also Note [Role annotations in the renamer]
rnRoleAnnots :: NameSet -- ^ of the decls in this group
-> [LRoleAnnotDecl RdrName]
-> RnM (NameEnv (LRoleAnnotDecl Name))
rnRoleAnnots decl_names role_annots
= do { -- check for duplicates *before* renaming, to avoid lumping
-- together all the unboundNames
let (no_dups, dup_annots) = removeDups role_annots_cmp role_annots
role_annots_cmp (L _ annot1) (L _ annot2)
= roleAnnotDeclName annot1 `compare` roleAnnotDeclName annot2
; mapM_ dupRoleAnnotErr dup_annots
; role_annots' <- mapM (wrapLocM rn_role_annot1) no_dups
-- some of the role annots will be unbound; we don't wish
-- to include these
; return $ mkNameEnv [ (name, ra)
| ra <- role_annots'
, let name = roleAnnotDeclName (unLoc ra)
, not (isUnboundName name) ] }
where
rn_role_annot1 (RoleAnnotDecl tycon roles)
= do { -- the name is an *occurrence*, but look it up only in the
-- decls defined in this group (see #10263)
tycon' <- lookupSigCtxtOccRn (RoleAnnotCtxt decl_names)
(text "role annotation")
tycon
; return $ RoleAnnotDecl tycon' roles }
dupRoleAnnotErr :: [LRoleAnnotDecl RdrName] -> RnM ()
dupRoleAnnotErr [] = panic "dupRoleAnnotErr"
dupRoleAnnotErr list
= addErrAt loc $
hang (text "Duplicate role annotations for" <+>
quotes (ppr $ roleAnnotDeclName first_decl) <> colon)
2 (vcat $ map pp_role_annot sorted_list)
where
sorted_list = sortBy cmp_annot list
(L loc first_decl : _) = sorted_list
pp_role_annot (L loc decl) = hang (ppr decl)
4 (text "-- written at" <+> ppr loc)
cmp_annot (L loc1 _) (L loc2 _) = loc1 `compare` loc2
orphanRoleAnnotErr :: LRoleAnnotDecl Name -> RnM ()
orphanRoleAnnotErr (L loc decl)
= addErrAt loc $
hang (text "Role annotation for a type previously declared:")
2 (ppr decl) $$
parens (text "The role annotation must be given where" <+>
quotes (ppr $ roleAnnotDeclName decl) <+>
text "is declared.")
rnDataDefn :: HsDocContext -> HsDataDefn RdrName -> RnM (HsDataDefn Name, FreeVars)
rnDataDefn doc (HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context, dd_cons = condecls
, dd_kindSig = sig, dd_derivs = derivs })
= do { checkTc (h98_style || null (unLoc context))
(badGadtStupidTheta doc)
; (sig', sig_fvs) <- rnLHsMaybeKind doc sig
; (context', fvs1) <- rnContext doc context
; (derivs', fvs3) <- rn_derivs derivs
-- For the constructor declarations, drop the LocalRdrEnv
-- in the GADT case, where the type variables in the declaration
-- do not scope over the constructor signatures
-- data T a where { T1 :: forall b. b-> b }
; let { zap_lcl_env | h98_style = \ thing -> thing
| otherwise = setLocalRdrEnv emptyLocalRdrEnv }
; (condecls', con_fvs) <- zap_lcl_env $ rnConDecls condecls
-- No need to check for duplicate constructor decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
; let all_fvs = fvs1 `plusFV` fvs3 `plusFV`
con_fvs `plusFV` sig_fvs
; return ( HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context', dd_kindSig = sig'
, dd_cons = condecls'
, dd_derivs = derivs' }
, all_fvs )
}
where
h98_style = case condecls of -- Note [Stupid theta]
L _ (ConDecl { con_res = ResTyGADT {} }) : _ -> False
_ -> True
rn_derivs Nothing = return (Nothing, emptyFVs)
rn_derivs (Just (L ld ds)) = do { (ds', fvs) <- rnLHsTypes doc ds
; return (Just (L ld ds'), fvs) }
badGadtStupidTheta :: HsDocContext -> SDoc
badGadtStupidTheta _
= vcat [ptext (sLit "No context is allowed on a GADT-style data declaration"),
ptext (sLit "(You can put a context on each contructor, though.)")]
rnFamDecl :: Maybe Name
-- Just cls => this FamilyDecl is nested
-- inside an *class decl* for cls
-- used for associated types
-> FamilyDecl RdrName
-> RnM (FamilyDecl Name, FreeVars)
rnFamDecl mb_cls (FamilyDecl { fdLName = tycon, fdTyVars = tyvars
, fdInfo = info, fdKindSig = kind })
= do { ((tycon', tyvars', kind'), fv1) <-
bindHsTyVars fmly_doc mb_cls kvs tyvars $ \tyvars' ->
do { tycon' <- lookupLocatedTopBndrRn tycon
; (kind', fv_kind) <- rnLHsMaybeKind fmly_doc kind
; return ((tycon', tyvars', kind'), fv_kind) }
; (info', fv2) <- rn_info info
; return (FamilyDecl { fdLName = tycon', fdTyVars = tyvars'
, fdInfo = info', fdKindSig = kind' }
, fv1 `plusFV` fv2) }
where
fmly_doc = TyFamilyCtx tycon
kvs = extractRdrKindSigVars kind
rn_info (ClosedTypeFamily (Just eqns))
= do { (eqns', fvs) <- rnList (rnTyFamInstEqn Nothing) eqns
-- no class context,
; return (ClosedTypeFamily (Just eqns'), fvs) }
rn_info (ClosedTypeFamily Nothing)
= return (ClosedTypeFamily Nothing, emptyFVs)
rn_info OpenTypeFamily = return (OpenTypeFamily, emptyFVs)
rn_info DataFamily = return (DataFamily, emptyFVs)
{-
Note [Stupid theta]
~~~~~~~~~~~~~~~~~~~
Trac #3850 complains about a regression wrt 6.10 for
data Show a => T a
There is no reason not to allow the stupid theta if there are no data
constructors. It's still stupid, but does no harm, and I don't want
to cause programs to break unnecessarily (notably HList). So if there
are no data constructors we allow h98_style = True
-}
depAnalTyClDecls :: [(LTyClDecl Name, FreeVars)] -> [SCC (LTyClDecl Name)]
-- See Note [Dependency analysis of type and class decls]
depAnalTyClDecls ds_w_fvs
= stronglyConnCompFromEdgedVertices edges
where
edges = [ (d, tcdName (unLoc d), map get_parent (nameSetElems fvs))
| (d, fvs) <- ds_w_fvs ]
-- We also need to consider data constructor names since
-- they may appear in types because of promotion.
get_parent n = lookupNameEnv assoc_env n `orElse` n
assoc_env :: NameEnv Name -- Maps a data constructor back
-- to its parent type constructor
assoc_env = mkNameEnv $ concat assoc_env_list
assoc_env_list = do
(L _ d, _) <- ds_w_fvs
case d of
ClassDecl { tcdLName = L _ cls_name
, tcdATs = ats }
-> do L _ (FamilyDecl { fdLName = L _ fam_name }) <- ats
return [(fam_name, cls_name)]
DataDecl { tcdLName = L _ data_name
, tcdDataDefn = HsDataDefn { dd_cons = cons } }
-> do L _ dc <- cons
return $ zip (map unLoc $ con_names dc) (repeat data_name)
_ -> []
{-
Note [Dependency analysis of type and class decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to do dependency analysis on type and class declarations
else we get bad error messages. Consider
data T f a = MkT f a
data S f a = MkS f (T f a)
This has a kind error, but the error message is better if you
check T first, (fixing its kind) and *then* S. If you do kind
inference together, you might get an error reported in S, which
is jolly confusing. See Trac #4875
Note [Role annotations in the renamer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must ensure that a type's role annotation is put in the same group as the
proper type declaration. This is because role annotations are needed during
type-checking when creating the type's TyCon. So, rnRoleAnnots builds a
NameEnv (LRoleAnnotDecl Name) that maps a name to a role annotation for that
type, if any. Then, this map can be used to add the role annotations to the
groups after dependency analysis.
This process checks for duplicate role annotations, where we must be careful
to do the check *before* renaming to avoid calling all unbound names duplicates
of one another.
The renaming process, as usual, might identify and report errors for unbound
names. We exclude the annotations for unbound names in the annotation
environment to avoid spurious errors for orphaned annotations.
We then (in rnTyClDecls) do a check for orphan role annotations (role
annotations without an accompanying type decl). The check works by folding
over raw_groups (of type [[TyClDecl Name]]), selecting out the relevant
role declarations for each group, as well as diminishing the annotation
environment. After the fold is complete, anything left over in the name
environment must be an orphan, and errors are generated.
An earlier version of this algorithm short-cut the orphan check by renaming
only with names declared in this module. But, this check is insufficient in
the case of staged module compilation (Template Haskell, GHCi).
See #8485. With the new lookup process (which includes types declared in other
modules), we get better error messages, too.
*********************************************************
* *
\subsection{Support code for type/data declarations}
* *
*********************************************************
-}
---------------
badAssocRhs :: [Name] -> RnM ()
badAssocRhs ns
= addErr (hang (ptext (sLit "The RHS of an associated type declaration mentions")
<+> pprWithCommas (quotes . ppr) ns)
2 (ptext (sLit "All such variables must be bound on the LHS")))
-----------------
rnConDecls :: [LConDecl RdrName] -> RnM ([LConDecl Name], FreeVars)
rnConDecls = mapFvRn (wrapLocFstM rnConDecl)
rnConDecl :: ConDecl RdrName -> RnM (ConDecl Name, FreeVars)
rnConDecl decl@(ConDecl { con_names = names, con_qvars = tvs
, con_cxt = lcxt@(L loc cxt), con_details = details
, con_res = res_ty, con_doc = mb_doc
, con_old_rec = old_rec, con_explicit = expl })
= do { mapM_ (addLocM checkConName) names
; when old_rec (addWarn (deprecRecSyntax decl))
; new_names <- mapM lookupLocatedTopBndrRn names
-- For H98 syntax, the tvs are the existential ones
-- For GADT syntax, the tvs are all the quantified tyvars
-- Hence the 'filter' in the ResTyH98 case only
; rdr_env <- getLocalRdrEnv
; let arg_tys = hsConDeclArgTys details
(free_kvs, free_tvs) = case res_ty of
ResTyH98 -> filterInScope rdr_env (get_rdr_tvs arg_tys)
ResTyGADT _ ty -> get_rdr_tvs (ty : arg_tys)
-- With an Explicit forall, check for unused binders
-- With Implicit, find the mentioned ones, and use them as binders
-- With Qualified, do the same as with Implicit, but give a warning
-- See Note [Context quantification]
; new_tvs <- case expl of
Implicit -> return (mkHsQTvs (userHsTyVarBndrs loc free_tvs))
Qualified -> do { warnContextQuantification (docOfHsDocContext doc)
(userHsTyVarBndrs loc free_tvs)
; return (mkHsQTvs (userHsTyVarBndrs loc free_tvs)) }
Explicit -> do { warnUnusedForAlls (docOfHsDocContext doc) tvs free_tvs
; return tvs }
; mb_doc' <- rnMbLHsDoc mb_doc
; bindHsTyVars doc Nothing free_kvs new_tvs $ \new_tyvars -> do
{ (new_context, fvs1) <- rnContext doc lcxt
; (new_details, fvs2) <- rnConDeclDetails doc details
; (new_details', new_res_ty, fvs3)
<- rnConResult doc (map unLoc new_names) new_details res_ty
; return (decl { con_names = new_names, con_qvars = new_tyvars
, con_cxt = new_context, con_details = new_details'
, con_res = new_res_ty, con_doc = mb_doc' },
fvs1 `plusFV` fvs2 `plusFV` fvs3) }}
where
doc = ConDeclCtx names
get_rdr_tvs tys = extractHsTysRdrTyVars (cxt ++ tys)
rnConResult :: HsDocContext -> [Name]
-> HsConDetails (LHsType Name) (Located [LConDeclField Name])
-> ResType (LHsType RdrName)
-> RnM (HsConDetails (LHsType Name) (Located [LConDeclField Name]),
ResType (LHsType Name), FreeVars)
rnConResult _ _ details ResTyH98 = return (details, ResTyH98, emptyFVs)
rnConResult doc _con details (ResTyGADT ls ty)
= do { (ty', fvs) <- rnLHsType doc ty
; let (arg_tys, res_ty) = splitHsFunType ty'
-- We can finally split it up,
-- now the renamer has dealt with fixities
-- See Note [Sorting out the result type] in RdrHsSyn
; case details of
InfixCon {} -> pprPanic "rnConResult" (ppr ty)
-- See Note [Sorting out the result type] in RdrHsSyn
RecCon {} -> do { unless (null arg_tys)
(addErr (badRecResTy (docOfHsDocContext doc)))
; return (details, ResTyGADT ls res_ty, fvs) }
PrefixCon {} -> return (PrefixCon arg_tys, ResTyGADT ls res_ty, fvs)}
rnConDeclDetails
:: HsDocContext
-> HsConDetails (LHsType RdrName) (Located [LConDeclField RdrName])
-> RnM (HsConDetails (LHsType Name) (Located [LConDeclField Name]), FreeVars)
rnConDeclDetails doc (PrefixCon tys)
= do { (new_tys, fvs) <- rnLHsTypes doc tys
; return (PrefixCon new_tys, fvs) }
rnConDeclDetails doc (InfixCon ty1 ty2)
= do { (new_ty1, fvs1) <- rnLHsType doc ty1
; (new_ty2, fvs2) <- rnLHsType doc ty2
; return (InfixCon new_ty1 new_ty2, fvs1 `plusFV` fvs2) }
rnConDeclDetails doc (RecCon (L l fields))
= do { (new_fields, fvs) <- rnConDeclFields doc fields
-- No need to check for duplicate fields
-- since that is done by RnNames.extendGlobalRdrEnvRn
; return (RecCon (L l new_fields), fvs) }
-------------------------------------------------
deprecRecSyntax :: ConDecl RdrName -> SDoc
deprecRecSyntax decl
= vcat [ ptext (sLit "Declaration of") <+> quotes (ppr (con_names decl))
<+> ptext (sLit "uses deprecated syntax")
, ptext (sLit "Instead, use the form")
, nest 2 (ppr decl) ] -- Pretty printer uses new form
badRecResTy :: SDoc -> SDoc
badRecResTy doc = ptext (sLit "Malformed constructor signature") $$ doc
{-
*********************************************************
* *
\subsection{Support code for type/data declarations}
* *
*********************************************************
Get the mapping from constructors to fields for this module.
It's convenient to do this after the data type decls have been renamed
-}
extendRecordFieldEnv :: [TyClGroup RdrName] -> [LInstDecl RdrName] -> TcM TcGblEnv
extendRecordFieldEnv tycl_decls inst_decls
= do { tcg_env <- getGblEnv
; field_env' <- foldrM get_con (tcg_field_env tcg_env) all_data_cons
; return (tcg_env { tcg_field_env = field_env' }) }
where
-- we want to lookup:
-- (a) a datatype constructor
-- (b) a record field
-- knowing that they're from this module.
-- lookupLocatedTopBndrRn does this, because it does a lookupGreLocalRn_maybe,
-- which keeps only the local ones.
lookup x = do { x' <- lookupLocatedTopBndrRn x
; return $ unLoc x'}
all_data_cons :: [ConDecl RdrName]
all_data_cons = [con | HsDataDefn { dd_cons = cons } <- all_ty_defs
, L _ con <- cons ]
all_ty_defs = [ defn | L _ (DataDecl { tcdDataDefn = defn })
<- tyClGroupConcat tycl_decls ]
++ map dfid_defn (instDeclDataFamInsts inst_decls)
-- Do not forget associated types!
get_con (ConDecl { con_names = cons, con_details = RecCon flds })
(RecFields env fld_set)
= do { cons' <- mapM lookup cons
; flds' <- mapM lookup (concatMap (cd_fld_names . unLoc)
(unLoc flds))
; let env' = foldl (\e c -> extendNameEnv e c flds') env cons'
fld_set' = extendNameSetList fld_set flds'
; return $ (RecFields env' fld_set') }
get_con _ env = return env
{-
*********************************************************
* *
\subsection{Support code to rename types}
* *
*********************************************************
-}
rnFds :: [Located (FunDep (Located RdrName))]
-> RnM [Located (FunDep (Located Name))]
rnFds fds
= mapM (wrapLocM rn_fds) fds
where
rn_fds (tys1, tys2)
= do { tys1' <- rnHsTyVars tys1
; tys2' <- rnHsTyVars tys2
; return (tys1', tys2') }
rnHsTyVars :: [Located RdrName] -> RnM [Located Name]
rnHsTyVars tvs = mapM rnHsTyVar tvs
rnHsTyVar :: Located RdrName -> RnM (Located Name)
rnHsTyVar (L l tyvar) = do
tyvar' <- lookupOccRn tyvar
return (L l tyvar')
{-
*********************************************************
* *
findSplice
* *
*********************************************************
This code marches down the declarations, looking for the first
Template Haskell splice. As it does so it
a) groups the declarations into a HsGroup
b) runs any top-level quasi-quotes
-}
findSplice :: [LHsDecl RdrName] -> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
findSplice ds = addl emptyRdrGroup ds
addl :: HsGroup RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- This stuff reverses the declarations (again) but it doesn't matter
addl gp [] = return (gp, Nothing)
addl gp (L l d : ds) = add gp l d ds
add :: HsGroup RdrName -> SrcSpan -> HsDecl RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
add gp loc (SpliceD splice@(SpliceDecl _ flag)) ds
= do { -- We've found a top-level splice. If it is an *implicit* one
-- (i.e. a naked top level expression)
case flag of
ExplicitSplice -> return ()
ImplicitSplice -> do { th_on <- xoptM Opt_TemplateHaskell
; unless th_on $ setSrcSpan loc $
failWith badImplicitSplice }
; return (gp, Just (splice, ds)) }
where
badImplicitSplice = ptext (sLit "Parse error: naked expression at top level")
$$ ptext (sLit "Perhaps you intended to use TemplateHaskell")
-- Class declarations: pull out the fixity signatures to the top
add gp@(HsGroup {hs_tyclds = ts, hs_fixds = fs}) l (TyClD d) ds
| isClassDecl d
= let fsigs = [ L l f | L l (FixSig f) <- tcdSigs d ] in
addl (gp { hs_tyclds = add_tycld (L l d) ts, hs_fixds = fsigs ++ fs}) ds
| otherwise
= addl (gp { hs_tyclds = add_tycld (L l d) ts }) ds
-- Signatures: fixity sigs go a different place than all others
add gp@(HsGroup {hs_fixds = ts}) l (SigD (FixSig f)) ds
= addl (gp {hs_fixds = L l f : ts}) ds
add gp@(HsGroup {hs_valds = ts}) l (SigD d) ds
= addl (gp {hs_valds = add_sig (L l d) ts}) ds
-- Value declarations: use add_bind
add gp@(HsGroup {hs_valds = ts}) l (ValD d) ds
= addl (gp { hs_valds = add_bind (L l d) ts }) ds
-- Role annotations: added to the TyClGroup
add gp@(HsGroup {hs_tyclds = ts}) l (RoleAnnotD d) ds
= addl (gp { hs_tyclds = add_role_annot (L l d) ts }) ds
-- The rest are routine
add gp@(HsGroup {hs_instds = ts}) l (InstD d) ds
= addl (gp { hs_instds = L l d : ts }) ds
add gp@(HsGroup {hs_derivds = ts}) l (DerivD d) ds
= addl (gp { hs_derivds = L l d : ts }) ds
add gp@(HsGroup {hs_defds = ts}) l (DefD d) ds
= addl (gp { hs_defds = L l d : ts }) ds
add gp@(HsGroup {hs_fords = ts}) l (ForD d) ds
= addl (gp { hs_fords = L l d : ts }) ds
add gp@(HsGroup {hs_warnds = ts}) l (WarningD d) ds
= addl (gp { hs_warnds = L l d : ts }) ds
add gp@(HsGroup {hs_annds = ts}) l (AnnD d) ds
= addl (gp { hs_annds = L l d : ts }) ds
add gp@(HsGroup {hs_ruleds = ts}) l (RuleD d) ds
= addl (gp { hs_ruleds = L l d : ts }) ds
add gp@(HsGroup {hs_vects = ts}) l (VectD d) ds
= addl (gp { hs_vects = L l d : ts }) ds
add gp l (DocD d) ds
= addl (gp { hs_docs = (L l d) : (hs_docs gp) }) ds
add_tycld :: LTyClDecl a -> [TyClGroup a] -> [TyClGroup a]
add_tycld d [] = [TyClGroup { group_tyclds = [d], group_roles = [] }]
add_tycld d (ds@(TyClGroup { group_tyclds = tyclds }):dss)
= ds { group_tyclds = d : tyclds } : dss
add_role_annot :: LRoleAnnotDecl a -> [TyClGroup a] -> [TyClGroup a]
add_role_annot d [] = [TyClGroup { group_tyclds = [], group_roles = [d] }]
add_role_annot d (tycls@(TyClGroup { group_roles = roles }) : rest)
= tycls { group_roles = d : roles } : rest
add_bind :: LHsBind a -> HsValBinds a -> HsValBinds a
add_bind b (ValBindsIn bs sigs) = ValBindsIn (bs `snocBag` b) sigs
add_bind _ (ValBindsOut {}) = panic "RdrHsSyn:add_bind"
add_sig :: LSig a -> HsValBinds a -> HsValBinds a
add_sig s (ValBindsIn bs sigs) = ValBindsIn bs (s:sigs)
add_sig _ (ValBindsOut {}) = panic "RdrHsSyn:add_sig"
|
urbanslug/ghc
|
compiler/rename/RnSource.hs
|
bsd-3-clause
| 69,789
| 5
| 28
| 21,713
| 14,219
| 7,547
| 6,672
| 848
| 4
|
module Data.Stream (Stream (..)) where
infixr 5 :|
data Stream a = a :| Stream a
|
sonyandy/wart
|
src/Data/Stream.hs
|
bsd-3-clause
| 83
| 0
| 7
| 18
| 34
| 21
| 13
| 3
| 0
|
module GameSession
( createSession
, joinSession
, leaveSession
, maxPlayers ) where
import Data.Time.Clock (UTCTime)
import DB
import Types
maxPlayers :: Int
maxPlayers = 6
removeItem :: Eq a => a -> [a] -> [a]
removeItem _ [] = []
removeItem r (x:xs) | r == x = removeItem r xs
| otherwise = x : removeItem r xs
leaveSession :: User -> Session -> Session
leaveSession u s =
case userRole u of
Just r ->
Session (sessionOpen s + 1)
(removeItem u $ sessionUsers s)
(r : sessionRoles s)
(sessionTime s)
Nothing -> error "No Role"
joinSession :: User -> Session -> UTCTime -> Session
joinSession u s =
case userRole u of
Just r ->
Session (sessionOpen s - 1)
(u : sessionUsers s)
(removeItem r $ sessionRoles s)
Nothing -> error "No Role"
createSession :: [Role] -> UTCTime -> Session
createSession rs = do
Session maxPlayers [] rs
|
yohad/OWTeamQueue
|
src/GameSession.hs
|
bsd-3-clause
| 1,041
| 0
| 11
| 363
| 359
| 181
| 178
| 34
| 2
|
{-# LANGUAGE TupleSections #-}
module NanoUtils.Set
(
randCoprimeFactors
, randPartition
, randFixedPartition
, randPicks
, randPick
) where
import Control.Monad.Random
import Control.Monad (liftM)
import Data.Int
import Data.List (partition)
import NanoUtils.List (sortOn,removeAt)
import qualified Data.Set as S
import qualified Data.Map as M
import Data.HashTable (hashString)
hashSet :: Show a => S.Set a -> Int32
hashSet = hashString.concat.map show.S.toList
randPartition :: (RandomGen g,Ord a) => Int -> S.Set a -> Rand g [S.Set a]
randPartition n s = do
binMap <- mapM (\x -> getRandomR (1,n) >>= \r -> return (r,[x])).S.toList $ s
return.map S.fromList.M.elems.M.fromListWith (++) $ binMap
-- |Doc
randFixedPartition :: RandomGen g => Int -> Int -> [a] -> Rand g [[a]]
randFixedPartition 0 _ _ = return []
randFixedPartition len n xs = do
(xs',p) <- randPicks len n' xs
liftM (p:) $ randFixedPartition (len-n') n xs'
where n' = if n > len then len else n
randPicks :: RandomGen g => Int -> Int -> [a] -> Rand g ([a],[a])
randPicks _ 0 xs = return (xs,[])
randPicks len num xs = do
(r,xs') <- randPick len xs
liftM (fmap (r:)) $ randPicks (len-1) (num-1) xs'
randPick :: RandomGen g => Int -> [a] -> Rand g (a,[a])
randPick len xs = do
num <- getRandomR (0,len-1)
return.removeAt num $ xs
randAnnotate m = mapM (\x -> m >>= \r -> (r,x))
randCoprimeFactors :: (RandomGen g,Ord a) =>
S.Set a
-> S.Set a
-> Rand g (Maybe (S.Set a,S.Set a))
randCoprimeFactors iden d = do
let diff = S.difference iden d
case S.size diff of
1 -> return Nothing
_ -> do
parts <- randPartition 2 diff
return (Just $ addDiv $ balance parts)
where balance ps
| length ps == 1 = moveOne S.empty (head ps)
| otherwise = (head ps,(head.tail) ps)
moveOne s1 s2 = let (a,s2') = S.deleteFindMin s2
in (S.insert a s1,s2')
addDiv (a,b) = (S.union d a,S.union d b)
|
nanonaren/NanoUtils
|
NanoUtils/Set.hs
|
bsd-3-clause
| 2,059
| 4
| 17
| 538
| 953
| 495
| 458
| 55
| 2
|
{-# LANGUAGE BangPatterns #-}
module Pipes.Illumina where
import Data.Bits
import Data.Word
import Data.Int
import Foreign.Ptr
import Foreign.Storable
import Foreign.ForeignPtr.Unsafe
import Foreign.ForeignPtr.Safe
import Foreign.Marshal.Alloc
import Data.ByteString.Internal
import Control.Applicative
import Control.Monad
import System.IO
import qualified Data.ByteString as B
import Data.ByteString.Unsafe
import Pipes
import Pipes.Bgzf
bclBgzfProducer :: MonadIO m => [Handle] -> Producer (ByteString, ByteString) m ()
bclBgzfProducer [] = return ()
bclBgzfProducer hdls = start where
start = do
mblocks <- getBlocks
case mblocks of
Nothing -> return ()
Just bs -> do
b2fPipe $ map (B.drop 4) bs
go
go = do
mblocks <- getBlocks
case mblocks of
Nothing -> return ()
Just bs -> do
b2fPipe bs
go
getBlocks = do
mbchunks <- forM hdls $ \h -> do
header <- liftIO $ B.hGet h 18
case parseHeader header of
28 -> return Nothing
blocklen -> do
chunk <- liftIO $ B.hGet h (blocklen - 18)
return $ Just chunk
return $ map inflateBlock <$> sequence mbchunks
cycs = length hdls
b2fPipe chunks = do
let l = B.length $ head chunks
forM_ [0..l-1] $ \i -> do
r <- liftIO $ do
seqfp <- mallocByteString cycs
qualfp <- mallocByteString cycs
let seqp :: Ptr Word8
seqp = unsafeForeignPtrToPtr seqfp
qualp = unsafeForeignPtrToPtr qualfp
forM_ (zip chunks [0..]) $ \(c,j) -> do
case unsafeIndex c i of
0 -> do
pokeElemOff seqp j 78
pokeElemOff qualp j 35
w -> do
pokeElemOff seqp j $ case w .&. 3 of
0 -> 65
1 -> 67
2 -> 71
3 -> 84
pokeElemOff qualp j $ 33 + w `shiftR` 2
touchForeignPtr seqfp
touchForeignPtr qualfp
let !sq = PS seqfp 0 cycs
!qual = PS qualfp 0 cycs
return $! (sq, qual)
yield r
filterProducer :: Handle -> Producer Bool IO ()
filterProducer hdl = go where
numclusters :: IO Int32
numclusters = do
buf <- mallocBytes 12
12 <- hGetBufSome hdl buf 12
cnt <- peek (buf `plusPtr` 8)
free buf
return cnt
go = do
clusts <- liftIO $ fromIntegral <$> numclusters
buf <- liftIO $ mallocBytes 1
replicateM_ clusts $ do
1 <- liftIO $ hGetBufSome hdl buf 1
b <- liftIO $ peek buf
yield b
liftIO $ free buf
locsProducer :: Handle -> Producer (Float, Float) IO ()
locsProducer hdl = go where
numclusters :: IO Int32
numclusters = do
buf <- mallocBytes 12
12 <- hGetBufSome hdl buf 12
cnt <- peek (buf `plusPtr` 8)
free buf
return cnt
go = do
clusts <- liftIO $ fromIntegral <$> numclusters
buf <- liftIO $ mallocBytes 8
replicateM_ clusts $ do
8 <- liftIO $ hGetBuf hdl buf 8
x <- liftIO $ peek buf
y <- liftIO $ peekElemOff buf 1
yield (x, y)
liftIO $ free buf
bciProducer :: Handle -> Producer (Int, Int) IO ()
bciProducer hdl = go where
go = do
buf <- liftIO $ (mallocBytes 8 :: IO (Ptr Int32))
let go' = do
r <- liftIO $ hGetBuf hdl buf 8
case r of
8 -> do
a <- liftIO $ peek buf
b <- liftIO $ peekElemOff buf 1
yield (fromIntegral a, fromIntegral b)
go'
0 -> liftIO $ free buf
go'
|
rcallahan/pipes-illumina
|
Pipes/Illumina.hs
|
bsd-3-clause
| 4,138
| 4
| 23
| 1,790
| 1,239
| 602
| 637
| 121
| 8
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.EN.AU.Corpus
( allExamples
, negativeExamples
) where
import Data.String
import Data.Text (Text)
import Prelude
import Duckling.AmountOfMoney.Types
import Duckling.Testing.Types
allExamples :: [Example]
allExamples = concat
[ examples (simple AUD 1000)
[ "a grand"
, "1 grand"
]
, examples (simple AUD 10000)
[ "10 grand"
, "two hundred thousand nickels"
]
, examples (simple AUD 1)
[ "four quarters"
, "ten dimes"
, "twenty nickels"
]
, examples (simple AUD 0.1)
[ "dime"
, "a dime"
, "two nickels"
]
, examples (simple AUD 0.25)
[ "quarter"
, "a quarter"
, "five nickels"
]
, examples (simple AUD 0.05)
[ "nickel"
, "a nickel"
]
]
negativeExamples :: [Text]
negativeExamples =
[ "grand"
]
|
facebookincubator/duckling
|
Duckling/AmountOfMoney/EN/AU/Corpus.hs
|
bsd-3-clause
| 1,264
| 0
| 9
| 456
| 226
| 134
| 92
| 35
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module IrcScanner.KeywordRulesParser(parseKwFile,saveKwFile) where
import Text.ParserCombinators.ReadP
import IrcScanner.Types
import Text.Read(readMaybe)
import qualified Data.Text as T
import Test.Hspec
import Control.Monad.Trans.Either
import Control.Monad.State as S
import Control.Concurrent.MVar
import System.Directory(renameFile)
import Data.Text.IO as T(writeFile)
--import Control.Monad(sequence)
-- ex file
-- Nomiccoin:RegexMatcher:/\bnomiccoin\b|\ba\.?n\.?o\.?n\b/
-- Cool:RegexMatcher:/\bcool\b/i
-- Open/Closed Phase:RegexMatcher:/\b(open|closed?)\s+phase\b/i
--accepts any character following a backslash
backSlashChar :: ReadP String
backSlashChar =
do
_ <- char '\\'
c <- satisfy $ const True --read any character
return $ "\\"++[c]
--allows for the backslashing of a char:
-- if the match is "\<char>", then "<char>" is returned
-- if "\<other char>", then "\<other char>" is returned
-- if <char>, fails
-- if <other char>, "<other char>" is returned
backSlashableSep :: Char -> ReadP String
backSlashableSep m = (choice [string ("\\"++ [m]) >> return [m],
backSlashChar,
(satisfy (\c-> c /= m))>>= return . (: [])
])
field :: ReadP T.Text
field =
do
f <- many1 $ backSlashableSep ':'
choice [char ':' >> return (), eof]
return $ T.pack (foldr (++) [] f)
matchType :: ReadP MatcherType
matchType =
do
s <- field
case (readMaybe $ T.unpack s) of
Nothing -> pfail
Just x -> return x
matcher :: MatcherType -> ReadP Matcher
matcher mt =
do
pat <- field
case mkMatcher mt pat of
Left _ -> pfail
Right x -> return x
--either reads its value or returns an error message specified by errorFunc
maybeRead :: ReadP x -> (T.Text -> T.Text) -> EitherT T.Text (State T.Text) x
maybeRead readp errorFunc =
do
s <- S.get
v <- return $ readP_to_S readp (T.unpack s)
case v of
(x,y) : _ -> put (T.pack y) >> return x
_ -> left $ errorFunc s
kwLine :: EitherT T.Text (State T.Text) Index
kwLine =
do
dn <- maybeRead field $ const "Display name must have at least one char"
mt <- maybeRead matchType (T.append "Can't interpret matchtype: ")
m <- maybeRead (matcher mt) (T.append "Can't parse pattern: ")
maybeRead eof $ (T.append "Extra characters at end of line: ")
return $ Index dn m
-- kwFile :: [T.Text] -> [Either T.Text Index]
-- kwFile lns = fmap (evalState $ runEitherT kwLine) lns
parseKwFile :: [T.Text] -> Either [T.Text] [Index]
parseKwFile lns = listEitherToEitherLists (fmap (evalState $ runEitherT kwLine) lns ) ""
saveKwFile :: T.Text -> IConfig -> IO ()
saveKwFile contents ic =
let
fileName = (_crulesFile ic)
bakFileName = fileName ++ ".bak"
lock = (_ckwFileLock ic)
in
withMVar lock $ const $ do
renameFile fileName bakFileName
T.writeFile fileName contents
--if there are any lefts, returns a list of the lefts. For rows that are right in that
-- case, returns placeholder
--otherwise returns a list of the rights
listEitherToEitherLists :: [Either a b] -> a -> Either [a] [b]
listEitherToEitherLists lns placeholder = case doit lns (Right []) of
Left x -> Left (reverse x)
Right y -> Right (reverse y)
where
doit (Right x : xs) (Right ys) = doit xs (Right (x : ys))
doit (Left y : ys) (Right xs) = doit ys (Left $ y : (fmap (const placeholder) xs))
doit (Right _ : xs) (Left ys) = doit xs (Left $ placeholder : ys)
doit (Left y : xs) (Left ys) = doit xs $ Left (y : ys)
doit [] r = r
_test :: IO ()
_test =
hspec $ do
describe "kwLine" $ do
it "the happy path" $ do
runState (runEitherT kwLine ) "Nomiccoin:RegexMatcher:/\\bnomiccoin\\b|\\ba\\.?n\\.?o\\.?n\\b/"
`shouldSatisfy`
(\(v,_) ->
case v of
(Right x) -> (show x) == "Index {_idisplayName = \"Nomiccoin\", _imatcher = Regex \"\\\\bnomiccoin\\\\b|\\\\ba\\\\.?n\\\\.?o\\\\.?n\\\\b\"}"
_ -> False
)
describe "kwFile" $ do
it "the happy path" $ do
(show $ parseKwFile ["Nomiccoin:RegexMatcher:/\\bnomiccoin\\b|\\ba\\.?n\\.?o\\.?n\\b/","Cool:RegexMatcher:/\\bcool\\b/i"])
`shouldBe`
"Right [Index {_idisplayName = \"Cool\", _imatcher = Regex \"\\\\bcool\\\\b\"},Index {_idisplayName = \"Nomiccoin\", _imatcher = Regex \"\\\\bnomiccoin\\\\b|\\\\ba\\\\.?n\\\\.?o\\\\.?n\\\\b\"}]"
|
redfish64/IrcScanner
|
src/IrcScanner/KeywordRulesParser.hs
|
bsd-3-clause
| 4,524
| 0
| 22
| 1,057
| 1,307
| 664
| 643
| 94
| 6
|
-- | This package provides an OpenGL abstraction that targets either
-- WebGL (with GHCJS) or native OpenGL bindings (with GHC). Only the
-- context setup code differs; the same shader & rendering code can be
-- used in both cases.
--
-- The library is deliberately low-level, staying as close to the
-- original APIs as possible while hiding their differences. If you
-- need access to features not available on both targets, see
-- 'Graphics.BothGL.Internal'
module Graphics.BothGL (
module Graphics.BothGL.GL
, module Graphics.BothGL.Types
) where
import Graphics.BothGL.Types
import Graphics.BothGL.GL
|
bergey/bothgl
|
src/Graphics/BothGL.hs
|
bsd-3-clause
| 617
| 0
| 5
| 102
| 43
| 32
| 11
| 5
| 0
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-- | 'Lucid.HtmlT' inspired monad for creating 'ReactElement's
module Glazier.React.ReactElement
( ReactElement -- constructor is not exported
, mkBranchElement
, mkLeafElement
, rawTextElement
, mkCombinedElements
) where
import Glazier.React.ReactElement.Internal
import qualified JavaScript.Array as JA
import JS.Data
-- | Create a react element (with children) from a HashMap of properties
mkBranchElement :: JSVal -> [(JSString, JSVal)] -> [ReactElement] -> IO ReactElement
mkBranchElement n props xs = do
o <- object_fromEntries (f <$> props)
js_mkBranchElement n o (JA.fromList $ toJS <$> xs)
where
f (a, b) = (toJS a, b)
-- | Create a react element (with no children) from a HashMap of properties
mkLeafElement :: JSVal -> [(JSString, JSVal)] -> IO ReactElement
mkLeafElement n props = do
o <- object_fromEntries (f <$> props)
js_mkLeafElement n o
where
f (a, b) = (toJS a, b)
-- | Not an IO action because JSString is immutable
rawTextElement :: JSString -> ReactElement
rawTextElement = js_rawTextElement
-- | 'Glazier.React.ReactDOM.renderDOM' only allows a single top most element.
-- Provide a handly function to wrap a list of ReactElements inside a 'div' if required.
-- If there is only one element in the list, then nothing is changed.
mkCombinedElements :: [ReactElement] -> IO ReactElement
mkCombinedElements xs = js_mkCombinedElements (JA.fromList $ toJS <$> xs)
#ifdef __GHCJS__
-- | This is an IO action because even if the same args was used
-- a different ReactElement may be created, because JSVal
-- and JSArray are mutable.
foreign import javascript unsafe
"$r = hgr$React().createElement($1, $2, $3);"
js_mkBranchElement :: JSVal -> JSObject -> JA.JSArray -> IO ReactElement
foreign import javascript unsafe
"$r = hgr$React().createElement($1, $2);"
js_mkLeafElement :: JSVal -> JSObject -> IO ReactElement
foreign import javascript unsafe
"$r = $1;"
js_rawTextElement :: JSString -> ReactElement
-- | Wrap a list of ReactElements inside a 'div'
foreign import javascript unsafe
"$r = hgr$mkCombinedElements($1);"
js_mkCombinedElements :: JA.JSArray -> IO ReactElement
#else
-- | This is an IO action because even if the same args was used
-- a different ReactElement may be created, because JSVal
-- and JSArray are mutable.
js_mkBranchElement :: JSVal -> JSObject -> JA.JSArray -> IO ReactElement
js_mkBranchElement _ _ _ = pure (ReactElement nullRef)
js_mkLeafElement :: JSVal -> JSObject -> IO ReactElement
js_mkLeafElement _ _ = pure (ReactElement nullRef)
js_rawTextElement :: JSString -> ReactElement
js_rawTextElement _ = ReactElement nullRef
js_mkCombinedElements :: JA.JSArray -> IO ReactElement
js_mkCombinedElements _ = pure (ReactElement nullRef)
#endif
|
louispan/glazier-react
|
src/Glazier/React/ReactElement.hs
|
bsd-3-clause
| 2,928
| 13
| 12
| 511
| 410
| 229
| 181
| 35
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import Calc.Calc
import Calc.Compiler
import System.Console.CmdArgs
fib :: Num a => [a]
fib = 0 : 1 : zipWith (+) fib (tail fib)
fibProg :: Int -> Prog
fibProg n = Compute (fib !! n)
data Fib = Fib
{ n :: Int
} deriving (Data, Typeable, Show)
defaultArgs
= cmdArgsMode $ Fib
{ n = def &= name "n" &= help "The number in the sequence to compute"
} &= summary "A fibonacci calculator using a Haskell eDSL"
main = do
(Fib n) <- cmdArgsRun defaultArgs
x <- run (fibProg n)
putStrLn $ "fib " ++ show n ++ " = " ++ show x
|
jeannekamikaze/calc
|
Calc/Main.hs
|
bsd-3-clause
| 613
| 0
| 11
| 156
| 221
| 115
| 106
| 20
| 1
|
module Definition where
import Prelude hiding (Applicative, Eq, Functor, Show, fmap, pure, show, (*>), (/=), (<$), (<*>),
(==))
class Show a where
-- Minimum complete definition: show
show :: a -> String
class Eq a where
-- Minimum complete definition: (==) or (/=)
(==) :: a -> a -> Bool
lhs == rhs = not $ lhs /= rhs
(/=) :: a -> a -> Bool
lhs /= rhs = not $ lhs == rhs
-- N.B., often with type classes such as these they are defined with /laws/ that you are expected to
-- obey when writing instances. I have not listed these laws here, if oyu're interested you may look
-- up the documentation for them.
class Functor f where
-- Minimum complete definition: fmap
-- 'fmap' takes a function and a functor, and applies the function to the element inside the
-- functor
fmap :: (a -> b) -> f a -> f b
-- Infix binary operator - equivalent to 'fmap'
-- I often call it "dollar wings"
(<$>) :: (a -> b) -> f a -> f b
(<$>) = fmap
-- Similar to dollar wings, but rather than taking a function it just throws away the value
-- inside the functor and sets it to 'a'
(<$) :: a -> f b -> f a
(<$) = fmap . const
class Functor f => Applicative f where
-- Minimum complete definition: pure & <*>
-- Lifts a value into the applicative (container)
pure :: a -> f a
-- Apply the function inside the first applicative to the value inside the second
(<*>) :: f (a -> b) -> f a -> f b
-- Sequences two applicatives, i.e., perform the action of the first, then the second and return
-- the result from the second
(*>) :: f a -> f b -> f b
fA *> fB = (id <$ fA) <*> fB
-- Sequences two applicatives, but return the result from the first
(<*) :: f a -> f b -> f a
fA <* fB = fmap const fA <*> fB
class Applicative m => Monad m where
-- Minimum complete definition: >>=
return :: a -> m a
return = pure
-- Called "bind"
(>>=) :: m a -> (a -> m b) -> m b
-- Called "sequence"
(>>) :: m a -> m b -> m b
(>>) = (*>)
class Applicative f => Alternative f where
-- Minimum complete definition: empty & <|>
-- The identity of '<|>'
empty :: f a
-- An associative binary operation
(<|>) :: f a -> f a -> f a
-- One or more.
some :: f a -> f [a]
some v = some_v
where
many_v = some_v <|> pure []
some_v = (fmap (:) v) <*> many_v
-- Zero or more.
many :: f a -> f [a]
many v = many_v
where
many_v = some_v <|> pure []
some_v = (fmap (:) v) <*> many_v
|
hjwylde/haskell-type-classes-workshop
|
src/Definition.hs
|
bsd-3-clause
| 2,628
| 0
| 11
| 804
| 695
| 382
| 313
| 40
| 0
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | Stability: Experimental
module Mahjong.Player where
import Mahjong.Class
import Mahjong.Tile
-- | Tiles can only be stolen to make a Meld, and once that Meld is formed using
-- a stolen tile it cannot be changed for the duration of the hand. Since
-- visualization is something to consider, we'll support which tile should be
-- represented by their position to the player that stole the tile. For instance
-- if the player that steals is North and the player that is stolen from is
-- South then in a Run the stolen tile will be a second one in the Meld.
-- Functionally this serves no purpose other than a rule of thumb for visual
-- clients.
data Steal a
= Steal PlayerSeat (Meld a)
deriving (Eq, Show)
instance Functor Steal where
fmap fn (Steal s a) = Steal s (fmap fn a)
instance Tile a => Tile (Steal a) where
honor (Steal _ a) = honor a
terminal (Steal _ a) = terminal a
data Player a
= Player
{ score :: Integer
, hand :: PlayerHand a
, stolenMelds :: [Steal a]
, discardPile :: [a]
, tenpaiState :: Tenpai }
deriving (Show)
defaultPlayer :: Player a
defaultPlayer = Player 25000 mempty [] [] NotInTenpai
newtype PlayerHand a
= PlayerHand [a]
deriving (Monoid, Show)
addToHand :: PlayerHand a -> a -> PlayerHand a
addToHand (PlayerHand as) a
= PlayerHand (a : as)
newtype PlayerSeat
= Seat Wind
deriving (Eq, Ord, Enum, Bounded, Show, Cycle)
-- | Player property sum type that allows us to not have to calculate whether
-- they are in tenpai every round. Some special conditions may have to be
-- checked each turn, but a pattern matching case will suffice.
data Tenpai
= NotInTenpai
-- ^ Normal state for the hand to be in.
| InTenpai
-- ^ One tile away from winning and no special conditions apply.
| InRiichi
-- ^ Same as InTenpai, but the player has decided to bet points for an extra
-- yaku.
| InFuriten
-- ^ The player is in a state of tenpai, but cannot win on anything other
-- than a self draw due to the player discarding a tile that would currently
-- let them win. This rule can also be found under the name of 'Sacred
-- Discard'.
deriving (Eq, Show)
|
TakSuyu/mahsjong
|
src/Mahjong/Player.hs
|
mit
| 2,234
| 0
| 10
| 504
| 381
| 215
| 166
| 37
| 1
|
module Main where
import Game.Minecraft.Map
import System.Environment
import System.Exit
showHelp :: IO ()
showHelp = do
progName <- getProgName
die $ "Usage: " ++ progName ++ " INPUT_REGION_DIR OUTPUT.png [Y_SLICING_HEIHGT]"
buildAndSave :: FilePath -> FilePath -> Int -> IO ()
buildAndSave inDir outDir h = do
print "loading resources..."
rs <- loadRegionsP h inDir
buildAndWritePng rs outDir True
print "done"
-- main
main :: IO ()
main = do
argv <- getArgs
case length argv of
2 -> buildAndSave (head argv) (last argv) 255
3 -> buildAndSave (head argv) (argv !! 1) (read $ last argv)
_ -> showHelp
|
DFLY-Entertainment/hsMCMap
|
exc-src/Main.hs
|
gpl-3.0
| 727
| 0
| 13
| 224
| 225
| 110
| 115
| 21
| 3
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE RelaxedPolyRec #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE LambdaCase #-}
module OpenCog.Lojban.Syntax where
import Prelude hiding (id,(.),(<*>),(<$>),(*>),(<*))
import qualified Data.Map as M
import qualified Data.List.Split as S
import Data.List (nub,partition)
import Data.Foldable (find)
import Data.Maybe (fromJust,listToMaybe)
import Data.List (isInfixOf,isPrefixOf,(\\))
import Data.Hashable
import System.Random
import Control.Category
import Control.Arrow hiding (left,right)
import Control.Applicative hiding (many,some,optional)
import Control.Monad.RWS
import Control.Monad.Trans.Class
import Iso
import Syntax hiding (SynIso,Syntax,text)
import Lojban
import Lojban.Syntax.Util
import OpenCog.AtomSpace (Atom(..),TruthVal(..),noTv,stv,atomFold,nodeName,atomElem,atomMap)
import OpenCog.Lojban.Util
import OpenCog.Lojban.Syntax.Types
import OpenCog.Lojban.Syntax.Util
import OpenCog.Lojban.Syntax.AtomUtil
import Debug.Trace
mytrace a = traceShow a a
mytrace2 s a = trace (s ++(' ':show a)) a
--ReadMe:
--It is recommend to first take a look at Syntax.Types
--http://wiki.opencog.org/w/Claims_and_contexts#An_Example_of_Moderately_Complex_Semantic_Embedding
--TODO Parser
-- FIX: pa + ki'o
-- da + quantifier
-- ZAhO explicit representation
-- vo'a
-- make statments explicit
-- ge'e cai on logical conectives and else?
-- Does SEI have implicit args?
-- What if you have SEI and UI
-- argslots
-- Synonims for tenses and gismu
-- ki
-- references
-- ConceptAnd/OrLink NotLink isntead of boolea and/or
--TODO Printer
-- da,de,di differentiation
--List of Flags
--NA_FLAG keeps track of the current NA state
--FEhE switch between temporal/spatioal interval_propertys
lojban = finalCheck <<< text
finalCheck :: SynIso a a
finalCheck = Iso f g where
f a = do
text <- gets getText
if text == ""
then pure a
else lift $ Left $ "Incomplete parse: " ++ text
g = pure
{-
text = handle
<<< many (selmaho "NAI")
&&& many (cmene <+> indicators)
&&& optional (joik_jek)
&&& text_1
text_1 :: Syntax Atom
text_1 = handle
<<< optional (many (sepSelmaho "I")
-- &&> optional joik_jek
-- &&& optional (optional stag
-- <&& sepSelmaho "BO"
-- )
-- )
<+>
many (sepSelmaho "NIhO")
)
&&& optional paragraphs
-}
text :: Syntax Atom
text = (text_1 <+> (filterDummy . handleFREEs . addfst dummy . frees))
where filterDummy = mkIso f id where
f (LL ls) = cLL (filter (\a -> not $ atomAny (== dummy) a) ls)
dummy = cCN "dummy" noTv
text_1 :: Syntax Atom
text_1 = paragraphs
manySep :: Syntax () -> Syntax ()
manySep iso = ((manySep iso <+> id) . iso <+> insert ())
paragraphs :: Syntax Atom
paragraphs = listl . cons . addfst (cAN "paragraphs") . cons
<<< ((handleFREEs . commute ||| id) . ifJustA
<<< optional (sepSelmaho "NIhO" &&> frees)
&&& paragraph
)
&&& many (handleFREEs . commute <<< sepSelmaho "NIhO"
&&> frees
&&& paragraph)
paragraph :: Syntax Atom
paragraph = listl . cons . addfst (cAN "paragraph") . cons
<<< (handleFREEs . commute
<<< optSelmaho "I"
&&> frees
&&& (statement <+> fragment)
)
&&& many (handleFREEs . commute <<< sepSelmaho "I"
&&> frees
&&& (statement <+> fragment)
)
statement :: Syntax Atom
statement = handleMa <<< statement'
where handleMa :: SynIso Atom Atom
handleMa = Iso f g where
f a = do
let x = atomFold (\r a -> r || isMa a) False a
list = "$var" : ((('$':).show) <$> [0..10])
isMa (Node "VariableNode" x noTv) = x `notElem` list
isMa _ = False
inter = cCN "Interrogative" noTv
interinst <- apply instanceOf inter
let na = cInhL noTv interinst (cSSScL noTv [a])
pure (x ? na $ a)
g (InhL _ _ (SSScL [a])) = pure a
g a = pure a
statement' :: Syntax Atom
statement' = listl . cons . addfst (cAN "statement") . cons
<<< (statement_1 <+> (prenex &&> statement)) &&& gsAtoms
statement_1 :: Syntax Atom
statement_1 = isoFoldl handleCon2
<<< statement_2 &&& many (sepSelmaho "I"
&&>
(just.joik_jek &&& insert Nothing)
&&&
statement_2 --FIXME officaly optional
)
statement_2 :: Syntax Atom
statement_2 = (handleCon2 ||| id) . ifJustB
<<< statement_3 &&& optional (sepSelmaho "I"
&&> (just.joik_jek &&& optional stag)
&&& sepSelmaho "BO"
&&> statement_2 --FIXME officaly optional
)
statement_3 :: Syntax Atom
statement_3 = sentence
-- <+> optional tag &&& sepSelmaho "TUhE" &&> text_1 <&&& optSelmaho "TUhU"
--FIXME
fragment :: Syntax Atom
fragment = listl . cons . addfst (cAN "fragment") . gsAtoms
-- <<< ek
-- <+> gihek
<<< (toState 1 . tolist1 . quantifier)
-- <+> selmaho "NA"
<+> (termsToState <<< terms <&& optSelmaho "VAU")
<+> prenex
<+> (toState 1 . tolist1 . listl <<< relative_clauses)
<+> (termsToState <<< linkargs)
<+> (termsToState <<< links)
where termsToState = toState 1 . tolist1 . listl . mapIso (rmsndAny Nothing)
--FIXME
prenex :: Syntax ()
prenex = toState 1 . tolist1 . listl . tolist2 . addfst (cAN "ZOhU")
. listl . mapIso (rmsndAny Nothing)
<<< terms <&& sepSelmaho "ZOhU"
sentence :: Syntax Atom
sentence = withCleanState sentence'
sentence' :: Syntax Atom
sentence' = handleCTX . handleBTCT
<<< ((terms <&& optSelmaho "CU") <+> insert []) &&& bridi_tail
where handleCTX = Iso f g where
f a = do
atoms <- gets sAtoms
now <- gets sNow
case now of
CN "NOCTX" -> pure (cSL ((cAN "RelativePhrase"):a:atoms))
_ -> do
ctxs <- filter (/= now) <$> gets sCtx
setAtoms []
setCtx [now]
ctx <- if ctxs /= []
then pure $ cSL ctxs
else do
name <- randName ""
pure $ cSL [cCN name noTv]
pure $ cCtxL noTv ctx (cSL (a:atoms))
g (CtxL (SL ctxs) (SL (a:atoms))) = do
setCtx ctxs
setAtoms atoms
pure a
g (SL (_:a:atoms)) = do --Check that now is "NOCTX"???
setAtoms atoms
pure a
subsentence :: Syntax Atom
subsentence = sentence
<+> (prenex &&> subsentence)
handleBTCT :: SynIso ([Sumti],BTCT) Atom
handleBTCT = Iso f g where
f (sumti1,CTLeaf (selb,sumti2)) = apply handleBRIDI (selb,sumti1++sumti2)
f (sumti1,CTNode con (st1,st2)) = do
a1 <- apply handleBTCT (sumti1,st1)
a2 <- apply handleBTCT (sumti1,st2)
apply handleCon (con,(a1,a2))
g = error "Not implemented g handleBTCT"
-- Isoier version:
-- (handleBRIDI . manageLeaf. inverse ctLeaf
-- <+> handleCon . second (handleBTCT *** handleBTCT) . manageNode . inverse ctNode)
-- manageLeaf = Iso f g where
-- f (s1,(s,s2)) = (s,s1++s2)
-- g (s1,s) = ([],(s,s1)) --Leafe it and split later
handleBRIDI :: SynIso BT Atom
handleBRIDI = handleNA
-- frames
. handleSelbriSumtis
-- (Selbri,sumti)
handleNA :: SynIso Atom Atom
handleNA = rmFlagIso "NA_FLAG" . iso . second (getFlagValueIso "NA_FLAG") . unit
<+> id
where iso = Iso f g
f (a,n) = apply _eval (cGPN n lowTv,[a])
g (EvalL _ (GPN n) a) = pure (a,n)
handleSelbriSumtis :: SynIso (Selbri,[Sumti]) Atom
handleSelbriSumtis = merge
. (_frames *** handleModalSumti)
. reorder
. second (splitSumti
. (mapIso handleJAI . ifFlag "JAI" <+> id)
. handleTAG)
where splitSumti :: SynIso [(Atom,Tag)] ([(Atom,Tag)],[Atom])
splitSumti = mkIso f g where
f s = f' ([],[]) s
g (ls,rs) = ls ++ map (\a -> (a,"ModalSumti")) rs
f' (ls,rs) [] = (ls,rs)
f' (ls,rs) ((a,"ModalSumti"):xs) = f' (ls,a:rs) xs
f' (ls,rs) (a:xs) = f' (a:ls,rs) xs
reorder = mkIso f g where
f ((tv,s),(ls,rs)) = (((tv,s),ls),(s,rs))
g (((tv,s),ls),(_,rs)) = ((tv,s),(ls,rs))
merge :: SynIso (Atom,Maybe Atom) Atom
merge = Iso f g where
f (a1,Just a2) = apply andl [a1,a2]
f (a1,Nothing) = pure a1
g (AL [a1@(AL _),a2@(AL _)]) = pure (a1,Just a2)
g a1 = pure (a1,Nothing)
handleJAI :: SynIso (Atom,Tag) (Atom,Tag)
handleJAI = Iso f g where
f (a,"fai") = pure (a,"1")
f (a,"1") = do
mjai <- gets sJAI
case mjai of
Just jai -> do
(na,Just t) <- apply modalSumti (jai,a)
pure (na,t)
Nothing -> lift $ Left "No JAI in state."
f a = pure a
g a = pure a --Loosing information
bridi_tail :: Syntax BTCT
bridi_tail = (extendBTCT ||| id) . ifJustB
<<< bridi_tail_1
&&& optional (((just.right.gihek &&& optional stag)
&&& sepSelmaho "KE" &&> bridi_tail <&& optSelmaho "KEhE"
)
&&& tail_terms
)
bridi_tail_1 :: Syntax BTCT
bridi_tail_1 = isoFoldl extendBTCT
<<< bridi_tail_2
&&& many (((just.right.gihek &&& insert Nothing)
&&& bridi_tail_2
)
&&& tail_terms
)
bridi_tail_2 :: Syntax BTCT
bridi_tail_2 = (extendBTCT ||| id) . ifJustB
<<< ctLeaf . bridi_tail_3
&&& optional (((just.right.gihek &&& optional stag)
&&& sepSelmaho "BO"
&&> bridi_tail_2
)
&&& tail_terms
)
extendBTCT :: SynIso (BTCT,((Con,BTCT),[Sumti])) BTCT
extendBTCT = appendSumtiToBTCT . first manage . associate
where manage :: SynIso (BTCT,(Con,BTCT)) BTCT
manage = mkIso f g where
f (l1,(con,l2)) = CTNode con (l1,l2)
g (CTNode con (l1,l2)) = (l1,(con,l2))
appendSumtiToBTCT :: SynIso (BTCT,[Sumti]) BTCT
appendSumtiToBTCT = mkIso f g where
f (btct,sumti) = fmap (aSTBTCT sumti) btct
g (btct) = let sumti = reverse $ foldl (ff) [] btct
nbtct = fmap (tCTBTSa sumti) btct
in (nbtct,sumti)
aSTBTCT sumti2 (selbri,sumti1) = (selbri,sumti1++sumti2)
tCTBTSa sumti2 (selbri,sumti1) = (selbri,sumti1 \\ sumti2)
ff ls (selbri,sumti) = fun (reverse sumti) ls
fun a [] = a
fun (a:as) (b:bs) | a == b = a : (fun as bs)
| otherwise = []
bridi_tail_3 :: Syntax (Selbri,[Sumti])
bridi_tail_3 = (second handleKO . selbri) &&& tail_terms
-- <+> gek_sentence
where handleKO :: SynIso Atom Atom
handleKO = (sndToState 1 . second (tolist1 . impl) . reorder . ifFlag "ko") <+> id
where reorder = mkIso f g where
f selbri = (selbri,[selbri,cPN "Imperative" lowTv])
g (selbri,_) = selbri
gek_sentence :: Syntax Atom
gek_sentence = handleCon . handleGIK
<<< gek &&& subsentence
&&& gik
&&& subsentence
-- &&& tail_terms
-- <+> optional tag &&& sepSelmaho "KE"
-- &&> gek_sentence
-- &&& optSelmaho "KEhE"
-- <+> selmaho "NA" &&& gek_sentence
-------------------------------------------------------------------------------
--Sumti
-------------------------------------------------------------------------------
tail_terms :: Syntax [Sumti]
tail_terms = termsM <&& optSelmaho "VAU"
terms :: Syntax [Sumti]
terms = some term
termsM :: Syntax [Sumti]
termsM = many term
--FIXME Implement TermSets
--terms_1 :: Syntax [Sumti]
--terms_1 = terms_2 &&& many (sepSelmaho "PEhE" &&> joik_jek &&& terms_2)
--terms_2 :: Syntax [Sumti]
--terms_2 = cons <<< term &&& many (sepSelmaho "CEhE" &&& term)
term :: Syntax Sumti
term = (sumti &&& insert Nothing)
<+> (modalSumti <<< tag &&& sumti <&& optSelmaho "KU")
<+> (commute <<< _FA &&& sumti <&& optSelmaho "KU")
-- <+> termset
-- <+> selmaho "NA" <&& sepSelmaho "KU"
<+> placeholder --Not normal Lojban
where _FA = just . faToPlace . selmaho "FA"
faToPlace :: SynIso String String
faToPlace = mkSynonymIso [("fa","1")
,("fe","2")
,("fi","3")
,("fo","4")
,("fu","5")
,("fi'a","?")]
--For parsing placeholder like "x1" as a sumti
--The 1 will be used as the place tag
placeholder :: Syntax Sumti
placeholder = first instanceOf . handle
<<< letter &&& digit <&& sepSpace
where handle = mkIso f g
f (c,d) = (cCN [c,d] noTv,Just [d])
g (CN [c,_],Just [d]) = (c,d)
modalSumti :: SynIso (JJCTTS,Atom) Sumti
modalSumti = addsnd (Just "ModalSumti")
. sndToState 1
. (pid &&& tolist1)
. handleJJCTTS
. second tolist1
sumti :: Syntax Atom
sumti = handleFREEs2 <<< sumti' &&& frees
sumti' :: Syntax Atom
sumti' = (isoFoldl handleKEhA ||| id) . ifJustB
<<< sumti_1 &&& optional (sepSelmaho "VUhO" &&> relative_clauses)
sumti_1 :: Syntax Atom
sumti_1 = (handleCon2 ||| id) . ifJustB
<<< sumti_2 &&& optional ((just . joik_ek &&& optional stag)
<&& sepSelmaho "KE"
&&& sumti
<&& optSelmaho "KEhE"
)
sumti_2 :: Syntax Atom
sumti_2 = isoFoldl handleCon2
<<< sumti_3 &&& many ((just . joik_ek &&& insert Nothing) &&& sumti_3)
sumti_3 :: Syntax Atom
sumti_3 = (handleCon2 ||| id) . ifJustB
<<< sumti_4 &&& optional ((just . joik_ek &&& optional stag)
<&& sepSelmaho "BO"
&&& sumti_3
)
sumti_4 :: Syntax Atom
sumti_4 = sumti_5
<+> (handleCon . handleGIK <<< gek &&& sumti &&& gik &&& sumti_4)
--FIXME missing optional "KU" after selbri
sumti_5 :: Syntax Atom
sumti_5 = ptp (quantifier &&& lookahead selbri) (isoAppend " lo ") sumti_5Q
<+> sumti_5Q
-- Quantiviers
sumti_5Q :: Syntax Atom
sumti_5Q = (handleSubSet . second setWithSize ||| maybeinof)
. manage
<<< (optional quantifier &&& sumti_5R)
where manage :: SynIso (Maybe Atom,Atom) (Either (Atom,(Atom,Atom)) Atom)
manage = Iso f g where
f (Just pa,sumti) = do
atoms <- gets sAtoms
case findSetType sumti atoms of
Just t -> pure $ Left (sumti,(pa,t ))
Nothing -> pure $ Left (sumti,(pa,sumti))
f (Nothing,sumti) = pure $ Right sumti
g (Left (sumti,(pa,t))) = pure (Just pa,sumti)
g (Right sumti) = pure (Nothing,sumti)
handleSubSet = sndToState 1 . second (tolist1 . subsetL) . reorder2
maybeinof = Iso f g where
f a = do
atoms <- gets sAtoms
case getDefinitions [a] atoms of
[] -> apply instanceOf a
_ -> pure a --We have a lep which is already an instance
g a = error "Check for lep somehow"
reorder2 = mkIso f g where
f (t,s) = (s,(s,t))
g (s,(_,t)) = (t,s)
-- Relative clauses
sumti_5R :: Syntax Atom
sumti_5R = (isoFoldl handleKEhA ||| id) . ifJustB
<<< sumti_6 &&& optional relative_clauses
handleKEhA :: SynIso (Atom,Atom) Atom
handleKEhA = Iso f g where
f (c,a) = do
pushAtom $ atomMap (switch c) a
pure c
g c = error "Not Implemented"
switch c (Node "ConceptNode" "ke'a" _) = c
switch _ a = a
sumti_6 :: Syntax Atom
sumti_6 = kohaP
<+> le
<+> laP
<+> liP
<+> zoP
<+> luP
relative_clauses :: Syntax [Atom]
relative_clauses = cons <<< relative_clause &&& many (sepSelmaho "ZIhE" &&> relative_clause)
relative_clause :: Syntax Atom
relative_clause = (goi <&& optSelmaho "GEhU") <+> (noi <&& optSelmaho "KUhO")
where goi :: Syntax Atom
goi = ptp (selmaho "GOI") goiToNoi noi where
goiToNoi = mkSynonymIso [("pe " ,"poi ke'a srana ")
,("po " ,"poi ke'a se steci srana ")
,("po'e ","poi jinzi ke se steci srana ")
,("po'u ","poi ke'a du ")
,("ne " ,"noi ke'a srana ")
,("no'u ","noi ke'a du ")
,("goi " ,"poi ke'a du ")]
noi :: Syntax Atom
noi = sepSelmaho "NOI" &&> ((hasKEhA <<< relSentence)
<+>
ptp bridi_tail addKEhA relSentence)
where hasKEhA = Iso f f
f a = if atomAny (\case { Link _ _ _ -> False
; (Node _ n _) -> "ke'a" `isInfixOf` n
}) a
then pure a
else lift $ Left "No ke'a in bridi."
addKEhA = mkIso f g where
f = (++) "ke'a "
g = drop 4
relSentence = withNoCTX subsentence
withNoCTX syn = Iso f g where
f a = do
now <- gets sNow
setNow $ cCN "NOCTX" noTv
res <- apply syn a
setNow now
pure res
g a = do
now <- gets sNow
setNow $ cCN "NOCTX" noTv
res <- unapply syn a
setNow now
pure res
le :: Syntax Atom
le = (handleFREEs2 ||| id) . ifJustB
<<< setFlagValueIso "LE_FLAG" . (selmaho "LA" <+> selmaho "LE")
&&> sumti_tail
&&& optional (sepSelmaho "KU" &&> frees)
sumti_tail :: Syntax Atom
sumti_tail = sumti_tail_1
<+> ((handleKEhA . commute ||| id) . ifJustA
<<< optional (reparse relative_clause . insert "pe ")
&&& sumti_tail_1)
<+> (isoFoldl handleKEhA . commute <<< relative_clauses
&&& sumti_tail_1)
sumti_tail_1 :: Syntax Atom
sumti_tail_1 =
(
(setWithSize ||| id) . ifJustA
<<<
optional quantifier
&&&
((isoFoldl handleKEhA ||| id) . ifJustB
<<< (handle <<< selbri &&& varNode)
&&& optional relative_clauses)
)
-- <+>
-- (setWithSize <<< quantifier &&& sumti)
where varNode = insert [(Node "VariableNode" "$var" noTv,Nothing)]
handle = choice leHandlers . first (getFlagValueIso "LE_FLAG")
. commute . unit . _ssl . handleBRIDI
leHandlers = [genInstance "IntensionalInheritanceLink" . rmfst "le"
,genInstance "SubsetLink" . rmfst "lo"
,massOf "IntensionalInheritanceLink" . rmfst "lei"
,massOf "SubsetLink" . rmfst "loi"
,setOf "IntensionalInheritanceLink" . rmfst "le'i"
,setOf "SubsetLink" . rmfst "lo'i"
,genInstance "IntensionalInheritanceLink" . rmfst "le'e"
,genInstance "SubsetLink" . rmfst "lo'e"
]
-- A Mass according to Lojban is a thing that has
-- all properties of it's parts
massOf :: String -> SynIso Atom Atom
massOf itype = instanceOf . _ssl . _frames . addStuff
. (implicationOf *** genInstance itype) . addfstAny pp
where pp = Node "PredicateNode" "gunma" noTv
var = Node "VariableNode" "$var" noTv
addStuff = mkIso f g where
f (p,a) = ((noTv,p),[(var,"1"),(a,"2")])
g ((_,p),[_,(a,_)]) = (p,a)
setOf :: String -> SynIso Atom Atom
setOf itype = sndToState 1 . second (tolist1 . setTypeL . tolist2)
. makeSet . genInstance itype
where makeSet = Iso f g
where f a = do
name <- randName (show a)
let set = cCN name noTv
pure (set,(set,a))
g (_,(_,a)) = pure a
laP :: Syntax Atom
laP = handleName . wordNode <<< sepSelmaho "LA"
&&> anyWord
<&& optSelmaho "KU"
where handleName :: SynIso Atom Atom
handleName = Iso f g where
f a = do
p <- apply instanceOf (cPN "cmene" lowTv)
name <- randName (nodeName a ++ "___" ++ nodeName a)
let c = cCN name lowTv
at = (a,"1")
ct = (c,"2")
pt = (highTv,p)
l <- apply _frames (pt,[at,ct])
pushAtom l
pure c
g _ = do
(EvalL _ _ (LL [a,_])) <- popAtom
pure a
liP :: Syntax Atom
liP = sepSelmaho "LI" &&> (xo <+> number) <&& optSelmaho "LOhO"
xo :: Syntax Atom
xo = varnode <<< word "xo"
quantifier :: Syntax Atom
quantifier = number <&& optSelmaho "BOI"
--FIXME <+> sepSelmaho "VEI" &&& mex &&& optSelmaho "VEhO"
number :: Syntax Atom
number = ( numberNode ||| concept )
. (showReadIso . paToNum |^| isoIntercalate " ")
<<< some (selmaho "PA")
where paToNum :: SynIso [String] Int
paToNum = isoFoldl (digitsToNum . second paToDigit) . addfst 0
digitsToNum :: SynIso (Int,Int) Int
digitsToNum = Iso f g where
f (i,j) = pure (i*10+j)
g 0 = lift $ Left "Last Digit"
g n = pure (n `div` 10,n `mod` 10)
paToDigit :: SynIso String Int
paToDigit = mkSynonymIso [("no",0),("pa",1)
,("re",2),("ci",3)
,("vo",4),("mu",5)
,("xa",6),("ze",7)
,("bi",8),("so",9)]
zoP :: Syntax Atom
zoP = instanceOf . wordNode <<< mytext "zo" &&> anyWord
--KohaPharse for any kind of Pro-Noune
kohaP :: Syntax Atom
kohaP = da <+> ma <+> ko <+> keha <+> koha
where koha = concept . selmaho "KOhA"
ma = varnode . word "ma"
da = concept . oneOfS word ["da","de","di"]
ko = setFlagIso "ko" . concept . word "ko"
keha = concept . word "ke'a"
luP' :: Syntax Atom
luP' = sepSelmaho "LU" &&> lojban <&& optSelmaho "LIhU"
luP :: Syntax Atom
luP = instanceOf . luP'
setWithSize :: SynIso (Atom,Atom) Atom
setWithSize = sndToState 2 . second (tolist2 . (sizeL *** setTypeL)) . makeSet
where makeSet = Iso f g
f (a,b) = do
name <- randName $ show a ++ show b
let set = cCN name noTv
pure (set,([set,a],[set,b]))
g (_,([_,a],[_,b])) = pure (a,b)
ctLeaf :: SynIso a (ConnectorTree c a)
ctLeaf = Iso f g where
f a = pure $ CTLeaf a
g (CTLeaf a) = pure $ a
g _ = lift $ Left "Not a CTLeaf."
tagPat :: Syntax (Tagged Selbri) -> Syntax JJCTTS
tagPat syn = isoFoldl toTree <<< ctLeaf . syn
&&& many (joik_jek &&& syn)
where toTree :: SynIso (JJCTTS,(JOIK_JEK,Tagged Selbri)) JJCTTS
toTree = Iso f g
f (a,(c,b)) = pure $ CTNode c (a,CTLeaf b)
g (CTNode c (a,CTLeaf b)) = pure (a,(c,b))
g _ = lift $ Left "toTree.g: Doesn't match expected pattern."
tag :: Syntax JJCTTS
tag = tagPat tense_modal
stag :: Syntax JJCTTS
stag = tagPat simple_tense_modal
tense_modal :: Syntax (Tagged Selbri)
tense_modal = simple_tense_modal
<+> addsnd Nothing . (sepSelmaho "FIhO" &&> selbri <&& optSelmaho "FEhU")
simple_tense_modal :: Syntax (Tagged Selbri)
simple_tense_modal = _bai
<+> _space_time
<+> _CAhA
--FIXME: Can we really just use tanru_unit_2 or do we need sothing specific
_bai :: Syntax (Tagged Selbri)
_bai = addsnd Nothing . withFlag "WITH_BAI" tanru_unit_2
_space_time :: Syntax (Tagged Selbri)
_space_time = (addsnd (Just "space_time") . addfstAny noTv . space_time)
_CAhA :: Syntax (Tagged Selbri)
_CAhA = addsndAny Nothing . addfstAny noTv
. implicationOf . predicate . selmaho "CAhA"
--Fails when the Syntax Succeds and the other way arround
--Either the syn succeds then we fail with the zeroArrow
--Or the right . insertc succeds because syn failed then we do nothing
notsyn :: Syntax a -> Syntax ()
notsyn x = (zeroArrow ||| id) . ((left <<< lookahead x) <+> (right . insert ()))
-------------------------------------------------------------------------------
--Selbri
-------------------------------------------------------------------------------
handleTanru :: SynIso (Selbri,Selbri) Selbri
handleTanru = sndToState 1 . second (sel_iimpl) . reorder
where reorder = mkIso f g where
f (g,t) = (t,(t,g))
g(t,(_,g)) = (g,t)
sel_iimpl = Iso f g where
--FIXME do something sensible with the tvs
f ((tv1,s1),(tv2,s2)) = apply (tolist1 . _iimpl) (s1,s2)
g a = do
(s1,s2) <- unapply (tolist1 . _iimpl) a
pure ((noTv,s1),(noTv,s2))
selbri :: Syntax Selbri
selbri = second filterState . (handle ||| id) . ifJustA
<<< optional tag &&& selbri_1
where handle = commute . first handleJJCTTS_Selbri . associate . second commute
selbri_1 :: Syntax Selbri
selbri_1 = selbri_2 <+> ((handleNAFlag . selmaho "NA") &&> selbri)
where handleNAFlag = Iso f g where
f na = do
flags <- gets sFlags
if "NA_FLAG" `M.member` flags
then case M.lookup "NA_FLAG" flags of
Just "na" -> if na == "na"
then setFlagValue "NA_FLAG" "ja'a"
else pure ()
Just "ja'a" -> if na == "na"
then setFlagValue "NA_FLAG" "na"
else pure ()
else setFlagValue "NA_FLAG" na
g () = getFlagValue "NA_FLAG"
selbri_2 :: Syntax Selbri
selbri_2 = (handleTanru . commute ||| id) . ifJustB
<<< selbri_3 &&& optional (sepSelmaho "CO" &&> selbri_2)
selbri_3 :: Syntax Selbri
selbri_3 = isoFoldl handleTanru . (inverse cons) <<< some selbri_4
selbri_4 :: Syntax Selbri
selbri_4 = ( handleSelbri4 ||| id) . ifJustB
<<< selbri_5
&&& optional (some (((just . joik_jek &&& insert Nothing)
&&& selbriToEval . selbri_5)
<+> ((just . left . joik &&& optional stag)
&&& sepSelmaho "KE"
&&> selbriToEval . selbri_3
<&& optSelmaho "KEhE")))
where reorder = mkIso f g
f (a,(c,b)) = (c,(a,b))
g (c,(a,b)) = (a,(c,b))
handleSelbri4 = addfst selbriDefaultTV
. isoFoldl (rmfst selbriDefaultTV
. manageSelbriCon
. handleCon
. reorder)
. first selbriToEval
selbri_5 :: Syntax Selbri
selbri_5 = (handleSelbri5 ||| id) . ifJustB
<<< selbri_6
&&& optional ((
(just . joik_jek)
&&& optional stag
<&& sepSelmaho "BO"
)
&&& selbriToEval . selbri_5)
where handleSelbri5 = manageSelbriCon . handleCon2 .< selbriToEval
selbri_6 :: Syntax Selbri
selbri_6 = tanruBO <+> tanruGUHEK
tanruBO :: Syntax Selbri
tanruBO = (handleTanru ||| id) . ifJustB
<<< tanru_unit &&& optional (sepSelmaho "BO" &&> selbri_6)
tanruGUHEK :: Syntax Selbri
tanruGUHEK = manageSelbriCon . handleCon . handleGIK
<<< guhek &&& (selbriToEval . selbri) &&& gik &&& (selbriToEval . selbri_6)
tanru_unit :: Syntax Selbri
tanru_unit = isoFoldl handleCEI <<< tanru_unit_1
&&& many (sepSelmaho "CEI" &&> tanru_unit_1)
where handleCEI = Iso f g
f ((tv1,a1),(tv2,a2)) = do
pushAtom $ cImpL noTv a2 a1
pure (tv1,a1)
g (tv1,a1) = lift $ Left "FIXME: not unpacking of tanru_unit yet"
tanru_unit_1 :: Syntax Selbri
tanru_unit_1 = (handleLinkArgs ||| id) . ifJustB
<<< tanru_unit_2 &&& optional linkargs
where handleLinkArgs :: SynIso (Selbri,[Sumti]) Selbri
handleLinkArgs = iunit . commute
. (toState 1 . tolist1 . _frames . second handleTAG
&&& rmsndAny [])
linkargs :: Syntax [Sumti]
linkargs = (handleBEhOFREEs ||| id) . ifJustB
<<< sepSelmaho "BE"
&&> (cons <<<
(handleBEFREEs <<< frees &&& tag2 . term)
&&& (links <+> insert [])
)
&&& optional (sepSelmaho "BEhO" &&> frees)
where tag2 = second (mkIso f g)
f Nothing = Just "2"
f b = b
g (Just "2") = Nothing
g b = b
handleBEFREEs = first (handleFREEs2 . commute) . associate
handleBEhOFREEs = isoZip . first (inverse setl) . commute
. second handleFREEs2
. inverse associate
. first (commute . first setl . inverse isoZip)
links :: Syntax [Sumti]
links = some links'
where links' :: Syntax Sumti
links' = first (handleFREEs2 . commute) . associate
<<< sepSelmaho "BEI" &&> frees
&&& term
--Also block selbri_4 when selbri_5+6 ???
tanru_unit_2 :: Syntax Selbri
tanru_unit_2 = addfst selbriDefaultTV
. ((brivla
<+> nuP
<+> moiP
<+> gohaP
<+> meP) . ifNotFlag "WITH_BAI"
<+> bai . ifFlag "WITH_BAI")
<+> tanruSE
<+> tanruKE
<+> tanruNAhE
bai :: Syntax Atom
bai = (ptp (selmaho "BAI") iso brivla) . ifFlag "WITH_BAI"
where iso = Iso f g where
f a = do
btf <- asks wBai
apply btf a
g b = do
btf <- asks wBai
unapply btf b
selbriDefaultTV = stv 0.75 0.9
selbriToEval :: SynIso Selbri Atom
selbriToEval = mkIso f g where
f (tv,p) = cEvalL tv (cVN "$arg_place") (cLL [p,cVN "$arg"])
g (EvalL tv _ (LL [p,_])) = (tv,p)
manageSelbriCon :: SynIso Atom Selbri
manageSelbriCon = Iso f g where
f a = do
name <- randName (show a)
let s = (selbriDefaultTV,cPN name noTv)
eval <- apply selbriToEval s
pushAtom $ cImpL noTv eval a
pure s
g s = do
atoms <- gets sAtoms
eval <- apply selbriToEval s
let ml = find (ff eval) atoms
case ml of
Nothing -> lift $ Left "No ImpL for guhek."
Just l@(ImpL _ _ a) -> rmAtom l >> pure a
ff eval (ImpL _ eval2 _) = eval == eval2
ff _ _ = False
--Nahe with pred ohter then main??? influnce impl link???
tanruNAhE :: Syntax Selbri
tanruNAhE = (handleNAhE <<< _NAhE &&& tanru_unit_2)
where handleNAhE = mkIso f g
f (tv,(_,s)) = (tv,s)
g (tv,s) = (tv,(noTv,s))
tanruJAI :: Syntax Selbri
tanruJAI = (sepSelmaho "JAI"
&&> setFlagIso "JAI" . jaiFlag . tag
&&> tanru_unit_2)
where jaiFlag :: SynIso JJCTTS ()
jaiFlag = Iso f g where
f t = setJai t
g () = do
mjai <- gets sJAI
case mjai of
Just jai -> rmJai >> pure jai
Nothing -> lift $ Left "No JAI in state."
_NAhE :: Syntax TruthVal
_NAhE = naheToTV <<< (selmaho "NAhE" <+> insert "")
where naheToTV = mkSynonymIso [("je'a",stv 1 0.9)
--,("" ,stv 0.75 0.9)
,("no'e",stv 0.5 0.9)
,("na'e",stv 0.25 0.9)
,("to'e",stv 0 0.9)]
brivla :: Syntax Atom
brivla = handleFREEs2 <<< brivla' &&& frees
brivla' :: Syntax Atom
brivla' = implicationOf . predicate . gismu
meP :: Syntax Atom
meP = implicationOf . predicate . showReadIso
<<< sepSelmaho "ME" &&> sumti
_MO :: Syntax Atom
_MO = varnode . word "mo"
_GOhA :: Syntax Atom
_GOhA = implicationOf . predicate <<< selmaho "GOhA"
gohaP :: Syntax Atom
gohaP = _MO <+> _GOhA
tanruKE :: Syntax Selbri
tanruKE = sepSelmaho "KE" &&> selbri_3 <&& optSelmaho "KEhE"
--FIXME: Should use DefineLink instead of EquivalenceLink but that doesnt' accept a
--PredicateNode only DefinedPredicateNode which messus patter matching in the rest
-- of the code
tanruSE :: Syntax Selbri
tanruSE = handle <<< selmaho "SE" &&& tanru_unit_2
where handle = Iso f g where
f (se,(tv,t@(PN name))) = let dpred = cPN (name ++ "_"++ se) noTv
dsch = cDSN se
defl = cEquivL noTv
dpred
(cEXOL noTv
[dsch
,t
])
in do
pushAtom defl
pure (tv,dpred)
--FIXME: all popAtom's should be findAtom's
g (tv,dpred) = do
(EquivL _ (EXOL [dsch,t])) <- popAtom
let (DSN se) = dsch
pure (se,(tv,t))
nuP :: Syntax Atom
nuP = maybeImpl . isoFoldl handleCon2 . manage
<<< (selmaho "NU" &&& many ((just.joik_jek &&& insert Nothing)
&&& selmaho "NU"))
&&& subsentence
<&& optSelmaho "KEI"
where manage = Iso f g
f ((s,ls),a) = do
a1 <- apply handleNU (s,a)
as <- apply (mapIso (manage2 a)) ls
pure (a1,as)
g (a1,as) = do
(s,a) <- unapply handleNU a1
ls <- unapply (mapIso (manage2 a)) as
pure ((s,ls),a)
manage2 :: Atom -> SynIso (Con,String) (Con,Atom)
manage2 a = Iso f g where
f (con,s) = do
newa <- apply handleNU (s,a)
pure (con,newa)
g (con,newa) = do
(s,_) <- unapply handleNU (newa)
pure (con,s)
handleNU = withEmptyState $ choice nuHandlers
maybeImpl :: SynIso Atom Atom
maybeImpl = Iso f g where
f l@(Link _ _ _) = do
name <- randName (show l)
let pred = cPN name noTv
pushAtom $ cImpL noTv pred l
pure pred
f a@(Node _ _ _) = pure a
g pred = do
atoms <- gets sAtoms
case find (ff pred) atoms of
Just i@(ImpL _ _ l) -> rmAtom i >> pure l
Nothing -> pure pred
ff predP (ImpL _ predD _) = predP == predD
ff _ _ = False
nuHandlers :: [SynIso (String,Atom) Atom]
nuHandlers = [handleNU "du'u" (mkNuEventLabel "du'u") . rmfst "du'u",
handleNU "su'u" (mkNuEventLabel "su'u") . rmfst "su'u",
handleNU "nu" (mkNuEvent ["fasnu"]) . rmfst "nu",
handleNU "mu'e" (mkNuEvent ["fasnu", "mokca"]) . rmfst "mu'e",
handleNU "zu'o" (mkNuEvent ["zumfau"]) . rmfst "zu'o",
handleNU "za'i" (mkNuEvent ["tcini"]) . rmfst "za'i",
handleNU "ka" (mkNuEvent ["ckaji"]) . rmfst "ka",
handleNU "ni" (mkNuEvent ["klani"]) . rmfst "ni",
handleNU "si'o" (mkNuEvent ["sidbo"]) . rmfst "si'o",
handleNU "li'i" (mkNuEventLabel "is_experience") . rmfst "li'i",
handleNU "pu'u" (mkNuEventLabel "is_point_event") . rmfst "pu'u",
handleNU "jei" (mkNuEventLabel "is_truth_value") . rmfst "jei"]
where
-- Functions that specify the type of the abstraction
-- Note: atomNub may leave AndLink with only one atom
mkNuEventLabel :: String -> String -> SynIso Atom [Atom]
mkNuEventLabel eventName _ = tolist2 . (mkEval . atomNub . mkEvent &&& id)
where mkEval = _eval . addfst (cPN eventName highTv)
. tolist2 . addfstAny (cCN "$2" highTv)
mkNuEvent :: [String] -> String -> SynIso Atom [Atom]
mkNuEvent (nuType:nts) name = isoConcat . tolist2 . addfstAny nuImps
. tolist2 . (wrapNuVars &&& id)
where
nuPred = cPN (nuType ++ "_" ++ name) highTv
nuImps = (cImpL highTv nuPred (cPN nuType highTv))
:(case nts of
[] -> []
[nts] -> let nuSec = (cPN (nts ++ "_" ++ name) highTv)
in [(cIImpL highTv nuPred nuSec)
,(cImpL highTv nuSec (cPN nts highTv))])
wrapNuVars = andl . tolist2 . (mkEval "1" *** mkEval "2")
. addfstAny (cCN "$2" highTv) . atomNub . mkEvent
mkEval num = _evalTv . addfstAny highTv
. addfstAny (cPN ("sumti" ++ num) highTv)
. tolist2
. addfstAny nuPred
--Turns a Sentence into a conjunction of predicates
mkEvent = atomIsoMap (mkIso f id) where
f (CtxL _ (SL (evals:_))) = evals
f (EvalL _ (PN _) (LL [vn@(VN _), _])) = vn -- can be PN:CN, PN:WN, anything else?
f a = a
--As for "pu'u", "pruce" and "farvi" don't seem quite right
handleNU :: String -> (String -> SynIso Atom [Atom]) -> SynIso Atom Atom
handleNU abstractor nuTypeMarker = Iso f g where
f atom = do
rname <- randName $ (show atom)
let name = rname ++ "___" ++ abstractor
pred = cPN name highTv
link <- apply (mkLink pred name nuTypeMarker) atom
pushAtom link
pure pred
g (pred@(PN name)) = do
state <- gets sAtoms
atom <- case find (atomElem pred) state of -- remove "is_event" atoms
Just l -> unapply (mkLink pred name nuTypeMarker) l
Nothing -> lift $ Left $ (show pred) ++ " can't be found in state."
pure atom --should only be one. Check? Instatiate VNs???
mkLink :: Atom -> String -> (String -> SynIso Atom [Atom]) -> SynIso Atom Atom
mkLink pred name nuTypeMarker = mkLink' . addfstAny pred
. second (nuTypeMarker name)
. mkNuState . getPredVars
-- Extract predicateNodes from the atom and state
getPredVars :: SynIso Atom (([Atom], [Atom]), Atom)
getPredVars = mkIso f g where
f atom =
let predicateNodes =
nub $ atomFold (\ns a -> case a of (EvalL _ _ (LL (pn@(PN _):_))) -> pn:ns
(ImpL _ pn@(PN _) (PN _)) -> pn:ns
(InhL _ pn@(PN _) (PN _)) -> pn:ns
a -> ns) [] atom
predicateVars = map (cVN.("$"++).show) [3..(length predicateNodes) + 2]
in ((predicateVars,predicateNodes), atom)
g (_, atom) = atom --FIXME, can't assume first atom is the atom
mkNuState :: SynIso (([Atom], [Atom]), Atom) ([Atom], Atom)
mkNuState = second replacePredicatesIso
. inverse associate
. first (rmsndAny [] . pid &&& isoZip . commute)
replacePredicatesIso :: SynIso ([(Atom,Atom)],Atom) Atom
replacePredicatesIso = mkIso f g where
f (nodeVarMap,a) = atomMap (mapf nodeVarMap) a
g a = ([],a) -- i.e., don't instantiate vars for now
mapf nvm pn@(PN _) =
case lookup pn nvm of
Just vn -> vn
Nothing -> pn
mapf _ a = a
-- (pred, (typedPredicateVars, eventAtom:state')
mkLink' :: SynIso (Atom, ([Atom], [Atom])) Atom
mkLink' = _equivl
. first (_evalTv . addfst highTv . addsnd [cVN "$1"])
. second
(_meml . addfst (cVN "$1") . ssl . tolist2 . addfst (cVN "$2")
. _exl . first (varll . mapIso (_typedvarl . addsnd (cTN "PredicateNode")))
. second andl)
_MOI :: Syntax String
_MOI = selmaho "MOI"
moiP :: Syntax Atom
moiP = implicationOf . predicate . handleMOI
<<< (number &&& _MOI)
where handleMOI = mkIso f g
f (a,s) = nodeName a ++ '-':s
g name = let nn = takeWhile (/= '-') name
s = drop 1 $ dropWhile (/= '-') name
in if isNumeric nn
then (Node "NumberNode" nn noTv,s)
else (Node "ConceptNode" nn noTv,s)
-------------------------------------------------------------------------------
--bacru
-------------------------------------------------------------------------------
handleModalSumti :: SynIso (Atom,[Atom]) (Maybe Atom)
handleModalSumti = handle . mapIso handleModalSumti' . isoDistribute
where handle = Iso f g
f [] = pure Nothing
f as = Just <$> apply andl as
g Nothing = pure []
g (Just al) = unapply andl al
handleModalSumti' :: SynIso (Atom,Atom) Atom
handleModalSumti' = mkIso f g where
f (pred,a) = atomMap (fun pred) a
g atom = (cVN "ignore when printing",cVN "ignore when priting2")
fun pred1 (EvalL _ _ (LL [pred2@(PN _),_])) = cImpL noTv pred1 pred2
fun pred1 (EvalL _ pred2 _) = cImpL noTv pred1 pred2 --Time/Space
fun _ a = a
--For mergin sumties before and after the selbri into a single list
mergeSumti :: (a ~ aa) => SynIso ([a],(s,[aa])) (s,[a])
mergeSumti = Iso f g where
f ([],(_,[])) = lift $ Left "No Sumti to merge."
f (a1,(s,a2)) = pure (s,a1++a2)
g (s,a) = case a of
[] -> lift $ Left "No Sumti to reverse merge."
(x:xs) -> pure ([x],(s,xs))
addti :: SynIso String String
addti = mkIso f g
where f s = s ++ "ti "
g s = s --TODO: Maybe remoe ti again
------------------------------------
--Free
-----------------------------------
data Free = FNull | FUI [UI]
deriving (Show,Eq)
fNull :: SynIso () Free
fNull = Iso f g where
f () = pure FNull
g FNull = pure ()
g a = lift $ Left (show a ++ "is not a FNull")
fUI :: SynIso [UI] Free
fUI = Iso f g where
f ui = pure (FUI ui)
g (FUI ui) = pure ui
g a = lift $ Left (show a ++ "is not a FUI")
handleFREEs2 :: SynIso (Atom,[Free]) Atom
handleFREEs2 = isoFoldl handleFREE
handleFREEs :: SynIso (Atom,[Free]) Atom
handleFREEs = listl . cons . addfst (cAN "frees")
. cons . second gsAtoms . unit
. isoFoldl handleFREE
handleFREE :: SynIso (Atom,Free) Atom
handleFREE = (iunit . second (inverse fNull))
<+> (handleUIs . second (inverse fUI))
frees :: Syntax [Free]
frees = many free
free :: Syntax Free
free = atomToFNull . sei
-- <+> soi
-- <+> vocative &&& optional relative_clauses
-- &&& selbri
-- &&& optional relative_clauses
-- <&& optSelmaho "DOhU"
-- <+> vocative &&& optional relative_clauses
-- &&& some cmene
-- &&& optional relative_clauses
-- <&& optSelmaho "DOhU"
<+> atomToFNull . voc1
<+> fUI . indicators
where atomToFNull = fNull . toState 1 . tolist1
voc1 :: Syntax Atom
voc1 = (listl . cons . addfst (cAN "vocative1")
. (mapIso handleVOC1 . isoDistribute . commute ||| id) . ifJustB
<<< vocatives &&& optional sumti <&& optSelmaho "DOhU"
)
where handleVOC1 = _eval . second tolist1 . commute
--Vice Versa
--soi :: Syntax Atom
--soi = sepSelmaho "soi" &&> sumti &&& optional sumti <&& optSelmaho "SEhU"
{-
free :: SyntaxState s => Syntax s [ADT]
free = adtSyntax "free" <<<
<+> (number <+> lerfu_string) &+& adtSelmaho "MAI"
<+> adtSelmaho "TO" &+& text
&+& listoptional (adtSelmaho "TOI")
<+> adtSelmaho "XI" &+& listoptional (concatSome free)
&+& (number <+> lerfu_string)
&+& listoptional (adtSelmaho "BOI")
<+> adtSelmaho "XI" &+& listoptional (concatSome free)
&+& adtSelmaho "VEI"
&+& listoptional (concatSome free)
&+& mex
&+& listoptional (adtSelmaho "VEhO")
-}
------------------------------------
--Second Order Statments
-----------------------------------
type SEI = Atom
--SEIs are second order Statments that can appear almost anywhere
sei :: Syntax Atom
sei = handleBRIDI . commute
<<< sepSelmaho "SEI" &&> ((terms <&& optSelmaho "CU") <+> insert [])
&&& selbri
<&& optSelmaho "SEhU"
vocatives :: Syntax [Atom]
vocatives = mapIso (implicationOf . predicate) . merge
<<< oooob (some (handle <<< selmaho "COI" &&& optional (selmaho "NAI"))) []
(tolist1 . selmaho "DOI") []
where handle :: SynIso (String,Maybe String) String
handle = mkIso f g
f (c,Just n) = c ++ n
f (c,Nothing) = c
g s = let (ms,mn) = splitAt (length s - 3) s
in case mn of
"nai" -> (ms,Just mn)
_ -> (s,Nothing)
merge = mkIso f g where
f (a,b) = a ++ b
g _ = error $ "Not implemented vocative merge g."
--Attitude
type UI = (Atom,TruthVal)
indicators :: Syntax [UI]
indicators = some indicator
-- sepSelmaho "FUhE" &&> some indicator
indicator :: Syntax UI
indicator = uiP
-- <+> adtSelmaho "Y" FIXME??? Not relevant for text
-- <+> adtSelmaho "DAhO" FIXME resets various things to default
-- <+> adtSelmaho "FUhO"
uiP :: Syntax (Atom,TruthVal)
uiP = handle <<< oooob (_UI &&& naiP) (gehe,1) caiP 0.5
where handle = second handleNAICAI . inverse associate
handleNAICAI = mkIso f g where
f (n,c) = stv ((n*c)/2+0.5) 0.9
g (SimpleTV s _) = let v = (s-0.5)*2
in if v >= 0
then (1,v)
else (-1,-v)
gehe = cCN "ge'e" noTv
_UI :: Syntax Atom
_UI = concept <<< (xu <+> selmaho "UI")
where xu = setFlagIso "xu" <<< word "xu"
naiP :: Syntax Double
naiP = handleNAI <<< (selmaho "NAI" <+> insert "")
where handleNAI = mkSynonymIso [("nai" , -1)
,("" , 1)
,("ja'ai", 1)
]
caiP :: Syntax Double
caiP = handleCAI <<< (selmaho "CAI" <+> (insert "" . ifFlag "HaveUI"))
where handleCAI :: SynIso String Double
handleCAI = mkSynonymIso [("cai" ,0.99)
,("sai" ,0.75)
,("" ,0.5 )
,("ru'e" ,0.25)
,("cu'i" ,0.01)
]
handleUIs :: SynIso (Atom,[UI]) Atom
handleUIs = isoFoldl (handleUI . commute)
handleUI :: SynIso ((Atom,TruthVal),Atom) Atom
handleUI = (handleXU ||| handleUI') . switchOnFlag "xu"
where handleXU = rmFlagIso "xu" . addXUIso . rmfstAny (xu,tv)
xu = Node "ConceptNode" "xu" noTv
tv = stv 0.75 0.9
handleUI' :: SynIso ((Atom,TruthVal),Atom) Atom
handleUI' = sndToState 1
. second (tolist1 . _frames . first selbri)
. manage
where manage = mkIso f g where
f ((ui,tv),a) = (a,((tv,ui),[(getPred a,"1")]))
g (a,((tv,ui),_)) = ((ui,tv),a)
getPred (LL [_,CtxL _ (SL ((EvalL _ _ (LL [pred,_])):_)) ]) = pred
getPred a = a
selbri = second instanceOf
handleSEI :: SynIso (Atom,Atom) Atom
handleSEI = fstToState 1 . first tolist1
-------------------------------------------------------------------------------
--Space Time Utils
-------------------------------------------------------------------------------
oooob :: (Eq a,Show a,Eq b,Show b) => Syntax a -> a -> Syntax b -> b -> Syntax (a,b)
oooob syna defa synb defb =
rmFlagIso "SynA" <<< (setFlagIso "SynA" . syna <+> insert defa)
&&& (synb <+> insert defb . ifFlag "SynA")
oooobm :: (Eq a,Show a,Eq b,Show b) => Syntax a -> Syntax b -> Syntax (Maybe a,Maybe b)
oooobm syna synb =
rmFlagIso "SynA" <<< (setFlagIso "SynA" . just . syna <+> insert Nothing)
&&& (just . synb <+> insert Nothing . ifFlag "SynA")
--FIXME This is wrong use the slow version below???
mergeMaybe :: (Eq a,Show a) => SynIso (a,a) a
-> Syntax (Maybe a,Maybe a)
-> Syntax a
mergeMaybe iso syn = handle . syn
where handle = Iso f g
f (Just a,Just b) = apply iso (a,b)
f (Just a,Nothing) = pure a
f (Nothing,Just a) = pure a
f (Nothing,Nothing)= lift $ Left "Need at least one to mergeMaybe"
g _ = error $ "mergeMaybe g using wrong but fast implementeation"
--FIXME super slow tyr to get syn out of the alternatives
--mergeMaybe iso syn = (iso . (inverse just *** inverse just) . syn)
-- <+> (inverse just . rmsnd Nothing . syn)
-- <+> (inverse just . rmfst Nothing . syn)
mergePredicates :: SynIso (Atom,Atom) Atom
mergePredicates = Iso f g where
f (p1,p2) = do
name <- randName (show p1 ++ show p2)
let pred = cPN name noTv
pushAtom $ cImpL noTv pred (cAL noTv [p1,p2])
pure pred
g pred = error $ "Not Implemented g mergePredicates"
mergePredicatesSeq :: SynIso (Atom,Atom) Atom
mergePredicatesSeq = Iso f g where
f (p1,p2) = do
let p1name = nodeName p1
p2name = nodeName p2
p1pred = drop 20 p1name
p2pred = drop 20 p2name
name <- randName (p1name ++ p2name)
name2 <- randName (p1pred ++ p2pred)
let pred = cPN (name ++ "___" ++ name2) noTv
pushAtom $ cEquivL noTv (cEvalL noTv pred (cLL [cVN "$1",cVN "$3"]))
(cAL noTv [ cEvalL noTv p1 (cLL [cVN "$1",cVN "$2"])
, cEvalL noTv p2 (cLL [cVN "$2",cVN "$3"])
])
pure pred
g pred = error $ "Not Implemented g mergePredicatesSeq"
imply :: String -> SynIso Atom Atom
imply string = Iso f g where
f a = pushAtom (cImpL noTv a (cPN string noTv)) >> pure a
g a = popAtom >> pure a
selmahoPred :: String -> Syntax Atom
selmahoPred s = implicationOf . imply s . predicate . selmaho s
-------------------------------------------------------------------------------
--SpaceTime
-------------------------------------------------------------------------------
space_time :: Syntax Atom
space_time = mergeMaybe mergePredicates ((just . space &&& optional time)
<+>
(optional space &&& just . time))
time :: Syntax Atom
time = mergeMaybe mergePredicatesSeq (oooobm time_offset time_interval)
time_offset :: Syntax Atom
time_offset = general_offset "PU" "ZI"
time_interval :: Syntax Atom
time_interval = mergeMaybe mergePredicates (oooobm time_interval' interval_property)
time_interval' :: Syntax Atom
time_interval' = handle_interval <<< selmahoPred "ZEhA" &&& optional (selmahoPred "PU")
where handle_interval = Iso f g where
f (zeha,pu) = do
let zehaname = nodeName zeha
puname = maybe "" nodeName pu
name <- randName (zehaname ++('_':puname))
let pred = cPN (name ++ "interval") noTv
pushAtom $ cImpL noTv pred zeha
case pu of
Just pu -> pushAtom $ cImpL noTv pred pu
Nothing -> pushAtom $ cImpL noTv pred (cPN "PU" noTv)
pure pred
g _ = error "Not implemented g time_interval'"
space :: Syntax Atom
space = mergeMaybe mergePredicatesSeq (oooobm space_offset space_interval)
space_offset :: Syntax Atom
space_offset = general_offset "FAhA" "VA"
general_offset :: String -> String -> Syntax Atom
general_offset dir mag = isoFoldl mergePredicates . inverse cons . mapIso (handle_offset dir mag)
<<< some (oooobm (selmahoPred dir) (selmahoPred mag))
<+> (insert [(Nothing,Nothing)] . ifFlag "WithDefaultTenses")
handle_offset :: String -> String -> SynIso (Maybe Atom,Maybe Atom) Atom
handle_offset dirC magC = Iso f g
where f (mdir,mmag) = do
let dirname = maybe "" nodeName mdir
magname = maybe "" nodeName mmag
name <- randName (dirname ++('_':magname))
let pred = cPN (name ++ "___offset") noTv
case mdir of
Just dir -> pushAtom $ cImpL noTv pred dir
_ -> pushAtom $ cImpL noTv pred (cPN dirC noTv)
case mmag of
Just mag -> pushAtom $ cImpL noTv pred mag
_ -> pushAtom $ cImpL noTv pred (cPN magC noTv)
pure pred
g pred = error $ "Not Implemented g handle_offset"
space_interval :: Syntax Atom
space_interval = mergeMaybe mergePredicates (oooobm space_interval' space_int_prop)
space_interval' :: Syntax Atom
space_interval' = handle_interval
<<< mergeMaybe mergePredicates (oooobm (selmahoPred "VEhA")
(selmahoPred "VIhA"))
&&& optional (selmahoPred "FAhA")
where handle_interval = Iso f g where
f (pred,mfaha) = do
case mfaha of
Just faha -> pushAtom $ cImpL noTv pred faha
Nothing -> pushAtom $ cImpL noTv pred (cPN "FAhA" noTv)
pure pred
g _ = error "Not Implemented"
space_int_prop :: Syntax Atom
space_int_prop = (setFlagIso "FEhE" . sepSelmaho "FEhE") &&> interval_property
interval_property :: Syntax Atom
interval_property = handle <<< handleROI . (number &&& selmahoPred "ROI")
<+> selmahoPred "TAhE"
<+> selmahoPred "ZAhO"
where handle = Iso f g where
f pred = do
flags <- gets sFlags
if "FEhE" `elem` flags
then pushAtom $ cImpL noTv pred (cPN "Spatial" noTv)
else pushAtom $ cImpL noTv pred (cPN "Temporal" noTv)
pushAtom $ cImpL noTv
(cEvalL noTv pred
(cLL [cVN "$1",cVN "$2"]))
(cSubL noTv (cVN "$1") (cVN "$2"))
pure pred
g _ = error "Reverse Interval Property Not implemented."
handleROI = Iso f g where
f (pa,roi) = do
pushAtom $ cImpL noTv
(cEvalL noTv roi
(cLL [cVN "$1",cVN "$2"]))
(Link "SetSizeLink" [cVN "$1",pa] noTv)
pure roi
g roi = error "Handle Roi g not implemented"
-------------------------------------------------------------------------------
--Connective Utils
-------------------------------------------------------------------------------
optBool :: String -> Syntax Bool
optBool s = insert True . mytext s <+> insert False
ekPat :: Syntax String -> Syntax EK
ekPat syn = optBool "na"
&&& optBool "se"
&&& syn
&&& optBool "nai"
_BO :: Syntax JJCTTS
_BO = stag <&& sepSelmaho "BO"
--type EK = (Bool,(Bool,(String,Bool)))
ek :: Syntax EK
ek = ekPat (selmaho "A")
jek :: Syntax EK
jek = ekPat (_JAtoA . selmaho "JA")
gihek :: Syntax EK
gihek = ekPat (_GIhAtoA . selmaho "GIhA")
--data JOIK = JOI (Bool,(String,Bool))
-- | INT (Bool,(String,Bool))
-- | INTGAhO (String,((Bool,(String,Bool)),String))
-- deriving (Show,Eq)
gek :: Syntax (Bool,(String,Bool))
gek = optBool "se" &&& (_GAtoA . selmaho "GA") &&& optBool "nai"
-- <+> joik &&& selmaho "GI"
-- <+> stag &&& gik
guhek :: Syntax (Bool,(String,Bool))
guhek = optBool "se" &&& (_GUhAtoA . selmaho "GUhA") &&& optBool "nai"
gik :: Syntax (Bool)
gik = sepSelmaho"GI" &&> optBool "nai"
-- ( gek / guhek ) a gik a
handleGIK :: SynIso ((Bool,(String,Bool)),(a,(Bool,a))) (Con,(a,a))
handleGIK = Iso f g where
f ((bse,(s,bna)),(bridi1,(bnai,bridi2))) =
pure ((Just $ Right(bna,(bse,(s,bnai))),Nothing),(bridi1,bridi2))
g ((Just (Right (bna,(bse,(s,bnai)))),Nothing),(bridi1,bridi2)) =
pure ((bse,(s,bna)),(bridi1,(bnai,bridi2)))
joik_JOI :: SynIso (Bool,(String,Bool)) JOIK
joik_JOI = Iso f g where
f a = pure $ JOI a
g (JOI a) = pure $ a
g _ = lift $ Left "Not a JOI."
joik_INT :: SynIso (Bool,(String,Bool)) JOIK
joik_INT = Iso f g where
f a = pure $ INT a
g (INT a) = pure $ a
g _ = lift $ Left "Not a INT."
joik_INTGAhO :: SynIso (String,((Bool,(String,Bool)),String)) JOIK
joik_INTGAhO = Iso f g where
f a = pure $ INTGAhO a
g (INTGAhO a) = pure $ a
g _ = lift $ Left "Not a INTGAhO."
joik :: Syntax JOIK
joik = joik_JOI . (optBool "se" &&& selmaho "JOI" &&& optBool "nai")
<+> joik_INT . interval
<+> joik_INTGAhO . (selmaho "GAhO" &&& interval &&& selmaho "GAhO")
where interval = optBool "se"
&&& selmaho "BIhI"
&&& optBool "nai"
joik_jek :: Syntax JOIK_JEK
joik_jek = left . joik <+> right . jek
joik_ek :: Syntax JOIK_EK
joik_ek = left . joik <+> right . ek
handleJJCTTS :: SynIso (JJCTTS,[Atom]) Atom
handleJJCTTS = Iso f g where
f (CTLeaf (pred,Nothing),as) = do
apply (_frames . second (handleTAG . toSumti)) (pred,as)
f (CTLeaf (pred,Just "space_time"),as) = do
apply handleSpaceTime (pred,as)
f (CTNode joik_jek (x1,x2),as) = do
a1 <- f (x1,as)
a2 <- f (x2,as)
case joik_jek of
Right jek -> apply conLink (jek,(a1,a2))
Left joik -> apply handleJOIK (joik,(a1,a2))
g _ = error $ "handleJJCTTS g: not implemented."
toSumti :: SynIso [Atom] [Sumti]
toSumti = mkIso f g where
f = map (\x -> (x,Nothing))
g = map fst
handleSpaceTime :: SynIso (Selbri,[Atom]) Atom
handleSpaceTime = Iso f g where
f ((tv,st),as) = do
case as of
[s] -> do
nctx <- (\x -> cCN x noTv) <$> randName (show s)
addCtx nctx
pure $ cEvalL tv st (cLL [nctx,s])
_ -> pure $ cEvalL tv st (cLL as)
g _ = error $ "Not Implemented g handleSpaceTime"
handleJJCTTS_Selbri :: SynIso (JJCTTS,Atom) Atom
handleJJCTTS_Selbri = Iso f g where
f (CTLeaf ((_tv,pred),Nothing),selb) = do
pushAtom $ cImpL noTv selb pred
pure selb
f (CTLeaf ((_tv,pred),Just "space_time"),selb) = do
atom <- apply handleSpaceTime pred
pushAtom atom
pure selb
f (CTNode joik_jek (x1,x2),s) = do
f (x1,s)
a1 <- popAtom
f (x2,s)
a2 <- popAtom
case joik_jek of
Right jek -> apply conLink (jek,(a1,a2)) >> pure s
Left joik -> apply handleJOIK (joik,(a1,a2)) >> pure s
g _ = error $ "handleJJCTTS g: not implemented."
toSumti :: SynIso [Atom] [Sumti]
toSumti = mkIso f g where
f = map (\x -> (x,Nothing))
g = map fst
handleSpaceTime :: SynIso Atom Atom
handleSpaceTime = Iso f g where
f pred = do
ctx <- gets (head.sCtx)
nctx <- (\x -> cCN x noTv) <$> randName (show ctx)
setPrimaryCtx nctx
pure (cEvalL noTv
pred
(cLL [nctx,ctx])
)
g _ = error "Not Implemented g handleSpaceTime_selbri"
--HandleCon Connectes the Atoms with 2 possible connectives
--Since booth connectives are in a Maybe we first lift the Atoms into the Maybe
--Then we fmap the isos for each Connective Type over the Maybes
--Finally we merge the results together or pick one
handleCon2 :: SynIso (Atom,(Con,Atom)) Atom
handleCon2 = handleCon . reorder
where reorder = mkIso f g
f (a1,(con,a2)) = (con,(a1,a2))
g (con,(a1,a2)) = (a1,(con,a2))
handleCon :: SynIso (Con,(Atom,Atom)) Atom
handleCon = merge
. (mapIso handle_joik_ek *** mapIso handle_jjctts)
. reorder
where handle_joik_ek = (handleJOIK ||| conLink) . expandEither
handle_jjctts = handleJJCTTS .> tolist2
reorder = mkIso f g where
f ((s,ts),as) = (eM (s,as),eM (ts,as))
g (Just (s,as) ,Just (ts,_)) = ((Just s,Just ts),as)
g (Nothing ,Just (ts,as)) = ((Nothing,Just ts),as)
g (Just (s,as) ,Nothing) = ((Just s,Nothing),as)
eM (Just a,b) = Just (a,b) --expand Maybe
eM (Nothing,b) = Nothing
toSumti = mkIso f g where
f = map (\x -> (x,Nothing))
g = map fst
merge = mkIso f g where
f (Just a,Just b) = Link "AndLink" [a,b] highTv
f (Nothing,Just b) = b
f (Just a,Nothing) = a
f (Nothing,Nothing) = error "not allowed to happen."
g l@EvalL{} = (Nothing,Just l)
g (AL [a,b@EvalL{}]) = (Just a,Just b)
g l = (Just l,Nothing)
expandEither = mkIso f g where
f (Left a,c) = Left (a,c)
f (Right b,c) = Right (b,c)
g (Left (a,c) ) = (Left a,c)
g (Right (b,c)) = (Right b,c)
handleJOIK :: SynIso (JOIK,(Atom,Atom)) Atom
handleJOIK = Iso f g where
f (JOI (b1,(s,b2)),(a1,a2)) = do
(s,(na1,na2)) <- apply handleEKMods ((b1,(False,(s,b2))),(a1,a2))
pred <- apply implicationOf (cPN s noTv)
name <- randName (show s)
let new = case a1 of
PN _ -> cPN name noTv
_ -> cCN name noTv
selbri = (selbriDefaultTV,pred)
atom <- apply _frames (selbri,[(new,"1"),(na1,"2"),(na2,"3")])
pushAtom atom
pure new
f (INT (b1,(s,b2)),(a1,a2)) = lift $ Left "handleJOIK not implemented"
f (INTGAhO (s1,((b1,(s2,b2)),s3)),(a1,a2)) = lift $ Left "handleJOIK not implemented"
g _ = error "handleJOIK g not implemented"
|
misgeatgit/opencog
|
opencog/nlp/lojban/HaskellLib/src/OpenCog/Lojban/Syntax.hs
|
agpl-3.0
| 64,858
| 1
| 26
| 23,716
| 20,442
| 10,669
| 9,773
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module My.Namespacing.Extend.Test.ExtendTestService where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified My.Namespacing.Test.Hsmodule_Types as Hsmodule_Types
import qualified My.Namespacing.Test.HsTestService
import qualified My.Namespacing.Extend.Test.Extend_Types
import qualified My.Namespacing.Extend.Test.ExtendTestService_Iface as Iface
-- HELPER FUNCTIONS AND STRUCTURES --
data Check_args = Check_args
{ check_args_struct1 :: Hsmodule_Types.HsFoo
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Check_args where
encode = encode_Check_args
decode = decode_Check_args
instance Hashable.Hashable Check_args where
hashWithSalt salt record = salt `Hashable.hashWithSalt` check_args_struct1 record
instance DeepSeq.NFData Check_args where
rnf _record0 =
DeepSeq.rnf (check_args_struct1 _record0) `seq`
()
instance Arbitrary.Arbitrary Check_args where
arbitrary = Monad.liftM Check_args (Arbitrary.arbitrary)
shrink obj | obj == default_Check_args = []
| otherwise = Maybe.catMaybes
[ if obj == default_Check_args{check_args_struct1 = check_args_struct1 obj} then Nothing else Just $ default_Check_args{check_args_struct1 = check_args_struct1 obj}
]
from_Check_args :: Check_args -> Types.ThriftVal
from_Check_args record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v3 -> Just (1, ("struct1",Hsmodule_Types.from_HsFoo _v3))) $ check_args_struct1 record
]
write_Check_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Check_args -> IO ()
write_Check_args oprot record = Thrift.writeVal oprot $ from_Check_args record
encode_Check_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Check_args -> BS.ByteString
encode_Check_args oprot record = Thrift.serializeVal oprot $ from_Check_args record
to_Check_args :: Types.ThriftVal -> Check_args
to_Check_args (Types.TStruct fields) = Check_args{
check_args_struct1 = maybe (check_args_struct1 default_Check_args) (\(_,_val5) -> (case _val5 of {Types.TStruct _val6 -> (Hsmodule_Types.to_HsFoo (Types.TStruct _val6)); _ -> error "wrong type"})) (Map.lookup (1) fields)
}
to_Check_args _ = error "not a struct"
read_Check_args :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Check_args
read_Check_args iprot = to_Check_args <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Check_args)
decode_Check_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Check_args
decode_Check_args iprot bs = to_Check_args $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Check_args) bs
typemap_Check_args :: Types.TypeMap
typemap_Check_args = Map.fromList [("struct1",(1,(Types.T_STRUCT Hsmodule_Types.typemap_HsFoo)))]
default_Check_args :: Check_args
default_Check_args = Check_args{
check_args_struct1 = Hsmodule_Types.default_HsFoo}
data Check_result = Check_result
{ check_result_success :: Bool
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Check_result where
encode = encode_Check_result
decode = decode_Check_result
instance Hashable.Hashable Check_result where
hashWithSalt salt record = salt `Hashable.hashWithSalt` check_result_success record
instance DeepSeq.NFData Check_result where
rnf _record7 =
DeepSeq.rnf (check_result_success _record7) `seq`
()
instance Arbitrary.Arbitrary Check_result where
arbitrary = Monad.liftM Check_result (Arbitrary.arbitrary)
shrink obj | obj == default_Check_result = []
| otherwise = Maybe.catMaybes
[ if obj == default_Check_result{check_result_success = check_result_success obj} then Nothing else Just $ default_Check_result{check_result_success = check_result_success obj}
]
from_Check_result :: Check_result -> Types.ThriftVal
from_Check_result record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v10 -> Just (0, ("success",Types.TBool _v10))) $ check_result_success record
]
write_Check_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Check_result -> IO ()
write_Check_result oprot record = Thrift.writeVal oprot $ from_Check_result record
encode_Check_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Check_result -> BS.ByteString
encode_Check_result oprot record = Thrift.serializeVal oprot $ from_Check_result record
to_Check_result :: Types.ThriftVal -> Check_result
to_Check_result (Types.TStruct fields) = Check_result{
check_result_success = maybe (check_result_success default_Check_result) (\(_,_val12) -> (case _val12 of {Types.TBool _val13 -> _val13; _ -> error "wrong type"})) (Map.lookup (0) fields)
}
to_Check_result _ = error "not a struct"
read_Check_result :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Check_result
read_Check_result iprot = to_Check_result <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Check_result)
decode_Check_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Check_result
decode_Check_result iprot bs = to_Check_result $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Check_result) bs
typemap_Check_result :: Types.TypeMap
typemap_Check_result = Map.fromList [("success",(0,Types.T_BOOL))]
default_Check_result :: Check_result
default_Check_result = Check_result{
check_result_success = False}
process_check (seqid, iprot, oprot, handler) = do
args <- ExtendTestService.read_Check_args iprot
(Exception.catch
(do
val <- Iface.check handler (check_args_struct1 args)
let res = default_Check_result{check_result_success = val}
Thrift.writeMessage oprot ("check", Types.M_REPLY, seqid) $
write_Check_result oprot res
Thrift.tFlush (Thrift.getTransport oprot))
((\_ -> do
Thrift.writeMessage oprot ("check", Types.M_EXCEPTION, seqid) $
Thrift.writeAppExn oprot (Thrift.AppExn Thrift.AE_UNKNOWN "")
Thrift.tFlush (Thrift.getTransport oprot)) :: Exception.SomeException -> IO ()))
proc_ handler (iprot,oprot) (name,typ,seqid) = case name of
"check" -> process_check (seqid,iprot,oprot,handler)
_ -> My.Namespacing.Test.HsTestService.proc_ handler (iprot,oprot) (name,typ,seqid)
process handler (iprot, oprot) =
Thrift.readMessage iprot (proc_ handler (iprot,oprot)) >> return True
|
Orvid/fbthrift
|
thrift/compiler/test/fixtures/namespace/gen-hs/My/Namespacing/Extend/Test/ExtendTestService.hs
|
apache-2.0
| 8,193
| 0
| 18
| 1,191
| 2,242
| 1,261
| 981
| 132
| 2
|
{-# LANGUAGE ScopedTypeVariables, RecursiveDo #-}
import Data.Char
import System.Environment
import Control.Applicative
import Text.Earley
data Expr
= Expr :+: Expr
| Expr :*: Expr
| Var String
| Lit Int
deriving (Show)
grammar :: forall r. Grammar r (Prod r String Char Expr)
grammar = mdo
whitespace <- rule $ many $ satisfy isSpace
let tok :: Prod r String Char a -> Prod r String Char a
tok p = whitespace *> p
sym x = tok $ token x <?> [x]
ident = tok $ (:) <$> satisfy isAlpha <*> many (satisfy isAlphaNum) <?> "identifier"
num = tok $ some (satisfy isDigit) <?> "number"
expr0 <- rule
$ (Lit . read) <$> num
<|> Var <$> ident
<|> sym '(' *> expr2 <* sym ')'
expr1 <- rule
$ (:*:) <$> expr1 <* sym '*' <*> expr0
<|> expr0
expr2 <- rule
$ (:+:) <$> expr2 <* sym '+' <*> expr1
<|> expr1
return $ expr2 <* whitespace
main :: IO ()
main = do
x:_ <- getArgs
print $ fullParses (parser grammar) x
|
sboosali/Earley
|
examples/Expr2.hs
|
bsd-3-clause
| 997
| 0
| 16
| 275
| 396
| 199
| 197
| 34
| 1
|
-- |
-- Module: Data.Aeson
-- Copyright: (c) 2011-2015 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- Types and functions for working efficiently with JSON data.
--
-- (A note on naming: in Greek mythology, Aeson was the father of Jason.)
module Data.Aeson
(
-- * How to use this library
-- $use
-- ** Writing instances by hand
-- $manual
-- ** Working with the AST
-- $ast
-- ** Decoding to a Haskell value
-- $haskell
-- ** Decoding a mixed-type object
-- $mixed
-- * Encoding and decoding
-- $encoding_and_decoding
decode
, decode'
, eitherDecode
, eitherDecode'
, encode
-- ** Variants for strict bytestrings
, decodeStrict
, decodeStrict'
, eitherDecodeStrict
, eitherDecodeStrict'
-- * Core JSON types
, Value(..)
, Encoding
, fromEncoding
, Array
, Object
-- * Convenience types
, DotNetTime(..)
-- * Type conversion
, FromJSON(..)
, Result(..)
, fromJSON
, ToJSON(..)
, KeyValue(..)
-- ** Generic JSON classes
, GFromJSON(..)
, GToJSON(..)
, genericToJSON
, genericParseJSON
-- * Inspecting @'Value's@
, withObject
, withText
, withArray
, withNumber
, withScientific
, withBool
-- * Constructors and accessors
, Series
, series
, foldable
, (.:)
, (.:?)
, (.!=)
, object
-- * Parsing
, json
, json'
) where
import Data.Aeson.Encode (encode)
import Data.Aeson.Parser.Internal (decodeWith, decodeStrictWith,
eitherDecodeWith, eitherDecodeStrictWith,
jsonEOF, json, jsonEOF', json')
import Data.Aeson.Types
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses immediately, but defers conversion. See
-- 'json' for details.
decode :: (FromJSON a) => L.ByteString -> Maybe a
decode = decodeWith jsonEOF fromJSON
{-# INLINE decode #-}
-- | Efficiently deserialize a JSON value from a strict 'B.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses immediately, but defers conversion. See
-- 'json' for details.
decodeStrict :: (FromJSON a) => B.ByteString -> Maybe a
decodeStrict = decodeStrictWith jsonEOF fromJSON
{-# INLINE decodeStrict #-}
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace. This restriction is necessary to ensure
-- that if data is being lazily read from a file handle, the file
-- handle will be closed in a timely fashion once the document has
-- been parsed.
--
-- This function parses and performs conversion immediately. See
-- 'json'' for details.
decode' :: (FromJSON a) => L.ByteString -> Maybe a
decode' = decodeWith jsonEOF' fromJSON
{-# INLINE decode' #-}
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses and performs conversion immediately. See
-- 'json'' for details.
decodeStrict' :: (FromJSON a) => B.ByteString -> Maybe a
decodeStrict' = decodeStrictWith jsonEOF' fromJSON
{-# INLINE decodeStrict' #-}
eitherFormatError :: Either (JSONPath, String) a -> Either String a
eitherFormatError = either (Left . uncurry formatError) Right
{-# INLINE eitherFormatError #-}
-- | Like 'decode' but returns an error message when decoding fails.
eitherDecode :: (FromJSON a) => L.ByteString -> Either String a
eitherDecode = eitherFormatError . eitherDecodeWith jsonEOF fromJSON
{-# INLINE eitherDecode #-}
-- | Like 'decodeStrict' but returns an error message when decoding fails.
eitherDecodeStrict :: (FromJSON a) => B.ByteString -> Either String a
eitherDecodeStrict = eitherFormatError . eitherDecodeStrictWith jsonEOF fromJSON
{-# INLINE eitherDecodeStrict #-}
-- | Like 'decode'' but returns an error message when decoding fails.
eitherDecode' :: (FromJSON a) => L.ByteString -> Either String a
eitherDecode' = eitherFormatError . eitherDecodeWith jsonEOF' fromJSON
{-# INLINE eitherDecode' #-}
-- | Like 'decodeStrict'' but returns an error message when decoding fails.
eitherDecodeStrict' :: (FromJSON a) => B.ByteString -> Either String a
eitherDecodeStrict' = eitherFormatError . eitherDecodeStrictWith jsonEOF' fromJSON
{-# INLINE eitherDecodeStrict' #-}
-- $use
--
-- This section contains basic information on the different ways to
-- work with data using this library. These range from simple but
-- inflexible, to complex but flexible.
--
-- The most common way to use the library is to define a data type,
-- corresponding to some JSON data you want to work with, and then
-- write either a 'FromJSON' instance, a to 'ToJSON' instance, or both
-- for that type.
--
-- For example, given this JSON data:
--
-- > { "name": "Joe", "age": 12 }
--
-- we create a matching data type:
--
-- > {-# LANGUAGE DeriveGeneric #-}
-- >
-- > import GHC.Generics
-- >
-- > data Person = Person {
-- > name :: Text
-- > , age :: Int
-- > } deriving (Generic, Show)
--
-- The @LANGUAGE@ pragma and 'Generic' instance let us write empty
-- 'FromJSON' and 'ToJSON' instances for which the compiler will
-- generate sensible default implementations.
--
-- > instance ToJSON Person
-- > instance FromJSON Person
--
-- We can now encode a value like so:
--
-- > >>> encode (Person {name = "Joe", age = 12})
-- > "{\"name\":\"Joe\",\"age\":12}"
-- $manual
--
-- When necessary, we can write 'ToJSON' and 'FromJSON' instances by
-- hand. This is valuable when the JSON-on-the-wire and Haskell data
-- are different or otherwise need some more carefully managed
-- translation. Let's revisit our JSON data:
--
-- > { "name": "Joe", "age": 12 }
--
-- We once again create a matching data type, without bothering to add
-- a 'Generic' instance this time:
--
-- > data Person = Person {
-- > name :: Text
-- > , age :: Int
-- > } deriving Show
--
-- To decode data, we need to define a 'FromJSON' instance:
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- >
-- > instance FromJSON Person where
-- > parseJSON (Object v) = Person <$>
-- > v .: "name" <*>
-- > v .: "age"
-- > -- A non-Object value is of the wrong type, so fail.
-- > parseJSON _ = empty
--
-- We can now parse the JSON data like so:
--
-- > >>> decode "{\"name\":\"Joe\",\"age\":12}" :: Maybe Person
-- > Just (Person {name = "Joe", age = 12})
--
-- To encode data, we need to define a 'ToJSON' instance:
--
-- > instance ToJSON Person where
-- > -- this generates a Value
-- > toJSON (Person name age) =
-- > object ["name" .= name, "age" .= age]
-- >
-- > -- this encodes directly to a ByteString Builder
-- > toEncoding (Person name age) =
-- > series $ "name" .= name <> "age" .= age
--
-- We can now encode a value like so:
--
-- > >>> encode (Person {name = "Joe", age = 12})
-- > "{\"name\":\"Joe\",\"age\":12}"
--
-- There are predefined 'FromJSON' and 'ToJSON' instances for many
-- types. Here's an example using lists and 'Int's:
--
-- > >>> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- And here's an example using the 'Data.Map.Map' type to get a map of
-- 'Int's.
--
-- > >>> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int)
-- > Just (fromList [("bar",2),("foo",1)])
-- While the notes below focus on decoding, you can apply almost the
-- same techniques to /encoding/ data. (The main difference is that
-- encoding always succeeds, but decoding has to handle the
-- possibility of failure, where an input doesn't match our
-- expectations.)
--
-- See the documentation of 'FromJSON' and 'ToJSON' for some examples
-- of how you can automatically derive instances in many common
-- circumstances.
-- $ast
--
-- Sometimes you want to work with JSON data directly, without first
-- converting it to a custom data type. This can be useful if you want
-- to e.g. convert JSON data to YAML data, without knowing what the
-- contents of the original JSON data was. The 'Value' type, which is
-- an instance of 'FromJSON', is used to represent an arbitrary JSON
-- AST (abstract syntax tree). Example usage:
--
-- > >>> decode "{\"foo\": 123}" :: Maybe Value
-- > Just (Object (fromList [("foo",Number 123)]))
--
-- > >>> decode "{\"foo\": [\"abc\",\"def\"]}" :: Maybe Value
-- > Just (Object (fromList [("foo",Array (fromList [String "abc",String "def"]))]))
--
-- Once you have a 'Value' you can write functions to traverse it and
-- make arbitrary transformations.
-- $haskell
--
-- We can decode to any instance of 'FromJSON':
--
-- > λ> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- Alternatively, there are instances for standard data types, so you
-- can use them directly. For example, use the 'Data.Map.Map' type to
-- get a map of 'Int's.
--
-- > λ> import Data.Map
-- > λ> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int)
-- > Just (fromList [("bar",2),("foo",1)])
-- $mixed
--
-- The above approach with maps of course will not work for mixed-type
-- objects that don't follow a strict schema, but there are a couple
-- of approaches available for these.
--
-- The 'Object' type contains JSON objects:
--
-- > λ> decode "{\"name\":\"Dave\",\"age\":2}" :: Maybe Object
-- > Just (fromList) [("name",String "Dave"),("age",Number 2)]
--
-- You can extract values from it with a parser using 'parse',
-- 'parseEither' or, in this example, 'parseMaybe':
--
-- > λ> do result <- decode "{\"name\":\"Dave\",\"age\":2}"
-- > flip parseMaybe result $ \obj -> do
-- > age <- obj .: "age"
-- > name <- obj .: "name"
-- > return (name ++ ": " ++ show (age*2))
-- >
-- > Just "Dave: 4"
--
-- Considering that any type that implements 'FromJSON' can be used
-- here, this is quite a powerful way to parse JSON. See the
-- documentation in 'FromJSON' for how to implement this class for
-- your own data types.
--
-- The downside is that you have to write the parser yourself; the
-- upside is that you have complete control over the way the JSON is
-- parsed.
-- $encoding_and_decoding
--
-- Decoding is a two-step process.
--
-- * When decoding a value, the process is reversed: the bytes are
-- converted to a 'Value', then the 'FromJSON' class is used to
-- convert to the desired type.
--
-- There are two ways to encode a value.
--
-- * Convert to a 'Value' using 'toJSON', then possibly further
-- encode. This was the only method available in aeson 0.9 and
-- earlier.
--
-- * Directly encode (to what will become a 'L.ByteString') using
-- 'toEncoding'. This is much more efficient (about 3x faster, and
-- less memory intensive besides), but is only available in aeson
-- 0.10 and newer.
--
-- For convenience, the 'encode' and 'decode' functions combine both
-- steps.
|
plaprade/aeson
|
Data/Aeson.hs
|
bsd-3-clause
| 11,699
| 0
| 8
| 2,460
| 835
| 602
| 233
| 75
| 1
|
module Main where
import Data.Maybe
import Control.Monad
import qualified Data.ByteString.Lazy.Char8 as C
import Network.Socket hiding (send, recv)
import Network.Socket.ByteString.Lazy
import Data.Int
import Data.Binary.Get
import Data.Binary.Put
import Debug.Trace
import Data.NineP
connector :: IO Socket
connector = withSocketsDo $
do
ainfo <- getAddrInfo Nothing (Just "127.0.0.1") (Just "6872")
let a = head ainfo
sock <- socket AF_INET Stream defaultProtocol
putStrLn "Trying to connect"
connect sock (addrAddress (traceShow a a))
putStrLn "connected!"
let version = Msg TTversion (-1) $ Tversion 1024 "9P2000"
putStrLn $ "About to send: " ++ show version
send sock $ runPut (put version)
putStrLn "Getting response"
msg <- recv sock 50
let response = runGet get msg ::Msg
putStrLn $ show response
return sock
main :: IO ()
main = withSocketsDo $
do
ainfo <- getAddrInfo Nothing (Just "127.0.0.1") (Just "6872")
let a = head ainfo
sock <- socket AF_INET Stream defaultProtocol
putStrLn "Trying to connect"
connect sock (addrAddress (traceShow a a))
putStrLn "connected!"
let version = Msg TTversion (-1) $ Tversion 1024 "9P2000"
putStrLn $ "About to send: " ++ show version
send sock $ runPut (put version)
putStrLn "Getting response"
msg <- recv sock 50
let response = runGet get msg ::Msg
putStrLn $ show response
|
l29ah/9ph
|
test9p.hs
|
bsd-3-clause
| 1,676
| 0
| 14
| 552
| 498
| 235
| 263
| 44
| 1
|
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE TypeFamilies #-}
module T4200 where
class C a where
type In a :: *
op :: In a -> Int
-- Should be ok; no -XUndecidableInstances required
instance (In c ~ Int) => C [c] where
type In [c] = In c
op x = 3
|
green-haskell/ghc
|
testsuite/tests/indexed-types/should_compile/T4200.hs
|
bsd-3-clause
| 279
| 0
| 8
| 64
| 83
| 46
| 37
| 9
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53.GetHostedZoneCount
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | To retrieve a count of all your hosted zones, send a 'GET' request to the '2013-04-01/hostedzonecount' resource.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/API_GetHostedZoneCount.html>
module Network.AWS.Route53.GetHostedZoneCount
(
-- * Request
GetHostedZoneCount
-- ** Request constructor
, getHostedZoneCount
-- * Response
, GetHostedZoneCountResponse
-- ** Response constructor
, getHostedZoneCountResponse
-- ** Response lenses
, ghzcrHostedZoneCount
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestXML
import Network.AWS.Route53.Types
import qualified GHC.Exts
data GetHostedZoneCount = GetHostedZoneCount
deriving (Eq, Ord, Read, Show, Generic)
-- | 'GetHostedZoneCount' constructor.
getHostedZoneCount :: GetHostedZoneCount
getHostedZoneCount = GetHostedZoneCount
newtype GetHostedZoneCountResponse = GetHostedZoneCountResponse
{ _ghzcrHostedZoneCount :: Integer
} deriving (Eq, Ord, Read, Show, Enum, Num, Integral, Real)
-- | 'GetHostedZoneCountResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ghzcrHostedZoneCount' @::@ 'Integer'
--
getHostedZoneCountResponse :: Integer -- ^ 'ghzcrHostedZoneCount'
-> GetHostedZoneCountResponse
getHostedZoneCountResponse p1 = GetHostedZoneCountResponse
{ _ghzcrHostedZoneCount = p1
}
-- | The number of hosted zones associated with the current AWS account.
ghzcrHostedZoneCount :: Lens' GetHostedZoneCountResponse Integer
ghzcrHostedZoneCount =
lens _ghzcrHostedZoneCount (\s a -> s { _ghzcrHostedZoneCount = a })
instance ToPath GetHostedZoneCount where
toPath = const "/2013-04-01/hostedzonecount"
instance ToQuery GetHostedZoneCount where
toQuery = const mempty
instance ToHeaders GetHostedZoneCount
instance ToXMLRoot GetHostedZoneCount where
toXMLRoot = const (namespaced ns "GetHostedZoneCount" [])
instance ToXML GetHostedZoneCount
instance AWSRequest GetHostedZoneCount where
type Sv GetHostedZoneCount = Route53
type Rs GetHostedZoneCount = GetHostedZoneCountResponse
request = get
response = xmlResponse
instance FromXML GetHostedZoneCountResponse where
parseXML x = GetHostedZoneCountResponse
<$> x .@ "HostedZoneCount"
|
kim/amazonka
|
amazonka-route53/gen/Network/AWS/Route53/GetHostedZoneCount.hs
|
mpl-2.0
| 3,361
| 0
| 9
| 678
| 374
| 225
| 149
| 51
| 1
|
{-# LANGUAGE CPP
, DeriveDataTypeable
, FlexibleInstances
, MultiParamTypeClasses
, TypeFamilies
, Rank2Types
, BangPatterns
#-}
-- |
-- Module : Data.Vector
-- Copyright : (c) Roman Leshchinskiy 2008-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <rl@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable
--
-- A library for boxed vectors (that is, polymorphic arrays capable of
-- holding any Haskell value). The vectors come in two flavours:
--
-- * mutable
--
-- * immutable
--
-- and support a rich interface of both list-like operations, and bulk
-- array operations.
--
-- For unboxed arrays, use "Data.Vector.Unboxed"
--
module Data.Vector (
-- * Boxed vectors
Vector, MVector,
-- * Accessors
-- ** Length information
length, null,
-- ** Indexing
(!), (!?), head, last,
unsafeIndex, unsafeHead, unsafeLast,
-- ** Monadic indexing
indexM, headM, lastM,
unsafeIndexM, unsafeHeadM, unsafeLastM,
-- ** Extracting subvectors (slicing)
slice, init, tail, take, drop, splitAt,
unsafeSlice, unsafeInit, unsafeTail, unsafeTake, unsafeDrop,
-- * Construction
-- ** Initialisation
empty, singleton, replicate, generate, iterateN,
-- ** Monadic initialisation
replicateM, generateM, create,
-- ** Unfolding
unfoldr, unfoldrN,
constructN, constructrN,
-- ** Enumeration
enumFromN, enumFromStepN, enumFromTo, enumFromThenTo,
-- ** Concatenation
cons, snoc, (++), concat,
-- ** Restricting memory usage
force,
-- * Modifying vectors
-- ** Bulk updates
(//), update, update_,
unsafeUpd, unsafeUpdate, unsafeUpdate_,
-- ** Accumulations
accum, accumulate, accumulate_,
unsafeAccum, unsafeAccumulate, unsafeAccumulate_,
-- ** Permutations
reverse, backpermute, unsafeBackpermute,
-- ** Safe destructive updates
modify,
-- * Elementwise operations
-- ** Indexing
indexed,
-- ** Mapping
map, imap, concatMap,
-- ** Monadic mapping
mapM, imapM, mapM_, imapM_, forM, forM_,
-- ** Zipping
zipWith, zipWith3, zipWith4, zipWith5, zipWith6,
izipWith, izipWith3, izipWith4, izipWith5, izipWith6,
zip, zip3, zip4, zip5, zip6,
-- ** Monadic zipping
zipWithM, izipWithM, zipWithM_, izipWithM_,
-- ** Unzipping
unzip, unzip3, unzip4, unzip5, unzip6,
-- * Working with predicates
-- ** Filtering
filter, ifilter, filterM,
takeWhile, dropWhile,
-- ** Partitioning
partition, unstablePartition, span, break,
-- ** Searching
elem, notElem, find, findIndex, findIndices, elemIndex, elemIndices,
-- * Folding
foldl, foldl1, foldl', foldl1', foldr, foldr1, foldr', foldr1',
ifoldl, ifoldl', ifoldr, ifoldr',
-- ** Specialised folds
all, any, and, or,
sum, product,
maximum, maximumBy, minimum, minimumBy,
minIndex, minIndexBy, maxIndex, maxIndexBy,
-- ** Monadic folds
foldM, ifoldM, foldM', ifoldM',
fold1M, fold1M',foldM_, ifoldM_,
foldM'_, ifoldM'_, fold1M_, fold1M'_,
-- ** Monadic sequencing
sequence, sequence_,
-- * Prefix sums (scans)
prescanl, prescanl',
postscanl, postscanl',
scanl, scanl', scanl1, scanl1',
prescanr, prescanr',
postscanr, postscanr',
scanr, scanr', scanr1, scanr1',
-- * Conversions
-- ** Lists
toList, fromList, fromListN,
-- ** Other vector types
G.convert,
-- ** Mutable vectors
freeze, thaw, copy, unsafeFreeze, unsafeThaw, unsafeCopy
) where
import qualified Data.Vector.Generic as G
import Data.Vector.Mutable ( MVector(..) )
import Data.Primitive.Array
import qualified Data.Vector.Fusion.Bundle as Bundle
import Control.DeepSeq ( NFData, rnf )
import Control.Monad ( MonadPlus(..), liftM, ap )
import Control.Monad.ST ( ST )
import Control.Monad.Primitive
import Prelude hiding ( length, null,
replicate, (++), concat,
head, last,
init, tail, take, drop, splitAt, reverse,
map, concatMap,
zipWith, zipWith3, zip, zip3, unzip, unzip3,
filter, takeWhile, dropWhile, span, break,
elem, notElem,
foldl, foldl1, foldr, foldr1,
all, any, and, or, sum, product, minimum, maximum,
scanl, scanl1, scanr, scanr1,
enumFromTo, enumFromThenTo,
mapM, mapM_, sequence, sequence_ )
import Data.Typeable ( Typeable )
import Data.Data ( Data(..) )
import Text.Read ( Read(..), readListPrecDefault )
import Data.Monoid ( Monoid(..) )
import qualified Control.Applicative as Applicative
import qualified Data.Foldable as Foldable
import qualified Data.Traversable as Traversable
#if __GLASGOW_HASKELL__ >= 708
import qualified GHC.Exts as Exts (IsList(..))
#endif
-- | Boxed vectors, supporting efficient slicing.
data Vector a = Vector {-# UNPACK #-} !Int
{-# UNPACK #-} !Int
{-# UNPACK #-} !(Array a)
deriving ( Typeable )
instance NFData a => NFData (Vector a) where
rnf (Vector i n arr) = rnfAll i
where
rnfAll ix | ix < n = rnf (indexArray arr ix) `seq` rnfAll (ix+1)
| otherwise = ()
instance Show a => Show (Vector a) where
showsPrec = G.showsPrec
instance Read a => Read (Vector a) where
readPrec = G.readPrec
readListPrec = readListPrecDefault
#if __GLASGOW_HASKELL__ >= 708
instance Exts.IsList (Vector a) where
type Item (Vector a) = a
fromList = fromList
fromListN = fromListN
toList = toList
#endif
instance Data a => Data (Vector a) where
gfoldl = G.gfoldl
toConstr _ = error "toConstr"
gunfold _ _ = error "gunfold"
dataTypeOf _ = G.mkType "Data.Vector.Vector"
dataCast1 = G.dataCast
type instance G.Mutable Vector = MVector
instance G.Vector Vector a where
{-# INLINE basicUnsafeFreeze #-}
basicUnsafeFreeze (MVector i n marr)
= Vector i n `liftM` unsafeFreezeArray marr
{-# INLINE basicUnsafeThaw #-}
basicUnsafeThaw (Vector i n arr)
= MVector i n `liftM` unsafeThawArray arr
{-# INLINE basicLength #-}
basicLength (Vector _ n _) = n
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice j n (Vector i _ arr) = Vector (i+j) n arr
{-# INLINE basicUnsafeIndexM #-}
basicUnsafeIndexM (Vector i _ arr) j = indexArrayM arr (i+j)
{-# INLINE basicUnsafeCopy #-}
basicUnsafeCopy (MVector i n dst) (Vector j _ src)
= copyArray dst i src j n
-- See http://trac.haskell.org/vector/ticket/12
instance Eq a => Eq (Vector a) where
{-# INLINE (==) #-}
xs == ys = Bundle.eq (G.stream xs) (G.stream ys)
{-# INLINE (/=) #-}
xs /= ys = not (Bundle.eq (G.stream xs) (G.stream ys))
-- See http://trac.haskell.org/vector/ticket/12
instance Ord a => Ord (Vector a) where
{-# INLINE compare #-}
compare xs ys = Bundle.cmp (G.stream xs) (G.stream ys)
{-# INLINE (<) #-}
xs < ys = Bundle.cmp (G.stream xs) (G.stream ys) == LT
{-# INLINE (<=) #-}
xs <= ys = Bundle.cmp (G.stream xs) (G.stream ys) /= GT
{-# INLINE (>) #-}
xs > ys = Bundle.cmp (G.stream xs) (G.stream ys) == GT
{-# INLINE (>=) #-}
xs >= ys = Bundle.cmp (G.stream xs) (G.stream ys) /= LT
instance Monoid (Vector a) where
{-# INLINE mempty #-}
mempty = empty
{-# INLINE mappend #-}
mappend = (++)
{-# INLINE mconcat #-}
mconcat = concat
instance Functor Vector where
{-# INLINE fmap #-}
fmap = map
instance Monad Vector where
{-# INLINE return #-}
return = singleton
{-# INLINE (>>=) #-}
(>>=) = flip concatMap
{-# INLINE fail #-}
fail _ = empty
instance MonadPlus Vector where
{-# INLINE mzero #-}
mzero = empty
{-# INLINE mplus #-}
mplus = (++)
instance Applicative.Applicative Vector where
{-# INLINE pure #-}
pure = singleton
{-# INLINE (<*>) #-}
(<*>) = ap
instance Applicative.Alternative Vector where
{-# INLINE empty #-}
empty = empty
{-# INLINE (<|>) #-}
(<|>) = (++)
instance Foldable.Foldable Vector where
{-# INLINE foldr #-}
foldr = foldr
{-# INLINE foldl #-}
foldl = foldl
{-# INLINE foldr1 #-}
foldr1 = foldr1
{-# INLINE foldl1 #-}
foldl1 = foldl1
instance Traversable.Traversable Vector where
{-# INLINE traverse #-}
traverse f xs = fromList Applicative.<$> Traversable.traverse f (toList xs)
{-# INLINE mapM #-}
mapM = mapM
{-# INLINE sequence #-}
sequence = sequence
-- Length information
-- ------------------
-- | /O(1)/ Yield the length of the vector.
length :: Vector a -> Int
{-# INLINE length #-}
length = G.length
-- | /O(1)/ Test whether a vector if empty
null :: Vector a -> Bool
{-# INLINE null #-}
null = G.null
-- Indexing
-- --------
-- | O(1) Indexing
(!) :: Vector a -> Int -> a
{-# INLINE (!) #-}
(!) = (G.!)
-- | O(1) Safe indexing
(!?) :: Vector a -> Int -> Maybe a
{-# INLINE (!?) #-}
(!?) = (G.!?)
-- | /O(1)/ First element
head :: Vector a -> a
{-# INLINE head #-}
head = G.head
-- | /O(1)/ Last element
last :: Vector a -> a
{-# INLINE last #-}
last = G.last
-- | /O(1)/ Unsafe indexing without bounds checking
unsafeIndex :: Vector a -> Int -> a
{-# INLINE unsafeIndex #-}
unsafeIndex = G.unsafeIndex
-- | /O(1)/ First element without checking if the vector is empty
unsafeHead :: Vector a -> a
{-# INLINE unsafeHead #-}
unsafeHead = G.unsafeHead
-- | /O(1)/ Last element without checking if the vector is empty
unsafeLast :: Vector a -> a
{-# INLINE unsafeLast #-}
unsafeLast = G.unsafeLast
-- Monadic indexing
-- ----------------
-- | /O(1)/ Indexing in a monad.
--
-- The monad allows operations to be strict in the vector when necessary.
-- Suppose vector copying is implemented like this:
--
-- > copy mv v = ... write mv i (v ! i) ...
--
-- For lazy vectors, @v ! i@ would not be evaluated which means that @mv@
-- would unnecessarily retain a reference to @v@ in each element written.
--
-- With 'indexM', copying can be implemented like this instead:
--
-- > copy mv v = ... do
-- > x <- indexM v i
-- > write mv i x
--
-- Here, no references to @v@ are retained because indexing (but /not/ the
-- elements) is evaluated eagerly.
--
indexM :: Monad m => Vector a -> Int -> m a
{-# INLINE indexM #-}
indexM = G.indexM
-- | /O(1)/ First element of a vector in a monad. See 'indexM' for an
-- explanation of why this is useful.
headM :: Monad m => Vector a -> m a
{-# INLINE headM #-}
headM = G.headM
-- | /O(1)/ Last element of a vector in a monad. See 'indexM' for an
-- explanation of why this is useful.
lastM :: Monad m => Vector a -> m a
{-# INLINE lastM #-}
lastM = G.lastM
-- | /O(1)/ Indexing in a monad without bounds checks. See 'indexM' for an
-- explanation of why this is useful.
unsafeIndexM :: Monad m => Vector a -> Int -> m a
{-# INLINE unsafeIndexM #-}
unsafeIndexM = G.unsafeIndexM
-- | /O(1)/ First element in a monad without checking for empty vectors.
-- See 'indexM' for an explanation of why this is useful.
unsafeHeadM :: Monad m => Vector a -> m a
{-# INLINE unsafeHeadM #-}
unsafeHeadM = G.unsafeHeadM
-- | /O(1)/ Last element in a monad without checking for empty vectors.
-- See 'indexM' for an explanation of why this is useful.
unsafeLastM :: Monad m => Vector a -> m a
{-# INLINE unsafeLastM #-}
unsafeLastM = G.unsafeLastM
-- Extracting subvectors (slicing)
-- -------------------------------
-- | /O(1)/ Yield a slice of the vector without copying it. The vector must
-- contain at least @i+n@ elements.
slice :: Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> Vector a
-> Vector a
{-# INLINE slice #-}
slice = G.slice
-- | /O(1)/ Yield all but the last element without copying. The vector may not
-- be empty.
init :: Vector a -> Vector a
{-# INLINE init #-}
init = G.init
-- | /O(1)/ Yield all but the first element without copying. The vector may not
-- be empty.
tail :: Vector a -> Vector a
{-# INLINE tail #-}
tail = G.tail
-- | /O(1)/ Yield at the first @n@ elements without copying. The vector may
-- contain less than @n@ elements in which case it is returned unchanged.
take :: Int -> Vector a -> Vector a
{-# INLINE take #-}
take = G.take
-- | /O(1)/ Yield all but the first @n@ elements without copying. The vector may
-- contain less than @n@ elements in which case an empty vector is returned.
drop :: Int -> Vector a -> Vector a
{-# INLINE drop #-}
drop = G.drop
-- | /O(1)/ Yield the first @n@ elements paired with the remainder without copying.
--
-- Note that @'splitAt' n v@ is equivalent to @('take' n v, 'drop' n v)@
-- but slightly more efficient.
{-# INLINE splitAt #-}
splitAt :: Int -> Vector a -> (Vector a, Vector a)
splitAt = G.splitAt
-- | /O(1)/ Yield a slice of the vector without copying. The vector must
-- contain at least @i+n@ elements but this is not checked.
unsafeSlice :: Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> Vector a
-> Vector a
{-# INLINE unsafeSlice #-}
unsafeSlice = G.unsafeSlice
-- | /O(1)/ Yield all but the last element without copying. The vector may not
-- be empty but this is not checked.
unsafeInit :: Vector a -> Vector a
{-# INLINE unsafeInit #-}
unsafeInit = G.unsafeInit
-- | /O(1)/ Yield all but the first element without copying. The vector may not
-- be empty but this is not checked.
unsafeTail :: Vector a -> Vector a
{-# INLINE unsafeTail #-}
unsafeTail = G.unsafeTail
-- | /O(1)/ Yield the first @n@ elements without copying. The vector must
-- contain at least @n@ elements but this is not checked.
unsafeTake :: Int -> Vector a -> Vector a
{-# INLINE unsafeTake #-}
unsafeTake = G.unsafeTake
-- | /O(1)/ Yield all but the first @n@ elements without copying. The vector
-- must contain at least @n@ elements but this is not checked.
unsafeDrop :: Int -> Vector a -> Vector a
{-# INLINE unsafeDrop #-}
unsafeDrop = G.unsafeDrop
-- Initialisation
-- --------------
-- | /O(1)/ Empty vector
empty :: Vector a
{-# INLINE empty #-}
empty = G.empty
-- | /O(1)/ Vector with exactly one element
singleton :: a -> Vector a
{-# INLINE singleton #-}
singleton = G.singleton
-- | /O(n)/ Vector of the given length with the same value in each position
replicate :: Int -> a -> Vector a
{-# INLINE replicate #-}
replicate = G.replicate
-- | /O(n)/ Construct a vector of the given length by applying the function to
-- each index
generate :: Int -> (Int -> a) -> Vector a
{-# INLINE generate #-}
generate = G.generate
-- | /O(n)/ Apply function n times to value. Zeroth element is original value.
iterateN :: Int -> (a -> a) -> a -> Vector a
{-# INLINE iterateN #-}
iterateN = G.iterateN
-- Unfolding
-- ---------
-- | /O(n)/ Construct a vector by repeatedly applying the generator function
-- to a seed. The generator function yields 'Just' the next element and the
-- new seed or 'Nothing' if there are no more elements.
--
-- > unfoldr (\n -> if n == 0 then Nothing else Just (n,n-1)) 10
-- > = <10,9,8,7,6,5,4,3,2,1>
unfoldr :: (b -> Maybe (a, b)) -> b -> Vector a
{-# INLINE unfoldr #-}
unfoldr = G.unfoldr
-- | /O(n)/ Construct a vector with at most @n@ by repeatedly applying the
-- generator function to the a seed. The generator function yields 'Just' the
-- next element and the new seed or 'Nothing' if there are no more elements.
--
-- > unfoldrN 3 (\n -> Just (n,n-1)) 10 = <10,9,8>
unfoldrN :: Int -> (b -> Maybe (a, b)) -> b -> Vector a
{-# INLINE unfoldrN #-}
unfoldrN = G.unfoldrN
-- | /O(n)/ Construct a vector with @n@ elements by repeatedly applying the
-- generator function to the already constructed part of the vector.
--
-- > constructN 3 f = let a = f <> ; b = f <a> ; c = f <a,b> in f <a,b,c>
--
constructN :: Int -> (Vector a -> a) -> Vector a
{-# INLINE constructN #-}
constructN = G.constructN
-- | /O(n)/ Construct a vector with @n@ elements from right to left by
-- repeatedly applying the generator function to the already constructed part
-- of the vector.
--
-- > constructrN 3 f = let a = f <> ; b = f<a> ; c = f <b,a> in f <c,b,a>
--
constructrN :: Int -> (Vector a -> a) -> Vector a
{-# INLINE constructrN #-}
constructrN = G.constructrN
-- Enumeration
-- -----------
-- | /O(n)/ Yield a vector of the given length containing the values @x@, @x+1@
-- etc. This operation is usually more efficient than 'enumFromTo'.
--
-- > enumFromN 5 3 = <5,6,7>
enumFromN :: Num a => a -> Int -> Vector a
{-# INLINE enumFromN #-}
enumFromN = G.enumFromN
-- | /O(n)/ Yield a vector of the given length containing the values @x@, @x+y@,
-- @x+y+y@ etc. This operations is usually more efficient than 'enumFromThenTo'.
--
-- > enumFromStepN 1 0.1 5 = <1,1.1,1.2,1.3,1.4>
enumFromStepN :: Num a => a -> a -> Int -> Vector a
{-# INLINE enumFromStepN #-}
enumFromStepN = G.enumFromStepN
-- | /O(n)/ Enumerate values from @x@ to @y@.
--
-- /WARNING:/ This operation can be very inefficient. If at all possible, use
-- 'enumFromN' instead.
enumFromTo :: Enum a => a -> a -> Vector a
{-# INLINE enumFromTo #-}
enumFromTo = G.enumFromTo
-- | /O(n)/ Enumerate values from @x@ to @y@ with a specific step @z@.
--
-- /WARNING:/ This operation can be very inefficient. If at all possible, use
-- 'enumFromStepN' instead.
enumFromThenTo :: Enum a => a -> a -> a -> Vector a
{-# INLINE enumFromThenTo #-}
enumFromThenTo = G.enumFromThenTo
-- Concatenation
-- -------------
-- | /O(n)/ Prepend an element
cons :: a -> Vector a -> Vector a
{-# INLINE cons #-}
cons = G.cons
-- | /O(n)/ Append an element
snoc :: Vector a -> a -> Vector a
{-# INLINE snoc #-}
snoc = G.snoc
infixr 5 ++
-- | /O(m+n)/ Concatenate two vectors
(++) :: Vector a -> Vector a -> Vector a
{-# INLINE (++) #-}
(++) = (G.++)
-- | /O(n)/ Concatenate all vectors in the list
concat :: [Vector a] -> Vector a
{-# INLINE concat #-}
concat = G.concat
-- Monadic initialisation
-- ----------------------
-- | /O(n)/ Execute the monadic action the given number of times and store the
-- results in a vector.
replicateM :: Monad m => Int -> m a -> m (Vector a)
{-# INLINE replicateM #-}
replicateM = G.replicateM
-- | /O(n)/ Construct a vector of the given length by applying the monadic
-- action to each index
generateM :: Monad m => Int -> (Int -> m a) -> m (Vector a)
{-# INLINE generateM #-}
generateM = G.generateM
-- | Execute the monadic action and freeze the resulting vector.
--
-- @
-- create (do { v \<- new 2; write v 0 \'a\'; write v 1 \'b\'; return v }) = \<'a','b'\>
-- @
create :: (forall s. ST s (MVector s a)) -> Vector a
{-# INLINE create #-}
-- NOTE: eta-expanded due to http://hackage.haskell.org/trac/ghc/ticket/4120
create p = G.create p
-- Restricting memory usage
-- ------------------------
-- | /O(n)/ Yield the argument but force it not to retain any extra memory,
-- possibly by copying it.
--
-- This is especially useful when dealing with slices. For example:
--
-- > force (slice 0 2 <huge vector>)
--
-- Here, the slice retains a reference to the huge vector. Forcing it creates
-- a copy of just the elements that belong to the slice and allows the huge
-- vector to be garbage collected.
force :: Vector a -> Vector a
{-# INLINE force #-}
force = G.force
-- Bulk updates
-- ------------
-- | /O(m+n)/ For each pair @(i,a)@ from the list, replace the vector
-- element at position @i@ by @a@.
--
-- > <5,9,2,7> // [(2,1),(0,3),(2,8)] = <3,9,8,7>
--
(//) :: Vector a -- ^ initial vector (of length @m@)
-> [(Int, a)] -- ^ list of index/value pairs (of length @n@)
-> Vector a
{-# INLINE (//) #-}
(//) = (G.//)
-- | /O(m+n)/ For each pair @(i,a)@ from the vector of index/value pairs,
-- replace the vector element at position @i@ by @a@.
--
-- > update <5,9,2,7> <(2,1),(0,3),(2,8)> = <3,9,8,7>
--
update :: Vector a -- ^ initial vector (of length @m@)
-> Vector (Int, a) -- ^ vector of index/value pairs (of length @n@)
-> Vector a
{-# INLINE update #-}
update = G.update
-- | /O(m+min(n1,n2))/ For each index @i@ from the index vector and the
-- corresponding value @a@ from the value vector, replace the element of the
-- initial vector at position @i@ by @a@.
--
-- > update_ <5,9,2,7> <2,0,2> <1,3,8> = <3,9,8,7>
--
-- The function 'update' provides the same functionality and is usually more
-- convenient.
--
-- @
-- update_ xs is ys = 'update' xs ('zip' is ys)
-- @
update_ :: Vector a -- ^ initial vector (of length @m@)
-> Vector Int -- ^ index vector (of length @n1@)
-> Vector a -- ^ value vector (of length @n2@)
-> Vector a
{-# INLINE update_ #-}
update_ = G.update_
-- | Same as ('//') but without bounds checking.
unsafeUpd :: Vector a -> [(Int, a)] -> Vector a
{-# INLINE unsafeUpd #-}
unsafeUpd = G.unsafeUpd
-- | Same as 'update' but without bounds checking.
unsafeUpdate :: Vector a -> Vector (Int, a) -> Vector a
{-# INLINE unsafeUpdate #-}
unsafeUpdate = G.unsafeUpdate
-- | Same as 'update_' but without bounds checking.
unsafeUpdate_ :: Vector a -> Vector Int -> Vector a -> Vector a
{-# INLINE unsafeUpdate_ #-}
unsafeUpdate_ = G.unsafeUpdate_
-- Accumulations
-- -------------
-- | /O(m+n)/ For each pair @(i,b)@ from the list, replace the vector element
-- @a@ at position @i@ by @f a b@.
--
-- > accum (+) <5,9,2> [(2,4),(1,6),(0,3),(1,7)] = <5+3, 9+6+7, 2+4>
accum :: (a -> b -> a) -- ^ accumulating function @f@
-> Vector a -- ^ initial vector (of length @m@)
-> [(Int,b)] -- ^ list of index/value pairs (of length @n@)
-> Vector a
{-# INLINE accum #-}
accum = G.accum
-- | /O(m+n)/ For each pair @(i,b)@ from the vector of pairs, replace the vector
-- element @a@ at position @i@ by @f a b@.
--
-- > accumulate (+) <5,9,2> <(2,4),(1,6),(0,3),(1,7)> = <5+3, 9+6+7, 2+4>
accumulate :: (a -> b -> a) -- ^ accumulating function @f@
-> Vector a -- ^ initial vector (of length @m@)
-> Vector (Int,b) -- ^ vector of index/value pairs (of length @n@)
-> Vector a
{-# INLINE accumulate #-}
accumulate = G.accumulate
-- | /O(m+min(n1,n2))/ For each index @i@ from the index vector and the
-- corresponding value @b@ from the the value vector,
-- replace the element of the initial vector at
-- position @i@ by @f a b@.
--
-- > accumulate_ (+) <5,9,2> <2,1,0,1> <4,6,3,7> = <5+3, 9+6+7, 2+4>
--
-- The function 'accumulate' provides the same functionality and is usually more
-- convenient.
--
-- @
-- accumulate_ f as is bs = 'accumulate' f as ('zip' is bs)
-- @
accumulate_ :: (a -> b -> a) -- ^ accumulating function @f@
-> Vector a -- ^ initial vector (of length @m@)
-> Vector Int -- ^ index vector (of length @n1@)
-> Vector b -- ^ value vector (of length @n2@)
-> Vector a
{-# INLINE accumulate_ #-}
accumulate_ = G.accumulate_
-- | Same as 'accum' but without bounds checking.
unsafeAccum :: (a -> b -> a) -> Vector a -> [(Int,b)] -> Vector a
{-# INLINE unsafeAccum #-}
unsafeAccum = G.unsafeAccum
-- | Same as 'accumulate' but without bounds checking.
unsafeAccumulate :: (a -> b -> a) -> Vector a -> Vector (Int,b) -> Vector a
{-# INLINE unsafeAccumulate #-}
unsafeAccumulate = G.unsafeAccumulate
-- | Same as 'accumulate_' but without bounds checking.
unsafeAccumulate_
:: (a -> b -> a) -> Vector a -> Vector Int -> Vector b -> Vector a
{-# INLINE unsafeAccumulate_ #-}
unsafeAccumulate_ = G.unsafeAccumulate_
-- Permutations
-- ------------
-- | /O(n)/ Reverse a vector
reverse :: Vector a -> Vector a
{-# INLINE reverse #-}
reverse = G.reverse
-- | /O(n)/ Yield the vector obtained by replacing each element @i@ of the
-- index vector by @xs'!'i@. This is equivalent to @'map' (xs'!') is@ but is
-- often much more efficient.
--
-- > backpermute <a,b,c,d> <0,3,2,3,1,0> = <a,d,c,d,b,a>
backpermute :: Vector a -> Vector Int -> Vector a
{-# INLINE backpermute #-}
backpermute = G.backpermute
-- | Same as 'backpermute' but without bounds checking.
unsafeBackpermute :: Vector a -> Vector Int -> Vector a
{-# INLINE unsafeBackpermute #-}
unsafeBackpermute = G.unsafeBackpermute
-- Safe destructive updates
-- ------------------------
-- | Apply a destructive operation to a vector. The operation will be
-- performed in place if it is safe to do so and will modify a copy of the
-- vector otherwise.
--
-- @
-- modify (\\v -> write v 0 \'x\') ('replicate' 3 \'a\') = \<\'x\',\'a\',\'a\'\>
-- @
modify :: (forall s. MVector s a -> ST s ()) -> Vector a -> Vector a
{-# INLINE modify #-}
modify p = G.modify p
-- Indexing
-- --------
-- | /O(n)/ Pair each element in a vector with its index
indexed :: Vector a -> Vector (Int,a)
{-# INLINE indexed #-}
indexed = G.indexed
-- Mapping
-- -------
-- | /O(n)/ Map a function over a vector
map :: (a -> b) -> Vector a -> Vector b
{-# INLINE map #-}
map = G.map
-- | /O(n)/ Apply a function to every element of a vector and its index
imap :: (Int -> a -> b) -> Vector a -> Vector b
{-# INLINE imap #-}
imap = G.imap
-- | Map a function over a vector and concatenate the results.
concatMap :: (a -> Vector b) -> Vector a -> Vector b
{-# INLINE concatMap #-}
concatMap = G.concatMap
-- Monadic mapping
-- ---------------
-- | /O(n)/ Apply the monadic action to all elements of the vector, yielding a
-- vector of results
mapM :: Monad m => (a -> m b) -> Vector a -> m (Vector b)
{-# INLINE mapM #-}
mapM = G.mapM
-- | /O(n)/ Apply the monadic action to every element of a vector and its
-- index, yielding a vector of results
imapM :: Monad m => (Int -> a -> m b) -> Vector a -> m (Vector b)
{-# INLINE imapM #-}
imapM = G.imapM
-- | /O(n)/ Apply the monadic action to all elements of a vector and ignore the
-- results
mapM_ :: Monad m => (a -> m b) -> Vector a -> m ()
{-# INLINE mapM_ #-}
mapM_ = G.mapM_
-- | /O(n)/ Apply the monadic action to every element of a vector and its
-- index, ignoring the results
imapM_ :: Monad m => (Int -> a -> m b) -> Vector a -> m ()
{-# INLINE imapM_ #-}
imapM_ = G.imapM_
-- | /O(n)/ Apply the monadic action to all elements of the vector, yielding a
-- vector of results. Equvalent to @flip 'mapM'@.
forM :: Monad m => Vector a -> (a -> m b) -> m (Vector b)
{-# INLINE forM #-}
forM = G.forM
-- | /O(n)/ Apply the monadic action to all elements of a vector and ignore the
-- results. Equivalent to @flip 'mapM_'@.
forM_ :: Monad m => Vector a -> (a -> m b) -> m ()
{-# INLINE forM_ #-}
forM_ = G.forM_
-- Zipping
-- -------
-- | /O(min(m,n))/ Zip two vectors with the given function.
zipWith :: (a -> b -> c) -> Vector a -> Vector b -> Vector c
{-# INLINE zipWith #-}
zipWith = G.zipWith
-- | Zip three vectors with the given function.
zipWith3 :: (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d
{-# INLINE zipWith3 #-}
zipWith3 = G.zipWith3
zipWith4 :: (a -> b -> c -> d -> e)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
{-# INLINE zipWith4 #-}
zipWith4 = G.zipWith4
zipWith5 :: (a -> b -> c -> d -> e -> f)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f
{-# INLINE zipWith5 #-}
zipWith5 = G.zipWith5
zipWith6 :: (a -> b -> c -> d -> e -> f -> g)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f -> Vector g
{-# INLINE zipWith6 #-}
zipWith6 = G.zipWith6
-- | /O(min(m,n))/ Zip two vectors with a function that also takes the
-- elements' indices.
izipWith :: (Int -> a -> b -> c) -> Vector a -> Vector b -> Vector c
{-# INLINE izipWith #-}
izipWith = G.izipWith
-- | Zip three vectors and their indices with the given function.
izipWith3 :: (Int -> a -> b -> c -> d)
-> Vector a -> Vector b -> Vector c -> Vector d
{-# INLINE izipWith3 #-}
izipWith3 = G.izipWith3
izipWith4 :: (Int -> a -> b -> c -> d -> e)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
{-# INLINE izipWith4 #-}
izipWith4 = G.izipWith4
izipWith5 :: (Int -> a -> b -> c -> d -> e -> f)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f
{-# INLINE izipWith5 #-}
izipWith5 = G.izipWith5
izipWith6 :: (Int -> a -> b -> c -> d -> e -> f -> g)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f -> Vector g
{-# INLINE izipWith6 #-}
izipWith6 = G.izipWith6
-- | Elementwise pairing of array elements.
zip :: Vector a -> Vector b -> Vector (a, b)
{-# INLINE zip #-}
zip = G.zip
-- | zip together three vectors into a vector of triples
zip3 :: Vector a -> Vector b -> Vector c -> Vector (a, b, c)
{-# INLINE zip3 #-}
zip3 = G.zip3
zip4 :: Vector a -> Vector b -> Vector c -> Vector d
-> Vector (a, b, c, d)
{-# INLINE zip4 #-}
zip4 = G.zip4
zip5 :: Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector (a, b, c, d, e)
{-# INLINE zip5 #-}
zip5 = G.zip5
zip6 :: Vector a -> Vector b -> Vector c -> Vector d -> Vector e -> Vector f
-> Vector (a, b, c, d, e, f)
{-# INLINE zip6 #-}
zip6 = G.zip6
-- Unzipping
-- ---------
-- | /O(min(m,n))/ Unzip a vector of pairs.
unzip :: Vector (a, b) -> (Vector a, Vector b)
{-# INLINE unzip #-}
unzip = G.unzip
unzip3 :: Vector (a, b, c) -> (Vector a, Vector b, Vector c)
{-# INLINE unzip3 #-}
unzip3 = G.unzip3
unzip4 :: Vector (a, b, c, d) -> (Vector a, Vector b, Vector c, Vector d)
{-# INLINE unzip4 #-}
unzip4 = G.unzip4
unzip5 :: Vector (a, b, c, d, e)
-> (Vector a, Vector b, Vector c, Vector d, Vector e)
{-# INLINE unzip5 #-}
unzip5 = G.unzip5
unzip6 :: Vector (a, b, c, d, e, f)
-> (Vector a, Vector b, Vector c, Vector d, Vector e, Vector f)
{-# INLINE unzip6 #-}
unzip6 = G.unzip6
-- Monadic zipping
-- ---------------
-- | /O(min(m,n))/ Zip the two vectors with the monadic action and yield a
-- vector of results
zipWithM :: Monad m => (a -> b -> m c) -> Vector a -> Vector b -> m (Vector c)
{-# INLINE zipWithM #-}
zipWithM = G.zipWithM
-- | /O(min(m,n))/ Zip the two vectors with a monadic action that also takes
-- the element index and yield a vector of results
izipWithM :: Monad m => (Int -> a -> b -> m c) -> Vector a -> Vector b -> m (Vector c)
{-# INLINE izipWithM #-}
izipWithM = G.izipWithM
-- | /O(min(m,n))/ Zip the two vectors with the monadic action and ignore the
-- results
zipWithM_ :: Monad m => (a -> b -> m c) -> Vector a -> Vector b -> m ()
{-# INLINE zipWithM_ #-}
zipWithM_ = G.zipWithM_
-- | /O(min(m,n))/ Zip the two vectors with a monadic action that also takes
-- the element index and ignore the results
izipWithM_ :: Monad m => (Int -> a -> b -> m c) -> Vector a -> Vector b -> m ()
{-# INLINE izipWithM_ #-}
izipWithM_ = G.izipWithM_
-- Filtering
-- ---------
-- | /O(n)/ Drop elements that do not satisfy the predicate
filter :: (a -> Bool) -> Vector a -> Vector a
{-# INLINE filter #-}
filter = G.filter
-- | /O(n)/ Drop elements that do not satisfy the predicate which is applied to
-- values and their indices
ifilter :: (Int -> a -> Bool) -> Vector a -> Vector a
{-# INLINE ifilter #-}
ifilter = G.ifilter
-- | /O(n)/ Drop elements that do not satisfy the monadic predicate
filterM :: Monad m => (a -> m Bool) -> Vector a -> m (Vector a)
{-# INLINE filterM #-}
filterM = G.filterM
-- | /O(n)/ Yield the longest prefix of elements satisfying the predicate
-- without copying.
takeWhile :: (a -> Bool) -> Vector a -> Vector a
{-# INLINE takeWhile #-}
takeWhile = G.takeWhile
-- | /O(n)/ Drop the longest prefix of elements that satisfy the predicate
-- without copying.
dropWhile :: (a -> Bool) -> Vector a -> Vector a
{-# INLINE dropWhile #-}
dropWhile = G.dropWhile
-- Parititioning
-- -------------
-- | /O(n)/ Split the vector in two parts, the first one containing those
-- elements that satisfy the predicate and the second one those that don't. The
-- relative order of the elements is preserved at the cost of a sometimes
-- reduced performance compared to 'unstablePartition'.
partition :: (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE partition #-}
partition = G.partition
-- | /O(n)/ Split the vector in two parts, the first one containing those
-- elements that satisfy the predicate and the second one those that don't.
-- The order of the elements is not preserved but the operation is often
-- faster than 'partition'.
unstablePartition :: (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE unstablePartition #-}
unstablePartition = G.unstablePartition
-- | /O(n)/ Split the vector into the longest prefix of elements that satisfy
-- the predicate and the rest without copying.
span :: (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE span #-}
span = G.span
-- | /O(n)/ Split the vector into the longest prefix of elements that do not
-- satisfy the predicate and the rest without copying.
break :: (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE break #-}
break = G.break
-- Searching
-- ---------
infix 4 `elem`
-- | /O(n)/ Check if the vector contains an element
elem :: Eq a => a -> Vector a -> Bool
{-# INLINE elem #-}
elem = G.elem
infix 4 `notElem`
-- | /O(n)/ Check if the vector does not contain an element (inverse of 'elem')
notElem :: Eq a => a -> Vector a -> Bool
{-# INLINE notElem #-}
notElem = G.notElem
-- | /O(n)/ Yield 'Just' the first element matching the predicate or 'Nothing'
-- if no such element exists.
find :: (a -> Bool) -> Vector a -> Maybe a
{-# INLINE find #-}
find = G.find
-- | /O(n)/ Yield 'Just' the index of the first element matching the predicate
-- or 'Nothing' if no such element exists.
findIndex :: (a -> Bool) -> Vector a -> Maybe Int
{-# INLINE findIndex #-}
findIndex = G.findIndex
-- | /O(n)/ Yield the indices of elements satisfying the predicate in ascending
-- order.
findIndices :: (a -> Bool) -> Vector a -> Vector Int
{-# INLINE findIndices #-}
findIndices = G.findIndices
-- | /O(n)/ Yield 'Just' the index of the first occurence of the given element or
-- 'Nothing' if the vector does not contain the element. This is a specialised
-- version of 'findIndex'.
elemIndex :: Eq a => a -> Vector a -> Maybe Int
{-# INLINE elemIndex #-}
elemIndex = G.elemIndex
-- | /O(n)/ Yield the indices of all occurences of the given element in
-- ascending order. This is a specialised version of 'findIndices'.
elemIndices :: Eq a => a -> Vector a -> Vector Int
{-# INLINE elemIndices #-}
elemIndices = G.elemIndices
-- Folding
-- -------
-- | /O(n)/ Left fold
foldl :: (a -> b -> a) -> a -> Vector b -> a
{-# INLINE foldl #-}
foldl = G.foldl
-- | /O(n)/ Left fold on non-empty vectors
foldl1 :: (a -> a -> a) -> Vector a -> a
{-# INLINE foldl1 #-}
foldl1 = G.foldl1
-- | /O(n)/ Left fold with strict accumulator
foldl' :: (a -> b -> a) -> a -> Vector b -> a
{-# INLINE foldl' #-}
foldl' = G.foldl'
-- | /O(n)/ Left fold on non-empty vectors with strict accumulator
foldl1' :: (a -> a -> a) -> Vector a -> a
{-# INLINE foldl1' #-}
foldl1' = G.foldl1'
-- | /O(n)/ Right fold
foldr :: (a -> b -> b) -> b -> Vector a -> b
{-# INLINE foldr #-}
foldr = G.foldr
-- | /O(n)/ Right fold on non-empty vectors
foldr1 :: (a -> a -> a) -> Vector a -> a
{-# INLINE foldr1 #-}
foldr1 = G.foldr1
-- | /O(n)/ Right fold with a strict accumulator
foldr' :: (a -> b -> b) -> b -> Vector a -> b
{-# INLINE foldr' #-}
foldr' = G.foldr'
-- | /O(n)/ Right fold on non-empty vectors with strict accumulator
foldr1' :: (a -> a -> a) -> Vector a -> a
{-# INLINE foldr1' #-}
foldr1' = G.foldr1'
-- | /O(n)/ Left fold (function applied to each element and its index)
ifoldl :: (a -> Int -> b -> a) -> a -> Vector b -> a
{-# INLINE ifoldl #-}
ifoldl = G.ifoldl
-- | /O(n)/ Left fold with strict accumulator (function applied to each element
-- and its index)
ifoldl' :: (a -> Int -> b -> a) -> a -> Vector b -> a
{-# INLINE ifoldl' #-}
ifoldl' = G.ifoldl'
-- | /O(n)/ Right fold (function applied to each element and its index)
ifoldr :: (Int -> a -> b -> b) -> b -> Vector a -> b
{-# INLINE ifoldr #-}
ifoldr = G.ifoldr
-- | /O(n)/ Right fold with strict accumulator (function applied to each
-- element and its index)
ifoldr' :: (Int -> a -> b -> b) -> b -> Vector a -> b
{-# INLINE ifoldr' #-}
ifoldr' = G.ifoldr'
-- Specialised folds
-- -----------------
-- | /O(n)/ Check if all elements satisfy the predicate.
all :: (a -> Bool) -> Vector a -> Bool
{-# INLINE all #-}
all = G.all
-- | /O(n)/ Check if any element satisfies the predicate.
any :: (a -> Bool) -> Vector a -> Bool
{-# INLINE any #-}
any = G.any
-- | /O(n)/ Check if all elements are 'True'
and :: Vector Bool -> Bool
{-# INLINE and #-}
and = G.and
-- | /O(n)/ Check if any element is 'True'
or :: Vector Bool -> Bool
{-# INLINE or #-}
or = G.or
-- | /O(n)/ Compute the sum of the elements
sum :: Num a => Vector a -> a
{-# INLINE sum #-}
sum = G.sum
-- | /O(n)/ Compute the produce of the elements
product :: Num a => Vector a -> a
{-# INLINE product #-}
product = G.product
-- | /O(n)/ Yield the maximum element of the vector. The vector may not be
-- empty.
maximum :: Ord a => Vector a -> a
{-# INLINE maximum #-}
maximum = G.maximum
-- | /O(n)/ Yield the maximum element of the vector according to the given
-- comparison function. The vector may not be empty.
maximumBy :: (a -> a -> Ordering) -> Vector a -> a
{-# INLINE maximumBy #-}
maximumBy = G.maximumBy
-- | /O(n)/ Yield the minimum element of the vector. The vector may not be
-- empty.
minimum :: Ord a => Vector a -> a
{-# INLINE minimum #-}
minimum = G.minimum
-- | /O(n)/ Yield the minimum element of the vector according to the given
-- comparison function. The vector may not be empty.
minimumBy :: (a -> a -> Ordering) -> Vector a -> a
{-# INLINE minimumBy #-}
minimumBy = G.minimumBy
-- | /O(n)/ Yield the index of the maximum element of the vector. The vector
-- may not be empty.
maxIndex :: Ord a => Vector a -> Int
{-# INLINE maxIndex #-}
maxIndex = G.maxIndex
-- | /O(n)/ Yield the index of the maximum element of the vector according to
-- the given comparison function. The vector may not be empty.
maxIndexBy :: (a -> a -> Ordering) -> Vector a -> Int
{-# INLINE maxIndexBy #-}
maxIndexBy = G.maxIndexBy
-- | /O(n)/ Yield the index of the minimum element of the vector. The vector
-- may not be empty.
minIndex :: Ord a => Vector a -> Int
{-# INLINE minIndex #-}
minIndex = G.minIndex
-- | /O(n)/ Yield the index of the minimum element of the vector according to
-- the given comparison function. The vector may not be empty.
minIndexBy :: (a -> a -> Ordering) -> Vector a -> Int
{-# INLINE minIndexBy #-}
minIndexBy = G.minIndexBy
-- Monadic folds
-- -------------
-- | /O(n)/ Monadic fold
foldM :: Monad m => (a -> b -> m a) -> a -> Vector b -> m a
{-# INLINE foldM #-}
foldM = G.foldM
-- | /O(n)/ Monadic fold (action applied to each element and its index)
ifoldM :: Monad m => (a -> Int -> b -> m a) -> a -> Vector b -> m a
{-# INLINE ifoldM #-}
ifoldM = G.ifoldM
-- | /O(n)/ Monadic fold over non-empty vectors
fold1M :: Monad m => (a -> a -> m a) -> Vector a -> m a
{-# INLINE fold1M #-}
fold1M = G.fold1M
-- | /O(n)/ Monadic fold with strict accumulator
foldM' :: Monad m => (a -> b -> m a) -> a -> Vector b -> m a
{-# INLINE foldM' #-}
foldM' = G.foldM'
-- | /O(n)/ Monadic fold with strict accumulator (action applied to each
-- element and its index)
ifoldM' :: Monad m => (a -> Int -> b -> m a) -> a -> Vector b -> m a
{-# INLINE ifoldM' #-}
ifoldM' = G.ifoldM'
-- | /O(n)/ Monadic fold over non-empty vectors with strict accumulator
fold1M' :: Monad m => (a -> a -> m a) -> Vector a -> m a
{-# INLINE fold1M' #-}
fold1M' = G.fold1M'
-- | /O(n)/ Monadic fold that discards the result
foldM_ :: Monad m => (a -> b -> m a) -> a -> Vector b -> m ()
{-# INLINE foldM_ #-}
foldM_ = G.foldM_
-- | /O(n)/ Monadic fold that discards the result (action applied to each
-- element and its index)
ifoldM_ :: Monad m => (a -> Int -> b -> m a) -> a -> Vector b -> m ()
{-# INLINE ifoldM_ #-}
ifoldM_ = G.ifoldM_
-- | /O(n)/ Monadic fold over non-empty vectors that discards the result
fold1M_ :: Monad m => (a -> a -> m a) -> Vector a -> m ()
{-# INLINE fold1M_ #-}
fold1M_ = G.fold1M_
-- | /O(n)/ Monadic fold with strict accumulator that discards the result
foldM'_ :: Monad m => (a -> b -> m a) -> a -> Vector b -> m ()
{-# INLINE foldM'_ #-}
foldM'_ = G.foldM'_
-- | /O(n)/ Monadic fold with strict accumulator that discards the result
-- (action applied to each element and its index)
ifoldM'_ :: Monad m => (a -> Int -> b -> m a) -> a -> Vector b -> m ()
{-# INLINE ifoldM'_ #-}
ifoldM'_ = G.ifoldM'_
-- | /O(n)/ Monadic fold over non-empty vectors with strict accumulator
-- that discards the result
fold1M'_ :: Monad m => (a -> a -> m a) -> Vector a -> m ()
{-# INLINE fold1M'_ #-}
fold1M'_ = G.fold1M'_
-- Monadic sequencing
-- ------------------
-- | Evaluate each action and collect the results
sequence :: Monad m => Vector (m a) -> m (Vector a)
{-# INLINE sequence #-}
sequence = G.sequence
-- | Evaluate each action and discard the results
sequence_ :: Monad m => Vector (m a) -> m ()
{-# INLINE sequence_ #-}
sequence_ = G.sequence_
-- Prefix sums (scans)
-- -------------------
-- | /O(n)/ Prescan
--
-- @
-- prescanl f z = 'init' . 'scanl' f z
-- @
--
-- Example: @prescanl (+) 0 \<1,2,3,4\> = \<0,1,3,6\>@
--
prescanl :: (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE prescanl #-}
prescanl = G.prescanl
-- | /O(n)/ Prescan with strict accumulator
prescanl' :: (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE prescanl' #-}
prescanl' = G.prescanl'
-- | /O(n)/ Scan
--
-- @
-- postscanl f z = 'tail' . 'scanl' f z
-- @
--
-- Example: @postscanl (+) 0 \<1,2,3,4\> = \<1,3,6,10\>@
--
postscanl :: (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE postscanl #-}
postscanl = G.postscanl
-- | /O(n)/ Scan with strict accumulator
postscanl' :: (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE postscanl' #-}
postscanl' = G.postscanl'
-- | /O(n)/ Haskell-style scan
--
-- > scanl f z <x1,...,xn> = <y1,...,y(n+1)>
-- > where y1 = z
-- > yi = f y(i-1) x(i-1)
--
-- Example: @scanl (+) 0 \<1,2,3,4\> = \<0,1,3,6,10\>@
--
scanl :: (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE scanl #-}
scanl = G.scanl
-- | /O(n)/ Haskell-style scan with strict accumulator
scanl' :: (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE scanl' #-}
scanl' = G.scanl'
-- | /O(n)/ Scan over a non-empty vector
--
-- > scanl f <x1,...,xn> = <y1,...,yn>
-- > where y1 = x1
-- > yi = f y(i-1) xi
--
scanl1 :: (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanl1 #-}
scanl1 = G.scanl1
-- | /O(n)/ Scan over a non-empty vector with a strict accumulator
scanl1' :: (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanl1' #-}
scanl1' = G.scanl1'
-- | /O(n)/ Right-to-left prescan
--
-- @
-- prescanr f z = 'reverse' . 'prescanl' (flip f) z . 'reverse'
-- @
--
prescanr :: (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE prescanr #-}
prescanr = G.prescanr
-- | /O(n)/ Right-to-left prescan with strict accumulator
prescanr' :: (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE prescanr' #-}
prescanr' = G.prescanr'
-- | /O(n)/ Right-to-left scan
postscanr :: (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE postscanr #-}
postscanr = G.postscanr
-- | /O(n)/ Right-to-left scan with strict accumulator
postscanr' :: (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE postscanr' #-}
postscanr' = G.postscanr'
-- | /O(n)/ Right-to-left Haskell-style scan
scanr :: (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE scanr #-}
scanr = G.scanr
-- | /O(n)/ Right-to-left Haskell-style scan with strict accumulator
scanr' :: (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE scanr' #-}
scanr' = G.scanr'
-- | /O(n)/ Right-to-left scan over a non-empty vector
scanr1 :: (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanr1 #-}
scanr1 = G.scanr1
-- | /O(n)/ Right-to-left scan over a non-empty vector with a strict
-- accumulator
scanr1' :: (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanr1' #-}
scanr1' = G.scanr1'
-- Conversions - Lists
-- ------------------------
-- | /O(n)/ Convert a vector to a list
toList :: Vector a -> [a]
{-# INLINE toList #-}
toList = G.toList
-- | /O(n)/ Convert a list to a vector
fromList :: [a] -> Vector a
{-# INLINE fromList #-}
fromList = G.fromList
-- | /O(n)/ Convert the first @n@ elements of a list to a vector
--
-- @
-- fromListN n xs = 'fromList' ('take' n xs)
-- @
fromListN :: Int -> [a] -> Vector a
{-# INLINE fromListN #-}
fromListN = G.fromListN
-- Conversions - Mutable vectors
-- -----------------------------
-- | /O(1)/ Unsafe convert a mutable vector to an immutable one without
-- copying. The mutable vector may not be used after this operation.
unsafeFreeze :: PrimMonad m => MVector (PrimState m) a -> m (Vector a)
{-# INLINE unsafeFreeze #-}
unsafeFreeze = G.unsafeFreeze
-- | /O(1)/ Unsafely convert an immutable vector to a mutable one without
-- copying. The immutable vector may not be used after this operation.
unsafeThaw :: PrimMonad m => Vector a -> m (MVector (PrimState m) a)
{-# INLINE unsafeThaw #-}
unsafeThaw = G.unsafeThaw
-- | /O(n)/ Yield a mutable copy of the immutable vector.
thaw :: PrimMonad m => Vector a -> m (MVector (PrimState m) a)
{-# INLINE thaw #-}
thaw = G.thaw
-- | /O(n)/ Yield an immutable copy of the mutable vector.
freeze :: PrimMonad m => MVector (PrimState m) a -> m (Vector a)
{-# INLINE freeze #-}
freeze = G.freeze
-- | /O(n)/ Copy an immutable vector into a mutable one. The two vectors must
-- have the same length. This is not checked.
unsafeCopy :: PrimMonad m => MVector (PrimState m) a -> Vector a -> m ()
{-# INLINE unsafeCopy #-}
unsafeCopy = G.unsafeCopy
-- | /O(n)/ Copy an immutable vector into a mutable one. The two vectors must
-- have the same length.
copy :: PrimMonad m => MVector (PrimState m) a -> Vector a -> m ()
{-# INLINE copy #-}
copy = G.copy
|
seckcoder/vector
|
Data/Vector.hs
|
bsd-3-clause
| 45,855
| 0
| 13
| 9,973
| 9,687
| 5,429
| 4,258
| -1
| -1
|
-----------------------------------------------------------------------------
-- Standard Library: IO operations, beyond those included in the prelude
--
-- Suitable for use with Hugs 98
-----------------------------------------------------------------------------
module Hugs.IO (
Handle, -- instances: Eq, Show.
HandlePosn, -- instances: Eq, Show.
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
stdin, stdout, stderr, -- :: Handle
openFile, -- :: FilePath -> IOMode -> IO Handle
hClose, -- :: Handle -> IO ()
hFileSize, -- :: Handle -> IO Integer
hIsEOF, -- :: Handle -> IO Bool
isEOF, -- :: IO Bool
hSetBuffering, -- :: Handle -> BufferMode -> IO ()
hGetBuffering, -- :: Handle -> IO BufferMode
hFlush, -- :: Handle -> IO ()
hGetPosn, -- :: Handle -> IO HandlePosn
hSetPosn, -- :: HandlePosn -> IO ()
hSeek, -- :: Handle -> SeekMode -> Integer -> IO ()
hTell, -- :: Handle -> IO Integer
hLookAhead, -- :: Handle -> IO Char
hWaitForInput, -- :: Handle -> Int -> IO Bool
hGetChar, -- :: Handle -> IO Char
hGetLine, -- :: Handle -> IO String
hGetContents, -- :: Handle -> IO String
hPutChar, -- :: Handle -> Char -> IO ()
hPutStr, -- :: Handle -> String -> IO ()
hIsOpen, -- :: Handle -> IO Bool
hIsClosed, -- :: Handle -> IO Bool
hIsReadable, -- :: Handle -> IO Bool
hIsWritable, -- :: Handle -> IO Bool
hIsSeekable, -- :: Handle -> IO Bool
-- Non-standard extensions
handleToFd, -- :: Handle -> IO Int
openFd -- :: Int -> Bool -> IOMode -> Bool -> IO Handle
) where
import Hugs.Prelude ( Handle, IOMode(..), stdin, stdout, stderr )
import Hugs.Prelude ( openFile, hClose, hPutChar, hPutStr )
import Hugs.Prelude ( hGetContents, hGetChar, hGetLine )
import Hugs.Prelude ( Ix(..) )
import System.IO.Error
-- data Handle
data BufferMode = NoBuffering | LineBuffering
| BlockBuffering (Maybe Int)
deriving (Eq, Ord, Read, Show)
data SeekMode = AbsoluteSeek | RelativeSeek | SeekFromEnd
deriving (Eq, Ord, Ix, Bounded, Enum, Read, Show)
primitive hFileSize :: Handle -> IO Integer
primitive hIsEOF :: Handle -> IO Bool
isEOF :: IO Bool
isEOF = hIsEOF stdin
hSetBuffering :: Handle -> BufferMode -> IO ()
hSetBuffering h bMode =
case bMode of
NoBuffering -> hSetBuff h 0 0
LineBuffering -> hSetBuff h 1 0
BlockBuffering (Just x) -> hSetBuff h 2 x
BlockBuffering _ -> hSetBuff h 2 0
primitive hSetBuff :: Handle -> Int -> Int -> IO ()
hGetBuffering :: Handle -> IO BufferMode
hGetBuffering h = do
(k, sz) <- hGetBuff h
case k of
1 -> return NoBuffering
2 -> return LineBuffering
3 -> return (BlockBuffering (Just sz))
-- fatal - never to happen.
_ -> error "IO.hGetBuffering: unknown buffering mode"
primitive hGetBuff :: Handle -> IO (Int,Int)
primitive hFlush :: Handle -> IO ()
data HandlePosn = HandlePosn Handle Int deriving Eq
instance Show HandlePosn where
showsPrec p (HandlePosn h pos) =
showsPrec p h . showString " at position " . shows pos
hGetPosn :: Handle -> IO HandlePosn
hGetPosn h = do
p <- hGetPosnPrim h
return (HandlePosn h p)
hTell :: Handle -> IO Integer
hTell h = do
p <- hGetPosnPrim h
return (toInteger p)
primitive hGetPosnPrim :: Handle -> IO Int
hSetPosn :: HandlePosn -> IO ()
hSetPosn (HandlePosn h p) = hSetPosnPrim h p
primitive hSetPosnPrim :: Handle -> Int -> IO ()
hSeek :: Handle -> SeekMode -> Integer -> IO ()
hSeek h sMode int
| int > fromIntegral (maxBound :: Int) ||
int < fromIntegral (minBound :: Int) =
ioError (userError ("IO.hSeek: seek offset out of supported range"))
| otherwise =
hSeekPrim h (fromEnum sMode) ((fromIntegral int)::Int)
primitive hSeekPrim :: Handle -> Int -> Int -> IO ()
primitive hWaitForInput :: Handle -> Int -> IO Bool
primitive hLookAhead :: Handle -> IO Char
primitive hIsOpen,
hIsClosed,
hIsReadable,
hIsWritable,
hIsSeekable :: Handle -> IO Bool
-----------------------------------------------------------------------------
-- Extract the file descriptor from a Handle, closing the Handle
primitive handleToFd :: Handle -> IO Int
--
-- Creating a handle from a file descriptor/socket.
--
primitive openFd :: Int -- file descriptor
-> Bool -- True => it's a socket.
-> IOMode -- what mode to open the handle in.
-> Bool -- binary?
-> IO Handle
|
kaoskorobase/mescaline
|
resources/hugs/packages/hugsbase/Hugs/IO.hs
|
gpl-3.0
| 4,994
| 69
| 15
| 1,459
| 1,113
| 616
| 497
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE TypeInType #-}
module Main where
import Data.Kind
import Data.Proxy
import Language.Haskell.TH hiding (Type)
-- Anonymous tyvar binder example
newtype Foo1 = Foo1 (Proxy '[False, True, False])
-- Required (dependent) tyvar binder example
type family Wurble k (a :: k) :: k
newtype Foo2 a = Foo2 (Proxy (Wurble (Maybe a) Nothing))
-- Non-injective type family example
type family Foo3Fam1 (a :: Type) :: Type where
Foo3Fam1 a = a
type family Foo3Fam2 (a :: Foo3Fam1 b) :: b
newtype Foo3 = Foo3 (Proxy (Foo3Fam2 Int))
-- Injective type family example
type family Foo4Fam1 (a :: Type) = (r :: Type) | r -> a where
Foo4Fam1 a = a
type family Foo4Fam2 (a :: Foo4Fam1 b) :: b
newtype Foo4 = Foo4 (Proxy (Foo4Fam2 Int))
$(return [])
main :: IO ()
main = do
putStrLn $(reify ''Foo1 >>= stringE . pprint)
putStrLn $(reify ''Foo2 >>= stringE . pprint)
putStrLn $(reify ''Foo3 >>= stringE . pprint)
putStrLn $(reify ''Foo4 >>= stringE . pprint)
|
ezyang/ghc
|
testsuite/tests/th/T14060.hs
|
bsd-3-clause
| 1,109
| 0
| 13
| 199
| 359
| 203
| 156
| -1
| -1
|
{-# LANGUAGE StandaloneKindSignatures #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -ddump-splices #-}
module T17164 where
import Data.Kind
$([d| type T :: forall k -> k -> Type
type family T :: forall k -> k -> Type
|])
|
sdiehl/ghc
|
testsuite/tests/saks/should_compile/T17164.hs
|
bsd-3-clause
| 335
| 0
| 6
| 63
| 26
| 19
| 7
| -1
| -1
|
-- GSoC 2015, 2018 - Haskell bindings for OpenCog.
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
-- | This Module defines the main functions to interact with the AtomSpace
-- creating/removing/modifying atoms.
module OpenCog.AtomSpace.Api (
insert
, insertAndGetHandle
, remove
, get
, debug
, getByHandle
, getWithHandle
, execute
, evaluate
, exportFunction
, exportPredicate
) where
import Foreign (Ptr)
import Foreign.C.Types (CULong(..),CInt(..),CDouble(..))
import Foreign.C.String (CString,withCString,peekCString)
import Foreign.Marshal.Array (withArray,allocaArray,peekArray)
import Foreign.Marshal.Utils (toBool)
import Foreign.Marshal.Alloc
import Foreign.Storable (peek)
import Data.Functor ((<$>))
import Data.Typeable (Typeable)
import Data.Maybe (fromJust)
import Control.Monad.Trans.Reader (ReaderT,runReaderT,ask)
import Control.Monad.IO.Class (liftIO)
import OpenCog.AtomSpace.Env (AtomSpaceObj(..),AtomSpaceRef(..),(<:),
AtomSpace(..),getAtomSpace,refToObj)
import OpenCog.AtomSpace.Internal (toTVRaw,fromTVRaw,Handle,HandleSeq,TruthValueP
,TVRaw(..),tvMAX_PARAMS)
import OpenCog.AtomSpace.Types (Atom(..),AtomType(..),AtomName(..)
,TruthVal(..))
import OpenCog.AtomSpace.CUtils
sUCCESS :: CInt
sUCCESS = 0
--------------------------------------------------------------------------------
foreign import ccall "AtomSpace_debug"
c_atomspace_debug :: AtomSpaceRef -> IO ()
-- | 'debug' prints the state of the AtomSpace on stderr.
-- (only for debugging purposes)
debug :: AtomSpace ()
debug = do
asRef <- getAtomSpace
liftIO $ c_atomspace_debug asRef
--------------------------------------------------------------------------------
foreign import ccall "AtomSpace_addNode"
c_atomspace_addnode :: AtomSpaceRef
-> CString
-> CString
-> Handle
-> IO CInt
insertNode :: AtomType -> AtomName -> AtomSpace (Maybe Handle)
insertNode aType aName = do
asRef <- getAtomSpace
hptr <- liftIO $ callocBytes 8
liftIO $ withCString aType $
\atype -> withCString aName $
\aname -> do
res <- c_atomspace_addnode asRef atype aname hptr
if res == sUCCESS
then return $ Just hptr
else return Nothing
foreign import ccall "AtomSpace_addLink"
c_atomspace_addlink :: AtomSpaceRef
-> CString
-> HandleSeq
-> CInt
-> Handle
-> IO CInt
insertLink :: AtomType -> [Atom] -> AtomSpace (Maybe Handle)
insertLink aType aOutgoing = do
mlist <- mapM insertAndGetHandle aOutgoing
case mapM id mlist of
Nothing -> return Nothing
Just list -> do
asRef <- getAtomSpace
hptr <- liftIO $ callocBytes 8
liftIO $ withCString aType $
\atype -> withArray list $
\lptr -> do
res <- c_atomspace_addlink asRef atype lptr (fromIntegral $ length list) hptr
if res == sUCCESS
then return $ Just hptr
else return Nothing
insertAndGetHandle :: Atom -> AtomSpace (Maybe Handle)
insertAndGetHandle i = case i of
Node aType aName tv -> do
h <- insertNode aType aName
case h of -- set truth value after inserting.
Just hand -> setTruthValue hand tv
_ -> return False
return h
Link aType aOutgoing tv -> do
h <- insertLink aType aOutgoing
case h of -- set truth value after inserting.
Just hand -> setTruthValue hand tv
_ -> return False
return h
-- | 'insert' creates a new atom on the atomspace or updates the existing one.
insert :: Atom -> AtomSpace ()
insert i = do
mh <- insertAndGetHandle i
case mh of
Just h -> liftIO $ free h
Nothing -> return ()
--------------------------------------------------------------------------------
foreign import ccall "AtomSpace_removeAtom"
c_atomspace_remove :: AtomSpaceRef
-> Handle
-> IO CInt
-- | 'remove' deletes an atom from the atomspace.
-- Returns True in success or False if it couldn't locate the specified atom.
remove :: Atom -> AtomSpace Bool
remove i = do
asRef <- getAtomSpace
m <- getWithHandle i
case m of
Just (_,handle) -> do
res <- liftIO $ c_atomspace_remove asRef handle
liftIO $ free handle
return (res == sUCCESS)
_ -> return False
--------------------------------------------------------------------------------
foreign import ccall "AtomSpace_getNode"
c_atomspace_getnode :: AtomSpaceRef
-> CString
-> CString
-> Handle
-> IO CInt
getNodeHandle :: AtomType -> AtomName -> AtomSpace (Maybe Handle)
getNodeHandle aType aName = do
asRef <- getAtomSpace
hptr <- liftIO $ callocBytes 8
liftIO $ withCString aType $
\atype -> withCString aName $
\aname -> do
res <- c_atomspace_getnode asRef atype aname hptr
let found = res == sUCCESS
return $ if found
then Just hptr
else Nothing
getNode :: AtomType -> AtomName -> AtomSpace (Maybe (TruthVal,Handle))
getNode aType aName = do
m <- getNodeHandle aType aName
case m of
Nothing -> return Nothing
Just h -> do
res <- liftIO $ getTruthValue h
return $ case res of
Just tv -> Just (tv,h)
Nothing -> Nothing
foreign import ccall "AtomSpace_getLink"
c_atomspace_getlink :: AtomSpaceRef
-> CString
-> HandleSeq
-> CInt
-> Handle
-> IO CInt
getLinkHandle :: AtomType -> [Handle] -> AtomSpace (Maybe Handle)
getLinkHandle aType aOutgoing = do
asRef <- getAtomSpace
hptr <- liftIO $ callocBytes 8
liftIO $ withCString aType $
\atype -> withArray aOutgoing $
\lptr -> do
res <- c_atomspace_getlink asRef atype lptr
(fromIntegral $ length aOutgoing) hptr
let found = res == sUCCESS
return $ if found
then Just hptr
else Nothing
getLink :: AtomType -> [Handle] -> AtomSpace (Maybe (TruthVal,Handle))
getLink aType aOutgoing = do
m <- getLinkHandle aType aOutgoing
case m of
Nothing -> return Nothing
Just h -> do
res <- liftIO $ getTruthValue h
return $ case res of
Just tv -> Just (tv,h)
Nothing -> Nothing
getWithHandle :: Atom -> AtomSpace (Maybe (Atom,Handle))
getWithHandle i = do
let onLink :: AtomType
-> [Atom]
-> AtomSpace (Maybe (TruthVal,Handle,[Atom]))
onLink aType aOutgoing = do
ml <- sequence <$> mapM getWithHandle aOutgoing
case ml of -- ml :: Maybe [(AtomRaw,Handle)]
Nothing -> return Nothing
Just l -> do
res <- getLink aType $ map snd l
case res of
Just (tv,h) -> return $ Just (tv,h,map fst l)
_ -> return Nothing
in
case i of
Node aType aName _ -> do
m <- getNode aType aName
return $ case m of
Just (tv,h) -> Just $ (Node aType aName tv,h)
_ -> Nothing
Link aType aOutgoing _ -> do
m <- onLink aType aOutgoing
return $ case m of
Just (tv,h,newOutgoing) -> Just $ (Link aType newOutgoing tv, h)
_ -> Nothing
-- | 'get' looks for an atom in the atomspace and returns it.
-- (With updated mutable information)
get :: Atom -> AtomSpace (Maybe Atom)
get i = do
m <- getWithHandle i
case m of
Just (araw,handle) -> do liftIO $ free handle
return $ Just araw
_ -> return $ Nothing
--------------------------------------------------------------------------------
foreign import ccall "Exec_execute"
c_exec_execute :: AtomSpaceRef
-> Handle
-> Handle
-> IO CInt
execute :: Atom -> AtomSpace (Maybe Atom)
execute atom = do
m <- getWithHandle atom
case m of
Just (_,handle) -> do
asRef <- getAtomSpace
hptr <- liftIO $ callocBytes 8
res <- liftIO $ c_exec_execute asRef handle hptr
if res == sUCCESS
then do
resAtom <- getByHandle hptr
liftIO $ (free handle >> free hptr)
return resAtom
else return Nothing
_ -> return Nothing
foreign import ccall "Exec_evaluate"
c_exec_evaluate :: AtomSpaceRef
-> Handle
-> Ptr CString
-> Ptr CDouble
-> IO CInt
evaluate :: Atom -> AtomSpace (Maybe TruthVal)
evaluate atom = do
m <- getWithHandle atom
case m of
Just (_,handle) -> do
asRef <- getAtomSpace
res <- liftIO $ getTVfromC $ c_exec_evaluate asRef handle
liftIO $ free handle
return $ res
_ -> return Nothing
--------------------------------------------------------------------------------
foreign import ccall "AtomSpace_getAtomByHandle"
c_atomspace_getAtomByHandle :: AtomSpaceRef
-> Handle
-> Ptr CInt
-> Ptr CString
-> Ptr CString
-> HandleSeq
-> Ptr CInt
-> IO CInt
getByHandle :: Handle -> AtomSpace (Maybe Atom)
getByHandle h = do
asRef <- getAtomSpace
resTv <- liftIO $ getTruthValue h
case resTv of
Nothing -> return Nothing
Just tv -> do
res <- liftIO $ alloca $
\aptr -> alloca $
\tptr -> alloca $
\nptr -> alloca $
\hptr -> alloca $
\iptr -> do
res <- c_atomspace_getAtomByHandle asRef h aptr tptr nptr hptr iptr
if res /= sUCCESS
then return Nothing
else do
isNode <- toBool <$> peek aptr
ctptr <- peek tptr
atype <- peekCString ctptr
free ctptr
if isNode
then do
cnptr <- peek nptr
aname <- peekCString cnptr
free cnptr
return $ Just $ Right (atype,aname)
else do
outLen <- fromIntegral <$> peek iptr
outList <- peekArray outLen hptr
return $ Just $ Left (atype,outList)
case res of
Nothing -> return Nothing
Just (Right (atype,aname)) -> return $ Just $ Node atype aname tv
Just (Left (atype,outList)) -> do
mout <- mapM getByHandle outList
return $ case mapM id mout of
Just out -> Just $ Link atype out tv
Nothing -> Nothing
--------------------------------------------------------------------------------
foreign import ccall "TruthValue_getFromAtom"
c_truthvalue_getFromAtom :: Handle
-> Ptr CString
-> Ptr CDouble
-> IO CInt
-- Internal function to get an atom's truth value.
getTruthValue :: Handle -> IO (Maybe TruthVal)
getTruthValue handle = do
liftIO $ getTVfromC (c_truthvalue_getFromAtom handle)
foreign import ccall "TruthValue_setOnAtom"
c_truthvalue_setOnAtom :: Handle
-> CString
-> Ptr CDouble
-> IO CInt
-- Internal function to set an atom's truth value.
setTruthValue :: Handle -> TruthVal -> AtomSpace Bool
setTruthValue handle tv = do
let (TVRaw tvtype list) = toTVRaw tv
liftIO $ withArray (map realToFrac list) $
\lptr -> withCString tvtype $
\tptr -> do
res <- c_truthvalue_setOnAtom handle tptr lptr
return $ res == sUCCESS
foreign import ccall "PTruthValuePtr_fromRaw"
c_ptruthvalueptr_fromraw :: CString
-> Ptr CDouble
-> IO TruthValueP
-- Internal function for creating TruthValuePtr* to be returned by GroundedPredicate function
convertToTruthValueP :: TruthVal -> IO TruthValueP
convertToTruthValueP tv = do
let (TVRaw tvtype list) = toTVRaw tv
withArray (map realToFrac list) $
\lptr -> withCString tvtype $
\tptr -> do
res <- c_ptruthvalueptr_fromraw tptr lptr
return res
-- Helpfer function for creating function that can be called from C
exportFunction :: (Atom -> AtomSpace Atom) -> Ptr AtomSpaceRef -> Handle -> IO (Handle)
exportFunction f asRef id = do
as <- refToObj asRef
(Just atom) <- as <: getByHandle id
let (AtomSpace op) = f atom
resAtom <- runReaderT op (AtomSpaceRef asRef)
(Just resID) <- as <: insertAndGetHandle resAtom
return resID
-- Helpfer function for creating predicates that can be called from C
exportPredicate :: (Atom -> TruthVal) -> Ptr AtomSpaceRef -> Handle -> IO (TruthValueP)
exportPredicate p asRef id = do
as <- refToObj asRef
(Just atom) <- as <: getByHandle id
convertToTruthValueP $ p atom
|
ngeiswei/atomspace
|
opencog/haskell/OpenCog/AtomSpace/Api.hs
|
agpl-3.0
| 14,150
| 9
| 45
| 5,121
| 3,605
| 1,759
| 1,846
| 336
| 7
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Replacer | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/replacer/src/main/javahelp/org/zaproxy/zap/extension/replacer/resources/help_fa_IR/helpset_fa_IR.hs
|
apache-2.0
| 969
| 78
| 66
| 158
| 411
| 208
| 203
| -1
| -1
|
{-# OPTIONS -Wall -Werror #-}
module Main where
import Data.Time
main :: IO ()
main = do
now <- getCurrentTime
putStrLn (show (utctDay now) ++ "," ++ show (utctDayTime now))
putStrLn (show (utcToZonedTime utc now :: ZonedTime))
myzone <- getCurrentTimeZone
putStrLn (show (utcToZonedTime myzone now :: ZonedTime))
|
bergmark/time
|
test/Test/CurrentTime.hs
|
bsd-3-clause
| 322
| 0
| 13
| 55
| 120
| 59
| 61
| 10
| 1
|
module DrawLex where
import Monad(join)
import List(groupBy)
import Maybe(mapMaybe,fromMaybe)
import Fudgets
import FudDraw(ulineD')
import GIFAltFile
import OpTypes(eqBy)
import PfePlumbing(Label(..),lblPos,Icons,CertsStatus,addRefPos,refPos,assertionDefLbl)
import TokenTags as C
import HsLexerPass1(nextPos1,Pos(..))
import HsTokens as T
import HLexTagModuleNames
import RefsTypes(merge{-,T(..)-})
import PFE_Certs(CertName,certAttrsPath)
import CertServers(parseAttrs)
import HsName(ModuleName(..))
import TypedIds(NameSpace(..))
-- only the position is significant when locating a Label in a drawing
rootLabel = posLabel rootPos
where rootPos = Pos (-1) (-1) (-1) -- not a valid file position
posLabel p = Lbl ((TheRest,(p,"")),Nothing)
origLabel orig = Lbl ((TheRest,(refPos orig,"")),Just orig)
fakeLex = labelD rootLabel . vboxlD' 0 . map g . map (expand 1) . take 2500 . lines
drawLex dir icons m colorGctx rs cs na =
if quickfix
then
labelD rootLabel . vboxlD' 0 . map g .
lines . concatMap (snd.snd)
else
labelD rootLabel . vboxlD' 0 .
map (hboxD' 0 . map tokenD . autoAnnot {-. groupSpace-}) .
take 2500 . -- Show at most 2500 lines !!
groupBy sameLine . concatMap split . merge (map addRefPos rs) .
convModuleNames
where
-- split tokens that span several lines:
split ((t,(Pos n y x,s)),r) =
[Lbl ((t,(Pos n y' x,l)),r)|(y',l)<-zip [y..] (lines (expand x s))]
sameLine = eqBy (line.lblPos)
tokenD lbl@(Lbl((t,(p,s)),r)) = markD . labelD lbl . colorD t . g $ s
where markD =
case r of
Just (_,_,defs) | length defs/=1 -> ulineD' "red"
_ -> id
colorD t =
case t of
NestedComment ->
case isCertAnnot s of
Just cert -> const (drawCertIcon dir icons m cs cert)
_ -> fgD C.Comment
Commentstart -> fgD C.Comment
T.Comment -> fgD C.Comment
LiterateComment -> fgD C.Comment
Reservedid -> fgD Reserved
Reservedop -> fgD Reserved
Special -> fgD Reserved
Specialid -> fgD Reserved
Conid -> con r
Qconid -> con r
Varsym -> fgD VarOp
Qvarsym -> fgD VarOp
Consym -> fgD ConOp
Qconsym -> fgD ConOp
IntLit -> fgD Lit
FloatLit -> fgD Lit
StringLit -> fgD Lit
CharLit -> fgD Lit
_ -> id
fgD = hardAttribD . colorGctx
con = maybe id (fgD . rcolor)
rcolor ((_,sp),_,_) = if sp==ValueNames then Con else TCon
autoAnnot ts = ts++autoannots
where
autoannots = map (nestedComment dummyPos.certAnnot) certs
certs = concatMap (fromMaybe [] . flip lookup na) as
as = mapMaybe assertionDefLbl ts
dummyPos = lblPos (last ts)
certAnnot cert = "{-#cert:"++cert++"#-}"
nestedComment p s = Lbl ((NestedComment,(p,s)),Nothing)
{-
groupSpace [] = []
groupSpace (lbl@(Lbl((t,(p,s)),r)):ts) =
if isWhite lbl
then case span isWhite ts of
(ws,ts') -> Lbl((t,(p,s++concatMap str ws)),r):groupSpace ts'
else lbl:groupSpace ts
where
str (Lbl((_,(_,s)),_)) = s
isWhite (Lbl((Whitespace,(p,s)),r)) = all isSpace s
isWhite _ = False
-}
drawCertIcon :: FilePath -> Icons -> ModuleName -> CertsStatus -> CertName ->
Drawing lbl Gfx
drawCertIcon dir (sad,icons) m cstatus cert =
g (fileGfxAlt certIcon (certAttrsPath m cert dir) sad)
where
certIcon s =
case (`lookup` icons) =<< lookup "type" (parseAttrs s) of
Just cicons -> Right (cstatusIcon cicons (join (lookup cert cstatus)))
_ -> Left "bad cert/unknown cert type"
certIcon (sad,icons) (cert,(Just attrs,cstatus)) =
case (`lookup` icons) =<< lookup "type" attrs of
Just icons -> cstatusIcon icons cstatus
_ -> sad
certIcon (sad,_) _ = sad
cstatusIcon (valid,invalid,unknown) cstatus =
case cstatus of
Just (isvalid,_) -> if isvalid then valid else invalid
_ -> unknown
-- isCertAnnot :: Monad m => String -> m CertName
isCertAnnot s =
do '{':'-':'#':'c':'e':'r':'t':':':r <- return s
'}':'-':'#':f <- return (reverse r)
return (reverse f)
{- Why use "case" when you can use "do"? :-)
isCertAnnot s =
case s of
'{':'-':'#':'c':'e':'r':'t':':':r ->
case reverse r of
'}':'-':'#':f -> Just (reverse f)
_ -> Nothing
_ -> Nothing
-}
expand x "" = ""
expand x (c:s) =
case c of
'\t' -> replicate (x'-x) ' '++expand x' s
_ -> c:expand x' s
where Pos _ _ x' = nextPos1 (Pos 0 1 x) c
quickfix = argFlag "quickfix" False
|
forste/haReFork
|
tools/pfe/Browser/DrawLex.hs
|
bsd-3-clause
| 4,554
| 21
| 22
| 1,158
| 1,585
| 832
| 753
| 102
| 23
|
module CoreToLog where
import Data.Set
-- ISSUE: can we please allow things like `empty` to also
-- appear in type and alias specifications, not just in
-- measures as in `goo` below?
{-@ type IsEmp a = {v:[a] | Data.Set.elems v = Data.Set.empty } @-}
{-@ foo :: IsEmp Int @-}
foo :: [Int]
foo = []
{-@ measure goo @-}
goo :: (Ord a) => [a] -> Set a
goo [] = empty
goo (x:xs) = (singleton x) `union` (goo xs)
|
mightymoose/liquidhaskell
|
tests/pos/coretologic.hs
|
bsd-3-clause
| 428
| 0
| 7
| 101
| 97
| 57
| 40
| 7
| 1
|
module D1 where
--Any type/data constructor name declared in this module can be renamed.
--Any type variable can be renamed.
--Rename type Constructor 'BTree' to 'MyBTree'
data BTree a = Empty | T a (BTree a) (BTree a)
deriving Show
buildtree :: Ord a => [a] -> BTree a
buildtree [] = Empty
buildtree (x:xs) = insert x (buildtree xs)
insert :: Ord a => a -> BTree a -> BTree a
insert val v2 = let f (new@(T val Empty Empty)) = new in f v2
newPat_1 = Empty
f :: String -> String
f newPat_2@((x : xs)) = newPat_2
main :: BTree Int
main = buildtree [3,1,2]
|
kmate/HaRe
|
old/testing/unfoldAsPatterns/D1.hs
|
bsd-3-clause
| 579
| 0
| 13
| 134
| 226
| 120
| 106
| 13
| 1
|
import Control.Concurrent
import Control.Exception
-- version of conc015 using mask in place of the old deprecated
-- block/unblock.
-- test blocking & unblocking of async exceptions.
-- the first exception "foo" should be caught by the "caught1" handler,
-- since async exceptions are blocked outside this handler.
-- the second exception "bar" should be caught by the outer "caught2" handler,
-- (i.e. this tests that async exceptions are properly unblocked after
-- being blocked).
main = do
main_thread <- myThreadId
print =<< getMaskingState
m <- newEmptyMVar
m2 <- newEmptyMVar
forkIO (do takeMVar m
throwTo main_thread (ErrorCall "foo")
throwTo main_thread (ErrorCall "bar")
putMVar m2 ()
)
( do
mask $ \restore -> do
putMVar m ()
print =<< getMaskingState
sum [1..100000] `seq` -- give 'foo' a chance to be raised
(restore (myDelay 500000)
`Control.Exception.catch`
\e -> putStrLn ("caught1: " ++ show (e::SomeException)))
threadDelay 10000
takeMVar m2
)
`Control.Exception.catch`
\e -> do print =<< getMaskingState
putStrLn ("caught2: " ++ show (e::SomeException))
-- compensate for the fact that threadDelay is non-interruptible
-- on Windows with the threaded RTS in 6.6.
myDelay usec = do
m <- newEmptyMVar
forkIO $ do threadDelay usec; putMVar m ()
takeMVar m
-- myDelay = threadDelay
|
seereason/ghcjs
|
test/ghc/concurrent/conc015a.hs
|
mit
| 1,432
| 10
| 24
| 332
| 307
| 156
| 151
| 28
| 1
|
module Bug2 ( x ) where
import B
x :: A
x = A
|
DavidAlphaFox/ghc
|
utils/haddock/html-test/src/Bug2.hs
|
bsd-3-clause
| 46
| 0
| 4
| 14
| 22
| 14
| 8
| 4
| 1
|
-- Trac #2529
-- The example below successfully performed the {{{show}}}, but {{{reads}}}
-- returns an empty list. It fails in both GHCi and GHC. It succeeds if you
-- replaces the infix symbol with a name.
module Main where
data A = (:<>:) { x :: Int, y :: Int } deriving (Read, Show)
t :: A
t = 1 :<>: 2
s :: String
s = show t
r :: [(A,String)]
r = reads s
main :: IO ()
main = do putStrLn s
putStrLn (show r)
|
siddhanathan/ghc
|
testsuite/tests/deriving/should_run/T2529.hs
|
bsd-3-clause
| 429
| 14
| 7
| 105
| 124
| 71
| 53
| 11
| 1
|
module Futhark.CodeGen.ImpGen.Multicore.Base
( extractAllocations,
compileThreadResult,
Locks (..),
HostEnv (..),
AtomicBinOp,
MulticoreGen,
decideScheduling,
decideScheduling',
groupResultArrays,
renameSegBinOp,
freeParams,
renameHistOpLambda,
atomicUpdateLocking,
AtomicUpdate (..),
Locking (..),
getSpace,
getIterationDomain,
getReturnParams,
segOpString,
)
where
import Control.Monad
import Data.Bifunctor
import qualified Data.Map as M
import Data.Maybe
import qualified Futhark.CodeGen.ImpCode.Multicore as Imp
import Futhark.CodeGen.ImpGen
import Futhark.Error
import Futhark.IR.MCMem
import Futhark.Transform.Rename
import Prelude hiding (quot, rem)
-- | Is there an atomic t'BinOp' corresponding to this t'BinOp'?
type AtomicBinOp =
BinOp ->
Maybe (VName -> VName -> Imp.Count Imp.Elements (Imp.TExp Int32) -> Imp.Exp -> Imp.AtomicOp)
-- | Information about the locks available for accumulators.
data Locks = Locks
{ locksArray :: VName,
locksCount :: Int
}
data HostEnv = HostEnv
{ hostAtomics :: AtomicBinOp,
hostLocks :: M.Map VName Locks
}
type MulticoreGen = ImpM MCMem HostEnv Imp.Multicore
segOpString :: SegOp () MCMem -> MulticoreGen String
segOpString SegMap {} = return "segmap"
segOpString SegRed {} = return "segred"
segOpString SegScan {} = return "segscan"
segOpString SegHist {} = return "seghist"
arrParam :: VName -> MulticoreGen Imp.Param
arrParam arr = do
name_entry <- lookupVar arr
case name_entry of
ArrayVar _ (ArrayEntry (MemLoc mem _ _) _) ->
return $ Imp.MemParam mem DefaultSpace
_ -> error $ "arrParam: could not handle array " ++ show arr
toParam :: VName -> TypeBase shape u -> MulticoreGen [Imp.Param]
toParam name (Prim pt) = return [Imp.ScalarParam name pt]
toParam name (Mem space) = return [Imp.MemParam name space]
toParam name Array {} = pure <$> arrParam name
toParam _name Acc {} = pure [] -- FIXME? Are we sure this works?
getSpace :: SegOp () MCMem -> SegSpace
getSpace (SegHist _ space _ _ _) = space
getSpace (SegRed _ space _ _ _) = space
getSpace (SegScan _ space _ _ _) = space
getSpace (SegMap _ space _ _) = space
getIterationDomain :: SegOp () MCMem -> SegSpace -> MulticoreGen (Imp.TExp Int64)
getIterationDomain SegMap {} space = do
let ns = map snd $ unSegSpace space
ns_64 = map toInt64Exp ns
return $ product ns_64
getIterationDomain _ space = do
let ns = map snd $ unSegSpace space
ns_64 = map toInt64Exp ns
case unSegSpace space of
[_] -> return $ product ns_64
-- A segmented SegOp is over the segments
-- so we drop the last dimension, which is
-- executed sequentially
_ -> return $ product $ init ns_64
-- When the SegRed's return value is a scalar
-- we perform a call by value-result in the segop function
getReturnParams :: Pat MCMem -> SegOp () MCMem -> MulticoreGen [Imp.Param]
getReturnParams pat SegRed {} =
-- It's a good idea to make sure any prim values are initialised, as
-- we will load them (redundantly) in the task code, and
-- uninitialised values are UB.
fmap concat . forM (patElems pat) $ \pe -> do
case patElemType pe of
Prim pt -> patElemName pe <~~ ValueExp (blankPrimValue pt)
_ -> pure ()
toParam (patElemName pe) (patElemType pe)
getReturnParams _ _ = return mempty
renameSegBinOp :: [SegBinOp MCMem] -> MulticoreGen [SegBinOp MCMem]
renameSegBinOp segbinops =
forM segbinops $ \(SegBinOp comm lam ne shape) -> do
lam' <- renameLambda lam
return $ SegBinOp comm lam' ne shape
compileThreadResult ::
SegSpace ->
PatElem MCMem ->
KernelResult ->
MulticoreGen ()
compileThreadResult space pe (Returns _ _ what) = do
let is = map (Imp.le64 . fst) $ unSegSpace space
copyDWIMFix (patElemName pe) is what []
compileThreadResult _ _ ConcatReturns {} =
compilerBugS "compileThreadResult: ConcatReturn unhandled."
compileThreadResult _ _ WriteReturns {} =
compilerBugS "compileThreadResult: WriteReturns unhandled."
compileThreadResult _ _ TileReturns {} =
compilerBugS "compileThreadResult: TileReturns unhandled."
compileThreadResult _ _ RegTileReturns {} =
compilerBugS "compileThreadResult: RegTileReturns unhandled."
freeVariables :: Imp.Code -> [VName] -> [VName]
freeVariables code names =
namesToList $ freeIn code `namesSubtract` namesFromList names
freeParams :: Imp.Code -> [VName] -> MulticoreGen [Imp.Param]
freeParams code names = do
let freeVars = freeVariables code names
ts <- mapM lookupType freeVars
concat <$> zipWithM toParam freeVars ts
-- | Arrays for storing group results shared between threads
groupResultArrays ::
String ->
SubExp ->
[SegBinOp MCMem] ->
MulticoreGen [[VName]]
groupResultArrays s num_threads reds =
forM reds $ \(SegBinOp _ lam _ shape) ->
forM (lambdaReturnType lam) $ \t -> do
let full_shape = Shape [num_threads] <> shape <> arrayShape t
sAllocArray s (elemType t) full_shape DefaultSpace
isLoadBalanced :: Imp.Code -> Bool
isLoadBalanced (a Imp.:>>: b) = isLoadBalanced a && isLoadBalanced b
isLoadBalanced (Imp.For _ _ a) = isLoadBalanced a
isLoadBalanced (Imp.If _ a b) = isLoadBalanced a && isLoadBalanced b
isLoadBalanced (Imp.Comment _ a) = isLoadBalanced a
isLoadBalanced Imp.While {} = False
isLoadBalanced (Imp.Op (Imp.ParLoop _ _ _ code _ _ _)) = isLoadBalanced code
isLoadBalanced _ = True
segBinOpComm' :: [SegBinOp rep] -> Commutativity
segBinOpComm' = mconcat . map segBinOpComm
decideScheduling' :: SegOp () rep -> Imp.Code -> Imp.Scheduling
decideScheduling' SegHist {} _ = Imp.Static
decideScheduling' SegScan {} _ = Imp.Static
decideScheduling' (SegRed _ _ reds _ _) code =
case segBinOpComm' reds of
Commutative -> decideScheduling code
Noncommutative -> Imp.Static
decideScheduling' SegMap {} code = decideScheduling code
decideScheduling :: Imp.Code -> Imp.Scheduling
decideScheduling code =
if isLoadBalanced code
then Imp.Static
else Imp.Dynamic
-- | Try to extract invariant allocations. If we assume that the
-- given 'Imp.Code' is the body of a 'SegOp', then it is always safe
-- to move the immediate allocations to the prebody.
extractAllocations :: Imp.Code -> (Imp.Code, Imp.Code)
extractAllocations segop_code = f segop_code
where
declared = Imp.declaredIn segop_code
f (Imp.DeclareMem name space) =
-- Hoisting declarations out is always safe.
(Imp.DeclareMem name space, mempty)
f (Imp.Allocate name size space)
| not $ freeIn size `namesIntersect` declared =
(Imp.Allocate name size space, mempty)
f (x Imp.:>>: y) = f x <> f y
f (Imp.While cond body) =
(mempty, Imp.While cond body)
f (Imp.For i bound body) =
(mempty, Imp.For i bound body)
f (Imp.Comment s code) =
second (Imp.Comment s) (f code)
f Imp.Free {} =
mempty
f (Imp.If cond tcode fcode) =
let (ta, tcode') = f tcode
(fa, fcode') = f fcode
in (ta <> fa, Imp.If cond tcode' fcode')
f (Imp.Op (Imp.ParLoop s i prebody body postbody free info)) =
let (body_allocs, body') = extractAllocations body
(free_allocs, here_allocs) = f body_allocs
free' =
filter
( not
. (`nameIn` Imp.declaredIn body_allocs)
. Imp.paramName
)
free
in ( free_allocs,
here_allocs
<> Imp.Op (Imp.ParLoop s i prebody body' postbody free' info)
)
f code =
(mempty, code)
-------------------------------
------- SegHist helpers -------
-------------------------------
renameHistOpLambda :: [HistOp MCMem] -> MulticoreGen [HistOp MCMem]
renameHistOpLambda hist_ops =
forM hist_ops $ \(HistOp w rf dest neutral shape lam) -> do
lam' <- renameLambda lam
return $ HistOp w rf dest neutral shape lam'
-- | Locking strategy used for an atomic update.
data Locking = Locking
{ -- | Array containing the lock.
lockingArray :: VName,
-- | Value for us to consider the lock free.
lockingIsUnlocked :: Imp.TExp Int32,
-- | What to write when we lock it.
lockingToLock :: Imp.TExp Int32,
-- | What to write when we unlock it.
lockingToUnlock :: Imp.TExp Int32,
-- | A transformation from the logical lock index to the
-- physical position in the array. This can also be used
-- to make the lock array smaller.
lockingMapping :: [Imp.TExp Int64] -> [Imp.TExp Int64]
}
-- | A function for generating code for an atomic update. Assumes
-- that the bucket is in-bounds.
type DoAtomicUpdate rep r =
[VName] -> [Imp.TExp Int64] -> MulticoreGen ()
-- | The mechanism that will be used for performing the atomic update.
-- Approximates how efficient it will be. Ordered from most to least
-- efficient.
data AtomicUpdate rep r
= AtomicPrim (DoAtomicUpdate rep r)
| -- | Can be done by efficient swaps.
AtomicCAS (DoAtomicUpdate rep r)
| -- | Requires explicit locking.
AtomicLocking (Locking -> DoAtomicUpdate rep r)
atomicUpdateLocking ::
AtomicBinOp ->
Lambda MCMem ->
AtomicUpdate MCMem ()
atomicUpdateLocking atomicBinOp lam
| Just ops_and_ts <- lamIsBinOp lam,
all (\(_, t, _, _) -> supportedPrims $ primBitSize t) ops_and_ts =
primOrCas ops_and_ts $ \arrs bucket ->
-- If the operator is a vectorised binary operator on 32-bit values,
-- we can use a particularly efficient implementation. If the
-- operator has an atomic implementation we use that, otherwise it
-- is still a binary operator which can be implemented by atomic
-- compare-and-swap if 32 bits.
forM_ (zip arrs ops_and_ts) $ \(a, (op, t, x, y)) -> do
-- Common variables.
old <- dPrim "old" t
(arr', _a_space, bucket_offset) <- fullyIndexArray a bucket
case opHasAtomicSupport (tvVar old) arr' (sExt32 <$> bucket_offset) op of
Just f -> sOp $ f $ Imp.var y t
Nothing ->
atomicUpdateCAS t a (tvVar old) bucket x $
x <~~ Imp.BinOpExp op (Imp.var x t) (Imp.var y t)
where
opHasAtomicSupport old arr' bucket' bop = do
let atomic f = Imp.Atomic . f old arr' bucket'
atomic <$> atomicBinOp bop
primOrCas ops
| all isPrim ops = AtomicPrim
| otherwise = AtomicCAS
isPrim (op, _, _, _) = isJust $ atomicBinOp op
atomicUpdateLocking _ op
| [Prim t] <- lambdaReturnType op,
[xp, _] <- lambdaParams op,
supportedPrims (primBitSize t) = AtomicCAS $ \[arr] bucket -> do
old <- dPrim "old" t
atomicUpdateCAS t arr (tvVar old) bucket (paramName xp) $
compileBody' [xp] $ lambdaBody op
atomicUpdateLocking _ op = AtomicLocking $ \locking arrs bucket -> do
old <- dPrim "old" int32
continue <- dPrimVol "continue" int32 (0 :: Imp.TExp Int32)
-- Correctly index into locks.
(locks', _locks_space, locks_offset) <-
fullyIndexArray (lockingArray locking) $ lockingMapping locking bucket
-- Critical section
let try_acquire_lock = do
old <-- (0 :: Imp.TExp Int32)
sOp $
Imp.Atomic $
Imp.AtomicCmpXchg
int32
(tvVar old)
locks'
(sExt32 <$> locks_offset)
(tvVar continue)
(untyped (lockingToLock locking))
lock_acquired = tvExp continue
-- Even the releasing is done with an atomic rather than a
-- simple write, for memory coherency reasons.
release_lock = do
old <-- lockingToLock locking
sOp $
Imp.Atomic $
Imp.AtomicCmpXchg
int32
(tvVar old)
locks'
(sExt32 <$> locks_offset)
(tvVar continue)
(untyped (lockingToUnlock locking))
-- Preparing parameters. It is assumed that the caller has already
-- filled the arr_params. We copy the current value to the
-- accumulator parameters.
let (acc_params, _arr_params) = splitAt (length arrs) $ lambdaParams op
bind_acc_params =
everythingVolatile $
sComment "bind lhs" $
forM_ (zip acc_params arrs) $ \(acc_p, arr) ->
copyDWIMFix (paramName acc_p) [] (Var arr) bucket
let op_body =
sComment "execute operation" $
compileBody' acc_params $ lambdaBody op
do_hist =
everythingVolatile $
sComment "update global result" $
zipWithM_ (writeArray bucket) arrs $ map (Var . paramName) acc_params
-- While-loop: Try to insert your value
sWhile (tvExp continue .==. 0) $ do
try_acquire_lock
sUnless (lock_acquired .==. 0) $ do
dLParams acc_params
bind_acc_params
op_body
do_hist
release_lock
where
writeArray bucket arr val = copyDWIMFix arr bucket val []
atomicUpdateCAS ::
PrimType ->
VName ->
VName ->
[Imp.TExp Int64] ->
VName ->
MulticoreGen () ->
MulticoreGen ()
atomicUpdateCAS t arr old bucket x do_op = do
run_loop <- dPrimV "run_loop" (0 :: Imp.TExp Int32)
(arr', _a_space, bucket_offset) <- fullyIndexArray arr bucket
bytes <- toIntegral $ primBitSize t
let (toBits, fromBits) =
case t of
FloatType Float16 ->
( \v -> Imp.FunExp "to_bits16" [v] int16,
\v -> Imp.FunExp "from_bits16" [v] t
)
FloatType Float32 ->
( \v -> Imp.FunExp "to_bits32" [v] int32,
\v -> Imp.FunExp "from_bits32" [v] t
)
FloatType Float64 ->
( \v -> Imp.FunExp "to_bits64" [v] int64,
\v -> Imp.FunExp "from_bits64" [v] t
)
_ -> (id, id)
int
| primBitSize t == 16 = int16
| primBitSize t == 32 = int32
| otherwise = int64
everythingVolatile $ copyDWIMFix old [] (Var arr) bucket
old_bits_v <- tvVar <$> dPrim "old_bits" int
old_bits_v <~~ toBits (Imp.var old t)
let old_bits = Imp.var old_bits_v int
-- While-loop: Try to insert your value
sWhile (tvExp run_loop .==. 0) $ do
x <~~ Imp.var old t
do_op -- Writes result into x
sOp . Imp.Atomic $
Imp.AtomicCmpXchg
bytes
old_bits_v
arr'
(sExt32 <$> bucket_offset)
(tvVar run_loop)
(toBits (Imp.var x t))
old <~~ fromBits old_bits
supportedPrims :: Int -> Bool
supportedPrims 8 = True
supportedPrims 16 = True
supportedPrims 32 = True
supportedPrims 64 = True
supportedPrims _ = False
-- Supported bytes lengths by GCC (and clang) compiler
toIntegral :: Int -> MulticoreGen PrimType
toIntegral 8 = return int8
toIntegral 16 = return int16
toIntegral 32 = return int32
toIntegral 64 = return int64
toIntegral b = error $ "number of bytes is not supported for CAS - " ++ pretty b
|
HIPERFIT/futhark
|
src/Futhark/CodeGen/ImpGen/Multicore/Base.hs
|
isc
| 14,776
| 0
| 19
| 3,659
| 4,325
| 2,178
| 2,147
| 331
| 10
|
{-# htermination intersect :: Eq a => [(Maybe a)] -> [(Maybe a)] -> [(Maybe a)] #-}
import List
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/List_intersect_10.hs
|
mit
| 96
| 0
| 3
| 18
| 5
| 3
| 2
| 1
| 0
|
module Oden.Infer.Subsumption (
SubsumptionError(..),
typeSubsumedBy,
subsumedBy,
collectSubstitutions
) where
import Oden.Core.Expr (typeOf)
import Oden.Core.Typed as Typed
import Oden.Substitution
import Oden.Metadata
import Oden.SourceInfo
import Oden.Type.Kind
import Oden.Type.Polymorphic
import Control.Monad
import Control.Monad.Except
import Control.Monad.State
import qualified Data.Map as Map
data SubsumptionError = SubsumptionError SourceInfo Type Type
deriving (Show, Eq)
type Subsume a = StateT Subst (Except SubsumptionError) a
-- | Collects the substitutions in the 'Subsume' state for matching types and
-- throws 'SubsumptionError' on mismatches.
collectSubstitutions :: Type -> Type -> Subsume ()
collectSubstitutions t1 (TNamed _ _ t2) = collectSubstitutions t1 t2
collectSubstitutions (TNamed _ _ t1) t2 = collectSubstitutions t1 t2
collectSubstitutions (TCon _ n1) (TCon _ n2)
| n1 == n2 = return ()
collectSubstitutions t (TVar (Metadata si) tv) = do
(Subst s) <- get
case Map.lookup tv s of
Just t' | t == t' -> return ()
| otherwise -> throwError (SubsumptionError si t t')
Nothing -> modify (insert tv t)
collectSubstitutions (TFn _ a1 r1) (TFn _ a2 r2) = do
collectSubstitutions a1 a2
collectSubstitutions r1 r2
collectSubstitutions (TNoArgFn _ r1) (TNoArgFn _ r2) = collectSubstitutions r1 r2
collectSubstitutions (TForeignFn _ _ a1 r1) (TForeignFn _ _ a2 r2) =
mapM_ (uncurry collectSubstitutions) (zip a1 a2 ++ zip r1 r2)
collectSubstitutions (TSlice _ t1) (TSlice _ t2) = collectSubstitutions t1 t2
collectSubstitutions (TTuple _ f1 s1 r1) (TTuple _ f2 s2 r2) = do
collectSubstitutions f1 f2
collectSubstitutions s1 s2
zipWithM_ collectSubstitutions r1 r2
collectSubstitutions (TRecord _ r1) (TRecord _ r2) =
collectSubstitutions r1 r2
collectSubstitutions REmpty{} REmpty{} = return ()
collectSubstitutions r1 r2 | kindOf r1 == Row && kindOf r2 == Row = do
let f1 = Map.fromList (rowToList r1)
f2 = Map.fromList (rowToList r2)
onlyIn1 = f1 `Map.difference` f2
onlyIn2 = f2 `Map.difference` f1
unless (Map.null onlyIn1) $
throwError (SubsumptionError (getSourceInfo r2) r1 r2)
unless (Map.null onlyIn2) $
throwError (SubsumptionError (getSourceInfo r2) r1 r2)
unless (getLeafRow r1 == getLeafRow r2) $
throwError (SubsumptionError (getSourceInfo r2) r1 r2)
sequence_ (Map.elems (Map.intersectionWith collectSubstitutions f1 f2))
collectSubstitutions t1 t2 = throwError (SubsumptionError (getSourceInfo t2) t1 t2)
-- | Test if a specific type is subsumed by a more general type. If so, return
-- the corresponding substitution.
typeSubsumedBy :: Type -> Type -> Either SubsumptionError Subst
typeSubsumedBy specific general =
snd <$> runExcept (runStateT (collectSubstitutions specific general) (Subst Map.empty))
-- | Test if a type scheme is subsumed by an expression with a more general
-- type. If so, return the expression specialized to the less general type (all
-- subexpression types being substituted as well).
subsumedBy :: Scheme -> Typed.TypedExpr -> Either SubsumptionError Typed.CanonicalExpr
subsumedBy s@(Forall _ _ _ st) expr = do
subst <- typeSubsumedBy st (typeOf expr)
return (s, apply subst expr)
|
oden-lang/oden
|
src/Oden/Infer/Subsumption.hs
|
mit
| 3,413
| 0
| 13
| 715
| 1,062
| 529
| 533
| 64
| 2
|
-- ------------------------------------------------------ --
-- Copyright © 2014 AlephCloud Systems, Inc.
-- ------------------------------------------------------ --
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Main
( main
) where
import Network.Wai.Middleware.Cors
import Web.Scotty
main ∷ IO ()
main = scotty 8080 $ do
middleware simpleCors
matchAny "/" $ text "Success"
|
alephcloud/wai-cors
|
test/server.hs
|
mit
| 419
| 0
| 9
| 62
| 67
| 37
| 30
| 10
| 1
|
{-# LANGUAGE QuasiQuotes, RecordWildCards #-}
-- | (you should read the source for documentation: just think of this module as a config file)
module Commands.Frontends.Dragon13.Shim.Commands where
import Commands.Frontends.Dragon13.Shim.Types
import Commands.Frontends.Natlink.Types
import Text.InterpolatedString.Perl6
import GHC.Exts (IsString)
import Prelude
renderGrammarProperties :: (IsString t, Monoid t) => GrammarProperties -> t
renderGrammarProperties GrammarProperties{..} = [qc|Properties(status={_status}, exclusivity={_exclusivity}, shouldEavesdrop={_shouldEavesdrop}, shouldHypothesize={_shouldHypothesize})|]
where
_status = case _grammarStatus of
Enabled -> "True" -- TODO
Disabled -> "False"
_exclusivity = case _grammarExclusivity of
Exclusive -> "1"
Inclusive -> "0"
_shouldEavesdrop = case _grammarShouldEavesdrop of
YesEavesdrop -> "1"
NoEavesdrop -> "0"
_shouldHypothesize = case _grammarShouldHypothesize of
YesHypothesize -> "1"
NoHypothesize -> "0"
{-
{'status': {_status},
'exclusivity': {_exclusivity},
'shouldEavesdrop': {_shouldEavesdrop},
'shouldHypothesize': {_shouldHypothesize}
}
-}
{- |
given valid input, output will be a syntactically-valid Python (2.6)
Module, that only depends on the standard library and @natlink@.
>>> let Right{} = newPythonFile (getShim (ShimR "'''rules'''" "{'list':''}" "export" "localhost" "8666"))
the '__export__' must be exported by '__rules__'.
the Haskell server runs at @('__serverHost__', '__serverPort__')@ on the host.
some specializations:
@
getShim :: ShimR String -> String
getShim :: ShimR Text -> Text
getShim :: ShimR Doc -> Doc
@
= Implementation
inside the 'qc', "what you see is what you get", besides:
* escaping backslashes (e.g. @r'\\\\'@ renders as r'\\')
* interpolating between braces (e.g. @{...}@ is not a dict). the quasiquote must use @dict(a=1)@ rather than @{'a':1}@ to not conflict with the quasiquoter's interpolation syntax, or escape the first curly brace (e.g. @\{...}@).
hello are some words detect Unicode thing speak detect Unicode
-}
getShim :: (IsString t, Monoid t) => ShimR t -> t
getShim ShimR{..} = [qc|
#-*- coding: utf-8 -*-
# _commands.py
# natlink13 library
from natlinkmain import (setCheckForGrammarChanges)
from natlinkutils import (GrammarBase)
import natlink # a DLL
# python standard library
import time
import json
import urllib2
import traceback
from collections import (namedtuple)
################################################################################
# TYPES
Properties = namedtuple('Properties', ['status', 'exclusivity', 'shouldEavesdrop', 'shouldHypothesize'])
################################################################################
# UTILITIES
# current time in milliseconds
def now():
return int(time.clock() * 1000)
# http://stackoverflow.com/questions/1685221/accurately-measure-time-python-function-takes
def timeit(message, callback, *args, **kwargs):
before = time.clock()
result = callback(*args,**kwargs)
after = time.clock()
print message, ': ', (after - before) * 1000, 'ms'
return result
'''
json.dumps("cafe'") (i.e. with acute accent) causes
```UnicodeDecodeError: 'utf8' codec can't decode byte 0xe9 in position 3: unexpected end of data```
>>> 'caf\xe9'.decode('cp1252').encode('utf-8')
u'caf\xe9'
'''
def isUnicode(data): # TODO
try:
for word in data:
word.decode('cp1252').encode('utf8')
return True
except UnicodeDecodeError as e:
print e
print traceback.format_exc()
return False
def toUnicode(data): # TODO
try:
return [word.decode('cp1252').encode('utf8') for word in data]
except UnicodeDecodeError as e:
print e
print traceback.format_exc()
return False
def first_result(resultsObject):
return next(get_results(resultsObject), None)
# "exceptions aren't exceptional" lmfao
def get_results(resultsObject):
'''iterators are more idiomatic'''
try:
for number in xrange(10):
yield resultsObject.getWords(number)
except:
return
################################################################################
# INTERPOLATIONS from "H"askell
H_RULES = {__rules__}
H_LISTS = {__lists__}
H_EXPORT = {__export__}
H_SERVER_HOST = {__serverHost__}
H_SERVER_PORT = {__serverPort__}
H_PROPERTIES = {__properties__}
# e.g. for debugging
# H_RULES = '''<test> exported = \{test};'''
# H_LISTS = \{'test', ['upcase region']}
# H_EXPORT = 'test'
# H_SERVER_HOST = "192.168.56.1"
# H_SERVER_PORT = '8666'
# H_PROPERTIES = \{'status': True , 'exclusivity': 0, 'shouldEavesdrop': 1, 'shouldHypothesize': 1}
server_address = "http://%s:%s" % (H_SERVER_HOST, H_SERVER_PORT)
# HTTP versus HTTPS
microphone_rule = '''<microphone> exported = mike on | mike off | mike dead ;'''
microphone_export = "microphone"
################################################################################
# THE GRAMMAR
class NarcissisticGrammar(GrammarBase):
''' 'Narcissistic' because:
* load(.., allResults=1) means: every recognition triggers gotResultsObject
* load(.., hypothesis=1) means: every hypothesis, before the recognition, triggers gotHypothesis
* activate(.., exclusive=1) means: deactivate every other non-exclusive rule
(when both flags are set on load, NarcissisticGrammar.gotResultsObject is called on
every recognition of every exclusive rule, including this class's rules
of course, though I only expect this class to be active).
'''
gramSpec = microphone_rule + H_RULES
def initialize(self):
self.set_rules(self.gramSpec, [microphone_export, H_EXPORT])
self.set_lists(H_LISTS)
self.doOnlyGotResultsObject = True # aborts all processing after calling gotResultsObject
# def configure(self, allResults=True , hypothesis=True , doOnlyGotResultsObject=True):
# self.load(self.gramSpec, allResults=int(allResults), hypothesis=int(hypothesis))
# self.doOnlyGotResultsObject = doOnlyGotResultsObject
# TODO must it reload the grammar?
# TODO should include export for safety?
def set_rules(self, rules, exports):
self.gramSpec = rules
self.load(rules, allResults=H_PROPERTIES.shouldEavesdrop, hypothesis=H_PROPERTIES.shouldHypothesize)
self.set_exports(exports)
# activateSet is idempotent, unlike activate
def set_exports(self, exports):
self.activateSet(exports, exclusive=H_PROPERTIES.exclusivity )
# TODO must it reload the grammar?
def set_lists(self, lists):
for (lhs, rhs) in lists.items():
self.setList(lhs, rhs)
# called when speech is detected, before recognition begins.
def gotBegin(self, moduleInfo):
# handleDGNContextResponse(timeit("/context", urlopen, ("%s/context" % server_address), timeout=0.1))
# TODO parameterize "context" API
print
print
print "- - - - gotBegin - - - -"
# moduleInfo is just the current window in Windows
def gotHypothesis(self, words):
print
print "---------- gotHypothesis -------------"
print words
# recognitionType = self | reject | other
def gotResultsObject(self, recognitionType, resultsObject):
print "---------- gotResultsObject ----------"
print "recognitionType =", recognitionType
if not recognitionType: return
words = next(get_results(resultsObject), [])
data = toUnicode(words) # munge_recognition(words)
url = "%s/recognition/" % (server_address,) # TODO parameterize "recognition" API
# print 'resultsObject =',resultsObject
print 'words =', words
print 'url =', url
# # NOTE this correctly inserts characters into the virtual machine playString
# natlink.playString (' '.join(words))
try:
if should_request(self,data):
print 'data =', json.dumps(data)
request = urllib2.Request(url, json.dumps(data), \{"Content-Type": "application/json"})
response = urllib2.urlopen(request)
handleResponse(self, response)
pass
except Exception as e:
print
print "---------- error ------------------"
print "sending the request and/or handling the response threw:"
print e
print traceback.format_exc()
# don't print until the request is sent the response is handled
try:
print
print "status =", response.getcode()
print "body =", response
except NameError:
print
except Exception as e:
print
print "---------- error ------------------"
print e
print traceback.format_exc()
# for debugging only, shows whether specific rules (rather than the generic dgndictation) are matching the recognition
# not called when (self.doOnlyGotResultsObject=True)
def gotResults(self, words, fullResults):
print
print "---------- gotResultsObject ----------"
print "fullResults =", fullResults
################################################################################
# API
# TODO handleDGNUpdate(grammar, response)
def handleDGNUpdate(grammar, response):
pass
def should_request(grammar,data):
b = data and not handle_microphone(grammar,data) and isUnicode(data)
print "should_request=", b
return b
# returns true if it matched the recognition (and executed the magic action).
# in which case, don't send a request to the server to execute any non-magic actions.
# "mike off" deactivates all grammars besides the microphone grammer, "putting the microphone to sleep".
def handle_microphone(grammar,data):
raw = " ".join(data)
if raw == "mike on":
# grammar.setMicState("on")
grammar.activateSet([microphone_export, H_EXPORT], exclusive=1)
return True
elif raw == "mike off":
# grammar.setMicState("sleeping")
grammar.activateSet([microphone_export],exclusive=1)
return True
elif raw == "mike dead":
# the natlink.setMicState("off") # can't even be manually turned back on via the GUI
return True
else:
return False
'''
'''
def handleResponse(grammar, response) :
pass
################################################################################
# BOILERPLATE
# mutable global
GRAMMAR = None
def load():
global GRAMMAR
# automatically reload on file change (not only when microphone toggles on)
setCheckForGrammarChanges(1)
GRAMMAR = NarcissisticGrammar()
GRAMMAR.initialize()
def unload():
global GRAMMAR
if GRAMMAR:
GRAMMAR.unload()
GRAMMAR = None
load()
################################################################################
|]
|
sboosali/commands
|
commands-frontend-DragonNaturallySpeaking/sources/Commands/Frontends/Dragon13/Shim/Commands.hs
|
mit
| 11,093
| 0
| 9
| 2,384
| 221
| 129
| 92
| 23
| 5
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Parse ( JSONCell (..)
, JSONPiece (..)
, JSON (..)
, readJSON
) where
--------------------------------------------------------------------------------
------------------------------------ Header ------------------------------------
--------------------------------------------------------------------------------
-- Imports
import Control.Applicative ((<*>))
import Control.Monad (mzero)
import qualified Data.ByteString as BS (readFile)
import Data.Aeson
--------------------------------------------------------------------------------
------------------------------------ Types -------------------------------------
--------------------------------------------------------------------------------
-- | A cell in JSON
data JSONCell = JSONCell { jsonX :: Int
, jsonY :: Int
} deriving (Eq, Show, Read)
-- | A piece in JSON
data JSONPiece = JSONPiece { jsonMembers :: [JSONCell]
, jsonPivot :: JSONCell
} deriving (Eq, Show, Read)
-- | All the information retrieved from a JSON input file
data JSON = JSON { jsonId :: Int
, jsonPieces :: [JSONPiece]
, jsonWidth :: Int
, jsonHeight :: Int
, jsonFilled :: [JSONCell]
, jsonSourceLength :: Int
, jsonSourceSeeds :: [Int]
} deriving (Eq, Show, Read)
--------------------------------------------------------------------------------
---------------------------------- Functions -----------------------------------
--------------------------------------------------------------------------------
-- | Read JSON from a file and strictly decode it into a 'JSON'
readJSON :: FilePath -> IO (Maybe JSON)
readJSON path = decodeStrict' <$> BS.readFile path
--------------------------------------------------------------------------------
---------------------------------- Instances -----------------------------------
--------------------------------------------------------------------------------
-- | Parse a 'JSON'
instance FromJSON JSON where
parseJSON (Object v) = JSON <$> v .: "id"
<*> v .: "units"
<*> v .: "width"
<*> v .: "height"
<*> v .: "filled"
<*> v .: "sourceLength"
<*> v .: "sourceSeeds"
parseJSON _ = mzero
-- | Parse a 'JSONCell'
instance FromJSON JSONCell where
parseJSON (Object v) = JSONCell <$> v .: "x"
<*> v .: "y"
parseJSON _ = mzero
-- | Parse a 'JSONPiece'
instance FromJSON JSONPiece where
parseJSON (Object v) = JSONPiece <$> v .: "members"
<*> v .: "pivot"
parseJSON _ = mzero
|
sebmathguy/icfp-2015
|
library/Parse.hs
|
mit
| 3,063
| 0
| 19
| 938
| 454
| 265
| 189
| 42
| 1
|
-- | This module defines how the SoH editor and controls are rendered.
module View (renderControls, renderEditor) where
import Control.Lens.Extras (is)
import Import
import qualified JavaScript.Ace as Ace
import JavaScript.IFrame
import JavaScript.TermJs
import Model (runQuery, runSnippetCode, switchTab, closeControls, clearTypeInfo)
import View.Build
import View.Console
import View.PosMap (handleChange, selectionToSpan)
import View.TypeInfo
renderControls
:: UComponent TermJs
-> UComponent IFrame
-> State
-> React ()
renderControls termjs iframe state = do
let status = state ^. stateStatus
case status of
InitialStatus -> return ()
_ -> do
class_ "soh-visible"
-- Set the position of the controls.
div_ $ do
class_ "controls-bar"
renderTab state ConsoleTab "" "Console"
renderTab state WebTab "" "Web"
renderTab state DocsTab "" "Docs"
renderTab state BuildTab (buildStatusClass status) $ do
text (buildStatusText status)
renderCloseButton
renderTabContent state ConsoleTab $ consoleTab termjs
renderTabContent state DocsTab $ buildIFrame iframe stateDocs (Just noDocsUrl)
renderTabContent state WebTab $ buildIFrame iframe stateWeb Nothing
renderTabContent state BuildTab $ buildTab status
--------------------------------------------------------------------------------
-- Editor
renderEditor
:: UComponent Ace.Editor
-> UComponent TermJs
-> UComponent IFrame
-> SnippetId
-> JSString
-> Bool
-> State
-> React ()
renderEditor ace termjs iframe sid initialValue inlineControls state = do
let isCurrent = currentSnippet state == Just sid
class_ "soh-container"
div_ $ do
class_ $ addWhen isCurrent "soh-current"
$ addWhen (not inlineControls) "soh-remote-controls"
$ "soh-snippet"
buildUnmanaged ace (ixSnippet sid . snippetEditor) $ \stateVar q -> do
editor <- Ace.makeEditor q
Ace.setMaxLinesInfty editor
Ace.setValue editor initialValue
debounce 100 (handleSelectionChange stateVar sid) >>=
Ace.onSelectionChange editor
Ace.onChange editor (handleChange stateVar sid)
Ace.addCommand editor "run" "Ctrl-Enter" "Command-Enter" $ runSnippetCode stateVar sid
return editor
renderRunButton sid isCurrent (state ^. stateStatus)
forM_ (join (state ^? ixSnippet sid . snippetTypeInfo)) $ \(typs, x, y, _) ->
-- TODO: remove this ugly hack! We sometimes get lots of type
-- infos for the same span due to TH.
when (length typs < 4) $ typePopup typs x y
when (isCurrent && inlineControls) $ div_ $ do
id_ "soh-controls"
class_ "soh-inline-controls"
div_ $ renderControls termjs iframe state
handleSelectionChange :: TVar State -> SnippetId -> IO ()
handleSelectionChange stateVar sid = do
state <- readTVarIO stateVar
selection <- Ace.getSelection =<< getEditor state sid
-- Only show types for selections that contain multiple chars.
if Ace.anchor selection == Ace.lead selection
then clearTypeInfo stateVar sid
-- Compute the source span of the query at the time of compilation.
else case selectionToSpan state sid selection of
Nothing -> do
clearTypeInfo stateVar sid
-- FIXME: UI for this.
putStrLn "No span for this query"
Just ss -> runQuery stateVar sid (QueryInfo ss)
renderRunButton :: SnippetId -> Bool -> Status -> React ()
renderRunButton sid isCurrent s = div_ $ do
let building = is _BuildRequested s || is _Building s
working = building && isCurrent
class_ $ addWhen working "building"
$ "run glyphicon"
title_ $ if working then "Compiling code..." else "Compile and run code (Ctrl-Enter / Command-Enter)"
onClick $ \_ state -> runSnippetCode state sid
--------------------------------------------------------------------------------
-- Tabs
renderTab :: State -> Tab -> Text -> React () -> React ()
renderTab state tab extraClasses f = div_ $ do
class_ $
addWhen (state ^. stateTab == tab) "tab-focused"
("tab " <> tabClass tab <> " " <> extraClasses)
onClick (\_ -> flip switchTab tab)
f
renderTabContent :: State -> Tab -> React () -> React ()
renderTabContent state tab f = div_ $ do
class_ $
addWhen (state ^. stateTab == tab) "tab-content-focused"
("tab-content " <> tabClass tab <> "-content")
f
tabClass :: Tab -> Text
tabClass BuildTab = "build-tab"
tabClass ConsoleTab = "console-tab"
tabClass DocsTab = "docs-tab"
tabClass WebTab = "web-tab"
renderCloseButton :: React ()
renderCloseButton = div_ $ do
class_ "soh-close-btn"
onClick $ \_ -> closeControls
|
fpco/schoolofhaskell
|
soh-client/src/View.hs
|
mit
| 4,755
| 0
| 20
| 1,074
| 1,281
| 612
| 669
| -1
| -1
|
--------------------------------------------------------------------------------
-- (c) Tsitsimpis Ilias, 2011-2012
--
-- Utilities for error reporting
--
--------------------------------------------------------------------------------
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module ErrUtils (
Message, mkLocMessage,
msgSpan, msgContext, msgSeverity, msgExtraInfo,
showMsg,
MsgCode(..),
Severity(..),
ErrMsg, WarnMsg,
ErrorMessages, WarningMessages,
Messages, errorsFound, warnsFound,
emptyMessages, unionMessages,
mkErrMsg, mkWarnMsg,
sortMessages,
addError, addWarning
) where
import Bag
import SrcLoc
import Util
import DynFlags
-- -------------------------------------------------------------------
-- Basic error codes
-- Don't forget to add the error message into Show instance
data MsgCode
= ParseError String
| TypeError String
| ScopeError String
| UnreachError -- unreachable code
| RedefError String -- function/variable redefinition
| NoRetError String -- missing return statement
| OverflowError String -- type overflow error (ie ints > 32 bits)
| ArrSizeError String -- array definition errors
| UnusedIdError String -- variable/function defined but not used
| UnusedRsError String -- unused result
| ProtoError String -- prototype declaration errors
| UnknownError
data Severity
= SevInfo
| SevOutput
| SevWarning
| SevError
| SevFatal
-- -------------------------------------------------------------------
-- Collecting up messages for later ordering and printing
data Message = Msg {
msgSeverity :: Severity,
msgSpan :: SrcSpan,
msgContext :: MsgCode,
msgExtraInfo :: String
}
mkLocMessage :: Severity -> SrcSpan -> MsgCode -> String -> Message
mkLocMessage msgSev msgSpan msgContext msgExtraInfo =
Msg msgSev msgSpan msgContext msgExtraInfo
-- An error message
type ErrMsg = Message
mkErrMsg :: SrcSpan -> MsgCode -> String -> ErrMsg
mkErrMsg = mkLocMessage SevError
-- A warning message
type WarnMsg = Message
mkWarnMsg :: SrcSpan -> MsgCode -> String -> WarnMsg
mkWarnMsg = mkLocMessage SevWarning
type Messages = (Bag WarnMsg, Bag ErrMsg)
type WarningMessages = Bag WarnMsg
type ErrorMessages = Bag ErrMsg
emptyMessages :: Messages
emptyMessages = (emptyBag, emptyBag)
errorsFound :: Messages -> Bool
errorsFound (_warns, errs) = not (isEmptyBag errs)
warnsFound :: Messages -> Bool
warnsFound (warns, _errs) = not (isEmptyBag warns)
unionMessages :: Messages -> Messages -> Messages
unionMessages (w1, e1) (w2, e2) =
(w1 `unionBags` w2, e1 `unionBags` e2)
-- ---------------------------
-- Convert a Message to a String
showMsg :: DynFlags -> Message -> String
showMsg dflags Msg{msgSeverity=sev,msgSpan=mspan,msgContext=code,msgExtraInfo=extra} =
let extra' = if null extra then "" else "\n\t" ++ extra
loc = if dopt Opt_ErrorSpans dflags
then showSrcSpan mspan
else showSrcLoc (srcSpanStart mspan)
in loc ++ ": " ++ show sev ++ ": " ++ show code ++ extra'
-- -------------------------------------------------------------------
-- Sort a list of messages by descending SrcSpan order
sortMessages :: [Message] -> [Message]
sortMessages = sortLe (\Msg{msgSpan=s1} Msg{msgSpan=s2} -> s1<=s2)
-- -------------------------------------------------------------------
-- Add new messages to the bag
addError :: ErrMsg -> Messages -> Messages
addError err (warns, errs) =
let errs' = errs `snocBag` err
in
errs' `seq` (warns, errs')
addWarning :: WarnMsg -> Messages -> Messages
addWarning warn (warns, errs) =
let warns' = warns `snocBag` warn
in
warns' `seq` (warns', errs)
-- -------------------------------------------------------------------
-- Instance declartions
instance Show Severity where
show SevInfo = "Info"
show SevOutput = ""
show SevWarning = "Warning"
show SevError = "Error"
show SevFatal = "Fatal Error"
instance Show MsgCode where
show (ParseError "") = "Parse error at end of file"
show (ParseError buf) = "Parse error on input `" ++ buf ++ "'"
show (ScopeError buf) = "Not in scope `" ++ buf ++ "'"
show (TypeError expr) = "Type mismatch at `" ++ expr ++ "'"
show UnreachError = "Unreachable code"
show (RedefError buf) = "Conflicting definitions for `" ++ buf ++ "'"
show (NoRetError buf) = "Control reaches end of non-proc function `" ++ buf ++ "'"
show (OverflowError buf) = "Overflow in implicit constant conversion at `" ++ buf ++ "'"
show (ArrSizeError buf) = "Array definition error at `" ++ buf ++ "'"
show (UnusedIdError buf) = "Defined but not used: `" ++ buf ++ "'"
show (UnusedRsError buf) = "Unused return value of function `" ++ buf ++ "'"
show (ProtoError buf) = "Declaration for `" ++ buf ++ "' doesn't match prototype"
show UnknownError = "Unknown Error :@"
|
iliastsi/gac
|
src/basicTypes/ErrUtils.hs
|
mit
| 5,013
| 0
| 12
| 1,039
| 1,121
| 634
| 487
| 100
| 3
|
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network.RPC
-- Copyright : (c) Phil Hargett 2014
-- License : MIT (see LICENSE file)
--
-- Maintainer : phil@haphazardhouse.net
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- An implementation of synchronous remote procedure calls
-- (<http://en.wikipedia.org/wiki/Remote_procedure_call RPC>) on top of
-- 'Network.Endpoints.Endpoint's.
--
-- Applications exporting services for use by other applications via
-- RPC call 'handle' to start listening for incoming RPC requests
-- for a specific 'Method'. If multiple functions or 'Method's are exported,
-- then separate calls to 'handle' are necessary, one for each exported 'Method'.
-- Each call to 'handle' produces a 'HandleSite' which may be used to terminate
-- future handling of RPCs for that specific method by calling 'hangup' on the
-- returned 'HandleSite'.
--
-- Applications wishing to make RPCs to other applications or services do so
-- by first constructing a 'CallSite', and then 'call'ing specific methods
-- on the target handler through that 'CallSite'.
--
-- Both single and multiple target RPCs are available, as are variants that
-- either wait indefinitely or at most for a defined timeout.
--
-----------------------------------------------------------------------------
module Network.RPC (
Method,
newCallSite,
CallSite,
call,
callWithTimeout,
gcall,
gcallWithTimeout,
anyCall,
methodSelector,
hear,
hearTimeout,
hearAll,
hearAllTimeout,
Reply,
HandleSite(..),
handle,
handleAll,
hangup,
Request(..),
RequestId,
mkRequestId,
Response(..)
) where
-- local imports
import Network.Endpoints
-- external imports
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Monad
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Serialize
import Data.Word
import GHC.Generics hiding (from)
import System.Random
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
{-|
An identifier for what method to invoke on the receiving 'Endpoint'. If 'hear' has been invoked on the 'Endpoint'
with a matching identifier, then calls will be delivered to that invocation of 'hear'.
-}
type Method = String
data RPCMessageType = Req | Rsp deriving (Eq,Show,Enum,Generic)
instance Serialize RPCMessageType
{-|
A unique identifier for a 'Request'
-}
newtype RequestId = RequestId (Word32, Word32, Word32, Word32) deriving (Generic,Eq,Show)
instance Serialize RequestId
{-|
Create a new identifier for 'Request's
-}
mkRequestId :: IO RequestId
mkRequestId = do
w1 <- randomIO
w2 <- randomIO
w3 <- randomIO
w4 <- randomIO
return $ RequestId (w1, w2, w3, w4)
{-|
Encapsulates the initiating side of a 'call': every invocation of 'call' produces a 'Request' that is sent
to the destination 'Endpoint', where the 'hear'ing side will generate a 'Response' after completing the request'
-}
data Request = Request {
requestId :: RequestId,
requestCaller :: Name,
requestMethod :: Method,
requestArgs :: Message
} deriving (Eq,Show)
instance Serialize Request where
put req = do
put Req
put $ requestId req
put $ requestCaller req
put $ requestMethod req
put $ requestArgs req
get = do
Req <- get
rid <- get
caller <- get
method <- get
args <- get
return $ Request rid caller method args
{-|
Encapsulates the completion side of a 'call': every invocation of 'call' produces a 'Request' that is sent
to the destination 'Endpoint', where the 'hear'ing side will generate a 'Response' after completing the request'
-}
data Response = Response {
responseId :: RequestId,
responseFrom :: Name,
responseValue :: Message
} deriving (Eq,Show)
instance Serialize Response where
put rsp = do
put Rsp
put $ responseId rsp
put $ responseFrom rsp
put $ responseValue rsp
get = do
Rsp <- get
rid <- get
from <- get
val <- get
return $ Response rid from val
{-|
A call site is a location for making RPCs: it includes an endpoint
and a name by which recipients can return the call
-}
data CallSite = CallSite Endpoint Name
{-|
Create a new 'CallSite' using the indicated 'Endpoint' for sending
RPCs and using the specified 'Name' for receiving responses.
-}
newCallSite :: Endpoint -> Name -> CallSite
newCallSite = CallSite
{-|
Call a method with the provided arguments on the recipient with the given name.
The caller will wait until a matching response is received.
-}
call :: CallSite -> Name -> Method -> Message -> IO Message
call (CallSite endpoint from) name method args = do
rid <- mkRequestId
let req = Request {requestId = rid,requestCaller = from,requestMethod = method, requestArgs = args}
sendMessage endpoint name $ encode req
selectMessage endpoint $ \msg -> do
case decode msg of
Left _ -> Nothing
Right (Response respId _ value) -> do
if respId == rid
then Just value
else Nothing
{-|
Call a method with the provided arguments on the recipient with the given name.
A request will be made through the 'CallSite''s 'Endpoint', and then
the caller will wait until a matching response is received. If a response
is received within the provided timeout (measured in microseconds), then
return the value wrapped in 'Just'; otherwise, if the timeout expires
before the call returns, then return 'Nothing.
-}
callWithTimeout :: CallSite -> Name -> Method -> Int-> Message -> IO (Maybe Message)
callWithTimeout site name method delay args = do
resultOrTimeout <- race callIt (threadDelay delay)
case resultOrTimeout of
Left value -> return $ Just value
Right _ -> return Nothing
where
callIt = call site name method args
{-|
Group call or RPC: call a method with the provided arguments on all the recipients with the given names.
A request will be made through the 'CallSite''s 'Endpoint', and then
the caller will wait until all matching responses are received.
-}
gcall :: CallSite -> [Name] -> Method -> Message -> IO (M.Map Name Message)
gcall (CallSite endpoint from) names method args = do
rid <- mkRequestId
let req = Request {requestId = rid,requestCaller = from,requestMethod = method, requestArgs = args}
sendAll req
recvAll req M.empty
where
sendAll req = do
forM_ names $ \name -> sendMessage endpoint name $ encode req
recv req = selectMessage endpoint $ \msg -> do
case decode msg of
Left _ -> Nothing
Right (Response rid name value) -> do
if (rid == (requestId req)) && (elem name names)
then Just (name,value)
else Nothing
recvAll req results = do
(replier,result) <- recv req
let newResults = M.insert replier result results
replied = S.fromList $ M.keys newResults
expected = S.fromList names
if S.null (S.difference expected replied)
-- we have everything
then return newResults
-- still waiting on some results, so keep receiving
else recvAll req newResults
{-|
Group call or RPC but with a timeout: call a method with the provided arguments on all the
recipients with the given names. A request will be made through the 'CallSite''s 'Endpoint',
and then the caller will wait until all matching responses are received or the timeout occurs.
The returned 'M.Map' has a key for every 'Name' that was a target of the call, and the value
of that key will be @Nothing@ if no response was received before the timeout, or @Just value@
if a response was received.
-}
gcallWithTimeout :: CallSite -> [Name] -> Method -> Int -> Message -> IO (M.Map Name (Maybe Message))
gcallWithTimeout (CallSite endpoint from) names method delay args = do
rid <- mkRequestId
let req = Request {requestId = rid,requestCaller = from,requestMethod = method, requestArgs = args}
sendAll req
allResults <- atomically $ newTVar M.empty
responses <- race (recvAll req allResults) (threadDelay delay)
case responses of
Left results -> return $ complete results
Right _ -> do
partialResults <- atomically $ readTVar allResults
return $ complete partialResults
where
sendAll req = do
forM_ names $ \name -> sendMessage endpoint name $ encode req
recv req = selectMessage endpoint $ \msg -> do
case decode msg of
Left _ -> Nothing
Right (Response rid name value) -> do
if (rid == (requestId req)) && (elem name names)
then Just (name,value)
else Nothing
recvAll :: Request -> TVar (M.Map Name Message) -> IO (M.Map Name Message)
recvAll req allResults = do
(replier,result) <- recv req
newResults <- atomically $ do
modifyTVar allResults $ \results -> M.insert replier result results
readTVar allResults
let replied = S.fromList $ M.keys newResults
expected = S.fromList names
if S.null (S.difference expected replied)
-- we have everything
then return newResults
-- still waiting on some results, so keep receiving
else recvAll req allResults
-- Make sure the final results have an entry for every name,
-- but put Nothing for those handlers that did not return a result in time
complete :: M.Map Name b -> M.Map Name (Maybe b)
complete partial = foldl (\final name -> M.insert name (M.lookup name partial) final) M.empty names
{-|
Invoke the same method on multiple 'Name's, and wait indefinitely until
the first response from any 'Name', returning the value and the 'Name'
which responded.
-}
anyCall :: CallSite -> [Name] -> Method -> Message -> IO (Message,Name)
anyCall (CallSite endpoint from) names method args = do
rid <- mkRequestId
let req = Request {requestId = rid,requestCaller = from,requestMethod = method, requestArgs = args}
sendAll req
recvAny req
where
sendAll req = do
forM_ names $ \name -> sendMessage endpoint name $ encode req
recvAny req = selectMessage endpoint $ \msg -> do
case decode msg of
Left _ -> Nothing
Right (Response rid name value) -> do
if (rid == (requestId req)) && (elem name names)
then Just (value,name)
else Nothing
{-|
A 'Reply' is a one-shot function for sending a response to an incoming request.
-}
type Reply b = b -> IO ()
{-|
A simple function that, given a 'Method', returns a filter suitable for
use with 'selectMessage'. The typical use case will involve partial
application: @methodSelector method@ passed as an argument to 'selectMessage'.
-}
methodSelector :: Method -> Message -> Maybe (Name,RequestId,Message)
methodSelector method msg = do
case decode msg of
Left _ -> Nothing
Right (Request rid caller rmethod args) -> do
if rmethod == method
then Just (caller,rid,args)
else Nothing
anySelector :: Message -> Maybe (Name,RequestId,Method,Message)
anySelector msg =
case decode msg of
Left _ -> Nothing
Right (Request rid caller method args) -> Just (caller,rid,method,args)
{-|
Wait for a single incoming request to invoke the indicated 'Method' on the specified
'Endpoint'. Return both the method arguments and a 'Reply' function useful for sending
the reply. A good pattern for using 'hear' will pattern match the result to a tuple of
the form @(args,reply)@, then use the args as needed to compute a result, and then
finally send the result back to the client by simply passing the result to reply: @reply result@.
The invoker of 'hear' must supply the 'Name' they have bound to the 'Endpoint', as this
helps the original requestor of the RPC differentiate responses when the RPC was a group
call.
-}
hear :: Endpoint -> Name -> Method -> IO (Message,Reply Message)
hear endpoint name method = do
(caller,rid,args) <- selectMessage endpoint $ methodSelector method
return (args, reply caller rid)
where
reply caller rid result = do
sendMessage endpoint caller $ encode $ Response rid name result
{-|
Same as 'hear', except return 'Nothing' if no request received within the specified
timeout (measured in microseconds), or return a 'Just' instance containing both the
method arguments and a 'Reply' function useful for sending the reply.
-}
hearTimeout :: Endpoint -> Name -> Method -> Int -> IO (Maybe (Message,Reply Message))
hearTimeout endpoint name method timeout = do
req <- selectMessageTimeout endpoint timeout $ methodSelector method
case req of
Just (caller,rid,args) -> return $ Just (args, reply caller rid)
Nothing -> return Nothing
where
reply caller rid result = do
sendMessage endpoint caller $ encode $ Response rid name result
{-|
A variant of 'hear', except it listens for any incoming RPC request on the specified 'Endpoint'.
-}
hearAll :: Endpoint -> Name -> IO (Method,Message,Reply Message)
hearAll endpoint name = do
(caller,rid,method,args) <- selectMessage endpoint selectorForAll
return (method,args,reply caller rid)
where
reply caller rid result =
sendMessage endpoint caller $ encode $ Response rid name result
selectorForAll msg =
case decode msg of
Left _ -> Nothing
Right (Request rid caller method args) -> Just (caller,rid,method,args)
{-|
A variant of 'hearTimeout', except it listens for any incoming RPC request on the specified 'Endpoint'
-}
hearAllTimeout :: Endpoint -> Name -> Int -> IO (Maybe (Method,Message,Reply Message))
hearAllTimeout endpoint name timeout = do
req <- selectMessageTimeout endpoint timeout anySelector
case req of
Just (caller,rid,method,args) -> return $ Just (method,args, reply caller rid)
Nothing -> return Nothing
where
reply caller rid result = do
sendMessage endpoint caller $ encode $ Response rid name result
{-|
A 'HandleSite' is a just reference to the actual handler of a specific method.
Mostly for invoking 'hangup' on the handler, once it is no longer needed.
-}
data HandleSite = HandleSite Name (Async ())
{-|
Handle all RPCs to invoke the indicated 'Method' on the specified 'Endpoint',
until 'hangup' is called on the returned 'HandleSite'.
-}
handle :: Endpoint -> Name -> Method -> (Message -> IO Message) -> IO HandleSite
handle endpoint name method fn = do
task <- async handleCall
return $ HandleSite name task
where
handleCall = do
(args,reply) <- hear endpoint name method
result <- fn args
reply result
handleCall
{-|
Handle all RPCs on the specified 'Endpoint' until 'hangup' is called on the returned 'HandleSite'
-}
handleAll :: Endpoint -> Name -> (Method -> Message -> IO Message) -> IO HandleSite
handleAll endpoint name fn = do
task <- async handleCall
return $ HandleSite name task
where
handleCall = do
(method,args,reply) <- hearAll endpoint name
result <- fn method args
reply result
handleCall
{-|
Stop handling incoming RPCs for the indicated 'HandleSite'.
-}
hangup :: HandleSite -> IO ()
hangup (HandleSite _ task) = do
cancel task
return ()
|
hargettp/courier
|
src/Network/RPC.hs
|
mit
| 16,205
| 0
| 21
| 4,135
| 3,314
| 1,689
| 1,625
| 248
| 5
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE InstanceSigs #-}
module Lib
( module Control.Lens
, module Control.Applicative
, module Numeric.Lens
, module Data.Char
, module Data.Data.Lens
, someFunc
)
where
import Control.Lens
import Control.Applicative
import Data.Char
import Data.Data.Lens
import Numeric.Lens
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.Text as T
someFunc :: IO ()
someFunc = putStrLn "someFunc"
|
shouya/thinking-dumps
|
optics-exercises/src/Lib.hs
|
mit
| 673
| 0
| 6
| 103
| 116
| 78
| 38
| 25
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LambdaCmsOrg.Page.Foundation where
import Yesod
import Data.Text (Text)
import Network.Wai (requestMethod)
import Control.Arrow ((&&&))
import LambdaCms.Core
import LambdaCmsOrg.Page.Message (PageMessage, defaultMessage, englishMessage)
import qualified LambdaCmsOrg.Page.Message as Msg
import LambdaCmsOrg.Page.Models
import LambdaCmsOrg.Page.PageType
data PageAdmin = PageAdmin
mkYesodSubData "PageAdmin" $(parseRoutesFile "config/routes")
instance LambdaCmsOrgPage master => RenderMessage master PageMessage where
renderMessage = renderPageMessage
type PageHandler a = forall master. LambdaCmsOrgPage master => HandlerT PageAdmin (HandlerT master IO) a
type PageForm x = forall master. LambdaCmsOrgPage master => Html -> MForm (HandlerT master IO) (FormResult x, WidgetT master IO ())
class LambdaCmsAdmin master => LambdaCmsOrgPage master where
pageR :: Route PageAdmin -> Route master
renderPageMessage :: master
-> [Text]
-> PageMessage
-> Text
renderPageMessage m (lang:langs) = do
case (lang `elem` (renderLanguages m), lang) of
(True, "en") -> englishMessage
_ -> renderPageMessage m langs
renderPageMessage _ _ = defaultMessage
class ToHuman a where
toHuman :: a -> Text
instance ToHuman PageType where
toHuman Homepage = "homepage"
toHuman Documentation = "documentation"
toHuman Community = "community"
toHuman License = "license"
defaultPageAdminMenu :: LambdaCmsOrgPage master => (Route PageAdmin -> Route master) -> [AdminMenuItem master]
defaultPageAdminMenu tp = [ MenuItem (SomeMessage Msg.MenuPage) (tp PageAdminIndexR) "file" ]
instance LambdaCmsOrgPage master => LambdaCmsLoggable master Page where
logMessage y "POST" = translatePageLogs y Msg.LogCreatedPage
logMessage y "PATCH" = translatePageLogs y Msg.LogUpdatedPage
logMessage y "DELETE" = translatePageLogs y Msg.LogDeletedPage
logMessage _ _ = const []
translatePageLogs :: forall b master.
( LambdaCmsOrgPage master
, RenderMessage master b
) => master -> (Text -> b) -> Page -> [(Text, Text)]
translatePageLogs y msg e = map (id &&& messageFor) $ renderLanguages y
where messageFor lang = renderMessage y [lang] . msg . toHuman $ pageType e
logPage :: LambdaCmsOrgPage master => Page -> HandlerT master IO [(Text, Text)]
logPage page = do
y <- getYesod
method <- waiRequest >>= return . requestMethod
return $ logMessage y method page
|
lambdacms/lambdacms.org
|
lambdacmsorg-page/LambdaCmsOrg/Page/Foundation.hs
|
mit
| 3,074
| 0
| 13
| 794
| 754
| 397
| 357
| 61
| 1
|
module Scale where
import Note
data Scale a = Scale a a a a a a a deriving (Eq, Ord, Show)
type IntervalScale = Scale Int
type NoteScale = Scale Note
toList :: Scale a -> [a]
toList (Scale i ii iii iv v vi vii) = [i, ii, iii, iv, v, vi, vii]
applyScale :: IntervalScale -> Note -> [Note]
applyScale scale note = scanl (flip changePitch ) note (toList scale)
scaleFromNotes :: Note -> Note -> Note -> Note -> Note -> Note -> Note -> IntervalScale
scaleFromNotes a b c d e f g = Scale (distance a b) (distance b c) (distance c d)
(distance d e) (distance e f) (distance f g) (distance g a)
major = scaleFromNotes (Note C Natural)
(Note D Natural)
(Note E Natural)
(Note F Natural)
(Note G Natural)
(Note A Natural)
(Note B Natural)
minor = scaleFromNotes (Note A Natural)
(Note B Natural)
(Note C Natural)
(Note D Natural)
(Note E Natural)
(Note F Natural)
(Note G Natural)
|
damianfral/soundchorden
|
src/Scale.hs
|
mit
| 1,209
| 0
| 11
| 505
| 445
| 231
| 214
| 26
| 1
|
module Main where
import System.Environment (getProgName)
import System.IO (hPutStrLn, stderr)
import Client (getServerStatus, serverCommand, stopServer)
import CommandArgs
import Daemonize (daemonize)
import Server (startServer, createListenSocket)
import Types (Command(..))
defaultSocketFilename :: FilePath
defaultSocketFilename = ".hdevtools.sock"
getSocketFilename :: Maybe FilePath -> FilePath
getSocketFilename Nothing = defaultSocketFilename
getSocketFilename (Just f) = f
main :: IO ()
main = do
args <- loadHDevTools
let sock = getSocketFilename (socket args)
case args of
Admin {} -> doAdmin sock args
Check {} -> doCheck sock args
ModuleFile {} -> doModuleFile sock args
Info {} -> doInfo sock args
Type {} -> doType sock args
FindSymbol {} -> doFindSymbol sock args
doAdmin :: FilePath -> HDevTools -> IO ()
doAdmin sock args
| start_server args =
if noDaemon args then startServer sock Nothing
else do
s <- createListenSocket sock
daemonize True $ startServer sock (Just s)
| status args = getServerStatus sock
| stop_server args = stopServer sock
| otherwise = do
progName <- getProgName
hPutStrLn stderr "You must provide a command. See:"
hPutStrLn stderr $ progName ++ " --help"
doModuleFile :: FilePath -> HDevTools -> IO ()
doModuleFile sock args =
serverCommand sock (CmdModuleFile (module_ args)) (ghcOpts args)
doFileCommand :: String -> (HDevTools -> Command) -> FilePath -> HDevTools -> IO ()
doFileCommand cmdName cmd sock args
| null (file args) = do
progName <- getProgName
hPutStrLn stderr "You must provide a haskell source file. See:"
hPutStrLn stderr $ progName ++ " " ++ cmdName ++ " --help"
| otherwise = serverCommand sock (cmd args) (ghcOpts args)
doCheck :: FilePath -> HDevTools -> IO ()
doCheck = doFileCommand "check" $
\args -> CmdCheck (file args)
doInfo :: FilePath -> HDevTools -> IO ()
doInfo = doFileCommand "info" $
\args -> CmdInfo (file args) (identifier args)
doType :: FilePath -> HDevTools -> IO ()
doType = doFileCommand "type" $
\args -> CmdType (file args) (line args, col args)
doFindSymbol :: FilePath -> HDevTools -> IO ()
doFindSymbol sock args =
serverCommand sock (CmdFindSymbol (symbol args) (files args)) (ghcOpts args)
|
dan-t/hdevtools
|
src/Main.hs
|
mit
| 2,399
| 0
| 12
| 547
| 803
| 393
| 410
| 59
| 6
|
module Graphics.CG.Draw.Triangle where
import Graphics.CG.Draw.Lines
import Graphics.CG.Primitives.Triangle
import Graphics.Gloss
drawTriangle :: Triangle -> Picture
drawTriangle (a, b, c) = drawClosedLines [a, b, c]
|
jagajaga/CG-Haskell
|
Graphics/CG/Draw/Triangle.hs
|
mit
| 249
| 0
| 6
| 55
| 65
| 41
| 24
| 6
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveGeneric #-}
module PredictionsByStop where
import System.Exit (exitFailure, exitSuccess)
import System.IO (stderr, hPutStrLn)
import qualified Data.ByteString.Lazy.Char8 as BSL
import System.Environment (getArgs)
import Control.Monad (forM_, mzero, join)
import Control.Applicative
import Data.Aeson.AutoType.Alternative
import Data.Aeson(decode, Value(..), FromJSON(..), ToJSON(..),
(.:), (.:?), (.=), object)
import Data.Monoid
import Data.Text (Text)
import GHC.Generics
-- | Workaround for https://github.com/bos/aeson/issues/287.
o .:?? val = fmap join (o .:? val)
data AlertHeadersElt = AlertHeadersElt {
alertHeadersEltAlertId :: Double,
alertHeadersEltEffectName :: Text,
alertHeadersEltHeaderText :: Text
} deriving (Show,Eq,Generic)
instance FromJSON AlertHeadersElt where
parseJSON (Object v) = AlertHeadersElt <$> v .: "alert_id" <*> v .: "effect_name" <*> v .: "header_text"
parseJSON _ = mzero
instance ToJSON AlertHeadersElt where
toJSON (AlertHeadersElt {..}) = object ["alert_id" .= alertHeadersEltAlertId, "effect_name" .= alertHeadersEltEffectName, "header_text" .= alertHeadersEltHeaderText]
data Vehicle = Vehicle {
vehicleVehicleLon :: Text,
vehicleVehicleSpeed :: Text,
vehicleVehicleLat :: Text,
vehicleVehicleId :: Text,
vehicleVehicleBearing :: Text,
vehicleVehicleLabel :: Text,
vehicleVehicleTimestamp :: Text
} deriving (Show,Eq,Generic)
instance FromJSON Vehicle where
parseJSON (Object v) = Vehicle <$> v .: "vehicle_lon" <*> v .: "vehicle_speed" <*> v .: "vehicle_lat" <*> v .: "vehicle_id" <*> v .: "vehicle_bearing" <*> v .: "vehicle_label" <*> v .: "vehicle_timestamp"
parseJSON _ = mzero
instance ToJSON Vehicle where
toJSON (Vehicle {..}) = object ["vehicle_lon" .= vehicleVehicleLon, "vehicle_speed" .= vehicleVehicleSpeed, "vehicle_lat" .= vehicleVehicleLat, "vehicle_id" .= vehicleVehicleId, "vehicle_bearing" .= vehicleVehicleBearing, "vehicle_label" .= vehicleVehicleLabel, "vehicle_timestamp" .= vehicleVehicleTimestamp]
data TripElt = TripElt {
tripEltPreDt :: Text,
tripEltPreAway :: Text,
tripEltSchDepDt :: Text,
tripEltTripName :: Text,
tripEltTripId :: Text,
tripEltVehicle :: (Maybe (Vehicle:|:[(Maybe Value)])),
tripEltTripHeadsign :: Text,
tripEltSchArrDt :: Text
} deriving (Show,Eq,Generic)
instance FromJSON TripElt where
parseJSON (Object v) = TripElt <$> v .: "pre_dt" <*> v .: "pre_away" <*> v .: "sch_dep_dt" <*> v .: "trip_name" <*> v .: "trip_id" <*> v .:?? "vehicle" <*> v .: "trip_headsign" <*> v .: "sch_arr_dt"
parseJSON _ = mzero
instance ToJSON TripElt where
toJSON (TripElt {..}) = object ["pre_dt" .= tripEltPreDt, "pre_away" .= tripEltPreAway, "sch_dep_dt" .= tripEltSchDepDt, "trip_name" .= tripEltTripName, "trip_id" .= tripEltTripId, "vehicle" .= tripEltVehicle, "trip_headsign" .= tripEltTripHeadsign, "sch_arr_dt" .= tripEltSchArrDt]
data DirectionElt = DirectionElt {
directionEltTrip :: [TripElt],
directionEltDirectionId :: Text,
directionEltDirectionName :: Text
} deriving (Show,Eq,Generic)
instance FromJSON DirectionElt where
parseJSON (Object v) = DirectionElt <$> v .: "trip" <*> v .: "direction_id" <*> v .: "direction_name"
parseJSON _ = mzero
instance ToJSON DirectionElt where
toJSON (DirectionElt {..}) = object ["trip" .= directionEltTrip, "direction_id" .= directionEltDirectionId, "direction_name" .= directionEltDirectionName]
data RouteElt = RouteElt {
routeEltDirection :: [DirectionElt],
routeEltRouteId :: Text,
routeEltRouteName :: Text
} deriving (Show,Eq,Generic)
instance FromJSON RouteElt where
parseJSON (Object v) = RouteElt <$> v .: "direction" <*> v .: "route_id" <*> v .: "route_name"
parseJSON _ = mzero
instance ToJSON RouteElt where
toJSON (RouteElt {..}) = object ["direction" .= routeEltDirection, "route_id" .= routeEltRouteId, "route_name" .= routeEltRouteName]
data ModeElt = ModeElt {
modeEltRouteType :: Text,
modeEltRoute :: [RouteElt],
modeEltModeName :: Text
} deriving (Show,Eq,Generic)
instance FromJSON ModeElt where
parseJSON (Object v) = ModeElt <$> v .: "route_type" <*> v .: "route" <*> v .: "mode_name"
parseJSON _ = mzero
instance ToJSON ModeElt where
toJSON (ModeElt {..}) = object ["route_type" .= modeEltRouteType, "route" .= modeEltRoute, "mode_name" .= modeEltModeName]
data TopLevel = TopLevel {
topLevelAlertHeaders :: [AlertHeadersElt],
topLevelMode :: [ModeElt],
topLevelStopId :: Text,
topLevelStopName :: Text
} deriving (Show,Eq,Generic)
instance FromJSON TopLevel where
parseJSON (Object v) = TopLevel <$> v .: "alert_headers" <*> v .: "mode" <*> v .: "stop_id" <*> v .: "stop_name"
parseJSON _ = mzero
instance ToJSON TopLevel where
toJSON (TopLevel {..}) = object ["alert_headers" .= topLevelAlertHeaders, "mode" .= topLevelMode, "stop_id" .= topLevelStopId, "stop_name" .= topLevelStopName]
|
thesietch/homOS
|
homOS-backend/src/PredictionsByStop.hs
|
mit
| 5,471
| 0
| 21
| 1,151
| 1,439
| 804
| 635
| 100
| 1
|
module Quark.QuoteUtils where
import Quark.Type
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import Data.Foldable (toList)
--- Quote Utilities ---
-- checks if a QItem is a QFunc
isFunc :: QItem -> Bool
isFunc (QFunc _) = True
isFunc _ = False
-- returns the AtomSet of a QProg (non recursive)
getAtoms :: QProg -> AtomSet
getAtoms = Set.fromList . map (\(QFunc a) -> a) . filter isFunc . toList
-- convert a QFunc to a QVar
funcToVar :: QItem -> QItem
funcToVar (QFunc x) = QVar x
funcToVar x = x
-- recursively replaces QFuncs with QVars
funcToVarRec :: AtomSet -> QItem -> QItem
funcToVarRec vars (QFunc x) = if Set.member x vars then QVar x else QFunc x
funcToVarRec vars (QQuote pattern body) = QQuote pattern' body'
where vars' = Set.union vars $ getAtoms pattern
pattern' = fmap funcToVar pattern
body' = fmap (funcToVarRec vars') body
funcToVarRec _ x = x
-- same as funcToVarRec, but with default `vars` AtomSet
addVars :: QItem -> QItem
addVars = funcToVarRec Set.empty
-- returns a list of all QFuncs referenced in a QQuote
getFuncs :: QItem -> AtomSet
getFuncs (QQuote _ body) = Set.unions . toList $ fmap getFuncs body
getFuncs (QFunc a) = Set.singleton a
getFuncs _ = Set.empty
-- adds a prefix to a QVar
prefixVar :: FuncName -> QItem -> QItem
prefixVar prefix (QVar x) = QVar (prefix ++ "." ++ x)
prefixVar _ x = x
-- recursively prefixes QVars
prefixVarRec :: FuncName -> QItem -> QItem
prefixVarRec prefix (QVar x) = QVar (prefix ++ "." ++ x)
prefixVarRec prefix (QQuote pattern body) = QQuote pattern' body'
where pattern' = fmap (prefixVar prefix) pattern
body' = fmap (prefixVarRec prefix) body
prefixVarRec _ x = x
|
henrystanley/Quark
|
Quark/QuoteUtils.hs
|
cc0-1.0
| 1,703
| 0
| 12
| 330
| 538
| 281
| 257
| 35
| 2
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
module Course.Extend where
import Course.Core
import Course.Id
import Course.List
import Course.Optional
import Course.Functor
-- | All instances of the `Extend` type-class must satisfy one law. This law
-- is not checked by the compiler. This law is given as:
--
-- * The law of associativity
-- `∀f g. (f <<=) . (g <<=) ≅ (<<=) (f . (g <<=))`
class Functor f => Extend f where
-- Pronounced, extend.
(<<=) ::
(f a -> b)
-> f a
-> f b
infixr 1 <<=
-- | Implement the @Extend@ instance for @Id@.
--
-- >>> id <<= Id 7
-- Id (Id 7)
instance Extend Id where
(<<=) ::
(Id a -> b)
-> Id a
-> Id b
(<<=) =
error "todo: Course.Extend (<<=)#instance Id"
-- | Implement the @Extend@ instance for @List@.
--
-- >>> length <<= ('a' :. 'b' :. 'c' :. Nil)
-- [3,2,1]
--
-- >>> id <<= (1 :. 2 :. 3 :. 4 :. Nil)
-- [[1,2,3,4],[2,3,4],[3,4],[4]]
--
-- >>> reverse <<= ((1 :. 2 :. 3 :. Nil) :. (4 :. 5 :. 6 :. Nil) :. Nil)
-- [[[4,5,6],[1,2,3]],[[4,5,6]]]
instance Extend List where
(<<=) ::
(List a -> b)
-> List a
-> List b
(<<=) =
error "todo: Course.Extend (<<=)#instance List"
-- | Implement the @Extend@ instance for @Optional@.
--
-- >>> id <<= (Full 7)
-- Full (Full 7)
--
-- >>> id <<= Empty
-- Empty
instance Extend Optional where
(<<=) ::
(Optional a -> b)
-> Optional a
-> Optional b
(<<=) =
error "todo: Course.Extend (<<=)#instance Optional"
-- | Duplicate the functor using extension.
--
-- >>> cojoin (Id 7)
-- Id (Id 7)
--
-- >>> cojoin (1 :. 2 :. 3 :. 4 :. Nil)
-- [[1,2,3,4],[2,3,4],[3,4],[4]]
--
-- >>> cojoin (Full 7)
-- Full (Full 7)
--
-- >>> cojoin Empty
-- Empty
cojoin ::
Extend f =>
f a
-> f (f a)
cojoin =
error "todo: Course.Extend#cojoin"
|
harrisi/on-being-better
|
list-expansion/Haskell/course/src/Course/Extend.hs
|
cc0-1.0
| 1,853
| 0
| 10
| 433
| 309
| 183
| 126
| 42
| 1
|
module HeelGenerators.SandalsToe(sandalToeDebugToFile, sandalToeStlToFile ) where
import TriCad.MathPolar(
slopeAdjustedForVerticalAngle,
createTopFaces,
createBottomFacesSimplified,
radiusAdjustedForZslope,
xyQuadrantAngle,
createCornerPoint,
Slope(..),
Radius(..),
Angle(..),
flatXSlope,
flatYSlope,
)
import TriCad.Points(Point(..))
import TriCad.CornerPoints(CornerPoints(..), (++>), (+++), (++++), Faces(..))
import TriCad.StlCornerPoints((+++^))
import TriCad.StlBase (StlShape(..), newStlShape, stlShapeToText)
import TriCad.CornerPointsFaceExtraction ( extractTopFace, extractBottomFrontLine, extractFrontTopLine, extractBackTopLine, extractBottomFace, extractBackBottomLine, extractFrontFace )
import TriCad.CornerPointsFaceConversions(lowerFaceFromUpperFace, backBottomLineFromBottomFrontLine, backTopLineFromFrontTopLine, frontTopLineFromBackTopLine, upperFaceFromLowerFace, bottomFrontLineFromBackBottomLine)
import TriCad.CornerPointsDebug((+++^?), CubeName(..), CubeDebug(..), CubeDebugs(..))
import TriCad.StlFileWriter(writeStlToFile, writeStlDebugToFile)
sandalToeDebugToFile = writeStlDebugToFile riserCubesDebug
sandalToeStlToFile = writeStlToFile sandalToeStlFile
sandalToeStlFile = newStlShape "SandalToe" $ shoeTopTriangles ++ shoeMiddleTriangles -- riserTriangles -- treadTriangles --
angles = [0,10..360]
{------------------------------------ the layer that meets the shoe ----------------------------------
It is made up of 2 layer, so that the bottom layer does not have a slope. This will allow for
a keyway that is not sloped.
-}
shoeRadius =
[
Radius 56,--0
Radius 57,--1
Radius 57.5,--2
Radius 57.2,--3
Radius 55,--4
Radius 51,--5
Radius 48,--6
Radius 46,--7
Radius 45,--8
Radius 45.2,--9
Radius 46,--10
Radius 48,--11
Radius 51,--12
Radius 55,--13
Radius 62,--14
Radius 66,--15
Radius 60.5,--16
Radius 57.8,--17
Radius 57,--18
Radius 58,--17
Radius 61,--16
Radius 61,--15
Radius 54,--14
Radius 48,--13
Radius 43,--12
Radius 40.5,--11
Radius 39,--10
Radius 38,--9
Radius 37.5,--8
Radius 37.5,--7
Radius 38.5,--6
Radius 40,--5
Radius 42,--4
Radius 45,--3
Radius 50,--2
Radius 54,--1
Radius 56--0
]
shoeTopOrigin = (Point{x_axis=0, y_axis=(0), z_axis=70})
shoeMiddleOrigin = (Point{x_axis=0, y_axis=(0), z_axis=50})
shoeBtmOrigin = (Point{x_axis=0, y_axis=(0), z_axis=40})
shoeTopTriangles =
[FacesBackFrontTop | x <- [1,2..36]]
+++^
shoeTopCubes
shoeTopCubesDebug =
[CubeName "shoeTopCubes" | x <- [1..]]
+++^?
shoeTopCubes
shoeTopCubes =
shoeMiddleCubes
++++
shoeTopFaces
shoeTopFacesDebug =
[CubeName "shoeTopFaces" | x <- [1..]]
+++^?
shoeTopFaces
--only need top faces as it will get added to the tread top faces
shoeTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces shoeTopOrigin shoeRadius angles flatXSlope (PosYSlope 10))
++++
--back line. Note that treadInnerRadius is used so that the keyway is kept consistent.
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces shoeTopOrigin treadInnerRadius angles flatXSlope (PosYSlope 10))
shoeMiddleTriangles =
[FacesBackBottomFront | x <- [1,2..36]]
+++^
shoeMiddleCubes
shoeMiddleCubesDebug =
[CubeName "shoeMiddleCubes" | x <- [1..]]
+++^?
shoeMiddleCubes
shoeMiddleCubes =
shoeMiddleFaces
++++
shoeBtmFaces
shoeMiddleFaces =
--front line
map (extractFrontTopLine) (createTopFaces shoeMiddleOrigin shoeRadius angles flatXSlope (PosYSlope 0))
++++
--back line. Note that treadInnerRadius is used so that the keyway is kept consistent.
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces shoeMiddleOrigin treadInnerRadius angles flatXSlope (PosYSlope 0))
shoeBtmFacesDebug =
[CubeName "shoeBtmFaces" | x <- [1..]]
+++^?
shoeBtmFaces
shoeBtmFaces =
--front line
map (extractBottomFrontLine)
(createBottomFacesSimplified shoeBtmOrigin shoeRadius (map (Angle) angles) flatXSlope (PosYSlope 0))
++++
--back line. Note that treadInnerRadius is used so that the keyway is kept consistent.
map (backBottomLineFromBottomFrontLine . extractBottomFrontLine)
(createBottomFacesSimplified shoeBtmOrigin treadInnerRadius (map (Angle) angles) flatXSlope (PosYSlope 0))
{------------------------------------ the riser layer -----------------------------------------------
This riser has the tread radius on the bottom and shoe radius on the top, which means it adapts between the 2, as well as providing extra height.
It has the treadInnerRadius key, as all layers have to keep it consistent.
It must not have a sloped top, so that the keyway is straight.
-}
riserOrigin = (Point{x_axis=0, y_axis=(0), z_axis=40})
riserTriangles = [FacesBackBottomFrontTop | x <- [1,2..36]]
+++^
riserCubes
riserCubesDebug =
[CubeName "riserCubes" | x <- [1..]]
+++^?
riserCubes
riserCubes =
treadCubes
++++
riserTopFaces
riserTopFacesDebug =
[CubeName "riserTopFaces" | x <- [1..]]
+++^?
riserTopFaces
riserTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces riserOrigin shoeRadius angles flatXSlope (PosYSlope 0))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces riserOrigin treadInnerRadius angles flatXSlope (PosYSlope 0))
{------------------------------------ the layer that meets the tread -----------------------------------}
--half in that it is symmetrical, so only need to enter in 1 side, and then add the mirrored side later
--to get the full radius.
treadHalfRadius =
[
Radius 56,--0
Radius 57,--1
Radius 57,--2
Radius 57,--3
Radius 55,--4
Radius 53,--5
Radius 51,--6
Radius 50,--7
Radius 50,--8
Radius 50,--9
Radius 52,--10
Radius 55,--11
Radius 58,--12
Radius 63,--13
Radius 70,--14
Radius 79,--15
Radius 81,--16
Radius 77--17
]
--the center val is 180 deg
--It is symmetrical, so can be mirrored.
treadRadius = concat [treadHalfRadius, [Radius 76], reverse treadHalfRadius]
treadInnerRadius = map (\(Radius x) -> (Radius (x * 0.5))) treadRadius
topTreadOrigin = (Point{x_axis=0, y_axis=(0), z_axis=20})
btmTreadOrigin = (Point{x_axis=0, y_axis=0, z_axis=0})
treadTriangles = [FacesBackBottomFrontTop | x <- [1,2..36]]
+++^
treadCubes
treadCubesDebug =
[CubeName "treadCubes" | x <- [1..]]
+++^?
treadCubes
treadCubes =
treadTopFaces
++++
treadBtmFaces
treadTopFacesDebug =
[CubeName "treadTopFaces" | x <- [1..]]
+++^?
treadTopFaces
treadTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces topTreadOrigin treadRadius angles flatXSlope (PosYSlope 0))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces topTreadOrigin treadInnerRadius angles flatXSlope (PosYSlope 0))
treadBtmFacesDebug =
[CubeName "treadBtmFaces" | x <- [1..]]
+++^?
treadBtmFaces
treadBtmFaces =
--front line
map (extractBottomFrontLine)
(createBottomFacesSimplified btmTreadOrigin treadRadius (map (Angle) angles) flatXSlope flatYSlope)
++++
--back line
map (backBottomLineFromBottomFrontLine . extractBottomFrontLine)
(createBottomFacesSimplified btmTreadOrigin treadInnerRadius (map (Angle) angles) flatXSlope flatYSlope)
|
heathweiss/Tricad
|
src/Examples/ShoeLift/SandalsToe.hs
|
gpl-2.0
| 7,407
| 0
| 11
| 1,247
| 1,783
| 1,027
| 756
| 186
| 1
|
{---------------------------------------------------------------------}
{- Copyright 2015 Nathan Bloomfield -}
{- -}
{- This file is part of Feivel. -}
{- -}
{- Feivel is free software: you can redistribute it and/or modify -}
{- it under the terms of the GNU General Public License version 3, -}
{- as published by the Free Software Foundation. -}
{- -}
{- Feivel is distributed in the hope that it will be useful, but -}
{- WITHOUT ANY WARRANTY; without even the implied warranty of -}
{- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -}
{- GNU General Public License for more details. -}
{- -}
{- You should have received a copy of the GNU General Public License -}
{- along with Feivel. If not, see <http://www.gnu.org/licenses/>. -}
{---------------------------------------------------------------------}
module Carl.Orders where
lexicographic2 :: (Ord a, Ord b) => [(a,b)] -> [(a,b)] -> Ordering
lexicographic2 [] [] = EQ
lexicographic2 [] _ = LT
lexicographic2 _ [] = GT
lexicographic2 ((a1,b1):ps1) ((a2,b2):ps2)
| a1 < a2 = LT
| a1 > a2 = GT
| a1 == a2 && b1 < b2 = LT
| a1 == a2 && b1 > b2 = GT
| otherwise = lexicographic2 ps1 ps2
|
nbloomf/carl
|
src/Carl/Orders.hs
|
gpl-3.0
| 1,572
| 0
| 10
| 613
| 222
| 124
| 98
| 11
| 1
|
module QFeldspar.Prelude.MiniFeldspar
(Dp,Syn(..),toExpF,frmExpF,Type,Num,EqE,OrdE,Fractional,conF,
Word32,Float,Bool,pattern TrueE,pattern FalseE,(?),while,fst,snd,
Ary,mkArr,lnArr,ixArr,Vec(..),share,Complex,pattern (:+.),
Opt,some,none,option,
(*),(+),(-),(==.),(<.),save,
realPartE,imagPartE,divE,(/),(.&..),(.|..),xorE,shfRgtE,shfLftE,
complementE,i2fE,cisE,ilog2E,sqrtE,hashTableE,
trmEql,trmEqlF) where
import QFeldspar.MyPrelude (Word32,Float,Complex,Ary,Bool(..),Num(..)
,Monad(..),fst,snd,Fractional(..),impossible,(.))
import qualified QFeldspar.MyPrelude as MP
import QFeldspar.Expression.MiniFeldspar
import qualified QFeldspar.Expression.Utils.Equality.MiniFeldspar as MFS
import qualified QFeldspar.Type.GADT as TG
import QFeldspar.Type.GADT(Type)
import QFeldspar.Environment.Typed (Env(Emp,Ext))
import QFeldspar.Prelude.Environment
import QFeldspar.Prelude.HaskellEnvironment hiding (cis,ilog2,i2f)
import qualified QFeldspar.Variable.Typed as VT
import QFeldspar.Magic
-- import QFeldspar.Expression.Utils.MiniFeldspar (shared)
prm0 :: Match a '[] t => VT.Var Prelude a -> Dp t
prm0 v = Prm v Emp
prm1 :: (TG.Type t1 , Match a '[t1] t) =>
VT.Var Prelude a -> Dp t1 -> Dp t
prm1 v e = Prm v (Ext e Emp)
prm2 :: (TG.Type t1 , TG.Type t2 , Match a '[t1, t2] t) =>
VT.Var Prelude a -> Dp t1 -> Dp t2 -> Dp t
prm2 v e1 e2 = Prm v (Ext e1 (Ext e2 Emp))
trmEql :: TG.Type a => Dp a -> Dp a -> MP.Bool
trmEql = MFS.eql
trmEqlF :: (TG.Type a , TG.Type b) =>
(Dp a -> Dp b) -> (Dp a -> Dp b) -> MP.Bool
trmEqlF = MFS.eqlF
----------
type Dp t = Exp Prelude t
conF :: Float -> Dp Float
conF = ConF
class Type (InT a) => Syn a where
type InT a :: *
toExp :: a -> Dp (InT a)
frmExp :: Dp (InT a) -> a
instance Type a => Syn (Dp a) where
type InT (Dp a) = a
toExp x = x
frmExp x = x
toExpF :: (Syn a , Syn b) => (a -> b) -> Dp (InT a) -> Dp (InT b)
toExpF f = toExp . f . frmExp
frmExpF :: (Syn a , Syn b) => (Dp (InT a) -> Dp (InT b)) -> a -> b
frmExpF f = frmExp . f . toExp
pattern TrueE = ConB MP.True
pattern FalseE = ConB MP.False
(?) :: Syn a => Dp Bool -> (a , a) -> a
c ? (t , e) = frmExp (Cnd c (toExp t) (toExp e))
while :: Syn a => (a -> Dp Bool) -> (a -> a) -> a -> a
while c b i = frmExp (Whl (toExpF c) (toExpF b) (toExp i))
instance (Syn a , Syn b) => Syn (a , b) where
type InT (a , b) = (InT a , InT b)
toExp (x , y) = Tpl (toExp x) (toExp y)
frmExp ee = let e = ee in
(frmExp (Fst e) , frmExp (Snd e))
mkArr :: Type t => Dp Word32 -> (Dp Word32 -> Dp t) -> Dp (Ary t)
mkArr = Ary
lnArr :: Type t => Dp (Ary t) -> Dp Word32
lnArr = Len
ixArr :: Dp (Ary t) -> Dp Word32 -> Dp t
ixArr = Ind
data Vec t = Vec (Dp Word32) (Dp Word32 -> t)
instance Syn a => Syn (Vec a) where
type InT (Vec a) = Ary (InT a)
toExp (Vec l f) = Ary l (\ i -> toExp (f i))
frmExp aa = let a = aa in
Vec (Len a) (\ i -> frmExp (Ind a i))
pattern x :+. y = Cmx x y
save :: Dp a -> Dp a
save = Mem
class Syn a => Undef a where
undef :: a
instance Undef (Dp Bool) where
undef = FalseE
instance Undef (Dp Word32) where
undef = 0
instance Undef (Dp Float) where
undef = 0
instance (Undef a, Undef b) => Undef (a,b) where
undef = (undef, undef)
data Opt_R a = Opt_R { def :: Dp Bool, val :: a }
instance Syn a => Syn (Opt_R a) where
type InT (Opt_R a) = (Bool, InT a)
toExp (Opt_R b x) = Tpl b (toExp x)
frmExp pp = let p = pp in
Opt_R (Fst p) (frmExp (Snd p))
some_R :: a -> Opt_R a
some_R x = Opt_R TrueE x
none_R :: Undef a => Opt_R a
none_R = Opt_R FalseE undef
option_R :: Syn b => b -> (a -> b) -> Opt_R a -> b
option_R d f o = def o ? (f (val o), d)
newtype Opt a = O { unO :: forall b . Undef b =>
((a -> Opt_R b) -> Opt_R b) }
instance MP.Functor Opt where
fmap f m = f MP.<$> m
instance MP.Applicative Opt where
pure = return
m <*> n = do m' <- m
n' <- n
return (m' n')
instance Monad Opt where
return x = O (\g -> g x)
m >>= k = O (\g -> unO m (\x -> unO (k x) g))
instance Undef a => Syn (Opt a) where
type InT (Opt a) = (Bool, InT a)
frmExp = lift . frmExp
toExp = toExp . lower
lift :: Opt_R a -> Opt a
lift o = O (\g -> Opt_R (def o ? (def (g (val o)), FalseE))
(def o ? (val (g (val o)), undef)))
lower :: Undef a => Opt a -> Opt_R a
lower m = unO m some_R
some :: a -> Opt a
some a = lift (some_R a)
none :: Undef a => Opt a
none = lift none_R
option :: (Undef a, Undef b) => b -> (a -> b) -> Opt a -> b
option d f o = option_R d f (lower o)
instance Num (Dp Float) where
(+) = Add
(-) = Sub
(*) = Mul
fromInteger x = ConF (MP.fromInteger x)
abs = impossible
signum = impossible
instance Num (Dp Word32) where
(+) = Add
(-) = Sub
(*) = Mul
fromInteger x = ConI (MP.fromInteger x)
abs = impossible
signum = impossible
instance Num (Dp (Complex Float)) where
(+) = Add
(-) = Sub
(*) = Mul
fromInteger x = Cmx (ConF (MP.fromInteger x)) 0.0
abs = impossible
signum = impossible
infix 4 ==.
class EqE t where
(==.) :: Dp t -> Dp t -> Dp Bool
instance EqE Bool where
(==.) = Eql
instance EqE Word32 where
(==.) = Eql
instance EqE Float where
(==.) = Eql
infix 4 <.
class OrdE t where
(<.) :: Dp t -> Dp t -> Dp Bool
instance OrdE Bool where
(<.) = Ltd
instance OrdE Word32 where
(<.) = Ltd
instance OrdE Float where
(<.) = Ltd
share :: (Type (InT tl) , Syn tl , Syn tb) =>
tl -> (tl -> tb) -> tb
share e f = frmExp (LeT (toExp e) (toExp . f . frmExp))
realPartE :: Dp (Complex Float) -> Dp Float
realPartE = prm1 realPartVar
imagPartE :: Dp (Complex Float) -> Dp Float
imagPartE = prm1 imagPartVar
divE :: Dp Word32 -> Dp Word32 -> Dp Word32
divE = prm2 divWrdVar
instance Fractional (Dp Float) where
(/) = prm2 divFltVar
fromRational r = ConF (fromRational r)
infixl 7 .&..
(.&..) :: Dp Word32 -> Dp Word32 -> Dp Word32
(.&..) = prm2 andWrdVar
infixl 7 .|..
(.|..) :: Dp Word32 -> Dp Word32 -> Dp Word32
(.|..) = prm2 orWrdVar
xorE :: Dp Word32 -> Dp Word32 -> Dp Word32
xorE = prm2 xorWrdVar
shfRgtE :: Dp Word32 -> Dp Word32 -> Dp Word32
shfRgtE = prm2 shrWrdVar
shfLftE :: Dp Word32 -> Dp Word32 -> Dp Word32
shfLftE = prm2 shlWrdVar
complementE :: Dp Word32 -> Dp Word32
complementE = prm1 cmpWrdVar
i2fE :: Dp Word32 -> Dp Float
i2fE = prm1 i2fVar
cisE :: Dp Float -> Dp (Complex Float)
cisE = prm1 cisVar
ilog2E :: Dp Word32 -> Dp Word32
ilog2E = prm1 ilog2Var
hashTableE :: Dp (Ary Word32)
hashTableE = prm0 hshTblVar
sqrtE :: Dp Float -> Dp Float
sqrtE = prm1 sqrtFltVar
|
shayan-najd/QFeldspar
|
QFeldspar/Prelude/MiniFeldspar.hs
|
gpl-3.0
| 6,934
| 2
| 16
| 1,929
| 3,397
| 1,801
| 1,596
| -1
| -1
|
{-# language NoMonomorphismRestriction #-}
module FAnormalize
( normalizeBDFA
, normalizeTNFA
)
where
import FAmap
import FAtypes
import Ids
import Options
import FiniteMap
import Set
normalizeTNFA :: Ord a => Opts -> TNFA a -> TNFA Int
normalizeTNFA opts a @ (TNFA cons all starts moves) =
let fm = listToFM $ zip ( setToList all ) [0 .. ]
fun = lookupWithDefaultFM fm ( error "normalize" )
in mapTNFA opts fun a
normalizeBDFA :: Ord a => Opts -> BDFA a -> BDFA Int
normalizeBDFA opts a @ (BDFA cons all starts moves) =
let fm = listToFM $ zip ( setToList all ) [0 .. ]
fun = lookupWithDefaultFM fm ( error "normalize" )
in mapBDFA opts fun a
|
jwaldmann/rx
|
src/FAnormalize.hs
|
gpl-3.0
| 702
| 2
| 13
| 178
| 245
| 123
| 122
| 20
| 1
|
--------------------------------------------------------------------------
-- Common operations on types
--------------------------------------------------------------------------
module Operations(
-- * Generalization, skolemization and instantiation
generalize
, skolemize
, instantiate
, instantiateAnnot
, quantify
, xgeneralize
, normalize, constructedForm
-- * Creat fresh type variables
, freshSkolems, freshInstanceVar
, uniqueReset
-- * Inference
, Infer, runInfer
, HasTypeVar
, freeTvs, freeSkolems, (|->), subst
-- * Helpers
, message
, check, failure
, onlyIf
, isTau, isFlexTau, isScheme, isUniVar, ground
, liftIO
, readRef, writeRef, newRef
, withRankInf, withRankIncrease, getRank
) where
import Debug.Trace( trace )
import Data.IORef( IORef, newIORef, modifyIORef, readIORef, writeIORef )
import System.IO.Unsafe( unsafePerformIO )
import Data.List( sort )
import PPrint
import Types
import Subst
import Gamma
--import Control.Applicative (Applicative(..))
import Control.Monad (ap)
--------------------------------------------------------------------------
-- Generalize
-- Uses efficient generalization by lambda-ranking unifiable variables. See:
-- See: George Kuan and David McQueen, "Efficient ML type inference with ranked type variables"
--------------------------------------------------------------------------
generalize :: Gamma -> Infer Type -> Infer Type
generalize gamma inf
= do tp <- withRankInf inf
tvsTp <- freeTvs tp
(gtp,gtvs) <- xgeneralize tp
-- message $ "generalize over: " ++ show tp ++ ": " ++ show tvs
assertGen tvsTp gtvs -- assert that we generalize correctly
normalize gtp
where
assertGen tvsTp tvs
= do -- assert that we generalize correctly
tvsG <- freeTvs (gammaCoDomain gamma)
let tvsX = (tvsTp `diff` tvsG)
if (sort tvs /= sort tvsX)
then message ("warning: different generalization:\n tvs: " ++ show (sort tvs) ++ "\ntvsX: " ++ show (show tvsX))
else return ()
-- | "quantify" is used when type schemes are unified
quantify :: Type -> Infer Type
quantify tp
= do (gtp,_) <- xgeneralize tp
return gtp
xgeneralize :: Type -> Infer (Type,[TypeVar])
xgeneralize tp
= do tvsTp <- freeTvs tp
depth <- getRank
tvs <- genTvs depth tvsTp
qs <- freshQuantifiers tvs
stp <- subQuantifiers qs tvs tp
-- gtp <- mkForallQ qs stp
return (mkForallQ qs stp,tvs)
where
genTvs depth []
= return []
genTvs depth (tv@(TypeVar _ (Uni ref)):tvs)
= do bound <- readRef ref
case bound of
Instance tp rank | rank > depth
-> do tvsTp <- freeTvs tp
gtvs <- genTvs depth (tvsTp `union` tvs)
return ([tv] `union` gtvs)
_ -> genTvs depth tvs
subQuantifiers qs tvs tp
= -- subNew (tvIds tvs) (map (\q -> TVar (TypeVar (quantId q) Quantified)) qs) |-> tp
do mapM_ (\(quant,TypeVar _ (Uni ref)) -> writeRef ref (Equal (TVar (TypeVar (quantId quant) Quantified))))
(zip qs tvs)
subst tp
filterM pred []
= return []
filterM pred (x : xs)
= do keep <- pred x
xs' <- filterM pred xs
if keep then return (x:xs') else return xs'
--------------------------------------------------------------------------
-- Instantiation
--------------------------------------------------------------------------
-- | Instantiate a type
instantiate :: Type -> Infer Rho
instantiate tp
= do t <- ground tp
case t of
Forall q tp
-> do tv <- freshTVar q
stp <- subNew [quantId q] [tv] |-> tp
-- message $ "instantiate: " ++ show tp ++ ": " ++ show stp
instantiate stp
rho -> return rho
-- | Instantiate the the "some" quantifiers of an annotation to fresh type variables
instantiateAnnot :: Annot -> Infer ([Type],Type)
instantiateAnnot (Annot [] tp)
= do ntp <- normalize tp
return ([],ntp)
instantiateAnnot (Annot qs tp)
= do tvs <- freshTVars qs
stp <- subNew (quantIds qs) tvs |-> tp
ntp <- normalize stp
return (tvs,ntp)
--------------------------------------------------------------------------
-- Skolemization
--------------------------------------------------------------------------
-- | Skolemize a quantified type and return the newly introduced skolem variables
skolemize :: Type -> Infer ([TypeVar],Rho)
skolemize tp
= do t <- ground tp
case splitQuants t of
(qs,rho) | not (null qs)
-> do sks <- freshSkolems (length qs)
srho <- subNew (quantIds qs) (map TVar sks) |-> rho
-- message ("skolemize: " ++ show tp ++ " to " ++ show srho)
return (sks, srho)
_ -> return ([],tp)
---------------------------------------------------------------------------
-- constructed form
---------------------------------------------------------------------------
constructedForm tp
= case tp of
Forall (Quant id bound) rho
-> do eq <- checkTVar id rho
if eq then constructedForm rho
else return tp
_ -> return tp
where
checkTVar id rho
= case rho of
TVar (TypeVar id2 (Uni ref))
-> do bound <- readRef ref
case bound of
Equal t -> checkTVar id t
_ -> return False
TVar (TypeVar id2 Quantified) | id == id2
-> return True
_ -> return False
--------------------------------------------------------------------------
-- normalization
--------------------------------------------------------------------------
normalize :: Type -> Infer Type
normalize tp
= do stp <- subst tp -- apply all substitutions
(tvs,ntp) <- normalizeSchemeFtv stp
return ntp
normalizeSchemeFtv :: Type -> Infer ([TypeVar],Type)
normalizeSchemeFtv tp
= case tp of
Forall (Quant id bound) rho
-> do (tvs,nrho) <- normalizeSchemeFtv rho
if (not (tvFromId id `elem` tvs))
then return (tvs,nrho)
else do (btvs,nbound) <- normalizeSchemeFtv bound
case nrho of
(TVar (TypeVar id2 Quantified)) | id == id2
-> return (btvs,nbound)
_ -> do tp <- if (isRho nbound)
then (subNew [id] [nbound]) |-> nrho
else return (Forall (Quant id nbound) nrho)
return (btvs `union` (remove (tvFromId id) tvs), tp)
_ -> do tvs <- ftv tp
return (tvs,tp)
isRho :: Type -> Bool
isRho tp
= case tp of
Forall _ _ -> False
Bottom -> False
_ -> True
--------------------------------------------------------------------------
-- Free type variables
--------------------------------------------------------------------------
freeSkolems :: HasTypeVar a => a -> Infer [TypeVar]
freeSkolems tp
= do tvs <- ftv tp
return [tv | tv <- tvs, isSkolem (tvFlavour tv)]
-- | return the free unifiable variables of a type
freeTvs :: HasTypeVar a => a -> Infer [TypeVar]
freeTvs tp
= do tvs <- ftv tp
return [tv | tv <- tvs, isUni (tvFlavour tv)]
--------------------------------------------------------------------------
-- Type variables
--------------------------------------------------------------------------
-- | Things that have type variables
class HasTypeVar a where
-- | Return the free type variables
ftv :: a -> Infer [TypeVar]
-- | Apply a substitution
(|->) :: Sub -> a -> Infer a
-- | substitute the free reference type variables
subst :: a -> Infer a
instance HasTypeVar a => HasTypeVar [a] where
ftv xs
= do tvss <- mapM ftv xs
return (foldl union [] tvss)
sub |-> xs
= mapM (sub |->) xs
subst xs
= mapM subst xs
instance HasTypeVar Type where
ftv tp
= case tp of
Forall (Quant id bound) rho
-> do tvs <- ftv rho
if (tvFromId id `elem` tvs)
then do btvs <- ftv bound
return (btvs `union` (remove (tvFromId id) tvs))
else return tvs
TApp t1 t2 -> do tvs1 <- ftv t1
tvs2 <- ftv t2
return (tvs1 `union` tvs2)
TVar tv -> case tv of
TypeVar id (Uni ref)
-> do bound <- readRef ref
case bound of
Instance _ _ -> return [tv]
Equal t -> ftv t
_ -> return [tv]
TCon _ -> return []
Bottom -> return []
sub |-> tp
= case tp of
Forall (Quant id bound) rho
-> do srho <- (subRemove sub [id]) |-> rho
sbnd <- sub |-> bound
return (Forall (Quant id sbnd) srho)
TApp t1 t2 -> do st1 <- sub |-> t1
st2 <- sub |-> t2
return (TApp st1 st2)
TCon name -> return tp
TVar tv -> case tv of
TypeVar id (Uni ref)
-> do bound <- readRef ref
case bound of
Equal t -> sub |-> t
Instance _ _ -> case subLookup sub tv of
Just newtp -> return newtp
Nothing -> return tp
_ -> case subLookup sub tv of -- replace even bound ones, useful for instantiation
Just newtp -> return newtp
Nothing -> return tp
Bottom -> return tp
subst tp
= case tp of
Forall (Quant id bound) rho
-> do srho <- subst rho
sbnd <- subst bound
return (Forall (Quant id sbnd) srho)
TApp tp1 tp2 -> do stp1 <- subst tp1
stp2 <- subst tp2
return (TApp stp1 stp2)
TVar (TypeVar _ (Uni ref))
-> do bound <- readRef ref
case bound of
Equal t -> do ft <- subst t
writeRef ref (Equal ft)
return ft
Instance _ _ -> return tp
_ -> return tp
instance HasTypeVar Bound where
ftv bound
= case bound of
Equal tp -> error "Operations.ftv: equality quantifier?"
Instance tp _ -> ftv tp
sub |-> bound
= case bound of
Equal tp -> error "Operations.|->: equality quantifier?"
Instance tp r -> do stp <- sub |-> tp
return (Instance stp r)
subst bound
= case bound of
Instance tp r -> do stp <- subst tp
return (Instance stp r)
Equal tp -> error "Operations.subst: equality quantifier?"
--------------------------------------------------------------------------
-- Helpers
--------------------------------------------------------------------------
check :: Bool -> String -> Infer ()
check pred msg
= if pred then return ()
else failure msg
failure :: String -> a
failure msg
= error ("error: " ++ msg)
onlyIf :: Bool -> Infer () -> Infer ()
onlyIf pred inf
= if pred then inf else return ()
--------------------------------------------------------------------------
-- Fresh type variables
--------------------------------------------------------------------------
-- | return fresh skolem variables
freshSkolems :: Int -> Infer [TypeVar]
freshSkolems
= freshTypeVars Skolem
-- | return fresh bound variables
freshQuantifiers :: [TypeVar] -> Infer [Quant]
freshQuantifiers tvs
= mapM freshQuantifier tvs
freshQuantifier (TypeVar _ (Uni ref))
= do bound <- readRef ref
id <- freshId
case bound of
Equal tp -> error "Operations.freshQuantifier: do subst?"
Instance tp rank -> return (Quant id tp)
-- | return fresh unifiable types
freshTVars :: [Quant] -> Infer [Type]
freshTVars qs
= mapM freshTVar qs
-- | return a fresh unifiable type
freshTVar :: Quant -> Infer Type
freshTVar (Quant id tp)
= freshInstanceVar tp
freshInstanceVar :: Type -> Infer Type
freshInstanceVar tp
= do rank <- getRank -- instantiate under the current rank
scheme <- isScheme tp
ref <- newRef (if scheme then Instance tp rank else Equal tp)
tv <- freshTypeVar (Uni ref)
return (TVar tv)
-- | return fresh type variables of a certain |Flavour|
freshTypeVars :: Flavour -> Int -> Infer [TypeVar]
freshTypeVars fl n
= mapM (\_ -> freshTypeVar fl) [1..n]
-- | return a fresh type variable
freshTypeVar :: Flavour -> Infer TypeVar
freshTypeVar fl
= do id <- freshId
return (TypeVar id fl)
-- | return a fresh identifier
freshId :: Infer Id
freshId
= do id <- unique
return id
--------------------------------------------------------------------------
--
--------------------------------------------------------------------------
-- | Is this a monotype?
isTau tp
= case tp of
Forall _ _ -> return False
TApp t1 t2 -> do b1 <- isTau t1
b2 <- isTau t2
return (b1 && b2)
TVar (TypeVar _ (Uni ref))
-> do bound <- readRef ref
case bound of
Equal t -> isTau t
Instance Bottom _ -> return True
Instance _ _ -> return False
Bottom -> return False
TCon _ -> return True
-- | Is this a monotype with flexible bounds only?
isFlexTau tp
= case tp of
Forall _ _ -> return False
TApp t1 t2 -> do b1 <- isFlexTau t1
b2 <- isFlexTau t2
return (b1 && b2)
TVar (TypeVar _ (Uni ref))
-> do bound <- readRef ref
case bound of
Equal t -> isFlexTau t
-- Instance Bottom _ -> return True
Instance _ _ -> return True
Bottom -> return False
TCon _ -> return True
-- | Is this a type scheme?
isScheme tp
= case tp of
Forall q rho -> return True
TVar (TypeVar _ (Uni ref))
-> do bound <- readRef ref
case bound of
Equal t -> isScheme t
Instance _ _ -> return False
Bottom -> return True
_ -> return False
ground :: Type -> Infer Type
ground tp@(TVar (TypeVar _ (Uni ref)))
= do bound <- readRef ref
case bound of
Equal t -> ground t
Instance _ _ -> return tp
ground tp
= return tp
isUniVar :: Type -> Infer Bool
isUniVar tp
= do t <- ground tp
case t of
TVar (TypeVar _ (Uni _))
-> return True
_ -> return False
--------------------------------------------------------------------------
-- Infer
--------------------------------------------------------------------------
-- | The type inference monad, just IO for convenience
newtype Infer a = Infer (Env -> IO a)
type Env = Rank
runInfer :: Infer a -> IO a
runInfer (Infer inf)
= inf 0
--instance Functor Infer where
-- fmap f (Infer inf) = Infer (\env -> fmap f (inf env))
--instance Monad Infer where
-- return x = Infer (\env -> return x)
-- (Infer inf) >>= f = Infer (\env -> do x <- inf env
-- case f x of
-- Infer inf2 -> inf2 env)
instance Functor Infer where
fmap f (Infer inf) = Infer (\env -> fmap f (inf env))
instance Monad Infer where
return = pure
(Infer inf) >>= f = Infer (\env -> do x <- inf env
case f x of
Infer inf2 -> inf2 env)
instance Applicative Infer where
pure x = Infer (\env -> return x)
(<*>) = ap
message :: String -> Infer ()
message msg
= liftIO $ putStrLn msg
-- Ranks
withRank :: Rank -> Infer a -> Infer a
withRank r (Infer inf)
= Infer (\rank -> inf r)
getRank :: Infer Rank
getRank
= Infer (\rank -> return rank)
withRankIncrease :: Infer a -> Infer a
withRankIncrease inf
= do r <- getRank
withRank (r+1) inf
withRankInf :: Infer a -> Infer a
withRankInf inf
= withRank rankInf inf
-- Refs
liftIO :: IO a -> Infer a
liftIO io
= Infer (\env -> io)
readRef :: IORef a -> Infer a
readRef ref
= liftIO (readIORef ref)
writeRef :: IORef a -> a -> Infer ()
writeRef ref x
= liftIO (writeIORef ref x)
newRef :: a -> Infer (IORef a)
newRef x
= liftIO (newIORef x)
{-# NOINLINE uniqueInt #-}
uniqueInt :: IORef Int
uniqueInt = unsafePerformIO (newIORef 0)
-- | Quick and dirty unique numbers :-)
unique :: Infer Int
unique = do u <- readRef uniqueInt
writeRef uniqueInt (u+1)
return u
uniqueReset :: Infer ()
uniqueReset
= writeRef uniqueInt 0
|
alvisespano/Lw
|
extras/hml-prototype/Operations.hs
|
gpl-3.0
| 18,574
| 0
| 24
| 7,045
| 4,868
| 2,373
| 2,495
| 394
| 7
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudScheduler.Projects.Locations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets information about a location.
--
-- /See:/ <https://cloud.google.com/scheduler/ Cloud Scheduler API Reference> for @cloudscheduler.projects.locations.get@.
module Network.Google.Resource.CloudScheduler.Projects.Locations.Get
(
-- * REST Resource
ProjectsLocationsGetResource
-- * Creating a Request
, projectsLocationsGet
, ProjectsLocationsGet
-- * Request Lenses
, plgXgafv
, plgUploadProtocol
, plgAccessToken
, plgUploadType
, plgName
, plgCallback
) where
import Network.Google.CloudScheduler.Types
import Network.Google.Prelude
-- | A resource alias for @cloudscheduler.projects.locations.get@ method which the
-- 'ProjectsLocationsGet' request conforms to.
type ProjectsLocationsGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Location
-- | Gets information about a location.
--
-- /See:/ 'projectsLocationsGet' smart constructor.
data ProjectsLocationsGet =
ProjectsLocationsGet'
{ _plgXgafv :: !(Maybe Xgafv)
, _plgUploadProtocol :: !(Maybe Text)
, _plgAccessToken :: !(Maybe Text)
, _plgUploadType :: !(Maybe Text)
, _plgName :: !Text
, _plgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plgXgafv'
--
-- * 'plgUploadProtocol'
--
-- * 'plgAccessToken'
--
-- * 'plgUploadType'
--
-- * 'plgName'
--
-- * 'plgCallback'
projectsLocationsGet
:: Text -- ^ 'plgName'
-> ProjectsLocationsGet
projectsLocationsGet pPlgName_ =
ProjectsLocationsGet'
{ _plgXgafv = Nothing
, _plgUploadProtocol = Nothing
, _plgAccessToken = Nothing
, _plgUploadType = Nothing
, _plgName = pPlgName_
, _plgCallback = Nothing
}
-- | V1 error format.
plgXgafv :: Lens' ProjectsLocationsGet (Maybe Xgafv)
plgXgafv = lens _plgXgafv (\ s a -> s{_plgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plgUploadProtocol :: Lens' ProjectsLocationsGet (Maybe Text)
plgUploadProtocol
= lens _plgUploadProtocol
(\ s a -> s{_plgUploadProtocol = a})
-- | OAuth access token.
plgAccessToken :: Lens' ProjectsLocationsGet (Maybe Text)
plgAccessToken
= lens _plgAccessToken
(\ s a -> s{_plgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plgUploadType :: Lens' ProjectsLocationsGet (Maybe Text)
plgUploadType
= lens _plgUploadType
(\ s a -> s{_plgUploadType = a})
-- | Resource name for the location.
plgName :: Lens' ProjectsLocationsGet Text
plgName = lens _plgName (\ s a -> s{_plgName = a})
-- | JSONP
plgCallback :: Lens' ProjectsLocationsGet (Maybe Text)
plgCallback
= lens _plgCallback (\ s a -> s{_plgCallback = a})
instance GoogleRequest ProjectsLocationsGet where
type Rs ProjectsLocationsGet = Location
type Scopes ProjectsLocationsGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsGet'{..}
= go _plgName _plgXgafv _plgUploadProtocol
_plgAccessToken
_plgUploadType
_plgCallback
(Just AltJSON)
cloudSchedulerService
where go
= buildClient
(Proxy :: Proxy ProjectsLocationsGetResource)
mempty
|
brendanhay/gogol
|
gogol-cloudscheduler/gen/Network/Google/Resource/CloudScheduler/Projects/Locations/Get.hs
|
mpl-2.0
| 4,520
| 0
| 15
| 1,021
| 695
| 406
| 289
| 100
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Mirror.Settings.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a single setting by ID.
--
-- /See:/ <https://developers.google.com/glass Google Mirror API Reference> for @mirror.settings.get@.
module Network.Google.Resource.Mirror.Settings.Get
(
-- * REST Resource
SettingsGetResource
-- * Creating a Request
, settingsGet
, SettingsGet
-- * Request Lenses
, sgId
) where
import Network.Google.Mirror.Types
import Network.Google.Prelude
-- | A resource alias for @mirror.settings.get@ method which the
-- 'SettingsGet' request conforms to.
type SettingsGetResource =
"mirror" :>
"v1" :>
"settings" :>
Capture "id" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Setting
-- | Gets a single setting by ID.
--
-- /See:/ 'settingsGet' smart constructor.
newtype SettingsGet =
SettingsGet'
{ _sgId :: Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SettingsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sgId'
settingsGet
:: Text -- ^ 'sgId'
-> SettingsGet
settingsGet pSgId_ = SettingsGet' {_sgId = pSgId_}
-- | The ID of the setting. The following IDs are valid: - locale - The key
-- to the user’s language\/locale (BCP 47 identifier) that Glassware should
-- use to render localized content. - timezone - The key to the user’s
-- current time zone region as defined in the tz database. Example:
-- America\/Los_Angeles.
sgId :: Lens' SettingsGet Text
sgId = lens _sgId (\ s a -> s{_sgId = a})
instance GoogleRequest SettingsGet where
type Rs SettingsGet = Setting
type Scopes SettingsGet =
'["https://www.googleapis.com/auth/glass.timeline"]
requestClient SettingsGet'{..}
= go _sgId (Just AltJSON) mirrorService
where go
= buildClient (Proxy :: Proxy SettingsGetResource)
mempty
|
brendanhay/gogol
|
gogol-mirror/gen/Network/Google/Resource/Mirror/Settings/Get.hs
|
mpl-2.0
| 2,724
| 0
| 12
| 613
| 303
| 188
| 115
| 45
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQueryDataTransfer.Projects.Locations.DataSources.CheckValidCreds
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns true if valid credentials exist for the given data source and
-- requesting user. Some data sources doesn\'t support service account, so
-- we need to talk to them on behalf of the end user. This API just checks
-- whether we have OAuth token for the particular user, which is a
-- pre-requisite before user can create a transfer config.
--
-- /See:/ <https://cloud.google.com/bigquery-transfer/ BigQuery Data Transfer API Reference> for @bigquerydatatransfer.projects.locations.dataSources.checkValidCreds@.
module Network.Google.Resource.BigQueryDataTransfer.Projects.Locations.DataSources.CheckValidCreds
(
-- * REST Resource
ProjectsLocationsDataSourcesCheckValidCredsResource
-- * Creating a Request
, projectsLocationsDataSourcesCheckValidCreds
, ProjectsLocationsDataSourcesCheckValidCreds
-- * Request Lenses
, pldscvcXgafv
, pldscvcUploadProtocol
, pldscvcAccessToken
, pldscvcUploadType
, pldscvcPayload
, pldscvcName
, pldscvcCallback
) where
import Network.Google.BigQueryDataTransfer.Types
import Network.Google.Prelude
-- | A resource alias for @bigquerydatatransfer.projects.locations.dataSources.checkValidCreds@ method which the
-- 'ProjectsLocationsDataSourcesCheckValidCreds' request conforms to.
type ProjectsLocationsDataSourcesCheckValidCredsResource
=
"v1" :>
CaptureMode "name" "checkValidCreds" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CheckValidCredsRequest :>
Post '[JSON] CheckValidCredsResponse
-- | Returns true if valid credentials exist for the given data source and
-- requesting user. Some data sources doesn\'t support service account, so
-- we need to talk to them on behalf of the end user. This API just checks
-- whether we have OAuth token for the particular user, which is a
-- pre-requisite before user can create a transfer config.
--
-- /See:/ 'projectsLocationsDataSourcesCheckValidCreds' smart constructor.
data ProjectsLocationsDataSourcesCheckValidCreds =
ProjectsLocationsDataSourcesCheckValidCreds'
{ _pldscvcXgafv :: !(Maybe Xgafv)
, _pldscvcUploadProtocol :: !(Maybe Text)
, _pldscvcAccessToken :: !(Maybe Text)
, _pldscvcUploadType :: !(Maybe Text)
, _pldscvcPayload :: !CheckValidCredsRequest
, _pldscvcName :: !Text
, _pldscvcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSourcesCheckValidCreds' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldscvcXgafv'
--
-- * 'pldscvcUploadProtocol'
--
-- * 'pldscvcAccessToken'
--
-- * 'pldscvcUploadType'
--
-- * 'pldscvcPayload'
--
-- * 'pldscvcName'
--
-- * 'pldscvcCallback'
projectsLocationsDataSourcesCheckValidCreds
:: CheckValidCredsRequest -- ^ 'pldscvcPayload'
-> Text -- ^ 'pldscvcName'
-> ProjectsLocationsDataSourcesCheckValidCreds
projectsLocationsDataSourcesCheckValidCreds pPldscvcPayload_ pPldscvcName_ =
ProjectsLocationsDataSourcesCheckValidCreds'
{ _pldscvcXgafv = Nothing
, _pldscvcUploadProtocol = Nothing
, _pldscvcAccessToken = Nothing
, _pldscvcUploadType = Nothing
, _pldscvcPayload = pPldscvcPayload_
, _pldscvcName = pPldscvcName_
, _pldscvcCallback = Nothing
}
-- | V1 error format.
pldscvcXgafv :: Lens' ProjectsLocationsDataSourcesCheckValidCreds (Maybe Xgafv)
pldscvcXgafv
= lens _pldscvcXgafv (\ s a -> s{_pldscvcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldscvcUploadProtocol :: Lens' ProjectsLocationsDataSourcesCheckValidCreds (Maybe Text)
pldscvcUploadProtocol
= lens _pldscvcUploadProtocol
(\ s a -> s{_pldscvcUploadProtocol = a})
-- | OAuth access token.
pldscvcAccessToken :: Lens' ProjectsLocationsDataSourcesCheckValidCreds (Maybe Text)
pldscvcAccessToken
= lens _pldscvcAccessToken
(\ s a -> s{_pldscvcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldscvcUploadType :: Lens' ProjectsLocationsDataSourcesCheckValidCreds (Maybe Text)
pldscvcUploadType
= lens _pldscvcUploadType
(\ s a -> s{_pldscvcUploadType = a})
-- | Multipart request metadata.
pldscvcPayload :: Lens' ProjectsLocationsDataSourcesCheckValidCreds CheckValidCredsRequest
pldscvcPayload
= lens _pldscvcPayload
(\ s a -> s{_pldscvcPayload = a})
-- | Required. The data source in the form:
-- \`projects\/{project_id}\/dataSources\/{data_source_id}\` or
-- \`projects\/{project_id}\/locations\/{location_id}\/dataSources\/{data_source_id}\`.
pldscvcName :: Lens' ProjectsLocationsDataSourcesCheckValidCreds Text
pldscvcName
= lens _pldscvcName (\ s a -> s{_pldscvcName = a})
-- | JSONP
pldscvcCallback :: Lens' ProjectsLocationsDataSourcesCheckValidCreds (Maybe Text)
pldscvcCallback
= lens _pldscvcCallback
(\ s a -> s{_pldscvcCallback = a})
instance GoogleRequest
ProjectsLocationsDataSourcesCheckValidCreds
where
type Rs ProjectsLocationsDataSourcesCheckValidCreds =
CheckValidCredsResponse
type Scopes
ProjectsLocationsDataSourcesCheckValidCreds
=
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient
ProjectsLocationsDataSourcesCheckValidCreds'{..}
= go _pldscvcName _pldscvcXgafv
_pldscvcUploadProtocol
_pldscvcAccessToken
_pldscvcUploadType
_pldscvcCallback
(Just AltJSON)
_pldscvcPayload
bigQueryDataTransferService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSourcesCheckValidCredsResource)
mempty
|
brendanhay/gogol
|
gogol-bigquerydatatransfer/gen/Network/Google/Resource/BigQueryDataTransfer/Projects/Locations/DataSources/CheckValidCreds.hs
|
mpl-2.0
| 7,093
| 0
| 16
| 1,457
| 794
| 468
| 326
| 125
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Instances.GetScreenshot
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the screenshot from the specified instance.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instances.getScreenshot@.
module Network.Google.Resource.Compute.Instances.GetScreenshot
(
-- * REST Resource
InstancesGetScreenshotResource
-- * Creating a Request
, instancesGetScreenshot
, InstancesGetScreenshot
-- * Request Lenses
, igsProject
, igsZone
, igsInstance
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instances.getScreenshot@ method which the
-- 'InstancesGetScreenshot' request conforms to.
type InstancesGetScreenshotResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instances" :>
Capture "instance" Text :>
"screenshot" :>
QueryParam "alt" AltJSON :> Get '[JSON] Screenshot
-- | Returns the screenshot from the specified instance.
--
-- /See:/ 'instancesGetScreenshot' smart constructor.
data InstancesGetScreenshot =
InstancesGetScreenshot'
{ _igsProject :: !Text
, _igsZone :: !Text
, _igsInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstancesGetScreenshot' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'igsProject'
--
-- * 'igsZone'
--
-- * 'igsInstance'
instancesGetScreenshot
:: Text -- ^ 'igsProject'
-> Text -- ^ 'igsZone'
-> Text -- ^ 'igsInstance'
-> InstancesGetScreenshot
instancesGetScreenshot pIgsProject_ pIgsZone_ pIgsInstance_ =
InstancesGetScreenshot'
{ _igsProject = pIgsProject_
, _igsZone = pIgsZone_
, _igsInstance = pIgsInstance_
}
-- | Project ID for this request.
igsProject :: Lens' InstancesGetScreenshot Text
igsProject
= lens _igsProject (\ s a -> s{_igsProject = a})
-- | The name of the zone for this request.
igsZone :: Lens' InstancesGetScreenshot Text
igsZone = lens _igsZone (\ s a -> s{_igsZone = a})
-- | Name of the instance scoping this request.
igsInstance :: Lens' InstancesGetScreenshot Text
igsInstance
= lens _igsInstance (\ s a -> s{_igsInstance = a})
instance GoogleRequest InstancesGetScreenshot where
type Rs InstancesGetScreenshot = Screenshot
type Scopes InstancesGetScreenshot =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient InstancesGetScreenshot'{..}
= go _igsProject _igsZone _igsInstance (Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy InstancesGetScreenshotResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/Instances/GetScreenshot.hs
|
mpl-2.0
| 3,807
| 0
| 17
| 886
| 470
| 280
| 190
| 76
| 1
|
module Scanner where
import Data.List
import Data.Char
import Text.ParserCombinators.Parsec hiding (token, tokens)
import Control.Applicative ((<*), (*>), (<$>), (<*>))
data Token = Identifier String
| SConstant String
| IConstant Integer
| Operator
| Var
| Function
| If
| Else
| While
| Do
| Return
| Comma
| Colon
| Plus
| Minus
| Star
| Slash
| Dot
| LBracket | RBracket
| LPar | RPar
| LCurly | RCurly
deriving (Show, Eq)
type TokenPos = (Token, SourcePos)
parsePos :: Parser Token -> Parser TokenPos
parsePos p = (,) <$> p <*> getPosition
ident :: Parser TokenPos
ident = parsePos $ do
c <- letter
rest <- many (alphaNum <|> char '_')
return $ Identifier (c:rest)
stringConst :: Parser TokenPos
stringConst = parsePos $ do
q <- oneOf "'\""
s <- many $ chars q
char q <?> "closing quote"
return $ SConstant s
where
chars q = escaped q <|> noneOf [q]
escaped q = char '\\' >> choice (zipWith escapedChar (codes q) (replacements q))
escapedChar code replacement = char code >> return replacement
codes q = ['b', 'n', 'f', 'r', 't', '\\', q]
replacements q = ['\b', '\n', '\f', '\r', '\t', '\\', q]
intConst :: Parser TokenPos
intConst = parsePos (minInt <|> justDigits <?> "integer value")
where
minInt = (IConstant . negate . toInteger) <$> (char '-' *> many1 digit)
justDigits = (IConstant . toInteger) <$> many1 digit
toInteger s = fromIntegral $ foldl (\a i -> a * 10 + digitToInt i) 0 s
primitives :: Parser TokenPos
primitives = parsePos $ choice $ map (\(ch, tok) -> char ch >> return tok) [
( ';', Colon ), ( ',', Comma ), ( '.', Dot ),
( '+', Plus ), ( '-', Minus ), ( '*', Star ), ('/', Slash),
( '(', LPar ), ( ')', RPar ),
( '[', LBracket ), ( ']', RBracket ),
( '{', LCurly ), ( '}', RCurly )
]
reserved :: Parser TokenPos
reserved = parsePos $ choice $ map (\(s, tok) -> string s >> return tok) [
( "var", Var ), ( "function", Function ), ( "if", If ), ("else", Else),
( "while", While ), ( "do", Do ), ( "return", Return )
]
aToken :: Parser TokenPos
aToken = choice
[ try stringConst,
try reserved,
try intConst,
primitives,
ident
]
tokenStream :: Parser [TokenPos]
tokenStream = spaces *> many (aToken <* spaces)
|
lolepezy/hajs
|
src/Scanner.hs
|
unlicense
| 2,348
| 0
| 12
| 610
| 929
| 525
| 404
| 71
| 1
|
-- |
-- Module : DMSS.Storage.TH
-- License : Public Domain
--
-- Maintainer : daveparrish@tutanota.com
-- Stability : experimental
-- Portability : untested
--
-- Dead Man Switch System storage schema
--
{-# LANGUAGE GADTs #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module DMSS.Storage.TH where
import Database.Persist.TH
import DMSS.Storage.Types
import qualified Data.ByteString.Char8 as BS8
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
User
name Name -- ^ User's name
hashSalt HashSalt -- ^ Password hash and salt storage
boxKeypairStore BoxKeypairStore -- ^ encrypted box keypair
signKeypairStore SignKeypairStore -- ^ encrypted sign keypair
UniqueName name
created UTCTimeStore -- ^ Creation time
deriving Show
CheckIn
userId UserId
raw_data BS8.ByteString
created UTCTimeStore -- ^ Creation time
deriving Show
Peer
host Host -- ^ Host of peer
port Port -- ^ Port to connect on
created UTCTimeStore -- ^ Creation time
deriving Show
|]
|
dmp1ce/DMSS
|
src-lib/DMSS/Storage/TH.hs
|
unlicense
| 1,241
| 0
| 7
| 364
| 66
| 47
| 19
| 9
| 0
|
-----------------------------------------------------------------------------
-- Copyright 2019, Ideas project team. This file is distributed under the
-- terms of the Apache License 2.0. For more information, see the files
-- "LICENSE.txt" and "NOTICE.txt", which are included in the distribution.
-----------------------------------------------------------------------------
-- |
-- Maintainer : bastiaan.heeren@ou.nl
-- Stability : provisional
-- Portability : portable (depends on ghc)
--
-- A minimal interface for constructing simple HTML pages
-- See http://www.w3.org/TR/html4/
--
-----------------------------------------------------------------------------
module Ideas.Text.HTML
( ToHTML(..), HTMLPage, HTMLBuilder
, addCSS, addScript, addStyle, changeBody, showHTML
, string, text
, htmlPage, link
, h1, h2, h3, h4, h5, h6
, preText, ul, table, keyValueTable
, image, space, spaces, (<#>), (<##>), (<###>), spaced, spacedBy
, highlightXML
, para, paras, ttText, hr, br, pre, bullet
, dv, divClass, spanClass
-- HTML generic attributes
, idA, classA, styleA, titleA
-- Font style elements
, tt, italic, bold, big, small
) where
import Data.Char
import Data.List
import Data.Monoid
import Ideas.Text.XML
import qualified Data.Map as M
import qualified Ideas.Text.XML as XML
type HTMLBuilder = XMLBuilder
class ToHTML a where
toHTML :: a -> HTMLBuilder
listToHTML :: [a] -> HTMLBuilder
-- default definitions
listToHTML = ul . map toHTML
instance ToHTML a => ToHTML [a] where
toHTML = listToHTML
instance (ToHTML a, ToHTML b) => ToHTML (Either a b) where
toHTML = either toHTML toHTML
instance (ToHTML a) => ToHTML (Maybe a) where
toHTML = maybe mempty toHTML
instance ToHTML () where
toHTML _ = mempty
instance (ToHTML a, ToHTML b) => ToHTML (a, b) where
toHTML (a, b) = toHTML a <#> toHTML b
instance (ToHTML a, ToHTML b, ToHTML c) => ToHTML (a, b, c) where
toHTML (a, b, c) = toHTML a <#> toHTML b <#> toHTML c
instance (ToHTML a, ToHTML b) => ToHTML (M.Map a b) where
toHTML = Ideas.Text.HTML.table False . map f . M.toList
where
f (a, b) = [toHTML a, toHTML b]
instance ToHTML Int where
toHTML = text
instance ToHTML Bool where
toHTML = text
instance ToHTML Char where
toHTML = string . return
listToHTML = string
data HTMLPage = HTMLPage
{ title :: String
, styleSheets :: [FilePath]
, scripts :: [FilePath]
, styleTxts :: [String]
, htmlBody :: HTMLBuilder
}
instance ToXML HTMLPage where
toXML page = makeXML "html" $
element "head"
[ tag "title" (string (title page))
, mconcat
[ element "link"
[ "rel" .=. "STYLESHEET"
, "href" .=. css
, "type" .=. "text/css"
]
| css <- styleSheets page
]
, mconcat
[ tag "style" (string txt)
| txt <- styleTxts page
]
, mconcat
[ element "script" ["src" .=. js, "type" .=. "text/javascript", string " "]
| js <- scripts page
]
]
<> tag "body" (htmlBody page)
showHTML :: HTMLPage -> String
showHTML = compactXML . toXML
addCSS :: FilePath -> HTMLPage -> HTMLPage
addCSS css page = page { styleSheets = css : styleSheets page }
addScript :: FilePath -> HTMLPage -> HTMLPage
addScript js page = page { scripts = js : scripts page }
addStyle :: String -> HTMLPage -> HTMLPage
addStyle txt page = page { styleTxts = txt : styleTxts page }
changeBody :: (HTMLBuilder -> HTMLBuilder) -> HTMLPage -> HTMLPage
changeBody f p = p { htmlBody = f (htmlBody p) }
-- html helper functions
htmlPage :: String -> HTMLBuilder -> HTMLPage
htmlPage s = HTMLPage s [] [] []
link :: BuildXML a => String -> a -> a
link url body = tag "a" $
("href" .=. url) <> body
h1, h2, h3, h4, h5, h6 :: BuildXML a => String -> a
h1 = tag "h1" . string
h2 = tag "h2" . string
h3 = tag "h3" . string
h4 = tag "h4" . string
h5 = tag "h5" . string
h6 = tag "h6" . string
para :: BuildXML a => a -> a
para = tag "p"
preText :: BuildXML a => String -> a
preText = pre . string
pre :: BuildXML a => a -> a
pre = tag "pre"
hr :: BuildXML a => a
hr = emptyTag "hr"
br :: BuildXML a => a
br = emptyTag "br"
ttText :: BuildXML a => String -> a
ttText = tt . string
ul :: BuildXML a => [a] -> a
ul xs
| null xs = mempty
| otherwise = element "ul" (map (tag "li") xs)
-- | First argument indicates whether the table has a header or not
table :: BuildXML a => Bool -> [[a]] -> a
table b rows
| null rows = mempty
| otherwise = element "table" $
("border" .=. "1") :
[ element "tr" $
("class" .=. getClass i) :
[ tag "td" c | c <- row ]
| (i, row) <- zip [0::Int ..] rows
]
where
getClass i
| i == 0 && b = "top-row"
| even i = "even-row"
| otherwise = "odd-row"
keyValueTable :: BuildXML a => [(String, a)] -> a
keyValueTable =
let f (s, a) = [spanClass "table-key" (string s), a]
in para . table False . map f
spaces :: BuildXML a => Int -> a
spaces n = mconcat (replicate n space)
space, bullet :: BuildXML a => a
space = XML.string [chr 160] --
bullet = XML.string [chr 8226]
(<#>) :: BuildXML a => a -> a -> a
x <#> y = x <> space <> y
(<##>) :: BuildXML a => a -> a -> a
x <##> y = x <> spaces 2 <> y
(<###>) :: BuildXML a => a -> a -> a
x <###> y = x <> spaces 3 <> y
spaced :: BuildXML a => [a] -> a
spaced = spacedBy 1
spacedBy :: BuildXML a => Int -> [a] -> a
spacedBy n = mconcat . intersperse (spaces n)
paras :: BuildXML a => [a] -> a
paras = mconcat . map para
image :: BuildXML a => String -> a
image n = tag "img" ("src" .=. n)
dv :: BuildXML a => a -> a
dv = tag "div"
divClass :: BuildXML a => String -> a -> a
divClass n a = dv (classA n <> a)
spanClass :: BuildXML a => String -> a -> a
spanClass n a = tag "span" (classA n <> a)
-- A simple XML highlighter
highlightXML :: Bool -> XML -> HTMLBuilder
highlightXML nice
| nice = tag "pre" . f . prettyXML
| otherwise = tag "tt" . f . compactXML
where
-- find <
f :: String -> HTMLBuilder
f [] = mempty
f ('<':'/':xs) = g "</" [] xs
f ('<':xs) = g "<" [] xs
f (x:xs) = string [x] <> f xs
-- find >
g start acc [] = string (start ++ reverse acc)
g start acc ('/':'>':xs) = pp (start, reverse acc, "/>") <> f xs
g start acc ('>':xs) = pp (start, reverse acc, ">") <> f xs
g start acc (x:xs) = g start (x:acc) xs
pp (start, info, end) = blue (string (start ++ as)) <> rec bs <> blue (string end)
where
(as, bs) = span isAlphaNum info
rec [] = mempty
rec ('=':xs) = orange (string "=") <> rec xs
rec ('"':xs) = case break (== '"') xs of
(xs1, _:xs2) -> green (string ('"' : xs1 ++ ['"'])) <> rec xs2
_ -> string ('"':xs)
rec (x:xs) = string [x] <> rec xs
blue a = tag "font" ("color" .=. "blue" <> a)
orange a = tag "font" ("color" .=. "orange" <> a)
green a = tag "font" ("color" .=. "green" <> a)
{-
f [] = []
f list@(x:xs)
| "</" `isPrefixOf` list = -- close tag
let (as, bs) = span isAlphaNum (drop 5 list)
in "<font color='blue'></" ++ as ++ "<font color='green'>" ++ g bs
| "<" `isPrefixOf` list = -- open tag
let (as, bs) = span isAlphaNum (drop 4 list)
in "<font color='blue'><" ++ as ++ "<font color='green'>" ++ g bs
| otherwise = x : f xs
-- find >
g [] = []
g list@(x:xs)
| "/>" `isPrefixOf` list =
"</font>/></font>" ++ f (drop 5 list)
| ">" `isPrefixOf` list =
"</font>></font>" ++ f (drop 4 list)
| x=='=' = "<font color='orange'>=</font>" ++ g xs
| otherwise = x : g xs -}
-----------------------------------------------------------
-- * HTML generic attributes
idA, classA, styleA, titleA :: BuildXML a => String -> a
idA = ("id" .=.) -- document-wide unique id
classA = ("class" .=.) -- space-separated list of classes
styleA = ("style" .=.) -- associated style info
titleA = ("title" .=.) -- advisory title
-----------------------------------------------------------
-- * Font style elements
-- | Renders as teletype or monospaced Ideas.Text.
tt :: BuildXML a => a -> a
tt = tag "tt"
-- | Renders as italic text style.
italic :: BuildXML a => a -> a
italic = tag "i"
-- | Renders as bold text style.
bold :: BuildXML a => a -> a
bold = tag "b"
-- BIG: Renders text in a "large" font.
big :: BuildXML a => a -> a
big = tag "big"
-- SMALL: Renders text in a "small" font.
small :: BuildXML a => a -> a
small = tag "small"
|
ideas-edu/ideas
|
src/Ideas/Text/HTML.hs
|
apache-2.0
| 9,173
| 0
| 18
| 2,755
| 2,972
| 1,581
| 1,391
| 188
| 11
|
-- insertion sort
is k [] = [k]
is k (x:xs)
| k <= x = (k:x:xs)
| otherwise = x:(is k xs)
ans n i x
| n == i = []
| otherwise =
let k = x!!i
t = drop (i+1) x
h = is k $ take i x
r = h ++ t
in
(r:ans n (i+1) r)
main = do
n <- getLine
i <- getLine
let n' = read n :: Int
i' = map read $ words i :: [Int]
o = ans n' 0 i'
mapM_ putStrLn $ map unwords $ map (map show) o
|
a143753/AOJ
|
ALDS1_1_A.hs
|
apache-2.0
| 442
| 0
| 12
| 180
| 283
| 138
| 145
| 19
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Data.Geometry.ClipSpec where
import qualified Control.Monad as ControlMonad
import qualified Data.Aeson as Aeson
import qualified Data.Geometry.VectorTile.VectorTile as VectorTile
import qualified Data.Geospatial as Geospatial
import qualified Data.LinearRing as LinearRing
import qualified Data.Sequence as Sequence
import Test.Hspec (Spec, describe, it,
shouldBe)
import qualified Data.Geometry.Clip as GeometryClip
import qualified Data.Geometry.Clip.Internal.Line as InternalLine
import qualified Data.Geometry.Types.Geography as GeometryGeography
import qualified Data.SpecHelper as SpecHelper
polyPts :: [(Int, Int)]
polyPts = [ (50,150), (200, 50)
, (350,150), (350,300)
, (250,300), (200,250)
, (150,350), (100,250)
, (100,200)
]
innerPolyPts :: [(Int, Int)]
innerPolyPts = [(75,200),(250,250),(250,150),(75,150)]
poly :: VectorTile.Polygon
poly = VectorTile.Polygon (SpecHelper.tupleToPts polyPts) mempty
lineClipPts :: GeometryGeography.BoundingBoxPts
lineClipPts = GeometryGeography.BoundingBoxPts (VectorTile.Point 10 10) (VectorTile.Point 60 60)
linesTst :: Sequence.Seq VectorTile.LineString
linesTst = Sequence.fromList
[ VectorTile.LineString (SpecHelper.tupleToPts [(11, 11), (59, 59)])
, VectorTile.LineString (SpecHelper.tupleToPts [(0, 0), (0, 100)])
, VectorTile.LineString (SpecHelper.tupleToPts [(5, 5), (45, 50), (90, 140)])
, VectorTile.LineString (SpecHelper.tupleToPts [(0, 0), (10, 10)])
, VectorTile.LineString (SpecHelper.tupleToPts [(50, 50), (0, 10)])
, VectorTile.LineString (SpecHelper.tupleToPts [(0, 0), (60, 60)])]
resultLines :: Sequence.Seq VectorTile.LineString
resultLines = Sequence.fromList
[ VectorTile.LineString (SpecHelper.tupleToPts [(10, 10), (60, 60)])
, VectorTile.LineString (SpecHelper.tupleToPts [(50, 50), (10, 18)])
, VectorTile.LineString (SpecHelper.tupleToPts [(10, 10), (10, 10)])
, VectorTile.LineString (SpecHelper.tupleToPts [(10, 11), (45, 50), (50, 60)])
, VectorTile.LineString (SpecHelper.tupleToPts [(11, 11), (59, 59)])
]
resultPolyPts :: [(Int, Int)]
resultPolyPts = [(100,200),(100,116),(124,100),(275,100),(300,116),(300,300),(250,300),(200,250),(175,300),(125,300),(100,250),(100,200)]
innerPolyResultPts :: [(Int, Int)]
innerPolyResultPts = [(100,150),(100,207),(250,250),(250,150),(100,150)]
resultPolyWithInner :: VectorTile.Polygon
resultPolyWithInner = VectorTile.Polygon (SpecHelper.tupleToPts resultPolyPts) (Sequence.fromList [VectorTile.Polygon (SpecHelper.tupleToPts innerPolyResultPts) mempty])
geoLineTst :: Geospatial.GeoLine
geoLineTst = Geospatial.GeoLine (SpecHelper.mkLineString (5, 5) (45, 50) [(90, 140)])
geoLinesTst :: Geospatial.GeoMultiLine
geoLinesTst = Geospatial.GeoMultiLine $ Sequence.fromList
[ SpecHelper.mkLineString (11, 11) (59, 59) []
, SpecHelper.mkLineString (0, 0) (0, 100) []
, SpecHelper.mkLineString (5, 5) (45, 50) [(90, 140)]
, SpecHelper.mkLineString (0, 0) (10, 10) []
, SpecHelper.mkLineString (50, 50) (0, 10) []
, SpecHelper.mkLineString (0, 0) (60, 60) []
]
geoResultLine :: Geospatial.GeoLine
geoResultLine = Geospatial.GeoLine (SpecHelper.mkLineString (10, 10.625) (45, 50) [(50, 60)])
geoLinearRingTst1 :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoLinearRingTst1 = SpecHelper.mkLinearRing (50, 50) (50,150) (200, 50) [(350, 50), (350,150), (350, 350), (350,300), (250,300), (200,250), (50, 350), (150,350), (100,250), (100,200)]
geoLinearRingTst2 :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoLinearRingTst2 = SpecHelper.mkLinearRing (100,150) (100,207) (250,250) [(250,150),(100,150)]
geoBrokenLinearRingTst :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoBrokenLinearRingTst = SpecHelper.mkLinearRing (-512,-400) (96,-400) (96,-904) [(-512,-904),(-512,-400)]
geoGiantLinearRingTst :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoGiantLinearRingTst = SpecHelper.mkLinearRing (-128,-128) (2176,-128) (2176,2176) [(-128,2176), (-128,-128)]
geoTurningLinearRingTst :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoTurningLinearRingTst = SpecHelper.mkLinearRing (125,125) (175,175) (75,225) [(25,175), (125,125)]
geoPolyTst :: Geospatial.GeoPolygon
geoPolyTst = Geospatial.GeoPolygon (Sequence.singleton geoLinearRingTst1)
geoPolysTst :: Geospatial.GeoMultiPolygon
geoPolysTst = Geospatial.GeoMultiPolygon (Sequence.fromList [
Sequence.singleton geoLinearRingTst1,
Sequence.singleton geoLinearRingTst2
])
geoBrokenPolyTst :: Geospatial.GeoPolygon
geoBrokenPolyTst = Geospatial.GeoPolygon (Sequence.singleton geoBrokenLinearRingTst)
geoGiantPolyTst :: Geospatial.GeoPolygon
geoGiantPolyTst = Geospatial.GeoPolygon (Sequence.singleton geoGiantLinearRingTst)
geoTurningPolyTst :: Geospatial.GeoPolygon
geoTurningPolyTst = Geospatial.GeoPolygon (Sequence.singleton geoTurningLinearRingTst)
geoLineFeatureTst :: Geospatial.GeoFeature Aeson.Value
geoLineFeatureTst = Geospatial.GeoFeature Nothing (Geospatial.Line geoLineTst) Aeson.Null Nothing
geoLinesFeatureTst :: Geospatial.GeoFeature Aeson.Value
geoLinesFeatureTst = Geospatial.GeoFeature Nothing (Geospatial.MultiLine geoLinesTst) Aeson.Null Nothing
geoPolygonFeatureTst :: Geospatial.GeoFeature Aeson.Value
geoPolygonFeatureTst = Geospatial.GeoFeature Nothing (Geospatial.Polygon geoPolyTst) Aeson.Null Nothing
geoBrokenPolyFeatureTst :: Geospatial.GeoFeature Aeson.Value
geoBrokenPolyFeatureTst = Geospatial.GeoFeature Nothing (Geospatial.Polygon geoBrokenPolyTst) Aeson.Null Nothing
geoGiantPolyFeatureTst :: Geospatial.GeoFeature Aeson.Value
geoGiantPolyFeatureTst = Geospatial.GeoFeature Nothing (Geospatial.Polygon geoGiantPolyTst) Aeson.Null Nothing
geoTurningPolyFeatureTst :: Geospatial.GeoFeature Aeson.Value
geoTurningPolyFeatureTst = Geospatial.GeoFeature Nothing (Geospatial.Polygon geoTurningPolyTst) Aeson.Null Nothing
geoResultLines :: Geospatial.GeoMultiLine
geoResultLines = Geospatial.GeoMultiLine $ Sequence.fromList
[ SpecHelper.mkLineString (10, 10) (60, 60) []
, SpecHelper.mkLineString (50, 50) (10, 18) []
, SpecHelper.mkLineString (10, 10) (10, 10) []
, SpecHelper.mkLineString (10, 10.625) (45, 50) [(50, 60)]
, SpecHelper.mkLineString (11, 11) (59, 59) []
]
geoResultLineFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
geoResultLineFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.Line geoResultLine) Aeson.Null Nothing
geoResultLinesFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
geoResultLinesFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.MultiLine geoResultLines) Aeson.Null Nothing
geoResultLinearRing1 :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoResultLinearRing1 = SpecHelper.mkLinearRing (100,200) (100,116.66666666666667) (125.00000000000001,100) [(300,100),(300,300),(250,300),(200,250),(124.99999999999999,300),(125,300),(100,250),(100,200)]
offGeoResultLinearRing1 :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
offGeoResultLinearRing1 = SpecHelper.mkLinearRing (100,200) (100,116.66666666666666) (125.0,100) [(300,100),(300,300),(250,300),(200,250),(125,300),(125,300),(100,250),(100,200)]
geoResultLinearRing2 :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoResultLinearRing2 = SpecHelper.mkLinearRing (100,150) (100,207) (250,250) [(250,150),(100,150)]
geoResultGiantLinearRing :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoResultGiantLinearRing = SpecHelper.mkLinearRing (-128, -128) (2176, -128) (2176, 2176) [(-128, 2176), (-128, -128)]
geoResultTurningRing :: LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
geoResultTurningRing = SpecHelper.mkLinearRing (125,125) (175,175) (125,200) [(100,200),(100,137.5),(125,125)]
geoResultPolyFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
geoResultPolyFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.Polygon (Geospatial.GeoPolygon (Sequence.singleton geoResultLinearRing1))) Aeson.Null Nothing
offGeoResultPolyFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
offGeoResultPolyFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.Polygon (Geospatial.GeoPolygon (Sequence.singleton offGeoResultLinearRing1))) Aeson.Null Nothing
geoResultPolysFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
geoResultPolysFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.MultiPolygon (Geospatial.GeoMultiPolygon (Sequence.fromList [Sequence.singleton geoResultLinearRing1, Sequence.singleton geoResultLinearRing2]))) Aeson.Null Nothing
offGeoResultPolysFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
offGeoResultPolysFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.MultiPolygon (Geospatial.GeoMultiPolygon (Sequence.fromList [Sequence.singleton offGeoResultLinearRing1, Sequence.singleton geoResultLinearRing2]))) Aeson.Null Nothing
geoResultGiantPolyFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
geoResultGiantPolyFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.Polygon (Geospatial.GeoPolygon (Sequence.singleton geoResultGiantLinearRing))) Aeson.Null Nothing
geoResultTurningPolyFeatureTst :: Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
geoResultTurningPolyFeatureTst = Sequence.singleton $ Geospatial.GeoFeature Nothing (Geospatial.Polygon (Geospatial.GeoPolygon (Sequence.singleton geoResultTurningRing))) Aeson.Null Nothing
lineClip :: GeometryGeography.BoundingBox
lineClip = GeometryGeography.BoundingBox 10 10 60 60
polyClip :: GeometryGeography.BoundingBox
polyClip = GeometryGeography.BoundingBox 100 100 300 300
brokenClip :: GeometryGeography.BoundingBox
brokenClip = GeometryGeography.BoundingBox (-128) (-128) 2176 2176
giantClip :: GeometryGeography.BoundingBox
giantClip = GeometryGeography.BoundingBox (-128) (-128) 2176 2176
turningClip :: GeometryGeography.BoundingBox
turningClip = GeometryGeography.BoundingBox 100 100 200 200
spec :: Spec
spec = do
testLineHelper
testClipLine
testClipPolygon
-- testClipPolygonWithInterior
-- testManyClipPolygon
testLineHelper :: Spec
testLineHelper =
describe "segmentToLine" $ do
it "Simple test" $ do
let inputPts = Sequence.fromList ([1,2,2,7,7,10,10,11] :: [Int])
expectedPts = Sequence.fromList ([1,2,7,10,11] :: [Int])
expectedPts `shouldBe` InternalLine.segmentToLine inputPts
it "Empty Test" $
Sequence.empty `shouldBe` InternalLine.segmentToLine (Sequence.empty :: Sequence.Seq Int)
it "Single element test" $
Sequence.empty `shouldBe` InternalLine.segmentToLine (Sequence.fromList ([1] :: [Int]))
lineClippingAlgorithms :: [GeometryGeography.BoundingBox -> Geospatial.GeoLine -> Geospatial.GeoFeature Aeson.Value -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)]
lineClippingAlgorithms =
[GeometryClip.clipLineCs, GeometryClip.clipLineLb, GeometryClip.clipLineQc, GeometryClip.clipLineNLN]
multilineClippingAlgorithms :: [GeometryGeography.BoundingBox -> Geospatial.GeoMultiLine -> Geospatial.GeoFeature Aeson.Value -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)]
multilineClippingAlgorithms =
[GeometryClip.clipLinesCs, GeometryClip.clipLinesLb, GeometryClip.clipLinesQc, GeometryClip.clipLinesNLN]
testClipLine :: Spec
testClipLine =
describe "simple line test" $ do
it "Algorithms returns clipped line" $ do
let actual f = f lineClip geoLineTst geoLineFeatureTst Sequence.empty
ControlMonad.forM_ lineClippingAlgorithms (\x -> actual x `shouldBe` geoResultLineFeatureTst)
it "Algorithms returns clipped line" $ do
let actual f = f lineClip geoLinesTst geoLineFeatureTst Sequence.empty
ControlMonad.forM_ multilineClippingAlgorithms (\x -> actual x `shouldBe` geoResultLinesFeatureTst)
polgonClippingAlgorithms :: [GeometryGeography.BoundingBox -> Geospatial.GeoPolygon -> Geospatial.GeoFeature Aeson.Value -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)]
polgonClippingAlgorithms =
[GeometryClip.clipPolygonSh]
multipolgonClippingAlgorithms :: [GeometryGeography.BoundingBox -> Geospatial.GeoMultiPolygon -> Geospatial.GeoFeature Aeson.Value -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)]
multipolgonClippingAlgorithms =
[GeometryClip.clipPolygonsSh]
testClipPolygon :: Spec
testClipPolygon =
describe "simple polygon test" $ do
it "Simple - Returns clipped polygon" $
ControlMonad.forM_ polgonClippingAlgorithms (\x -> x polyClip geoPolyTst geoPolygonFeatureTst Sequence.empty `shouldBe` geoResultPolyFeatureTst)
it "Simple - Returns clipped multipolygon" $
ControlMonad.forM_ multipolgonClippingAlgorithms (\x -> x polyClip geoPolysTst geoPolygonFeatureTst Sequence.empty `shouldBe` geoResultPolysFeatureTst)
it "Simple - Negative polygon" $
ControlMonad.forM_ polgonClippingAlgorithms (\x -> x brokenClip geoBrokenPolyTst geoBrokenPolyFeatureTst Sequence.empty `shouldBe` Sequence.empty)
it "Simple - Maximum polygon" $
ControlMonad.forM_ polgonClippingAlgorithms (\x -> x giantClip geoGiantPolyTst geoGiantPolyFeatureTst Sequence.empty `shouldBe` geoResultGiantPolyFeatureTst)
it "Simple - Turning point test" $
ControlMonad.forM_ polgonClippingAlgorithms (\x -> x turningClip geoTurningPolyTst geoTurningPolyFeatureTst Sequence.empty `shouldBe` geoResultTurningPolyFeatureTst)
|
sitewisely/zellige
|
test/Data/Geometry/ClipSpec.hs
|
apache-2.0
| 13,968
| 0
| 16
| 1,677
| 4,297
| 2,414
| 1,883
| 186
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
--
-- Module : Database
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Database (
DBUsers
, DBFiles
, GetUser (..)
, AddUser (..)
, DelUser (..)
, GetFile (..)
, GetFiles (..)
, AddFile (..)
, DelFile (..)
, DB (..)
, initFiles
, initUsers
) where
import qualified File as F
import File (File)
import qualified User as U
import User (User)
import qualified Data.Map as M
import Data.Map (Map)
import Data.List (find)
import Data.SafeCopy (deriveSafeCopy, base)
import Data.Acid (AcidState, Update, Query, makeAcidic)
import Control.Monad.Reader (ask)
import Control.Monad.State (get, put)
data Files = Files [File]
data Users = Users (Map String User)
$(deriveSafeCopy 0 'base ''Users)
addUser :: User -> Update Users ()
addUser user = do
Users users <- get
put $ Users $ M.insert (U.username user) user users
delUser :: User -> Update Users ()
delUser user = do
Users users <- get
let username = U.username user
put $ Users $ M.delete username users
getUser :: String -> Query Users (Maybe User)
getUser username = do
Users users <- ask
return $ M.lookup username users
$(makeAcidic ''Users ['addUser, 'getUser, 'delUser])
type DBUsers = AcidState Users
$(deriveSafeCopy 0 'base ''Files)
addFile :: File -> Update Files ()
addFile file = do
Files files <- get
put $ Files $ file : files
delFile :: File -> Update Files ()
delFile file = do
Files files <- get
put $ Files $ filter (/= file) files
getFiles :: Query Files [File]
getFiles = do
Files files <- ask
return files
getFile :: String -> Query Files (Maybe File)
getFile fname = do
Files files <- ask
return $ findFile fname files
findFile :: String -> [File] -> Maybe File
findFile fname = find ((== fname) . F.filename)
$(makeAcidic ''Files ['addFile, 'getFile, 'getFiles, 'delFile])
type DBFiles = AcidState Files
data DB = DB {
users :: DBUsers
, files :: DBFiles
}
initFiles = Files []
initUsers = Users $ M.fromList [("root", U.root), ("guest", U.guest)]
|
uvNikita/FileService
|
src/Database.hs
|
apache-2.0
| 2,480
| 0
| 11
| 594
| 817
| 450
| 367
| 71
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.EnvVar where
import GHC.Generics
import Data.Text
import Kubernetes.V1.EnvVarSource
import qualified Data.Aeson
-- | EnvVar represents an environment variable present in a Container.
data EnvVar = EnvVar
{ name :: Text -- ^ Name of the environment variable. Must be a C_IDENTIFIER.
, value :: Maybe Text -- ^ Variable references $(VAR_NAME) are expanded using the previous defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\".
, valueFrom :: Maybe EnvVarSource -- ^ Source for the environment variable's value. Cannot be used if value is not empty.
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON EnvVar
instance Data.Aeson.ToJSON EnvVar
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/EnvVar.hs
|
apache-2.0
| 1,166
| 0
| 9
| 187
| 106
| 65
| 41
| 17
| 0
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
module PrivateCloud.Aws.Monad where
import Aws.Aws
import Aws.Core
import Aws.S3.Core
import Control.Exception.Safe
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Resource
import Network.HTTP.Client
import qualified Data.Text as T
data AwsContext = AwsContext
{ acConf :: Configuration
, acBucket :: Bucket
, acDomain :: T.Text
, acManager :: Manager
}
newtype AwsMonad a = AwsMonad (ReaderT AwsContext IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadThrow)
awsContext :: AwsMonad AwsContext
awsContext = AwsMonad ask
awsReq
:: (Transaction r a,
DefaultServiceConfiguration (ServiceConfiguration r NormalQuery))
=> r -> AwsMonad a
awsReq req = do
AwsContext{..} <- awsContext
liftIO $ runResourceT $ pureAws acConf defServiceConfig acManager req
|
rblaze/private-cloud
|
src/PrivateCloud/Aws/Monad.hs
|
apache-2.0
| 965
| 0
| 9
| 163
| 238
| 138
| 100
| 29
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-| This module takes compute graph and turns them into a string representation
that is easy to display using TensorBoard. See the python and Haskell
frontends to see how to do that given the string representations.
-}
module Spark.Core.Internal.Display(
displayGraph
) where
import qualified Data.Map as M
import qualified Data.Vector as V
import qualified Data.Text as T
import Data.Text(Text)
import Data.Default
import Lens.Family2 ((&), (.~))
import Data.Monoid((<>))
import Data.Text.Encoding(encodeUtf8)
import Data.Functor.Identity(runIdentity, Identity)
import qualified Proto.Tensorflow.Core.Framework.Graph as PG
import qualified Proto.Tensorflow.Core.Framework.NodeDef as PN
import qualified Proto.Tensorflow.Core.Framework.AttrValue as PAV
import Spark.Core.Internal.ContextStructures(ComputeGraph)
import Spark.Core.Internal.ComputeDag(computeGraphMapVertices, cdVertices)
import Spark.Core.Internal.DAGStructures(Vertex(vertexData))
import Spark.Core.Internal.OpStructures(OpExtra(opContentDebug))
import Spark.Core.Internal.OpFunctions(simpleShowOp, extraNodeOpData)
import Spark.Core.Internal.DatasetStructures(OperatorNode(..), StructureEdge(..), onOp, onType, onLocality)
import Spark.Core.StructuresInternal(prettyNodePath)
import Spark.Core.Internal.Utilities(show')
{-| Converts a compute graph to a form that can be displayed by TensorBoard.
-}
displayGraph :: ComputeGraph -> PG.GraphDef
displayGraph cg = PG.GraphDef nodes where
f :: OperatorNode -> [(PN.NodeDef, StructureEdge)] -> Identity PN.NodeDef
f on l = pure $ _displayNode on parents logical where
f' edgeType = PN._NodeDef'name . fst <$> filter ((edgeType ==).snd) l
parents = f' ParentEdge
logical = f' LogicalEdge
cg2 = runIdentity $ computeGraphMapVertices cg f
nodes = vertexData <$> V.toList (cdVertices cg2)
_displayNode :: OperatorNode -> [Text] -> [Text] -> PN.NodeDef
_displayNode on parents logical = (def :: PN.NodeDef)
& PN.name .~ (trim . prettyNodePath . onPath $ on)
& PN.input .~ (trim <$> parents ++ (("^" <>) . trim <$> logical))
& PN.op .~ simpleShowOp (onOp on)
& PN.attr .~ _attributes on
& PN.device .~ "/spark:0" where
trim txt
| T.null txt = ""
| T.head txt == '/' = T.tail txt
| otherwise = txt
_attributes :: OperatorNode -> M.Map Text PAV.AttrValue
_attributes on = M.fromList [("type", t), ("locality", l), ("zextra", e)] where
l' = encodeUtf8 . show' . onLocality $ on
t' = encodeUtf8 . show' . onType $ on
e' = encodeUtf8 . _clean . opContentDebug . extraNodeOpData . onOp $ on
l = (def :: PAV.AttrValue) & PAV.s .~ l'
t = (def :: PAV.AttrValue) & PAV.s .~ t'
e = (def :: PAV.AttrValue) & PAV.s .~ e'
_clean :: Text -> Text
_clean = T.replace "\"" "." . T.replace "\n" "" . T.replace "\\" ""
|
tjhunter/karps
|
haskell/src/Spark/Core/Internal/Display.hs
|
apache-2.0
| 2,926
| 0
| 17
| 480
| 865
| 499
| 366
| 55
| 1
|
-- http://www.codewars.com/kata/515e271a311df0350d00000f
module SquareSum where
squareSum :: [Integer] -> Integer
squareSum = sum . map (^2)
|
Bodigrim/katas
|
src/haskell/6-Squaren-Sum.hs
|
bsd-2-clause
| 141
| 0
| 7
| 16
| 33
| 20
| 13
| 3
| 1
|
{-# LANGUAGE BangPatterns #-}
-- | Replace a string by another string
--
-- Tested in this benchmark:
--
-- * Search and replace of a pattern in a text
--
module Benchmarks.Replace
( benchmark
, initEnv
) where
import Test.Tasty.Bench (Benchmark, bgroup, bench, nf)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Search as BL
import qualified Data.ByteString.Search as B
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
import qualified Data.Text.Lazy.IO as TL
type Env = (T.Text, B.ByteString, TL.Text, BL.ByteString)
initEnv :: FilePath -> IO Env
initEnv fp = do
tl <- TL.readFile fp
bl <- BL.readFile fp
let !t = TL.toStrict tl
!b = T.encodeUtf8 t
return (t, b, tl, bl)
benchmark :: String -> String -> Env -> Benchmark
benchmark pat sub ~(t, b, tl, bl) =
bgroup "Replace" [
bench "Text" $ nf (T.length . T.replace tpat tsub) t
, bench "ByteString" $ nf (BL.length . B.replace bpat bsub) b
, bench "LazyText" $ nf (TL.length . TL.replace tlpat tlsub) tl
, bench "LazyByteString" $ nf (BL.length . BL.replace blpat blsub) bl
]
where
tpat = T.pack pat
tsub = T.pack sub
tlpat = TL.pack pat
tlsub = TL.pack sub
bpat = T.encodeUtf8 tpat
bsub = T.encodeUtf8 tsub
blpat = B.concat $ BL.toChunks $ TL.encodeUtf8 tlpat
blsub = B.concat $ BL.toChunks $ TL.encodeUtf8 tlsub
|
bos/text
|
benchmarks/haskell/Benchmarks/Replace.hs
|
bsd-2-clause
| 1,597
| 0
| 12
| 376
| 517
| 289
| 228
| 37
| 1
|
module Text.RSS.Import.Tests where
import Test.HUnit (Assertion, assertEqual)
import Test.Framework (Test, mutuallyExclusive, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Text.RSS.Import
import Text.RSS.Syntax
import Text.XML.Light as XML
import Text.RSS.Utils (createContent, createQName)
import Text.RSS.Equals ()
rssImportTests :: Test
rssImportTests = testGroup "Text.RSS.Import"
[ mutuallyExclusive $ testGroup "RSS import"
[ testElementToCloudIsNotCreated
, testElementToCloud
]
]
testElementToCloudIsNotCreated :: Test
testElementToCloudIsNotCreated = testCase "should not create rss cloud" notCreateRSSCloud
where
notCreateRSSCloud :: Assertion
notCreateRSSCloud = do
let notXmlCloudElement = XML.Element { elName = createQName "notCloud", elAttribs = [], elContent = [], elLine = Nothing}
let expected = Nothing
assertEqual "not create rss cloud" expected (elementToCloud notXmlCloudElement)
testElementToCloud :: Test
testElementToCloud = testCase "should create rss cloud" createRSSCloud
where
createRSSCloud :: Assertion
createRSSCloud = do
let attr = XML.Attr { attrKey = createQName "attr" , attrVal = "text for attr" }
let xmlCloudElement = XML.Element {
elName = createQName "cloud"
, elAttribs = [
XML.Attr { attrKey = createQName "domain" , attrVal = "domain cloud" }
, XML.Attr { attrKey = createQName "port" , attrVal = "port cloud" }
, XML.Attr { attrKey = createQName "path" , attrVal = "path cloud" }
, XML.Attr { attrKey = createQName "registerProcedure" , attrVal = "register cloud" }
, XML.Attr { attrKey = createQName "protocol" , attrVal = "protocol cloud" }
, attr
] :: [ Attr ]
, elContent = [ createContent "" ]
, elLine = Nothing
}
let expected = Just RSSCloud {
rssCloudDomain = Just "domain cloud"
, rssCloudPort = Just "port cloud"
, rssCloudPath = Just "path cloud"
, rssCloudRegisterProcedure = Just "register cloud"
, rssCloudProtocol = Just "protocol cloud"
, rssCloudAttrs = [ attr ]
}
assertEqual "create rss cloud" expected (elementToCloud xmlCloudElement)
|
danfran/feed
|
tests/Text/RSS/Import/Tests.hs
|
bsd-3-clause
| 2,384
| 0
| 17
| 640
| 525
| 301
| 224
| 46
| 1
|
module TGA(createTGA,putTGApixels)
where
import System.IO
import qualified Data.ByteString as B
import Data.Word
pixeltopacked (u,v,w) =
B.pack (
[fromIntegral (w `mod` 256),
fromIntegral (v `mod` 256),
fromIntegral (u `mod` 256)]::[Word8]
)
createTGA :: String -> (Int, Int) -> IO Handle
createTGA filename (sizex,sizey) = do
fi <- openFile filename WriteMode
-- O L D
B.hPut fi zero -- 0 1 Number of Characters in Identification Field.
B.hPut fi zero -- 1 1 Color Map Type.
B.hPut fi two -- 2 1 Image Type Code.
B.hPut fi zero -- 3 5 Color Map Specification.
B.hPut fi zero --
B.hPut fi zero -- 5
B.hPut fi zero --
B.hPut fi zero -- 7
B.hPut fi zero -- 8 10 Image Specification.
B.hPut fi zero
B.hPut fi zero -- 10
B.hPut fi zero
B.hPut fi $ packint $ fromIntegral (sizex `mod` 256)
B.hPut fi $ packint $ fromIntegral (sizex `div` 256)
B.hPut fi $ packint $ fromIntegral (sizey `mod` 256)
B.hPut fi $ packint $ fromIntegral (sizey `div` 256)
B.hPut fi $ packint 24
B.hPut fi zero -- 17 1 Image Descriptor Byte.
-- B.hPut fi zero -- 18 var Image Identification Field.
return fi
where
packint i = B.pack ([i]::[Word8])
zero = packint 0
two = packint 2
putTGApixel :: Handle -> (Int,Int,Int) -> IO ()
putTGApixel h p = B.hPut h (pixeltopacked p)
putTGApixels :: Handle -> [(Int,Int,Int)] -> Int -> Int -> IO ()
putTGApixels h [] _ _ = putStrLn "Done."
putTGApixels h (x:t) progress max = do
if 100*progress `div` max == round (100*fromIntegral progress / fromIntegral max) then putStr (show (100*progress `div` max) ++ "%\r")
else return ()
B.hPut h $ pixeltopacked x
putTGApixels h t (progress+1) max
|
hacxman/renderer
|
TGA.hs
|
bsd-3-clause
| 1,713
| 24
| 14
| 392
| 737
| 376
| 361
| 43
| 2
|
import System.IO
import Databases
|
davbaumgartner/flaskell
|
src/WebServer/Tempdb.hs
|
bsd-3-clause
| 35
| 0
| 4
| 5
| 9
| 5
| 4
| 2
| 0
|
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module RegAlloc.Graph.SpillCost (
SpillCostRecord,
plusSpillCostRecord,
pprSpillCostRecord,
SpillCostInfo,
zeroSpillCostInfo,
plusSpillCostInfo,
slurpSpillCostInfo,
chooseSpill,
lifeMapFromSpillCostInfo
)
where
import RegAlloc.Liveness
import Instruction
import RegClass
import Reg
import GraphBase
import BlockId
import OldCmm
import UniqFM
import UniqSet
import Digraph (flattenSCCs)
import Outputable
import Platform
import State
import Data.List (nub, minimumBy)
import Data.Maybe
type SpillCostRecord
= ( VirtualReg -- register name
, Int -- number of writes to this reg
, Int -- number of reads from this reg
, Int) -- number of instrs this reg was live on entry to
type SpillCostInfo
= UniqFM SpillCostRecord
zeroSpillCostInfo :: SpillCostInfo
zeroSpillCostInfo = emptyUFM
-- | Add two spillCostInfos
plusSpillCostInfo :: SpillCostInfo -> SpillCostInfo -> SpillCostInfo
plusSpillCostInfo sc1 sc2
= plusUFM_C plusSpillCostRecord sc1 sc2
plusSpillCostRecord :: SpillCostRecord -> SpillCostRecord -> SpillCostRecord
plusSpillCostRecord (r1, a1, b1, c1) (r2, a2, b2, c2)
| r1 == r2 = (r1, a1 + a2, b1 + b2, c1 + c2)
| otherwise = error "RegSpillCost.plusRegInt: regs don't match"
-- | Slurp out information used for determining spill costs
-- for each vreg, the number of times it was written to, read from,
-- and the number of instructions it was live on entry to (lifetime)
--
slurpSpillCostInfo :: (Outputable instr, Instruction instr)
=> Platform
-> LiveCmmDecl statics instr
-> SpillCostInfo
slurpSpillCostInfo platform cmm
= execState (countCmm cmm) zeroSpillCostInfo
where
countCmm CmmData{} = return ()
countCmm (CmmProc info _ sccs)
= mapM_ (countBlock info)
$ flattenSCCs sccs
-- lookup the regs that are live on entry to this block in
-- the info table from the CmmProc
countBlock info (BasicBlock blockId instrs)
| LiveInfo _ _ (Just blockLive) _ <- info
, Just rsLiveEntry <- mapLookup blockId blockLive
, rsLiveEntry_virt <- takeVirtuals rsLiveEntry
= countLIs rsLiveEntry_virt instrs
| otherwise
= error "RegAlloc.SpillCost.slurpSpillCostInfo: bad block"
countLIs _ []
= return ()
-- skip over comment and delta pseudo instrs
countLIs rsLive (LiveInstr instr Nothing : lis)
| isMetaInstr instr
= countLIs rsLive lis
| otherwise
= pprPanic "RegSpillCost.slurpSpillCostInfo"
(text "no liveness information on instruction " <> ppr instr)
countLIs rsLiveEntry (LiveInstr instr (Just live) : lis)
= do
-- increment the lifetime counts for regs live on entry to this instr
mapM_ incLifetime $ uniqSetToList rsLiveEntry
-- increment counts for what regs were read/written from
let (RU read written) = regUsageOfInstr platform instr
mapM_ incUses $ catMaybes $ map takeVirtualReg $ nub read
mapM_ incDefs $ catMaybes $ map takeVirtualReg $ nub written
-- compute liveness for entry to next instruction.
let liveDieRead_virt = takeVirtuals (liveDieRead live)
let liveDieWrite_virt = takeVirtuals (liveDieWrite live)
let liveBorn_virt = takeVirtuals (liveBorn live)
let rsLiveAcross
= rsLiveEntry `minusUniqSet` liveDieRead_virt
let rsLiveNext
= (rsLiveAcross `unionUniqSets` liveBorn_virt)
`minusUniqSet` liveDieWrite_virt
countLIs rsLiveNext lis
incDefs reg = modify $ \s -> addToUFM_C plusSpillCostRecord s reg (reg, 1, 0, 0)
incUses reg = modify $ \s -> addToUFM_C plusSpillCostRecord s reg (reg, 0, 1, 0)
incLifetime reg = modify $ \s -> addToUFM_C plusSpillCostRecord s reg (reg, 0, 0, 1)
takeVirtuals :: UniqSet Reg -> UniqSet VirtualReg
takeVirtuals set = mapUniqSet get_virtual
$ filterUniqSet isVirtualReg set
where
get_virtual (RegVirtual vr) = vr
get_virtual _ = panic "getVirt"
-- | Choose a node to spill from this graph
chooseSpill
:: SpillCostInfo
-> Graph VirtualReg RegClass RealReg
-> VirtualReg
chooseSpill info graph
= let cost = spillCost_length info graph
node = minimumBy (\n1 n2 -> compare (cost $ nodeId n1) (cost $ nodeId n2))
$ eltsUFM $ graphMap graph
in nodeId node
-- | Chaitins spill cost function is:
--
-- cost = sum loadCost * freq (u) + sum storeCost * freq (d)
-- u <- uses (v) d <- defs (v)
--
-- There are no loops in our code at the momemnt, so we can set the freq's to 1
-- We divide this by the degree if t
--
--
-- If we don't have live range splitting then Chaitins function performs badly if we have
-- lots of nested live ranges and very few registers.
--
-- v1 v2 v3
-- def v1 .
-- use v1 .
-- def v2 . .
-- def v3 . . .
-- use v1 . . .
-- use v3 . . .
-- use v2 . .
-- use v1 .
--
--
-- defs uses degree cost
-- v1: 1 3 3 1.5
-- v2: 1 2 3 1.0
-- v3: 1 1 3 0.666
--
-- v3 has the lowest cost, but if we only have 2 hardregs and we insert spill code for v3
-- then this isn't going to improve the colorability of the graph.
--
-- When compiling SHA1, which as very long basic blocks and some vregs with very long live ranges
-- the allocator seems to try and spill from the inside out and eventually run out of stack slots.
--
-- Without live range splitting, its's better to spill from the outside in so set the cost of very
-- long live ranges to zero
--
{-
spillCost_chaitin
:: SpillCostInfo
-> Graph Reg RegClass Reg
-> Reg
-> Float
spillCost_chaitin info graph reg
-- Spilling a live range that only lives for 1 instruction isn't going to help
-- us at all - and we definately want to avoid trying to re-spill previously
-- inserted spill code.
| lifetime <= 1 = 1/0
-- It's unlikely that we'll find a reg for a live range this long
-- better to spill it straight up and not risk trying to keep it around
-- and have to go through the build/color cycle again.
| lifetime > allocatableRegsInClass (regClass reg) * 10
= 0
-- otherwise revert to chaitin's regular cost function.
| otherwise = fromIntegral (uses + defs) / fromIntegral (nodeDegree graph reg)
where (_, defs, uses, lifetime)
= fromMaybe (reg, 0, 0, 0) $ lookupUFM info reg
-}
-- Just spill the longest live range.
spillCost_length
:: SpillCostInfo
-> Graph VirtualReg RegClass RealReg
-> VirtualReg
-> Float
spillCost_length info _ reg
| lifetime <= 1 = 1/0
| otherwise = 1 / fromIntegral lifetime
where (_, _, _, lifetime)
= fromMaybe (reg, 0, 0, 0)
$ lookupUFM info reg
lifeMapFromSpillCostInfo :: SpillCostInfo -> UniqFM (VirtualReg, Int)
lifeMapFromSpillCostInfo info
= listToUFM
$ map (\(r, _, _, life) -> (r, (r, life)))
$ eltsUFM info
-- | Work out the degree (number of neighbors) of this node which have the same class.
nodeDegree
:: (VirtualReg -> RegClass)
-> Graph VirtualReg RegClass RealReg
-> VirtualReg
-> Int
nodeDegree classOfVirtualReg graph reg
| Just node <- lookupUFM (graphMap graph) reg
, virtConflicts <- length
$ filter (\r -> classOfVirtualReg r == classOfVirtualReg reg)
$ uniqSetToList
$ nodeConflicts node
= virtConflicts + sizeUniqSet (nodeExclusions node)
| otherwise
= 0
-- | Show a spill cost record, including the degree from the graph and final calulated spill cos
pprSpillCostRecord
:: (VirtualReg -> RegClass)
-> (Reg -> SDoc)
-> Graph VirtualReg RegClass RealReg
-> SpillCostRecord
-> SDoc
pprSpillCostRecord regClass pprReg graph (reg, uses, defs, life)
= hsep
[ pprReg (RegVirtual reg)
, ppr uses
, ppr defs
, ppr life
, ppr $ nodeDegree regClass graph reg
, text $ show $ (fromIntegral (uses + defs)
/ fromIntegral (nodeDegree regClass graph reg) :: Float) ]
|
nomeata/ghc
|
compiler/nativeGen/RegAlloc/Graph/SpillCost.hs
|
bsd-3-clause
| 8,068
| 173
| 17
| 1,760
| 1,528
| 854
| 674
| 144
| 4
|
----------------------------------------------------------------------------
-- |
-- Module : Data.KeyMap
-- Copyright : (c) Sergey Vinokurov 2016
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
-- Created : Monday, 19 September 2016
----------------------------------------------------------------------------
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wredundant-constraints #-}
{-# OPTIONS_GHC -Wsimplifiable-class-constraints #-}
module Data.KeyMap
( KeyMap
, unKeyMap
, HasKey(..)
, insert
, lookup
, member
, notMember
, fromList
, toMap
, toList
, elems
, restrictKeys
, keysSet
, empty
, null
, size
, intersectionWith
, differenceWith
) where
import Prelude hiding (lookup, null)
import Control.Arrow
import Control.DeepSeq
import Data.Coerce
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Pointed
import Data.Set (Set)
import Data.Store (Store)
import GHC.Generics
import Data.Text.Prettyprint.Doc.Combinators
-- | Map than maintains sets of values that all share some key.
-- Every value must be a member of 'HasKey' typeclass.
newtype KeyMap f a = KeyMap { unKeyMap :: Map (Key a) (f a) }
deriving (Generic)
deriving instance (Eq (f a), Eq (Key a)) => Eq (KeyMap f a)
deriving instance (Ord (f a), Ord (Key a)) => Ord (KeyMap f a)
deriving instance (Show (f a), Show (Key a)) => Show (KeyMap f a)
instance (Store (f a), Store (Key a), Ord (Key a)) => Store (KeyMap f a)
instance (NFData (f a), NFData (Key a)) => NFData (KeyMap f a)
instance (Pretty (Key a), Pretty (f a)) => Pretty (KeyMap f a) where
pretty = ppAssocList . M.toList . unKeyMap
instance (Ord (Key a), Semigroup (f a)) => Semigroup (KeyMap f a) where
{-# INLINE (<>) #-}
KeyMap m <> KeyMap m' = KeyMap $ M.unionWith (<>) m m'
instance (Ord (Key a), Semigroup (f a)) => Monoid (KeyMap f a) where
{-# INLINE mempty #-}
{-# INLINE mappend #-}
mempty = KeyMap mempty
mappend = (<>)
instance Foldable f => Foldable (KeyMap f) where
{-# INLINE foldMap #-}
foldMap f = foldMap (foldMap f) . unKeyMap
class Ord (Key a) => HasKey a where
type Key a :: *
getKey :: a -> Key a
{-# INLINE insert #-}
insert
:: forall a f. (HasKey a, Pointed f, Semigroup (f a))
=> a -> KeyMap f a -> KeyMap f a
insert x = coerce $ M.insertWith (<>) (getKey x) (point @f x)
{-# INLINE lookup #-}
lookup :: forall a f . HasKey a => Key a -> KeyMap f a -> Maybe (f a)
lookup = coerce (M.lookup :: Key a -> Map (Key a) (f a) -> Maybe (f a))
{-# INLINE member #-}
member :: forall a f. HasKey a => Key a -> KeyMap f a -> Bool
member = coerce (M.member :: Key a -> Map (Key a) (f a) -> Bool)
{-# INLINE notMember #-}
notMember :: forall a f. HasKey a => Key a -> KeyMap f a -> Bool
notMember = coerce (M.notMember :: Key a -> Map (Key a) (f a) -> Bool)
fromList :: (HasKey a, Pointed f, Semigroup (f a)) => [a] -> KeyMap f a
fromList = KeyMap . M.fromListWith (<>) . map (getKey &&& point)
{-# INLINE toMap #-}
toMap :: KeyMap f a -> Map (Key a) (f a)
toMap = unKeyMap
{-# INLINE toList #-}
toList :: KeyMap f a -> [(Key a, f a)]
toList = M.toList . toMap
{-# INLINE elems #-}
elems :: forall a f. KeyMap f a -> [f a]
elems = coerce (M.elems :: Map (Key a) (f a) -> [f a])
{-# INLINE restrictKeys #-}
restrictKeys :: forall a f. HasKey a => KeyMap f a -> Set (Key a) -> KeyMap f a
restrictKeys =
coerce (M.restrictKeys :: Map (Key a) (f a) -> Set (Key a) -> Map (Key a) (f a))
{-# INLINE keysSet #-}
keysSet :: forall a f. KeyMap f a -> Set (Key a)
keysSet = coerce (M.keysSet :: Map (Key a) (f a) -> Set (Key a))
{-# INLINE empty #-}
empty :: forall a f. KeyMap f a
empty = coerce (M.empty :: Map (Key a) (f a))
{-# INLINE null #-}
null :: forall a f. KeyMap f a -> Bool
null = coerce (M.null :: Map (Key a) (f a) -> Bool)
{-# INLINE size #-}
size :: forall a f. KeyMap f a -> Int
size = coerce (M.size :: Map (Key a) (f a) -> Int)
{-# INLINE intersectionWith #-}
intersectionWith
:: forall a f. HasKey a
=> (f a -> f a -> f a)
-> KeyMap f a
-> KeyMap f a
-> KeyMap f a
intersectionWith =
coerce (M.intersectionWith :: (f a -> f a -> f a) -> Map (Key a) (f a) -> Map (Key a) (f a) -> Map (Key a) (f a))
{-# INLINE differenceWith #-}
differenceWith
:: forall a f. HasKey a
=> (f a -> f a -> Maybe (f a))
-> KeyMap f a
-> KeyMap f a
-> KeyMap f a
differenceWith =
coerce (M.differenceWith :: (f a -> f a -> Maybe (f a)) -> Map (Key a) (f a) -> Map (Key a) (f a) -> Map (Key a) (f a))
|
sergv/tags-server
|
src/Data/KeyMap.hs
|
bsd-3-clause
| 4,785
| 0
| 13
| 1,063
| 1,982
| 1,043
| 939
| 121
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ViewPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.HABSim.Grib2.CSVParse.Types
-- Copyright : (C) 2017 Ricky Elrod
-- License : (see project LICENSE file)
-- Maintainer : Ricky Elrod <ricky@elrod.me>
-- Stability : experimental
-- Portability : GeneralizedNewtypeDeriving, ViewPatterns
--
-- This module provides types for parsing CSV files generated by @wgrib2@.
-- It is used (and re-exported) by 'Data.HABSim.Grib2.CSVParse'.
----------------------------------------------------------------------------
module Data.HABSim.Grib2.CSVParse.Types where
import Control.Applicative
import Control.Monad (mzero)
import qualified Data.ByteString.Char8 as B
import Data.Char (isDigit)
import Data.Csv
import Data.HABSim.Types (Longitude (..), Latitude (..))
import Data.Hashable
import Data.List (isSuffixOf)
import Data.Time
import qualified Data.Vector as V
import Text.Read (readMaybe)
-- | The wind direction
--
-- 'UGRD' is North\/South, 'VGRD' is East\/West.
--
-- If we're given something other than @UGRD@ or @VGRD@ in this field, we store
-- the value in the 'Other' constructor.
data Direction = UGRD | VGRD | Other String deriving (Eq, Show)
instance Hashable Direction where
hashWithSalt s (Other str) = s `hashWithSalt` (0::Int) `hashWithSalt` str
hashWithSalt s UGRD = s `hashWithSalt` (1::Int)
hashWithSalt s VGRD = s `hashWithSalt` (2::Int)
-- | This is used for both the 'referenceTime' and the 'forecastTime'. The
-- reason it exists is solely so we can create a 'FromField' instance on
-- 'UTCTime' while avoiding orphan instances.
newtype GribTime = GribTime { _gribTime :: UTCTime }
deriving (Eq, Ord, Read, Show, ParseTime, FormatTime)
-- | This is a (mostly-)raw gribline, right after being parsed.
--
-- The "mostly-" comes from the fact that we wrap a few things (e.g. the times
-- into 'GribTime') and convert 'pressure' into an 'Int'.
data RawGribLine =
RawGribLine { _referenceTime :: GribTime
, _forecastTime :: GribTime
, _direction :: Direction
, _pressure :: Int
, _longitude :: Longitude
, _latitude :: Latitude
, _velocity :: Double
} deriving (Eq, Show)
-- | A single 'UGRD' line.
newtype UGRDLine = UGRDLine { _uGRDLineRaw :: RawGribLine } deriving (Eq, Show)
-- | A single 'VGRD' line.
newtype VGRDLine = VGRDLine { _vGRDLineRaw :: RawGribLine } deriving (Eq, Show)
-- | A single non-UGRD and non-VGRD line.
newtype OtherLine =
OtherLine { _otherLineRaw :: RawGribLine } deriving (Eq, Show)
-- | Either a 'UGRDLine' or a 'VGRDLine'. This is so we can parse and ultimately
-- return a 'V.Vector' containing both 'UGRD' and 'VGRD' lines. We return a
-- 'V.Vector' 'GribLine' (or rather, Cassava does) and we just know that a
-- 'GribLine' will either be a 'UGRDLine' or a 'VGRDLine'.
--
-- If the line is anything else, we return the raw line in 'OtherGribLine'.
data GribLine = UGRDGribLine UGRDLine
| VGRDGribLine VGRDLine
| OtherGribLine OtherLine
deriving (Eq, Show)
instance FromField Direction where
parseField (B.unpack -> "UGRD") = pure UGRD
parseField (B.unpack -> "VGRD") = pure VGRD
parseField s = pure (Other (B.unpack s))
{-# INLINE parseField #-}
instance FromField GribTime where
parseField t =
maybe mzero pure (parseTimeM True defaultTimeLocale "%F %T" (B.unpack t))
{-# INLINE parseField #-}
type Pressure = Int
type Key = (Longitude, Latitude, Pressure, Direction)
newtype KeyedGribLine =
KeyedGribLine { _keyedLine :: Either String (Key, GribLine) }
deriving (Eq, Show)
instance FromRecord KeyedGribLine where
parseRecord v
| V.length v == 7 =
do
refTime <- v .! 0
foreTime <- v .! 1
dir <- v .! 2
press <- parsePressure <$> v .! 3
lon <- v .! 4
lat <- v .! 5
vel <- v .! 6
case press of
Nothing -> return $ KeyedGribLine (Left "Invalid pressure")
Just press' ->
let rawLine = RawGribLine refTime foreTime dir press' lon lat vel
key = (lon, lat, press', dir)
in return $
case dir of
UGRD ->
KeyedGribLine (Right (key, (UGRDGribLine (UGRDLine rawLine))))
VGRD ->
KeyedGribLine (Right (key, (VGRDGribLine (VGRDLine rawLine))))
Other _ ->
KeyedGribLine (Right (key, (OtherGribLine (OtherLine rawLine))))
| otherwise = mzero
where
parsePressure s =
if " mb" `isSuffixOf` s
then readMaybe (takeWhile isDigit s)
else Nothing
|
kg4sgp/habsim
|
haskell/src/Data/HABSim/Grib2/CSVParse/Types.hs
|
bsd-3-clause
| 4,787
| 0
| 25
| 1,144
| 981
| 561
| 420
| 81
| 0
|
module Shared where
import qualified Prelude
import Feldspar.Multicore
n' :: Word32
n' = 4
n :: Data Word32
n = value n'
shared :: Multicore ()
shared = do
s0 <- allocSem 0
b0 <- allocSArr n'
s1 <- allocSem 1
b1 <- allocLArr 1 n'
s2 <- allocSem 2
b2 <- allocSArr n'
onHost $ do
initSem s0
initSem s1
initSem s2
onCore 0 (f (s0, b0) (s1, b1))
onCore 1 (g (s1, b1) (s2, b2))
forever $ do
input <- newArr n
for (0, 1, Excl n) $ \i -> do
item <- lift $ fget stdin
setArr input i item
writeArr b0 (0, n - 1) input
release s0
output <- newArr n
acquire s2
readArr b2 (0, n - 1) output
for (0, 1, Excl n) $ \i -> do
item <- getArr output i
printf "> %d\n" item
f :: (Sem, DSArr Int32) -> (Sem, DLArr Int32) -> CoreComp ()
f (ri, input) (ro, output) = forever $ do
acquire ri
tmp <- newArr n
readArr input (0, n - 1) tmp
for (0, 1, Excl n) $ \i -> do
item <- getArr tmp i
setLArr output i (item + 1)
release ro
g :: (Sem, DLArr Int32) -> (Sem, DSArr Int32) -> CoreComp ()
g (ri, input) (ro, output) = forever $ do
acquire ri
tmp <- newArr n
for (0, 1, Excl n) $ \i -> do
item <- getLArr input i
setArr tmp i (item * 2)
writeArr output (0, n - 1) tmp
release ro
------------------------------------------------------------
test = shared
testAll = do
icompileAll `onParallella` test
let modules = compileAll `onParallella` test
forM_ modules $ \(name, contents) -> do
let name' = if name Prelude.== "main" then "host" else name
writeFile (name' Prelude.++ ".c") contents
runTestCompiled = runCompiled' def opts test
where
opts = def {externalFlagsPost = ["-lpthread"]}
|
kmate/raw-feldspar-mcs
|
examples/Shared.hs
|
bsd-3-clause
| 1,922
| 0
| 19
| 670
| 820
| 402
| 418
| 61
| 2
|
module Grammar where
import qualified Data.Map.Strict as DataMap
import qualified Data.Set as DataSet
data Expr a = Empty
| EmptySet
| Internal a
| Call a (Expr a)
| Return a
| Or (Expr a) (Expr a)
| And (Expr a) (Expr a)
| Not (Expr a)
| Concat (Expr a) (Expr a)
| Interleave (Expr a) (Expr a)
| ZeroOrMore (Expr a)
| Optional (Expr a)
| Contains (Expr a)
| Reference String
deriving (Eq, Ord, Show)
-- instance Eq a => Eq (Expr a) where
-- Empty == Empty = True
-- EmptySet == EmptySet = True
-- Node a b == Node a' b' = a == a' && b == b'
-- Or a b == Or a' b' = a == a' && b == b'
-- And a b == And a' b' = a == a' && b == b'
-- Not a == Not a' = a == a'
-- Concat a b == Concat a' b' = a == a' && b == b'
-- Interleave a b == Interleave a' b' = a == a' && b == b'
-- ZeroOrMore a == ZeroOrMore a' = a == a'
-- Optional a == Optional a' = a == a'
-- Contains a == Contains a' = a == a'
-- Reference a == Reference a' = a == a'
-- _ == _ = False
nullable :: Refs a -> Expr a -> Bool
nullable _ Empty = True
nullable _ EmptySet = False
nullable _ (Internal _ ) = False
nullable _ (Call _ _) = False
nullable _ (Return _ ) = False
nullable refs (Or l r) = nullable refs l || nullable refs r
nullable refs (And l r) = nullable refs l && nullable refs r
nullable refs (Not p) = not $ nullable refs p
nullable refs (Concat l r) = nullable refs l && nullable refs r
nullable refs (Interleave l r) = nullable refs l && nullable refs r
nullable _ (ZeroOrMore _) = True
nullable _ (Optional _) = True
nullable refs (Contains p) = nullable refs p
nullable refs (Reference name) = nullable refs $ lookupRef refs name
-- unescapable is used for short circuiting.
-- A part of the tree can be skipped if all Exprs are unescapable.
unescapable :: Expr a -> Bool
unescapable EmptySet = True
unescapable (Not EmptySet) = True
unescapable _ = False
newtype Refs a = Refs (DataMap.Map String (Expr a))
deriving (Show, Eq)
lookupRef :: Refs a -> String -> Expr a
lookupRef (Refs m) name = m DataMap.! name
reverseLookupRef :: (Eq a) => Expr a -> Refs a -> Maybe String
reverseLookupRef p (Refs m) = case DataMap.keys $ DataMap.filter (== p) m of
[] -> Nothing
(k:_) -> Just k
newRef :: String -> Expr a -> Refs a
newRef key value = Refs $ DataMap.singleton key value
emptyRef :: Refs a
emptyRef = Refs DataMap.empty
union :: Refs a -> Refs a -> Refs a
union (Refs m1) (Refs m2) = Refs $ DataMap.union m1 m2
hasRecursion :: Refs a -> Bool
hasRecursion refs = hasRec refs (DataSet.singleton "main") (lookupRef refs "main")
hasRec :: Refs a -> DataSet.Set String -> Expr a -> Bool
hasRec _ _ Empty = False
hasRec _ _ EmptySet = False
hasRec _ _ (Internal _) = False
hasRec _ _ (Call _ _) = False
hasRec _ _ (Return _) = False
hasRec refs set (Or l r) = hasRec refs set l || hasRec refs set r
hasRec refs set (And l r) = hasRec refs set l || hasRec refs set r
hasRec refs set (Not p) = hasRec refs set p
hasRec refs set (Concat l r) = hasRec refs set l || (nullable refs l && hasRec refs set r)
hasRec refs set (Interleave l r) = hasRec refs set l || hasRec refs set r
hasRec _ _ (ZeroOrMore _) = False
hasRec refs set (Optional p) = hasRec refs set p
hasRec refs set (Contains p) = hasRec refs set p
hasRec refs set (Reference name) = DataSet.member name set || hasRec refs (DataSet.insert name set) (lookupRef refs name)
|
katydid/nwe
|
src/Grammar.hs
|
bsd-3-clause
| 3,440
| 0
| 9
| 839
| 1,320
| 666
| 654
| 68
| 2
|
import Criterion.Main
import Control.Applicative
import Control.Monad
import Control.Monad.ST
import qualified Data.Sequence as S
import qualified Data.Vector as V
import Data.Random.Distribution.Uniform as Uni
import Data.Random
import Math.Probable.Random as Pr
import System.Random.MWC.Monad as MWC
import System.Random.MWC
import GA
import UtilsRandom
import PointMutation
import Selection
import Crossover
import Utils
import Examples
--TODO Operator benchmarks against naive verions
-- more fair pm for vectors? ST and/or unboxed?
-- Randomness benchmarks to decide default RNG
-- Algorithm benchmarks
-- Algorithm benchmarks against other frameworks
main = defaultMain
[
bgroup "Ones"
[
randBench "GA" $ (geneticAlgorithm 50 100 1000 0.01 0.6 simpleEval)
]
, bgroup "GA"
[
bgroup "GA Generations"
[
randBench "GA 100 gens" $ (geneticAlgorithm 25 50 100 0.01 0.6 simpleEval)
, randBench "GA 200 gens" $ (geneticAlgorithm 25 50 200 0.01 0.6 simpleEval)
, randBench "GA 300 gens" $ (geneticAlgorithm 25 50 300 0.01 0.6 simpleEval)
, randBench "GA 400 gens" $ (geneticAlgorithm 25 50 400 0.01 0.6 simpleEval)
]
, bgroup "GA Individual Sizes"
[
randBench "GA 100 Locuses" $ (geneticAlgorithm 50 100 100 0.01 0.6 simpleEval)
, randBench "GA 200 Locuses" $ (geneticAlgorithm 50 200 100 0.01 0.6 simpleEval)
, randBench "GA 300 Locuses" $ (geneticAlgorithm 50 300 100 0.01 0.6 simpleEval)
, randBench "GA 400 Locuses" $ (geneticAlgorithm 50 400 100 0.01 0.6 simpleEval)
]
, bgroup "GA Population Sizes"
[
randBench "GA 100 Individuals" $ (geneticAlgorithm 100 100 100 0.01 0.6 simpleEval)
, randBench "GA 200 Individuals" $ (geneticAlgorithm 200 100 100 0.01 0.6 simpleEval)
, randBench "GA 300 Individuals" $ (geneticAlgorithm 300 100 100 0.01 0.6 simpleEval)
, randBench "GA 400 Individuals" $ (geneticAlgorithm 400 100 100 0.01 0.6 simpleEval)
]
, bgroup "GA Mutation Rates"
[
bcompare
[
randBench "GA pm = 0.01" $ (geneticAlgorithm 100 100 100 0.01 0.6 simpleEval)
, randBench "GA pm = 0.25" $ (geneticAlgorithm 100 100 100 0.25 0.6 simpleEval)
, randBench "GA pm = 0.50" $ (geneticAlgorithm 100 100 100 0.50 0.6 simpleEval)
, randBench "GA pm = 0.75" $ (geneticAlgorithm 100 100 100 0.75 0.6 simpleEval)
, randBench "GA pm = 1.00" $ (geneticAlgorithm 100 100 100 1.00 0.6 simpleEval)
]
]
, bgroup "GA Crossover Rates"
[
randBench "GA pc = 0.25" $ (geneticAlgorithm 100 100 100 0.01 0.25 simpleEval)
, randBench "GA pc = 0.50" $ (geneticAlgorithm 100 100 100 0.01 0.50 simpleEval)
, randBench "GA pc = 0.75" $ (geneticAlgorithm 100 100 100 0.01 0.75 simpleEval)
, randBench "GA pc = 1.00" $ (geneticAlgorithm 100 100 100 0.01 1.00 simpleEval)
]
]
, bgroup "PM"
[
bgroup "PM Naive/Geometric"
[
randBench "PM Naive Seq" $ pointMutationNaive 0.01 8 $ pop32All0 100 10
, randBench "PM Naive Vect" $ pointMutationNaiveVector 0.01 8 (V.replicate 100 (V.replicate 10 0))
, randBench "PM Geometric" $ pointMutation 0.01 10 8 $ pop32All0 100 10
]
, bgroup "PM Mutation Rate"
[
bgroup "PM Naive Seq"
[
randBench "PM Naive Seq 0.2" $ pointMutationNaive 0.2 8 $ pop32All0 10 10
, randBench "PM Naive Seq 0.4" $ pointMutationNaive 0.4 8 $ pop32All0 10 10
, randBench "PM Naive Seq 0.6" $ pointMutationNaive 0.6 8 $ pop32All0 10 10
, randBench "PM Naive Seq 0.8" $ pointMutationNaive 0.8 8 $ pop32All0 10 10
, randBench "PM Naive Seq 1.0" $ pointMutationNaive 1.0 8 $ pop32All0 10 10
]
, bgroup "PM Naive Vect"
[
randBench "PM Naive Vect 0.2" $ pointMutationNaiveVector 0.2 8 (V.replicate 10 (V.replicate 10 0))
, randBench "PM Naive Vect 0.4" $ pointMutationNaiveVector 0.4 8 (V.replicate 10 (V.replicate 10 0))
, randBench "PM Naive Vect 0.6" $ pointMutationNaiveVector 0.6 8 (V.replicate 10 (V.replicate 10 0))
, randBench "PM Naive Vect 0.8" $ pointMutationNaiveVector 0.8 8 (V.replicate 10 (V.replicate 10 0))
, randBench "PM Naive Vect 1.0" $ pointMutationNaiveVector 1.0 8 (V.replicate 10 (V.replicate 10 0))
]
, bgroup "PM Geometric"
[
randBench "PM Geometric 0.2" $ pointMutation 0.2 10 8 $ pop32All0 10 10
, randBench "PM Geometric 0.4" $ pointMutation 0.4 10 8 $ pop32All0 10 10
, randBench "PM Geometric 0.6" $ pointMutation 0.6 10 8 $ pop32All0 10 10
, randBench "PM Geometric 0.8" $ pointMutation 0.8 10 8 $ pop32All0 10 10
, randBench "PM Geometric 1.0" $ pointMutation 1.0 10 8 $ pop32All0 10 10
]
]
, bgroup "PM Individual Size"
[
bgroup "Naive Seq"
[
randBench "PM Naive Seq 100" $ pointMutationNaive 0.01 8 $ pop32All0 10 100
, randBench "PM Naive Seq 200" $ pointMutationNaive 0.01 8 $ pop32All0 10 200
, randBench "PM Naive Seq 300" $ pointMutationNaive 0.01 8 $ pop32All0 10 300
, randBench "PM Naive Seq 400" $ pointMutationNaive 0.01 8 $ pop32All0 10 400
, randBench "PM Naive Seq 500" $ pointMutationNaive 0.01 8 $ pop32All0 10 500
]
, bgroup "Naive Vect"
[
randBench "PM Naive Vector 100" $ pointMutationNaiveVector 0.01 8 (V.replicate 10 (V.replicate 100 0))
, randBench "PM Naive Vector 200" $ pointMutationNaiveVector 0.01 8 (V.replicate 10 (V.replicate 200 0))
, randBench "PM Naive Vector 300" $ pointMutationNaiveVector 0.01 8 (V.replicate 10 (V.replicate 300 0))
, randBench "PM Naive Vector 400" $ pointMutationNaiveVector 0.01 8 (V.replicate 10 (V.replicate 400 0))
, randBench "PM Naive Vector 500" $ pointMutationNaiveVector 0.01 8 (V.replicate 10 (V.replicate 500 0))
]
, bgroup "Geometric"
[
randBench "PM Geometric 100" $ pointMutation 0.01 100 8 $ pop32All0 10 100
, randBench "PM Geometric 200" $ pointMutation 0.01 200 8 $ pop32All0 10 200
, randBench "PM Geometric 300" $ pointMutation 0.01 300 8 $ pop32All0 10 300
, randBench "PM Geometric 400" $ pointMutation 0.01 400 8 $ pop32All0 10 400
, randBench "PM Geometric 500" $ pointMutation 0.01 500 8 $ pop32All0 10 500
]
]
, bgroup "PM Population Size"
[
bgroup "Naive Seq"
[
randBench "PM Naive Seq 100" $ pointMutationNaive 0.01 8 $ pop32All0 100 10
, randBench "PM Naive Seq 200" $ pointMutationNaive 0.01 8 $ pop32All0 200 10
, randBench "PM Naive Seq 300" $ pointMutationNaive 0.01 8 $ pop32All0 300 10
, randBench "PM Naive Seq 400" $ pointMutationNaive 0.01 8 $ pop32All0 400 10
, randBench "PM Naive Seq 500" $ pointMutationNaive 0.01 8 $ pop32All0 500 10
]
, bgroup "Naive Vector"
[
randBench "PM Naive Vector 100" $ pointMutationNaiveVector 0.01 8 (V.replicate 100 (V.replicate 10 0))
, randBench "PM Naive Vector 200" $ pointMutationNaiveVector 0.01 8 (V.replicate 200 (V.replicate 10 0))
, randBench "PM Naive Vector 300" $ pointMutationNaiveVector 0.01 8 (V.replicate 300 (V.replicate 10 0))
, randBench "PM Naive Vector 400" $ pointMutationNaiveVector 0.01 8 (V.replicate 400 (V.replicate 10 0))
, randBench "PM Naive Vector 500" $ pointMutationNaiveVector 0.01 8 (V.replicate 500 (V.replicate 10 0))
]
, bgroup "Geometric"
[
randBench "PM Geometric 100" $ pointMutation 0.01 10 8 $ pop32All0 100 10
, randBench "PM Geometric 200" $ pointMutation 0.01 10 8 $ pop32All0 200 10
, randBench "PM Geometric 300" $ pointMutation 0.01 10 8 $ pop32All0 300 10
, randBench "PM Geometric 400" $ pointMutation 0.01 10 8 $ pop32All0 400 10
, randBench "PM Geometric 500" $ pointMutation 0.01 10 8 $ pop32All0 500 10
]
]
]
, let seqPop = S.replicate 100 (S.replicate 100 (0 :: Int))
vectPop = V.replicate 100 (V.replicate 100 (0 :: Int))
in
bgroup "Crossover"
[
bgroup "Crossover Naive/Geometric"
[
randBench "Crossover Naive Vector Seq" $ crossVectSeq 0.60 vectPop
, randBench "Crossover Naive Vector Par" $ crossVectPar 0.60 vectPop
, randBench "Crossover Naive Seq" $ crossoverSeq 0.60 seqPop
, randBench "Crossover Naive Par" $ crossoverPar 0.60 seqPop
, randBench "Crossover Geometric" $ crossover 0.60 seqPop
]
, bgroup "Crossover Rate"
[
bgroup "Crossover Naive Vector Sequential"
[
randBench "Crossover Naive Vector Seq 0.2" $ crossVectSeq 0.2 vectPop
, randBench "Crossover Naive Vector Seq 0.6" $ crossVectSeq 0.6 vectPop
, randBench "Crossover Naive Vector Seq 1.0" $ crossVectSeq 1.0 vectPop
]
, bgroup "Crossover Naive Vector Parallel"
[
randBench "Crossover Naive Vector Par 0.2" $ crossVectPar 0.2 vectPop
, randBench "Crossover Naive Vector Par 0.6" $ crossVectPar 0.6 vectPop
, randBench "Crossover Naive Vector Par 1.0" $ crossVectPar 1.0 vectPop
]
, bgroup "Crossover Naive Seq"
[
randBench "Crossover Naive Seq 0.2" $ crossoverSeq 0.2 seqPop
, randBench "Crossover Naive Seq 0.6" $ crossoverSeq 0.6 seqPop
, randBench "Crossover Naive Seq 1.0" $ crossoverSeq 1.0 seqPop
]
, bgroup "Crossover Naive Par"
[
randBench "Crossover Naive Par 0.2" $ crossoverPar 0.2 seqPop
, randBench "Crossover Naive Par 0.6" $ crossoverPar 0.6 seqPop
, randBench "Crossover Naive Par 1.0" $ crossoverPar 1.0 seqPop
]
, bgroup "Crossover Geometric"
[
randBench "Crossover Geometric 0.2" $ crossover 0.2 seqPop
, randBench "Crossover Geometric 0.6" $ crossover 0.6 seqPop
, randBench "Crossover Geometric 1.0" $ crossover 1.0 seqPop
]
]
, bgroup "Crossover Individual Size"
[
bgroup "Naive Vector Seq"
[
randBench "Crossover Naive Vector Seq 100" $ crossVectSeq 0.60 (V.replicate 100 (V.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Vector Seq 250" $ crossVectSeq 0.60 (V.replicate 100 (V.replicate 250 (0 :: Int)))
, randBench "Crossover Naive Vector Seq 500" $ crossVectSeq 0.60 (V.replicate 100 (V.replicate 500 (0 :: Int)))
]
, bgroup "Naive Vector Par"
[
randBench "Crossover Naive Vector Par 100" $ crossVectPar 0.60 (V.replicate 100 (V.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Vector Par 250" $ crossVectPar 0.60 (V.replicate 100 (V.replicate 250 (0 :: Int)))
, randBench "Crossover Naive Vector Par 500" $ crossVectPar 0.60 (V.replicate 100 (V.replicate 500 (0 :: Int)))
]
, bgroup "Naive Seq"
[
randBench "Crossover Naive Seq 100" $ crossoverSeq 0.60 (S.replicate 100 (S.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Seq 250" $ crossoverSeq 0.60 (S.replicate 100 (S.replicate 250 (0 :: Int)))
, randBench "Crossover Naive Seq 500" $ crossoverSeq 0.60 (S.replicate 100 (S.replicate 500 (0 :: Int)))
]
, bgroup "Naive Par"
[
randBench "Crossover Naive Par 100" $ crossoverPar 0.60 (S.replicate 100 (S.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Par 250" $ crossoverPar 0.60 (S.replicate 100 (S.replicate 250 (0 :: Int)))
, randBench "Crossover Naive Par 500" $ crossoverPar 0.60 (S.replicate 100 (S.replicate 500 (0 :: Int)))
]
, bgroup "Geometric"
[
randBench "Crossover Geometric 100" $ crossover 0.60 $ pop32All0 100 100
, randBench "Crossover Geometric 250" $ crossover 0.60 $ pop32All0 100 250
, randBench "Crossover Geometric 500" $ crossover 0.60 $ pop32All0 100 500
]
]
, bgroup "Crossover Population Size"
[
bgroup "Naive Vector Seq"
[
randBench "Crossover Naive Vector Seq 100" $ crossVectSeq 0.60 (V.replicate 100 (V.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Vector Seq 250" $ crossVectSeq 0.60 (V.replicate 250 (V.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Vector Seq 500" $ crossVectSeq 0.60 (V.replicate 500 (V.replicate 100 (0 :: Int)))
]
, bgroup "Naive Vector Par"
[
randBench "Crossover Naive Vector Par 100" $ crossVectPar 0.60 (V.replicate 100 (V.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Vector Par 250" $ crossVectPar 0.60 (V.replicate 250 (V.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Vector Par 500" $ crossVectPar 0.60 (V.replicate 500 (V.replicate 100 (0 :: Int)))
]
, bgroup "Naive Vector"
[
randBench "Crossover Naive Seq 100" $ crossoverSeq 0.60 (S.replicate 100 (S.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Seq 250" $ crossoverSeq 0.60 (S.replicate 250 (S.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Seq 500" $ crossoverSeq 0.60 (S.replicate 500 (S.replicate 100 (0 :: Int)))
]
, bgroup "Naive Vector"
[
randBench "Crossover Naive Par 100" $ crossoverPar 0.60 (S.replicate 100 (S.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Par 250" $ crossoverPar 0.60 (S.replicate 250 (S.replicate 100 (0 :: Int)))
, randBench "Crossover Naive Par 500" $ crossoverPar 0.60 (S.replicate 500 (S.replicate 100 (0 :: Int)))
]
, bgroup "Geometric"
[
randBench "Crossover Geometric 100" $ crossover 0.60 $ pop32All0 100 100
, randBench "Crossover Geometric 250" $ crossover 0.60 $ pop32All0 250 100
, randBench "Crossover Geometric 500" $ crossover 0.60 $ pop32All0 500 100
]
]
]
, bgroup "Random"
[
bgroup "Random Lists IO"
[
bench "MWC 1 Doubles" $ nfIO $ runWithSystemRandom $ asRandIO $ replicateM 1 (MWC.uniform :: Rand IO Double)
, bench "PureMT 1 Doubles" $ nfIO $ runRandIO $ replicateM 1 (sample Uni.stdUniform :: RVar Double)
, bench "Probable 1 Doubles" $ nfIO $ mwc $ replicateM 1 double
, bench "MWC 100000 Doubles" $ nfIO $ runWithSystemRandom $ asRandIO $ replicateM 100000 (MWC.uniform :: Rand IO Double)
, bench "PureMT 100000 Doubles" $ nfIO $ runRandIO $ replicateM 100000 (sample Uni.stdUniform :: RVar Double)
, bench "Probable 100000 Doubles" $ nfIO $ mwc $ replicateM 100000 double
]
]
]
randBench name rand = bench name $ nfIO $ runRandIO rand
simpleEval = return . ones
|
nsmryan/Misc
|
bench/Bench.hs
|
bsd-3-clause
| 15,222
| 0
| 21
| 4,382
| 4,134
| 2,042
| 2,092
| 238
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module ParserSpec where
import qualified Data.Attoparsec.Text as A
import Parser.PropType
import Test.HUnit
import Types.PropTypes as P
testParseInstanceOf = do
let parsedValue = A.parseOnly parseInstanceOf "instanceOf(Date)"
assertEqual "success - instanceOf(Date)" (Right $ InstanceOf "Date") parsedValue
let failedParsedValue = A.parseOnly parseInstanceOf "instanceof(Date)"
assertEqual "failure - instanceof(Date)" (Left "string") failedParsedValue
testParseArrayOf = do
let parsedValue = A.parseOnly parseArrayOf "arrayOf(string)"
assertEqual "success - arrayOf(string)" (Right $ ArrayOf P.String) parsedValue
let failedParsedValue = A.parseOnly parseArrayOf "arrayof(string)"
assertEqual "failure - arrayof(string)" (Left "string") failedParsedValue
testParseObjectOf = do
let parsedValue = A.parseOnly parseObjectOf "objectOf(bool)"
assertEqual "success - objectOf(string)" (Right $ ObjectOf P.Bool) parsedValue
let failedParsedValue = A.parseOnly parseObjectOf "objectof(string)"
assertEqual "failure - objectof(string)" (Left "string") failedParsedValue
testParseOneOfType = do
let parsedValue = A.parseOnly parseOneOfType "oneOfType([object, number, func])"
assertEqual "success - oneOfType([string, number, func])" (Right $ OneOfType [Object,Number,Func]) parsedValue
let missingBrackets = A.parseOnly parseOneOfType "oneOfType(string, noop, number)"
assertEqual "failure - missing brackets; oneOfType(string, noop, number)" (Left "string") missingBrackets
let failedParsedValue = A.parseOnly parseOneOfType "oneOfType(string, noop, number)"
assertEqual "failure - bad type; oneOfType([string, noop, number])" (Left "string") failedParsedValue
testParseOneOf = do
let parsedValue = A.parseOnly parseOneOf "oneOf([red, blue, green])"
assertEqual "success - oneOf([red, blue, green])" (Right $ OneOf ["red", "blue", "green"]) parsedValue
let missingBrackets = A.parseOnly parseOneOf "oneOf(red, blue, green)"
assertEqual "failure - missing brackets; oneOf(red, blue, green)" (Left "string") missingBrackets
testParseShape = do
let parsedValue = A.parseOnly parseShape "shape({name: string.isRequired, age: number, dob: instanceOf(Date)})"
let shape =
Shape [
Prop {_name = "name", _propType = P.String, _required = Required}
, Prop {_name = "age", _propType = P.Number, _required = Optional}
, Prop {_name = "dob", _propType = P.InstanceOf "Date", _required = Optional}
]
assertEqual "success - shape({...})" (Right $ shape) parsedValue
let malformedString = A.parseOnly parseShape "shapeOf({name: string.isRequired, age: number, dob: instanceOf(Date)})"
assertEqual "failure - malformed string; shape({...})" (Left "string") malformedString
|
tpoulsen/generate-component
|
test/ParserSpec.hs
|
bsd-3-clause
| 2,825
| 0
| 15
| 436
| 610
| 301
| 309
| 43
| 1
|
{-# LANGUAGE TypeSynonymInstances #-}
-- | Parses the input string into Haskell data.
-- Uses a parser combinator library called "Parsec",
-- which is commonly used in production code.
-- "Parsec" parsers produce good error messages
-- when given malformed input.
-- The style in which it's used is known as applicative -
-- compare 'p_location' and 'p_location_monadStyle' to
-- contrast applicative and monadic styles.
module Parser
where
import Commands
import Geometry
import Environment
import Text.ParserCombinators.Parsec hiding (optional)
import Control.Applicative hiding ((<|>))
-- |Returns the parsed 'OverallInput' from the input string.
-- Helper function that just runs the parser against the input.
parseInput :: String -> Either ParseError OverallInput
parseInput input = parse p_overallInput "(test string)" input
-- |A 'Plateau' of the given width and height (or 'PlateauError' if dimensions are bad)
p_plateau :: CharParser st PlateauOrError
p_plateau = mkPlateau <$> p_int <* p_whiteSpace <*> p_int
-- |One or more whitespace characters
p_whiteSpace :: CharParser st String
p_whiteSpace = many1 $ oneOf [' ','\t']
-- |End of line (with possible leading whitespace)
p_newLine :: CharParser st Char
p_newLine = optional p_whiteSpace *> newline
-- |A 'Rover' with its 'Location' and 'CompassDirection'
p_rover :: CharParser st RoverPos
p_rover = mkRover <$> p_location <* p_whiteSpace <*> p_heading
-- |Tuple up two integers. Written in applicative style.
p_location :: CharParser st Location
p_location = (,) <$> p_int <* p_whiteSpace <*> p_int
-- |Same as 'p_location' but using monadic style.
-- (For parsers, monadic style is longer than applicative style
-- but the named values (@x@ and @y@ in this case) can make
-- the code clearer, especially for readers who aren't used to
-- an applicative style.)
p_location_monadStyle :: CharParser st Location
p_location_monadStyle = do x <- p_int
p_whiteSpace
y <- p_int
return (x,y)
-- |A compass direction
p_heading :: CharParser st Heading
p_heading = N <$ char 'N'
<|> E <$ char 'E'
<|> S <$ char 'S'
<|> W <$ char 'W'
-- |A 'Command'
p_command :: CharParser st Command
p_command = constChar (Turn AntiClockwise) 'L'
<|> constChar (Turn Clockwise) 'R'
<|> constChar Forwards 'M'
-- |A parser that reads the given character and returns the given value
constChar :: a -> Char -> CharParser st a
constChar val ch = const val <$> char ch
-- |Parses an 'Int'. Doesn't allow leading minus sign. Just reads a string of digits
-- and calls the built-in function to convert them to an 'Int'.
p_int :: CharParser st Int
p_int = read <$> s <?> "integer"
where
s = do
maybeMinus <- optional $ char '-'
digits <- many1 digit
return $ maybe digits (: digits) maybeMinus
-- |The input for a single rover
p_roverInput :: CharParser st RoverInput
p_roverInput = RoverInput <$> p_rover <* p_newLine <*> many1 p_command
-- |Parses the overall input
p_overallInput :: CharParser st OverallInput
p_overallInput = OverallInput <$> p_plateau <* p_newLine <*> p_roverInput `sepEndBy` p_newLine
|
garethrowlands/marsrover
|
src/Parser.hs
|
bsd-3-clause
| 3,240
| 0
| 12
| 679
| 551
| 295
| 256
| 45
| 1
|
{-# LANGUAGE GADTs, FlexibleInstances #-}
-- | I Tags rappresentano le coordinate dei prodotti. Quando vogliamo selezionare un prodotto indichiamo un sottoinsieme dei suoi tags. Un insieme vuoto di tags seleziona qualsiasi prodotto.
-- Un valore Tags è utilizzato come selettore o definizione, matematicamente identici sono tenuti separati per semantica. La funzione 'selezione' li distingue nella firma.
module Tags (selezione, Tags, Selettore, Definizione, mkSelettore, mkDefinizione, addTag, removeTag) where
import Data.Set (Set, isSubsetOf, fromList, insert, delete)
import Data.Binary (Binary (..))
-- | Un segnaposto per i selettori.
data Selettore
-- | Un segnaposto per le definizioni
data Definizione
-- | I tags sono matematicamente degli insiemi di elementi tra loro diversi
data Tags a t where
Selettore :: Set a -> Tags a Selettore
Definizione :: Set a -> Tags a Definizione
instance (Ord a, Binary a) => Binary (Tags a Selettore) where
put (Selettore x) = put x
get = Selettore `fmap` get
instance (Ord a, Binary a) => Binary (Tags a Definizione) where
put (Definizione x) = put x
get = Definizione `fmap` get
setOf (Definizione x) = x
-- | seleziona i prodotti che intercettano una scelta
selezione :: (Ord a) => (b -> Tags a Definizione) -> Tags a Selettore -> [b] -> [b]
selezione defOf (Selettore x) = filter $ isSubsetOf x . setOf . defOf
-- | nuovo selettore da una lista di tags
mkSelettore :: Ord a => [a] -> Tags a Selettore
mkSelettore = Selettore . fromList
-- | nuova definizione da una lista di tags
mkDefinizione :: Ord a => [a] -> Tags a Definizione
mkDefinizione = Definizione . fromList
change :: Ord a => (a -> Set a -> Set a) -> a -> Tags a t -> Tags a t
change f a (Selettore s) = Selettore (a `f` s)
change f a (Definizione s) = Definizione (a `f` s)
addTag, removeTag :: Ord a => a -> Tags a t -> Tags a t
addTag = change insert
removeTag = change delete
|
paolino/consumattori
|
Tags.hs
|
bsd-3-clause
| 1,918
| 2
| 10
| 358
| 560
| 302
| 258
| -1
| -1
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, ExistentialQuantification,
TemplateHaskell, RecordWildCards, FlexibleContexts #-}
-- |This module exports basic WD actions that can be used to interact with a
-- browser session.
module Test.WebDriver.Commands
( -- * Sessions
createSession, closeSession, sessions, getCaps
-- * Browser interaction
-- ** Web navigation
, openPage, forward, back, refresh
-- ** Page info
, getCurrentURL, getSource, getTitle, screenshot, screenshotBase64
-- * Timeouts
, setImplicitWait, setScriptTimeout, setPageLoadTimeout
-- * Web elements
, Element(..), Selector(..)
-- ** Searching for elements
, findElem, findElems, findElemFrom, findElemsFrom
-- ** Interacting with elements
, click, submit, getText
-- *** Sending key inputs to elements
, sendKeys, sendRawKeys, clearInput
-- ** Element information
, attr, cssProp, elemPos, elemSize
, isSelected, isEnabled, isDisplayed
, tagName, activeElem, elemInfo
-- ** Element equality
, (<==>), (</=>)
-- * Javascript
, executeJS, asyncJS
, JSArg(..)
-- * Windows
, WindowHandle(..), currentWindow
, getCurrentWindow, closeWindow, windows, focusWindow, maximize
, getWindowSize, setWindowSize, getWindowPos, setWindowPos
-- * Focusing on frames
, focusFrame, FrameSelector(..)
-- * Cookies
, Cookie(..), mkCookie
, cookies, setCookie, deleteCookie, deleteVisibleCookies, deleteCookieByName
-- * Alerts
, getAlertText, replyToAlert, acceptAlert, dismissAlert
-- * Mouse gestures
, moveTo, moveToCenter, moveToFrom
, clickWith, MouseButton(..)
, mouseDown, mouseUp, withMouseDown, doubleClick
-- * HTML 5 Web Storage
, WebStorageType(..), storageSize, getAllKeys, deleteAllKeys
, getKey, setKey, deleteKey
-- * HTML 5 Application Cache
, ApplicationCacheStatus(..)
, getApplicationCacheStatus
-- * Mobile device support
-- ** Screen orientation
, Orientation(..)
, getOrientation, setOrientation
-- ** Geo-location
, getLocation, setLocation
-- ** Touch gestures
, touchClick, touchDown, touchUp, touchMove
, touchScroll, touchScrollFrom, touchDoubleClick
, touchLongClick, touchFlick, touchFlickFrom
-- * IME support
, availableIMEEngines, activeIMEEngine, checkIMEActive
, activateIME, deactivateIME
-- * Uploading files to remote server
-- |These functions allow you to upload a file to a remote server.
-- Note that this operation isn't supported by all WebDriver servers,
-- and the location where the file is stored is not standardized.
, uploadFile, uploadRawFile, uploadZipEntry
-- * Server information and logs
, serverStatus
, getLogs, getLogTypes, LogType, LogEntry(..), LogLevel(..)
) where
import Test.WebDriver.Commands.Internal
import Test.WebDriver.Class
import Test.WebDriver.Session
import Test.WebDriver.JSON
import Test.WebDriver.Capabilities
import Test.WebDriver.Internal
import Test.WebDriver.Utils (urlEncode)
import Data.Aeson
import Data.Aeson.Types
import Data.Aeson.TH
import qualified Data.Text as T
import Data.Text (Text, append, toUpper, toLower)
import Data.ByteString.Base64.Lazy as B64
import Data.ByteString.Lazy as LBS (ByteString)
import Network.URI hiding (path) -- suppresses warnings
import Codec.Archive.Zip
import qualified Data.Text.Lazy.Encoding as TL
import Network.HTTP.Types.Header (RequestHeaders)
import Control.Applicative
import Control.Monad.State.Strict
import Control.Monad.Base
import Control.Exception (SomeException)
import Control.Exception.Lifted (throwIO, handle)
import qualified Control.Exception.Lifted as L
import Data.Word
import Data.String (fromString)
import Data.Maybe
import qualified Data.Char as C
-- |Convenience function to handle webdriver commands with no return value
noReturn :: WebDriver wd => wd NoReturn -> wd ()
noReturn = void
-- |Convenience function to ignore result of a webdriver command
ignoreReturn :: WebDriver wd => wd Value -> wd ()
ignoreReturn = void
-- |Create a new session with the given 'Capabilities'.
createSession :: WebDriver wd => RequestHeaders -> Capabilities -> wd WDSession
createSession headers caps = do
ignoreReturn . doCommand headers methodPost "/session" . single "desiredCapabilities" $ caps
getSession
-- |Retrieve a list of active sessions and their 'Capabilities'.
sessions :: WebDriver wd => RequestHeaders -> wd [(SessionId, Capabilities)]
sessions headers = do
objs <- doCommand headers methodGet "/sessions" Null
forM objs $ parsePair "id" "capabilities" "sessions"
-- |Get the actual 'Capabilities' of the current session.
getCaps :: WebDriver wd => wd Capabilities
getCaps = doSessCommand methodGet "" Null
-- |Close the current session and the browser associated with it.
closeSession :: WebDriver wd => wd ()
closeSession = do s@WDSession {..} <- getSession
noReturn $ doSessCommand methodDelete "" Null
putSession s { wdSessId = Nothing }
-- |Sets the amount of time (ms) we implicitly wait when searching for elements.
setImplicitWait :: WebDriver wd => Integer -> wd ()
setImplicitWait ms =
noReturn $ doSessCommand methodPost "/timeouts/implicit_wait" (object msField)
`L.catch` \(_ :: SomeException) ->
doSessCommand methodPost "/timeouts" (object allFields)
where msField = ["ms" .= ms]
allFields = ["type" .= ("implicit" :: String)] ++ msField
-- |Sets the amount of time (ms) we wait for an asynchronous script to return a
-- result.
setScriptTimeout :: WebDriver wd => Integer -> wd ()
setScriptTimeout ms =
noReturn $ doSessCommand methodPost "/timeouts/async_script" (object msField)
`L.catch` \( _ :: SomeException) ->
doSessCommand methodPost "/timeouts" (object allFields)
where msField = ["ms" .= ms]
allFields = ["type" .= ("script" :: String)] ++ msField
-- |Sets the amount of time (ms) to wait for a page to finish loading before throwing a 'Timeout' exception.
setPageLoadTimeout :: WebDriver wd => Integer -> wd ()
setPageLoadTimeout ms = noReturn $ doSessCommand methodPost "/timeouts" params
where params = object ["type" .= ("page load" :: String)
,"ms" .= ms ]
-- |Gets the URL of the current page.
getCurrentURL :: WebDriver wd => wd String
getCurrentURL = doSessCommand methodGet "/url" Null
-- |Opens a new page by the given URL.
openPage :: WebDriver wd => String -> wd ()
openPage url
| isURI url = noReturn . doSessCommand methodPost "/url" . single "url" $ url
| otherwise = throwIO . InvalidURL $ url
-- |Navigate forward in the browser history.
forward :: WebDriver wd => wd ()
forward = noReturn $ doSessCommand methodPost "/forward" Null
-- |Navigate backward in the browser history.
back :: WebDriver wd => wd ()
back = noReturn $ doSessCommand methodPost "/back" Null
-- |Refresh the current page
refresh :: WebDriver wd => wd ()
refresh = noReturn $ doSessCommand methodPost "/refresh" Null
-- |An existential wrapper for any 'ToJSON' instance. This allows us to pass
-- parameters of many different types to Javascript code.
data JSArg = forall a. ToJSON a => JSArg a
instance ToJSON JSArg where
toJSON (JSArg a) = toJSON a
{- |Inject a snippet of Javascript into the page for execution in the
context of the currently selected frame. The executed script is
assumed to be synchronous and the result of evaluating the script is
returned and converted to an instance of FromJSON.
The first parameter defines arguments to pass to the javascript
function. Arguments of type Element will be converted to the
corresponding DOM element. Likewise, any elements in the script result
will be returned to the client as Elements.
The second parameter defines the script itself in the form of a
function body. The value returned by that function will be returned to
the client. The function will be invoked with the provided argument
list and the values may be accessed via the arguments object in the
order specified.
-}
executeJS :: (WebDriver wd, FromJSON a) => [JSArg] -> Text -> wd a
executeJS a s = fromJSON' =<< getResult
where
getResult = doSessCommand methodPost "/execute" . pair ("args", "script") $ (a,s)
{- |Executes a snippet of Javascript code asynchronously. This function works
similarly to 'executeJS', except that the Javascript is passed a callback
function as its final argument. The script should call this function
to signal that it has finished executing, passing to it a value that will be
returned as the result of asyncJS. A result of Nothing indicates that the
Javascript function timed out (see 'setScriptTimeout')
-}
asyncJS :: (WebDriver wd, FromJSON a) => [JSArg] -> Text -> wd (Maybe a)
asyncJS a s = handle timeout $ Just <$> (fromJSON' =<< getResult)
where
getResult = doSessCommand methodPost "/execute_async" . pair ("args", "script")
$ (a,s)
timeout (FailedCommand Timeout _) = return Nothing
timeout (FailedCommand ScriptTimeout _) = return Nothing
timeout err = throwIO err
-- |Grab a screenshot of the current page as a PNG image
screenshot :: WebDriver wd => wd LBS.ByteString
screenshot = B64.decodeLenient <$> screenshotBase64
-- |Grab a screenshot as a base-64 encoded PNG image. This is the protocol-defined format.
screenshotBase64 :: WebDriver wd => wd LBS.ByteString
screenshotBase64 = TL.encodeUtf8 <$> doSessCommand methodGet "/screenshot" Null
availableIMEEngines :: WebDriver wd => wd [Text]
availableIMEEngines = doSessCommand methodGet "/ime/available_engines" Null
activeIMEEngine :: WebDriver wd => wd Text
activeIMEEngine = doSessCommand methodGet "/ime/active_engine" Null
checkIMEActive :: WebDriver wd => wd Bool
checkIMEActive = doSessCommand methodGet "/ime/activated" Null
activateIME :: WebDriver wd => Text -> wd ()
activateIME = noReturn . doSessCommand methodPost "/ime/activate" . single "engine"
deactivateIME :: WebDriver wd => wd ()
deactivateIME = noReturn $ doSessCommand methodPost "/ime/deactivate" Null
-- |Specifies the frame used by 'Test.WebDriver.Commands.focusFrame'
data FrameSelector = WithIndex Integer
-- |focus on a frame by name or ID
| WithName Text
-- |focus on a frame 'Element'
| WithElement Element
-- |focus on the first frame, or the main document
-- if iframes are used.
| DefaultFrame
deriving (Eq, Show, Read)
instance ToJSON FrameSelector where
toJSON s = case s of
WithIndex i -> toJSON i
WithName n -> toJSON n
WithElement e -> toJSON e
DefaultFrame -> Null
-- |Switch focus to the frame specified by the FrameSelector.
focusFrame :: WebDriver wd => FrameSelector -> wd ()
focusFrame s = noReturn $ doSessCommand methodPost "/frame" . single "id" $ s
-- |Returns a handle to the currently focused window
getCurrentWindow :: WebDriver wd => wd WindowHandle
getCurrentWindow = doSessCommand methodGet "/window_handle" Null
-- |Returns a list of all windows available to the session
windows :: WebDriver wd => wd [WindowHandle]
windows = doSessCommand methodGet "/window_handles" Null
focusWindow :: WebDriver wd => WindowHandle -> wd ()
focusWindow w = noReturn $ doSessCommand methodPost "/window" . single "name" $ w
-- |Closes the given window
closeWindow :: WebDriver wd => WindowHandle -> wd ()
closeWindow = noReturn . doSessCommand methodDelete "/window" . single "name"
-- |Maximizes the current window if not already maximized
maximize :: WebDriver wd => wd ()
maximize = noReturn $ doWinCommand methodGet currentWindow "/maximize" Null
-- |Get the dimensions of the current window.
getWindowSize :: WebDriver wd => wd (Word, Word)
getWindowSize = doWinCommand methodGet currentWindow "/size" Null
>>= parsePair "width" "height" "getWindowSize"
-- |Set the dimensions of the current window.
setWindowSize :: WebDriver wd => (Word, Word) -> wd ()
setWindowSize = noReturn . doWinCommand methodPost currentWindow "/size"
. pair ("width", "height")
-- |Get the coordinates of the current window.
getWindowPos :: WebDriver wd => wd (Int, Int)
getWindowPos = doWinCommand methodGet currentWindow "/position" Null
>>= parsePair "x" "y" "getWindowPos"
-- |Set the coordinates of the current window.
setWindowPos :: WebDriver wd => (Int, Int) -> wd ()
setWindowPos = noReturn . doWinCommand methodPost currentWindow "/position" . pair ("x","y")
-- |Cookies are delicious delicacies. When sending cookies to the server, a value
-- of Nothing indicates that the server should use a default value. When receiving
-- cookies from the server, a value of Nothing indicates that the server is unable
-- to specify the value.
data Cookie = Cookie { cookName :: Text
, cookValue :: Text -- ^
, cookPath :: Maybe Text -- ^path of this cookie.
-- if Nothing, defaults to /
, cookDomain :: Maybe Text -- ^domain of this cookie.
-- if Nothing, the current pages
-- domain is used
, cookSecure :: Maybe Bool -- ^Is this cookie secure?
, cookExpiry :: Maybe Integer -- ^Expiry date expressed as
-- seconds since the Unix epoch
-- Nothing indicates that the
-- cookie never expires
} deriving (Eq, Show)
-- |Creates a Cookie with only a name and value specified. All other
-- fields are set to Nothing, which tells the server to use default values.
mkCookie :: Text -> Text -> Cookie
mkCookie name value = Cookie { cookName = name, cookValue = value,
cookPath = Nothing, cookDomain = Nothing,
cookSecure = Nothing, cookExpiry = Nothing
}
instance FromJSON Cookie where
parseJSON (Object o) = Cookie <$> req "name"
<*> req "value"
<*> opt "path" Nothing
<*> opt "domain" Nothing
<*> opt "secure" Nothing
<*> opt "expiry" Nothing
where
req :: FromJSON a => Text -> Parser a
req = (o .:)
opt :: FromJSON a => Text -> a -> Parser a
opt k d = o .:? k .!= d
parseJSON v = typeMismatch "Cookie" v
-- |Retrieve all cookies visible to the current page.
cookies :: WebDriver wd => wd [Cookie]
cookies = doSessCommand methodGet "/cookie" Null
-- |Set a cookie. If the cookie path is not specified, it will default to \"/\".
-- Likewise, if the domain is omitted, it will default to the current page's
-- domain
setCookie :: WebDriver wd => Cookie -> wd ()
setCookie = noReturn . doSessCommand methodPost "/cookie" . single "cookie"
-- |Delete a cookie. This will do nothing is the cookie isn't visible to the
-- current page.
deleteCookie :: WebDriver wd => Cookie -> wd ()
deleteCookie c = noReturn $ doSessCommand methodDelete ("/cookie/" `append` urlEncode (cookName c)) Null
deleteCookieByName :: WebDriver wd => Text -> wd ()
deleteCookieByName n = noReturn $ doSessCommand methodDelete ("/cookie/" `append` n) Null
-- |Delete all visible cookies on the current page.
deleteVisibleCookies :: WebDriver wd => wd ()
deleteVisibleCookies = noReturn $ doSessCommand methodDelete "/cookie" Null
-- |Get the current page source
getSource :: WebDriver wd => wd Text
getSource = doSessCommand methodGet "/source" Null
-- |Get the title of the current page.
getTitle :: WebDriver wd => wd Text
getTitle = doSessCommand methodGet "/title" Null
-- |Specifies element(s) within a DOM tree using various selection methods.
data Selector = ById Text
| ByName Text
| ByClass Text -- ^ (Note: multiple classes are not
-- allowed. For more control, use 'ByCSS')
| ByTag Text
| ByLinkText Text
| ByPartialLinkText Text
| ByCSS Text
| ByXPath Text
deriving (Eq, Show, Ord)
instance ToJSON Selector where
toJSON s = case s of
ById t -> selector "id" t
ByName t -> selector "name" t
ByClass t -> selector "class name" t
ByTag t -> selector "tag name" t
ByLinkText t -> selector "link text" t
ByPartialLinkText t -> selector "partial link text" t
ByCSS t -> selector "css selector" t
ByXPath t -> selector "xpath" t
where
selector :: Text -> Text -> Value
selector sn t = object ["using" .= sn, "value" .= t]
-- |Find an element on the page using the given element selector.
findElem :: WebDriver wd => Selector -> wd Element
findElem = doSessCommand methodPost "/element"
-- |Find all elements on the page matching the given selector.
findElems :: WebDriver wd => Selector -> wd [Element]
findElems = doSessCommand methodPost "/elements"
-- |Return the element that currently has focus.
activeElem :: WebDriver wd => wd Element
activeElem = doSessCommand methodPost "/element/active" Null
-- |Search for an element using the given element as root.
findElemFrom :: WebDriver wd => Element -> Selector -> wd Element
findElemFrom e = doElemCommand methodPost e "/element"
-- |Find all elements matching a selector, using the given element as root.
findElemsFrom :: WebDriver wd => Element -> Selector -> wd [Element]
findElemsFrom e = doElemCommand methodPost e "/elements"
-- |Describe the element. Returns a JSON object whose meaning is currently
-- undefined by the WebDriver protocol.
elemInfo :: WebDriver wd => Element -> wd Value
elemInfo e = doElemCommand methodGet e "" Null
-- |Click on an element.
click :: WebDriver wd => Element -> wd ()
click e = noReturn $ doElemCommand methodPost e "/click" Null
-- |Submit a form element. This may be applied to descendents of a form element
-- as well.
submit :: WebDriver wd => Element -> wd ()
submit e = noReturn $ doElemCommand methodPost e "/submit" Null
-- |Get all visible text within this element.
getText :: WebDriver wd => Element -> wd Text
getText e = doElemCommand methodGet e "/text" Null
-- |Send a sequence of keystrokes to an element. All modifier keys are released
-- at the end of the function. For more information about modifier keys, see
-- <http://code.google.com/p/selenium/wiki/JsonWireProtocol#/session/:sessionId/element/:id/value>
sendKeys :: WebDriver wd => Text -> Element -> wd ()
sendKeys t e = noReturn . doElemCommand methodPost e "/value" . single "value" $ [t]
-- |Similar to sendKeys, but doesn't implicitly release modifier keys
-- afterwards. This allows you to combine modifiers with mouse clicks.
sendRawKeys :: WebDriver wd => Text -> Element -> wd ()
sendRawKeys t e = noReturn . doElemCommand methodPost e "/keys" . single "value" $ [t]
-- |Return the tag name of the given element.
tagName :: WebDriver wd => Element -> wd Text
tagName e = doElemCommand methodGet e "/name" Null
-- |Clear a textarea or text input element's value.
clearInput :: WebDriver wd => Element -> wd ()
clearInput e = noReturn $ doElemCommand methodPost e "/clear" Null
-- |Determine if the element is selected.
isSelected :: WebDriver wd => Element -> wd Bool
isSelected e = doElemCommand methodGet e "/selected" Null
-- |Determine if the element is enabled.
isEnabled :: WebDriver wd => Element -> wd Bool
isEnabled e = doElemCommand methodGet e "/enabled" Null
-- |Determine if the element is displayed.
isDisplayed :: WebDriver wd => Element -> wd Bool
isDisplayed e = doElemCommand methodGet e "/displayed" Null
-- |Retrieve the value of an element's attribute
attr :: WebDriver wd => Element -> Text -> wd (Maybe Text)
attr e t = doElemCommand methodGet e ("/attribute/" `append` urlEncode t) Null
-- |Retrieve the value of an element's computed CSS property
cssProp :: WebDriver wd => Element -> Text -> wd (Maybe Text)
cssProp e t = doElemCommand methodGet e ("/css/" `append` urlEncode t) Null
-- |Retrieve an element's current position.
elemPos :: WebDriver wd => Element -> wd (Int, Int)
elemPos e = doElemCommand methodGet e "/location" Null >>= parsePair "x" "y" "elemPos"
-- |Retrieve an element's current size.
elemSize :: WebDriver wd => Element -> wd (Word, Word)
elemSize e = doElemCommand methodGet e "/size" Null
>>= parsePair "width" "height" "elemSize"
infix 4 <==>
-- |Determines if two element identifiers refer to the same element.
(<==>) :: WebDriver wd => Element -> Element -> wd Bool
e1 <==> (Element e2) = doElemCommand methodGet e1 ("/equals/" `append` urlEncode e2) Null
-- |Determines if two element identifiers refer to different elements.
infix 4 </=>
(</=>) :: WebDriver wd => Element -> Element -> wd Bool
e1 </=> e2 = not <$> (e1 <==> e2)
-- |A screen orientation
data Orientation = Landscape | Portrait
deriving (Eq, Show, Ord, Bounded, Enum)
instance ToJSON Orientation where
toJSON = String . toUpper . fromString . show
instance FromJSON Orientation where
parseJSON (String jStr) = case toLower jStr of
"landscape" -> return Landscape
"portrait" -> return Portrait
err -> fail $ "Invalid Orientation string " ++ show err
parseJSON v = typeMismatch "Orientation" v
-- |Get the current screen orientation for rotatable display devices.
getOrientation :: WebDriver wd => wd Orientation
getOrientation = doSessCommand methodGet "/orientation" Null
-- |Set the current screen orientation for rotatable display devices.
setOrientation :: WebDriver wd => Orientation -> wd ()
setOrientation = noReturn . doSessCommand methodPost "/orientation" . single "orientation"
-- |Get the text of an alert dialog.
getAlertText :: WebDriver wd => wd Text
getAlertText = doSessCommand methodGet "/alert_text" Null
-- |Sends keystrokes to Javascript prompt() dialog.
replyToAlert :: WebDriver wd => Text -> wd ()
replyToAlert = noReturn . doSessCommand methodPost "/alert_text" . single "text"
-- |Accepts the currently displayed alert dialog.
acceptAlert :: WebDriver wd => wd ()
acceptAlert = noReturn $ doSessCommand methodPost "/accept_alert" Null
-- |Dismisses the currently displayed alert dialog.
dismissAlert :: WebDriver wd => wd ()
dismissAlert = noReturn $ doSessCommand methodPost "/dismiss_alert" Null
-- |Moves the mouse to the given position relative to the active element.
moveTo :: WebDriver wd => (Int, Int) -> wd ()
moveTo = noReturn . doSessCommand methodPost "/moveto" . pair ("xoffset","yoffset")
-- |Moves the mouse to the center of a given element.
moveToCenter :: WebDriver wd => Element -> wd ()
moveToCenter (Element e) =
noReturn . doSessCommand methodPost "/moveto" . single "element" $ e
-- |Moves the mouse to the given position relative to the given element.
moveToFrom :: WebDriver wd => (Int, Int) -> Element -> wd ()
moveToFrom (x,y) (Element e) =
noReturn . doSessCommand methodPost "/moveto"
. triple ("element","xoffset","yoffset") $ (e,x,y)
-- |A mouse button
data MouseButton = LeftButton | MiddleButton | RightButton
deriving (Eq, Show, Ord, Bounded, Enum)
instance ToJSON MouseButton where
toJSON = toJSON . fromEnum
instance FromJSON MouseButton where
parseJSON v = do
n <- parseJSON v
case n :: Integer of
0 -> return LeftButton
1 -> return MiddleButton
2 -> return RightButton
err -> fail $ "Invalid JSON for MouseButton: " ++ show err
-- |Click at the current mouse position with the given mouse button.
clickWith :: WebDriver wd => MouseButton -> wd ()
clickWith = noReturn . doSessCommand methodPost "/click" . single "button"
-- |Perform the given action with the left mouse button held down. The mouse
-- is automatically released afterwards.
withMouseDown :: WebDriver wd => wd a -> wd a
withMouseDown wd = mouseDown >> wd <* mouseUp
-- |Press and hold the left mouse button down. Note that undefined behavior
-- occurs if the next mouse command is not mouseUp.
mouseDown :: WebDriver wd => wd ()
mouseDown = noReturn $ doSessCommand methodPost "/buttondown" Null
-- |Release the left mouse button.
mouseUp :: WebDriver wd => wd ()
mouseUp = noReturn $ doSessCommand methodPost "/buttonup" Null
-- |Double click at the current mouse location.
doubleClick :: WebDriver wd => wd ()
doubleClick = noReturn $ doSessCommand methodPost "/doubleclick" Null
-- |Single tap on the touch screen at the given element's location.
touchClick :: WebDriver wd => Element -> wd ()
touchClick (Element e) =
noReturn . doSessCommand methodPost "/touch/click" . single "element" $ e
-- |Emulates pressing a finger down on the screen at the given location.
touchDown :: WebDriver wd => (Int, Int) -> wd ()
touchDown = noReturn . doSessCommand methodPost "/touch/down" . pair ("x","y")
-- |Emulates removing a finger from the screen at the given location.
touchUp :: WebDriver wd => (Int, Int) -> wd ()
touchUp = noReturn . doSessCommand methodPost "/touch/up" . pair ("x","y")
-- |Emulates moving a finger on the screen to the given location.
touchMove :: WebDriver wd => (Int, Int) -> wd ()
touchMove = noReturn . doSessCommand methodPost "/touch/move" . pair ("x","y")
-- |Emulate finger-based touch scroll. Use this function if you don't care where
-- the scroll begins
touchScroll :: WebDriver wd => (Int, Int) -> wd ()
touchScroll = noReturn . doSessCommand methodPost "/touch/scroll" . pair ("xoffset","yoffset")
-- |Emulate finger-based touch scroll, starting from the given location relative
-- to the given element.
touchScrollFrom :: WebDriver wd => (Int, Int) -> Element -> wd ()
touchScrollFrom (x, y) (Element e) =
noReturn
. doSessCommand methodPost "/touch/scroll"
. triple ("xoffset", "yoffset", "element")
$ (x, y, e)
-- |Emulate a double click on a touch device.
touchDoubleClick :: WebDriver wd => Element -> wd ()
touchDoubleClick (Element e) =
noReturn
. doSessCommand methodPost "/touch/doubleclick"
. single "element" $ e
-- |Emulate a long click on a touch device.
touchLongClick :: WebDriver wd => Element -> wd ()
touchLongClick (Element e) =
noReturn
. doSessCommand methodPost "/touch/longclick"
. single "element" $ e
-- |Emulate a flick on the touch screen. The coordinates indicate x and y
-- velocity, respectively. Use this function if you don't care where the
-- flick starts.
touchFlick :: WebDriver wd => (Int, Int) -> wd ()
touchFlick =
noReturn
. doSessCommand methodPost "/touch/flick"
. pair ("xSpeed", "ySpeed")
-- |Emulate a flick on the touch screen.
touchFlickFrom :: WebDriver wd =>
Int -- ^ flick velocity
-> (Int, Int) -- ^ a location relative to the given element
-> Element -- ^ the given element
-> wd ()
touchFlickFrom s (x,y) (Element e) =
noReturn
. doSessCommand methodPost "/touch/flick" . object $
["xoffset" .= x
,"yoffset" .= y
,"speed" .= s
,"element" .= e
]
-- |Get the current geographical location of the device.
getLocation :: WebDriver wd => wd (Int, Int, Int)
getLocation = doSessCommand methodGet "/location" Null
>>= parseTriple "latitude" "longitude" "altitude" "getLocation"
-- |Set the current geographical location of the device.
setLocation :: WebDriver wd => (Int, Int, Int) -> wd ()
setLocation = noReturn . doSessCommand methodPost "/location"
. triple ("latitude",
"longitude",
"altitude")
-- |Uploads a file from the local filesystem by its file path.
uploadFile :: WebDriver wd => FilePath -> wd ()
uploadFile path = uploadZipEntry =<< liftBase (readEntry [] path)
-- |Uploads a raw bytestring with associated file info.
uploadRawFile :: WebDriver wd =>
FilePath -- ^File path to use with this bytestring.
-> Integer -- ^Modification time
-- (in seconds since Unix epoch).
-> LBS.ByteString -- ^ The file contents as a lazy ByteString
-> wd ()
uploadRawFile path t str = uploadZipEntry (toEntry path t str)
-- |Lowest level interface to the file uploading mechanism.
-- This allows you to specify the exact details of
-- the zip entry sent across network.
uploadZipEntry :: WebDriver wd => Entry -> wd ()
uploadZipEntry = noReturn . doSessCommand methodPost "/file" . single "file"
. TL.decodeUtf8 . B64.encode . fromArchive . (`addEntryToArchive` emptyArchive)
-- |Get the current number of keys in a web storage area.
storageSize :: WebDriver wd => WebStorageType -> wd Integer
storageSize s = doStorageCommand methodGet s "/size" Null
-- |Get a list of all keys from a web storage area.
getAllKeys :: WebDriver wd => WebStorageType -> wd [Text]
getAllKeys s = doStorageCommand methodGet s "" Null
-- |Delete all keys within a given web storage area.
deleteAllKeys :: WebDriver wd => WebStorageType -> wd ()
deleteAllKeys s = noReturn $ doStorageCommand methodDelete s "" Null
-- |An HTML 5 storage type
data WebStorageType = LocalStorage | SessionStorage
deriving (Eq, Show, Ord, Bounded, Enum)
-- |Get the value associated with a key in the given web storage area.
-- Unset keys result in empty strings, since the Web Storage spec
-- makes no distinction between the empty string and an undefined value.
getKey :: WebDriver wd => WebStorageType -> Text -> wd Text
getKey s k = doStorageCommand methodGet s ("/key/" `T.append` urlEncode k) Null
-- |Set a key in the given web storage area.
setKey :: WebDriver wd => WebStorageType -> Text -> Text -> wd Text
setKey s k v = doStorageCommand methodPost s "" . object $ ["key" .= k,
"value" .= v ]
-- |Delete a key in the given web storage area.
deleteKey :: WebDriver wd => WebStorageType -> Text -> wd ()
deleteKey s k = noReturn $ doStorageCommand methodPost s ("/key/" `T.append` urlEncode k) Null
-- |A wrapper around 'doStorageCommand' to create web storage URLs.
doStorageCommand :: (WebDriver wd, ToJSON a, FromJSON b) =>
Method -> WebStorageType -> Text -> a -> wd b
doStorageCommand m s path a = doSessCommand m (T.concat ["/", s', path]) a
where s' = case s of
LocalStorage -> "local_storage"
SessionStorage -> "session_storage"
-- |Get information from the server as a JSON 'Object'. For more information
-- about this object see
-- <http://code.google.com/p/selenium/wiki/JsonWireProtocol#/status>
serverStatus :: (WebDriver wd) => RequestHeaders -> wd Value -- todo: make this a record type
serverStatus headers = doCommand headers methodGet "/status" Null
-- |A record that represents a single log entry.
data LogEntry =
LogEntry { logTime :: Integer -- ^ timestamp for the log entry. The standard
-- does not specify the epoch or the unit of
-- time.
, logLevel :: LogLevel -- ^ log verbosity level
, logMsg :: Text
}
deriving (Eq, Ord, Show, Read)
instance FromJSON LogEntry where
parseJSON (Object o) =
LogEntry <$> o .: "timestamp"
<*> o .: "level"
<*> (fromMaybe "" <$> o .: "message")
parseJSON v = typeMismatch "LogEntry" v
type LogType = String
-- |Retrieve the log buffer for a given log type. The server-side log buffer is reset after each request.
--
-- Which log types are available is server defined, but the wire protocol lists these as common log types:
-- client, driver, browser, server
getLogs :: WebDriver wd => LogType -> wd [LogEntry]
getLogs t = doSessCommand methodPost "/log" . object $ ["type" .= t]
-- |Get a list of available log types.
getLogTypes :: WebDriver wd => wd [LogType]
getLogTypes = doSessCommand methodGet "/log/types" Null
data ApplicationCacheStatus = Uncached | Idle | Checking | Downloading | UpdateReady | Obsolete deriving (Eq, Enum, Bounded, Ord, Show, Read)
instance FromJSON ApplicationCacheStatus where
parseJSON val = do
n <- parseJSON val
case n :: Integer of
0 -> return Uncached
1 -> return Idle
2 -> return Checking
3 -> return Downloading
4 -> return UpdateReady
5 -> return Obsolete
err -> fail $ "Invalid JSON for ApplicationCacheStatus: " ++ show err
getApplicationCacheStatus :: (WebDriver wd) => wd ApplicationCacheStatus
getApplicationCacheStatus = doSessCommand methodGet "/application_cache/status" Null
-- Moving this closer to the definition of Cookie seems to cause strange compile
-- errors, so I'm leaving it here for now.
$( deriveToJSON (defaultOptions{fieldLabelModifier = map C.toLower . drop 4}) ''Cookie )
|
plow-technologies/hs-webdriver
|
src/Test/WebDriver/Commands.hs
|
bsd-3-clause
| 33,455
| 0
| 13
| 7,676
| 7,073
| 3,739
| 3,334
| 455
| 3
|
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.GLboolean
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : provisional
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling GLboolean.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.GLboolean (
marshalGLboolean, unmarshalGLboolean
) where
--------------------------------------------------------------------------------
marshalGLboolean :: Num a => Bool -> a
marshalGLboolean False = 0
marshalGLboolean True = 1
unmarshalGLboolean :: Num a => a -> Bool
unmarshalGLboolean = (/= 0)
|
FranklinChen/hugs98-plus-Sep2006
|
packages/OpenGL/Graphics/Rendering/OpenGL/GL/GLboolean.hs
|
bsd-3-clause
| 860
| 0
| 6
| 115
| 89
| 57
| 32
| 7
| 1
|
{- |
Module : $Header$
Description : The propositional formula and the operations on it.
Copyright : (c) Till Theis
License : MIT
Maintainer : Till Theis <theis.till@gmail.com>
Stability : experimental
Portability : portable
This is the main module which implements the propositional 'Formula' type
itself and all the algorithms on it.
-}
module PropositionalLogic.Logic where
import Prelude hiding (foldr1)
import Data.Foldable (foldr1)
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.List hiding (foldr1)
import Data.Function (on)
import Data.Ord (comparing)
import Debug.Trace
-- | Propositional formulae in any form are represented by the atomic formulae
-- 'T' (true), 'F' (false) and 'Symbol' and by nesting those within the basic
-- logical connectives.
--
-- Each formula has its form type @t@ attached, i.e. whether its a formula in the
-- negative normal form ('NNF') or any of the available formula types.
data Formula t = T -- ^ Atom (true)
| F -- ^ Atom (false)
| Symbol String -- ^ Atom
| Negation (Formula t) -- ^ Unary connective
-- (sometimes treated as atom, sometimes as connective)
| Conjunction (Formula t) (Formula t) -- ^ Connective
| Disjunction (Formula t) (Formula t) -- ^ Connective
| Implication (Formula t) (Formula t) -- ^ Connective
| Equivalence (Formula t) (Formula t) -- ^ Connective
deriving (Show, Ord, Eq)
-- * Types
-- | Default 'Formula' form type. Formulae of this type don't have any structure
-- constraints.
data Fancy = Fancy deriving (Show, Eq)
-- | 'Formula'e in the 'Normal' form don't include 'Implication's or
-- 'Equivalence's as those connectives can be created from the other ones.
data Normal = Normal deriving (Show, Eq)
-- | The negative normal form is similiar to the 'Normal' 'Formula' type but
-- only allows 'Negation's on atoms.
data NNF = NNF deriving (Show, Eq)
-- | The connective normal form extends the negative normal form in that
-- 'Disjunction's must not contain any 'Conjunction's.
data CNF = CNF deriving (Show, Eq)
-- | The disjunctive normal form extends the negative normal form in that
-- 'Conjunction's must not contain any 'Disjunction's.
data DNF = DNF deriving (Show, Eq)
-- * Equality
--instance Eq (Formula t) where
-- x == y = truthTable x == truthTable y
-- | Mapping from the 'Symbol's of a 'Formula' to the boolean values they
-- represent.
type SymbolMapping = Map.Map String Bool
-- | Mapping from unique combinations of 'SymbolMapping's to the boolean value
-- the related 'Formula' yields with each configuration.
type TruthTable = Map.Map SymbolMapping Bool
-- | The truth table (<http://en.wikipedia.org/wiki/Truth_table>) of the
-- 'Formula'.
truthTable :: Formula t -> TruthTable
truthTable x = foldr insert Map.empty . mappings $ Set.toList $ symbols x
where insert :: SymbolMapping -> TruthTable -> TruthTable
insert k = Map.insert k (eval k x)
-- | Generate the 'SymbolMapping' from the given 'Symbol's and
-- 'Formula'.
mappings :: [String] -> [SymbolMapping]
mappings = map (foldr Map.union Map.empty) . mappingMaps
-- | From a list of 'Symbol's generate a list of lists where each
-- inner one contains a Map, holding exactly one symbol with its
-- assigned boolean value. The union of the maps of a sublist will
-- describe the complete 'SymbolMapping' for that partcular row
-- in the truth table.
mappingMaps :: [String] -> [[SymbolMapping]]
mappingMaps syms = map (zipWith Map.singleton syms) $ combos syms
-- | The list of distinct value combinations for each symbol. The
-- values still need to be combined with the actual 'Symbol' strings.
combos :: [String] -> [[Bool]]
combos syms = cartProd . take (length syms) $ repeat [True, False]
-- | The cartesian product of the elements of the list.
--
-- >>> cartProd [[1,2], [8,9]]
-- [[1,8],[1,9],[2,8],[2,9]]
cartProd :: [[a]] -> [[a]]
cartProd [] = []
cartProd [[]] = [[]]
cartProd [xs] = [ [x] | x <- xs ]
cartProd (xs:xss) = [ x:ys | x <- xs, ys <- cartProd xss ]
-- | Extract all symbol names from the 'Formula'.
--
-- >>> symbols $ Symbol "Y" `Conjunction` Negation (Symbol "X")
-- fromList ["X","Y"]
symbols :: Formula t -> Set.Set String
symbols = foldFormula insert Set.empty
where insert (Symbol s) syms = Set.insert s syms
insert _ syms = syms
-- | Evaluate the 'Formula' with its corresponding symbol-to-boolean-mapping
-- to its truth value.
eval :: SymbolMapping -> Formula t -> Bool
eval mapping = toBool . deepTransform reduce
where reduce T = T
reduce F = F
reduce (Symbol s) = fromBool . fromJust $ Map.lookup s mapping
reduce (Negation T) = F
reduce (Negation F) = T
reduce val@(Conjunction _ _) = fromBool $ reconnectMap (&&) toBool val
reduce val@(Disjunction _ _) = fromBool $ reconnectMap (||) toBool val
reduce val@(Implication _ _) = fromBool . eval mapping $ mkNormalVal val
reduce val@(Equivalence _ _) = fromBool . eval mapping $ mkNormalVal val
reduce _ = error "Logic.eval.reduce: Impossible"
fromBool True = T
fromBool False = F
toBool T = True
toBool F = False
-- * Type Casts
-- | /Unsave(!)/ cast a 'Formula' from any type to any other type.
-- Formulae are casted regardless of their actual values.
--
-- Bad example (remember the definition of 'Normal' which says that 'Normal'
-- 'Formulae' must not include 'Implication's):
--
-- >>> :t cast (T `Implication` T) :: Formula Normal
-- cast (T `Implication` T) :: Formula Normal :: Formula Normal
cast :: Formula t -> Formula u
cast = transform cast
-- | Cast any 'Formula' to a 'Fancy' 'Formula'. This is safe because 'Fancy'
-- allows all kinds of sub-'Formula'e.
fancy :: Formula t -> Formula Fancy
fancy = cast
-- | Cast any 'Formula' to a 'Normal' 'Formula'. The caller must ensure that
-- the given 'Formula' doesn't include sub-'Formula'e of incompatible types
-- (i.e. 'Fancy' with 'Implication's or 'Equivalence's).
normal :: Formula t -> Formula Normal
normal = cast
-- | Cast any 'Formula' to a 'NNF' 'Formula'. The caller must ensure that
-- the given 'Formula' doesn't include sub-'Formula'e of incompatible types.
nnf :: Formula t -> Formula NNF
nnf = cast
-- | Cast any 'Formula' to a 'CNF' 'Formula'. The caller must ensure that
-- the given 'Formula' doesn't include sub-'Formula'e of incompatible types.
cnf :: Formula t -> Formula CNF
cnf = cast
-- | Cast any 'Formula' to a 'DNF' 'Formula'. The caller must ensure that
-- the given 'Formula' doesn't include sub-'Formula'e of incompatible types.
dnf :: Formula t -> Formula DNF
dnf = cast
-- * Transformations
-- | Transform a 'Formula' by applying a function to the arguments of its
-- outer connective. Atoms will not be transformed.
--
-- Example:
--
-- > transform Negation (Conjunction T F) == Conjunction (Negation T) (Negation F)
transform :: (Formula t -> Formula u) -> Formula t -> Formula u
transform _ T = T
transform _ F = F
transform _ (Symbol s) = Symbol s
transform f (Negation x) = Negation $ f x
transform f (Conjunction x y) = Conjunction (f x) (f y)
transform f (Disjunction x y) = Disjunction (f x) (f y)
transform f (Implication x y) = Implication (f x) (f y)
transform f (Equivalence x y) = Equivalence (f x) (f y)
-- | Recursively 'transform' a 'Formula', beginning at the atoms. While the
-- normal 'transform' function only transforms the connective's arguments,
-- 'deepTransform' does also transform the connective itself.
deepTransform :: (Formula t -> Formula t) -> Formula t -> Formula t
deepTransform f x = f (transform (deepTransform f) x)
-- | Reduce a 'Formula' to a single value, beginning at the atoms. The order in
-- which the connective's arguments are traversed and combined is undefined.
foldFormula :: (Formula t -> a -> a) -> a -> Formula t -> a
foldFormula f z T = f T z
foldFormula f z F = f F z
foldFormula f z val@(Symbol _) = f val z
foldFormula f z val@(Negation x) = f val z'
where z' = foldFormula f z x
foldFormula f z val@(Conjunction x y) = foldConnective f z x y val
foldFormula f z val@(Disjunction x y) = foldConnective f z x y val
foldFormula f z val@(Implication x y) = foldConnective f z x y val
foldFormula f z val@(Equivalence x y) = foldConnective f z x y val
foldConnective f z x y val = f val z''
where z' = foldFormula f z y
z'' = foldFormula f z' x
-- | Change a 'Formula''s connective. The connective can be substituted with
-- another 'Formula' constructor or an arbitrary binary function. Note that
-- this function is only defined for binary connectives!
reconnect :: (Formula t -> Formula t -> a) -> Formula t -> a
reconnect f (Conjunction x y) = f x y
reconnect f (Disjunction x y) = f x y
reconnect f (Implication x y) = f x y
reconnect f (Equivalence x y) = f x y
reconnect _ _ = undefined
-- * Normal Forms
-- | Convert a 'Fancy' 'Formula' to 'Formula' in the 'Normal' form. All
-- 'Formula'e, except the 'Fancy' ones, are already in the 'Normal' form.
mkNormal :: Formula Fancy -> Formula Normal
mkNormal = normal . deepTransform mkNormalVal
-- | Translate 'Fancy' 'Formula' constructors to their 'Normal' equivalents.
-- This is a \'flat\' function and is meant to be used with 'deepTransform'.
mkNormalVal :: Formula t -> Formula t
mkNormalVal (Implication x y) = Negation x `Disjunction` y
mkNormalVal (Equivalence x y) =
transform mkNormalVal $ Implication x y `Conjunction` Implication y x
mkNormalVal x = x
-- | Convert a 'Normal' 'Formula' to 'Formula' in the negative normal form. All
-- 'Formula'e, except the 'Fancy' and 'Normal' ones, are already in the 'NNF'
-- form.
mkNNF :: Formula Normal -> Formula NNF
mkNNF = nnf . deepTransform doubleNegation . deepTransform deMorgan
-- | Remove 'Negation's in front of connectives by moving the 'Negation' into
-- the connective (for more information please refer to
-- <http://en.wikipedia.org/wiki/De_Morgan%27s_laws>).
-- This is a \'flat\' function and is meant to be used with 'deepTransform'.
deMorgan :: Formula t -> Formula t
deMorgan (Negation (Conjunction x y)) =
transform deMorgan $ Negation x `Disjunction` Negation y
deMorgan (Negation (Disjunction x y)) =
transform deMorgan $ Negation x `Conjunction` Negation y
deMorgan x = x
-- | Remove double 'Negation's as they neutralize themselves.
-- This is a \'flat\' function and is meant to be used with 'deepTransform'.
doubleNegation :: Formula t -> Formula t
doubleNegation (Negation (Negation x)) = x
doubleNegation x = x
-- | Convert a 'Formula' in the negative normal form to a 'Formula' in the
-- conjunctive normal form.
mkCNF :: Formula NNF -> Formula CNF
mkCNF = cnf . deepTransform mkCNFVal
-- | Ensure that no 'Disjunction' contains any 'Conjunction's by making use of
-- the distributive law (<http://en.wikipedia.org/wiki/Distributivity>).
-- This is a \'flat\' function and is meant to be used with 'deepTransform'.
mkCNFVal :: Formula t -> Formula t
mkCNFVal (Disjunction x@(Conjunction _ _) y) =
transform (mkCNFVal . Disjunction y) x
mkCNFVal (Disjunction y x@(Conjunction _ _)) = mkCNFVal $ Disjunction x y
mkCNFVal x = x
-- | Simplify a 'Formula' 'CNF' but retain the CNF properties.
simplifyCNF :: Formula CNF -> Formula CNF
simplifyCNF = outerFromL . go [F, Negation T] [T, Negation F] . innerFromL . map (go [T, Negation F] [F, Negation T]) . toL
where toL disj@(Disjunction _ _) = [ innerToL disj ]
toL (Conjunction x y) = toL x ++ toL y
toL x = [[x]]
innerToL (Disjunction x y) = innerToL x ++ innerToL y
innerToL x = [x]
innerFromL = map (foldr1 Disjunction)
outerFromL = foldr1 Conjunction
go shortCircuits strippables xs =
if any (`elem` xs) shortCircuits || or [ isMutualExclusion x y | x <- xs, y <- reverse xs]
then [ head shortCircuits ]
else case nub xs \\ strippables of
[] -> [ head strippables ]
xs' -> xs'
isMutualExclusion T F = True
isMutualExclusion F T = True
isMutualExclusion x (Negation y) = x == y
isMutualExclusion (Negation x) y = x == y
isMutualExclusion _ _ = False
-- | Convert a 'Formula' in the negative normal form to a 'Formula' in the
-- disjunctive normal form.
mkDNF :: Formula NNF -> Formula DNF
mkDNF = dnf . deepTransform mkDNFVal
-- | Ensure that no 'Conjunction' contains any 'Disjunction's by making use of
-- the distributive law (<http://en.wikipedia.org/wiki/Distributivity>).
-- This is a \'flat\' function and is meant to be used with 'deepTransform'.
mkDNFVal :: Formula t -> Formula t
mkDNFVal (Conjunction x@(Disjunction _ _) y) =
transform (mkDNFVal . Conjunction y) x
mkDNFVal (Conjunction y x@(Disjunction _ _)) = mkDNFVal $ Conjunction x y
mkDNFVal x = x
-- | Simplify a 'Formula' 'DNF' but retain the DNF properties.
simplifyDNF :: Formula DNF -> Formula DNF
simplifyDNF = outerFromL . go [T, Negation F] [F, Negation T] . innerFromL . map (go [F, Negation T] [T, Negation F]) . toL
where toL disj@(Conjunction _ _) = [ innerToL disj ]
toL (Disjunction x y) = toL x ++ toL y
toL x = [[x]]
innerToL (Conjunction x y) = innerToL x ++ innerToL y
innerToL x = [x]
innerFromL = map (foldr1 Conjunction)
outerFromL = foldr1 Disjunction
go shortCircuits strippables xs =
if any (`elem` xs) shortCircuits || or [ isMutualExclusion x y | x <- xs, y <- reverse xs]
then [ head shortCircuits ]
else case nub xs \\ strippables of
[] -> [ head strippables ]
xs' -> xs'
isMutualExclusion T F = True
isMutualExclusion F T = True
isMutualExclusion x (Negation y) = x == y
isMutualExclusion (Negation x) y = x == y
isMutualExclusion _ _ = False
-- * Simplification
-- | Prime implicant chart with the prime implicants on the left and the indices of the
-- implicants which led to them on the right.
type PIChart = [([QMVal], [Int])]
simplify :: Formula t -> Formula DNF
simplify x = let piChart = petrick . qm . qmMappings . trueMappings $ truthTable x
in qmMappingsToFormula x $ map fst piChart
-- | Find all the 'SymbolMapping's in a 'TruthTable' for which the Formula
-- returns true (the models).
trueMappings :: TruthTable -> [SymbolMapping]
trueMappings = Map.keys . Map.filter id
-- | Convert a 'TruthTable' to a matching 'Formula'.
toFormula :: TruthTable -> Formula DNF
toFormula = outerFold . trueMappings
where outerFold :: [SymbolMapping] -> Formula DNF
outerFold = foldr1 Disjunction . map innerFold
innerFold :: SymbolMapping -> Formula DNF
innerFold = foldr1 Conjunction . map toAtom . Map.toList
toAtom :: (String, Bool) -> Formula DNF
toAtom (s, True) = Symbol s
toAtom (s, False) = Negation $ Symbol s
-- | First 'transform' and then 'reconnect' a 'Formula' but don't require the
-- transformer to yield 'Formula' results.
reconnectMap :: (a -> a -> b) -> (Formula t -> a) -> Formula t -> b
reconnectMap fc fm x = fc (fm $ fst p) (fm $ snd p)
where p = reconnect (,) x
-- | Apply the function @f@ to the argument @x@ if the condition @cond@ holds.
-- Leave @x@ unchanged otherwise.
alterIf :: Bool -> (a -> a) -> a -> a
alterIf cond f x = if cond then f x else x
-- | Value representation for the Quine-McCluskey algorithm.
data QMVal = QMTrue | QMFalse | QMDontCare deriving (Show, Eq, Ord)
toQMVal :: Bool -> QMVal
toQMVal True = QMTrue
toQMVal False = QMFalse
qmMappings :: [SymbolMapping] -> [[QMVal]]
qmMappings = map (map toQMVal . Map.elems)
qmMappingsToFormula :: Formula t -> [[QMVal]] -> Formula DNF
qmMappingsToFormula initialFormula mappings = fromL . disjunctions . disjAList $ mappings
where fromL :: [[Formula t]] -> Formula t
fromL [] = T
fromL xs = foldr1 Disjunction . map (foldr1 Conjunction) $ xs
disjAList :: [[QMVal]] -> [[(String, QMVal)]]
disjAList = map (zip $ Set.toList $ symbols initialFormula)
disjunctions :: [[(String, QMVal)]] -> [[Formula t]]
disjunctions = filter (not . null) . map (foldr go [])
go (_, QMDontCare) xs = xs
go (sym, QMTrue) xs = Symbol sym : xs
go (sym, QMFalse) xs = Negation (Symbol sym) : xs
-- | Quine–McCluskey algorithm
-- (<http://en.wikipedia.org/wiki/Quine%E2%80%93McCluskey_algorithm>,
-- <http://www.eetimes.com/discussion/programmer-s-toolbox/4025004/All-about-Quine-McClusky>)
-- to minimize a given 'Formula'.
qm :: [[QMVal]] -> PIChart
qm xs = let todo = flip zip [ [n] | n <- [0..] ] xs
in qm' [] todo todo []
qm' :: PIChart -- ^ values we are done with and relevant
-> PIChart -- ^ values we still have to work with
-> PIChart -- ^ values which have not been merged yet
-> PIChart -- ^ values for next round
-> PIChart -- ^ all the relevant values
qm' done [] unmerged [] = nub $ done ++ unmerged
qm' done [] unmerged next = qm' (done ++ unmerged) next next []
qm' done ((x, idxs):xs) unmerged next = qm' done' xs unmerged' next'
where mergeable = filter (\(y, _) -> isComparable x y && diff x y == 1) xs
done' = alterIf (null mergeable && (x, idxs) `elem` unmerged) ((x, idxs):) done
unmerged' = alterIf (not $ null mergeable) (filter (`notElem` (x, idxs):mergeable)) unmerged
next' = removeDuplicates $ next ++ map (\(y, yIdxs) -> (merge x y, idxs `union` yIdxs)) mergeable
removeDuplicates = nubBy ((==) `on` fst)
merge = zipWith (\a b -> if a == b then a else QMDontCare)
diff x y = length . filter not $ zipWith (==) x y
isComparable = (==) `on` elemIndices QMDontCare
essentialPIs :: PIChart -> PIChart
essentialPIs pis = nub $ map (pis !!) essentialLocalIdxs
where piIdxs = nub . concat . map snd $ pis
idxToLocalIdx = map (\idx -> findIndices (elem idx . snd) pis) piIdxs
essentialLocalIdxs = map head $ filter ((==) 1 . length) idxToLocalIdx
essentialIdxs = map (pis !!) essentialLocalIdxs
-- | Petrick's method (<http://en.wikipedia.org/wiki/Petrick's_method>). The result contains only the relevant rows.
petrick :: PIChart -> PIChart
petrick pis = essentials ++ (shortestCombo $ fromReducable $ reduce $ toReducable $ productOfSums)
where essentials = essentialPIs pis
nonEssentials = pis \\ essentials
originalIdxs = map snd nonEssentials
allIdxs = nub $ concat originalIdxs
productOfSums = [ map (:[]) $ filter (elem idx . snd) nonEssentials | idx <- allIdxs ]
toReducable :: Ord a => [[[a]]] -> [Set (Set a)]
toReducable = map (Set.fromList . map Set.fromList)
fromReducable :: Ord a => Set (Set a) -> [[a]]
fromReducable = Set.toList . Set.map Set.toList
reduce :: Ord a => [Set (Set a)] -> Set (Set a)
reduce (xs:ys:zss) = reduce $ xOrXyIsX (expand xs ys) : zss
reduce [xs] = xs
reduce [] = Set.empty
expand :: Ord a => Set (Set a) -> Set (Set a) -> Set (Set a)
expand xs ys = Set.foldr outerFolder Set.empty xs
where outerFolder = \x prods -> (Set.foldr (innerFolder x) Set.empty ys) `Set.union` prods
innerFolder x = \y prod -> Set.insert (x `Set.union` y) prod
xOrXyIsX set = Set.foldr (\xs -> Set.filter $ not . Set.isSubsetOf xs) set set
shortestCombo [] = []
shortestCombo combos = maximumBy (comparing $ sum . map (length . elemIndices QMDontCare . fst)) combosWithFewestElems
where (combosWithFewestElems, _) = partition ((==) minNumElems . length) combos
minNumElems = length $ minimumBy (comparing length) combos
|
tilltheis/propositional-logic
|
src/PropositionalLogic/Logic.hs
|
bsd-3-clause
| 20,255
| 0
| 14
| 4,813
| 5,325
| 2,801
| 2,524
| 263
| 12
|
module AnsiToPrompt (
main
) where
import Control.Monad.State.Strict as S
import qualified Control.Monad.State.Strict (State)
import System.Environment (getArgs)
main :: IO ()
main = do
args <- getArgs
let pt = case args of
["--bash"] -> Bash
["--haskeline"] -> Haskeline
_ -> error "Expecting --bash or --haskeline argument."
interact $ runAnsiToPrompt pt
data PromptType
= Bash
| Haskeline
data Status
= Normal
| Escaping
runAnsiToPrompt :: PromptType -> String -> String
runAnsiToPrompt pt = flip S.evalState Normal . ansiToPrompt pt
ansiToPrompt :: PromptType -> String -> State Status String
ansiToPrompt pt str = case str of
'\ESC' : rest -> do
S.put Escaping
let open = case pt of
Bash -> "\\[\ESC"
Haskeline -> "\ESC"
liftM (open ++) $ ansiToPrompt pt rest
'm' : rest -> do
status <- S.get
case status of
Normal -> liftM ('m' :) $ ansiToPrompt pt rest
Escaping -> do
S.put Normal
let close = case pt of
Bash -> "\\]"
Haskeline -> "\STX"
liftM (('m' : close) ++) $ ansiToPrompt pt rest
c : rest -> liftM (c :) $ ansiToPrompt pt rest
"" -> return ""
|
thomaseding/ansi2prompt
|
src/AnsiToPrompt.hs
|
bsd-3-clause
| 1,348
| 0
| 20
| 474
| 410
| 208
| 202
| 41
| 7
|
module Main where
import System.Random (newStdGen)
import Lib
main :: IO ()
main = do
gen <- newStdGen
let w = 20
h = 12
maze = createMaze gen w h
soln = solveMaze maze (1, 1) (w, h)
putStr . renderMaze maze $ soln
|
ford-prefect/haskell-maze
|
app/Main.hs
|
bsd-3-clause
| 242
| 0
| 11
| 72
| 103
| 55
| 48
| 11
| 1
|
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.IOState
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module for an IO monad with a pointer as an
-- additional state, basically a /StateT (Ptr s) IO a/.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.IOState (
IOState(..), getIOState, peekIOState, evalIOState, nTimes
) where
import Control.Applicative ( Applicative(..) )
import Control.Monad ( ap, liftM, replicateM )
import Foreign.Ptr ( Ptr, plusPtr )
import Foreign.Storable ( Storable(sizeOf,peek) )
--------------------------------------------------------------------------------
newtype IOState s a = IOState { runIOState :: Ptr s -> IO (a, Ptr s) }
instance Applicative (IOState s) where
pure = return
(<*>) = ap
instance Functor (IOState s) where
fmap = liftM
instance Monad (IOState s) where
return a = IOState $ \s -> return (a, s)
m >>= k = IOState $ \s -> do (a, s') <- runIOState m s ; runIOState (k a) s'
fail str = IOState $ \_ -> fail str
getIOState :: IOState s (Ptr s)
getIOState = IOState $ \s -> return (s, s)
putIOState :: Ptr s -> IOState s ()
putIOState s = IOState $ \_ -> return ((), s)
peekIOState :: Storable a => IOState a a
peekIOState = do
ptr <- getIOState
x <- liftIOState $ peek ptr
putIOState (ptr `plusPtr` sizeOf x)
return x
liftIOState :: IO a -> IOState s a
liftIOState m = IOState $ \s -> do a <- m ; return (a, s)
evalIOState :: IOState s a -> Ptr s -> IO a
evalIOState m s = do (a, _) <- runIOState m s ; return a
nTimes :: Integral a => a -> IOState b c -> IOState b [c]
nTimes n = replicateM (fromIntegral n)
|
hesiod/OpenGL
|
src/Graphics/Rendering/OpenGL/GL/IOState.hs
|
bsd-3-clause
| 1,947
| 0
| 12
| 377
| 613
| 332
| 281
| 33
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.