code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.RDS.DeleteDBClusterSnapshot
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a DB cluster snapshot. If the snapshot is being copied, the copy
-- operation is terminated.
--
-- The DB cluster snapshot must be in the 'available' state to be deleted.
--
-- For more information on Amazon Aurora, see
-- <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html Aurora on Amazon RDS>
-- in the /Amazon RDS User Guide./
--
-- /See:/ <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DeleteDBClusterSnapshot.html AWS API Reference> for DeleteDBClusterSnapshot.
module Network.AWS.RDS.DeleteDBClusterSnapshot
(
-- * Creating a Request
deleteDBClusterSnapshot
, DeleteDBClusterSnapshot
-- * Request Lenses
, ddcsDBClusterSnapshotIdentifier
-- * Destructuring the Response
, deleteDBClusterSnapshotResponse
, DeleteDBClusterSnapshotResponse
-- * Response Lenses
, ddcsrsDBClusterSnapshot
, ddcsrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.RDS.Types
import Network.AWS.RDS.Types.Product
import Network.AWS.Request
import Network.AWS.Response
-- |
--
-- /See:/ 'deleteDBClusterSnapshot' smart constructor.
newtype DeleteDBClusterSnapshot = DeleteDBClusterSnapshot'
{ _ddcsDBClusterSnapshotIdentifier :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteDBClusterSnapshot' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddcsDBClusterSnapshotIdentifier'
deleteDBClusterSnapshot
:: Text -- ^ 'ddcsDBClusterSnapshotIdentifier'
-> DeleteDBClusterSnapshot
deleteDBClusterSnapshot pDBClusterSnapshotIdentifier_ =
DeleteDBClusterSnapshot'
{ _ddcsDBClusterSnapshotIdentifier = pDBClusterSnapshotIdentifier_
}
-- | The identifier of the DB cluster snapshot to delete.
--
-- Constraints: Must be the name of an existing DB cluster snapshot in the
-- 'available' state.
ddcsDBClusterSnapshotIdentifier :: Lens' DeleteDBClusterSnapshot Text
ddcsDBClusterSnapshotIdentifier = lens _ddcsDBClusterSnapshotIdentifier (\ s a -> s{_ddcsDBClusterSnapshotIdentifier = a});
instance AWSRequest DeleteDBClusterSnapshot where
type Rs DeleteDBClusterSnapshot =
DeleteDBClusterSnapshotResponse
request = postQuery rDS
response
= receiveXMLWrapper "DeleteDBClusterSnapshotResult"
(\ s h x ->
DeleteDBClusterSnapshotResponse' <$>
(x .@? "DBClusterSnapshot") <*> (pure (fromEnum s)))
instance ToHeaders DeleteDBClusterSnapshot where
toHeaders = const mempty
instance ToPath DeleteDBClusterSnapshot where
toPath = const "/"
instance ToQuery DeleteDBClusterSnapshot where
toQuery DeleteDBClusterSnapshot'{..}
= mconcat
["Action" =:
("DeleteDBClusterSnapshot" :: ByteString),
"Version" =: ("2014-10-31" :: ByteString),
"DBClusterSnapshotIdentifier" =:
_ddcsDBClusterSnapshotIdentifier]
-- | /See:/ 'deleteDBClusterSnapshotResponse' smart constructor.
data DeleteDBClusterSnapshotResponse = DeleteDBClusterSnapshotResponse'
{ _ddcsrsDBClusterSnapshot :: !(Maybe DBClusterSnapshot)
, _ddcsrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteDBClusterSnapshotResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddcsrsDBClusterSnapshot'
--
-- * 'ddcsrsResponseStatus'
deleteDBClusterSnapshotResponse
:: Int -- ^ 'ddcsrsResponseStatus'
-> DeleteDBClusterSnapshotResponse
deleteDBClusterSnapshotResponse pResponseStatus_ =
DeleteDBClusterSnapshotResponse'
{ _ddcsrsDBClusterSnapshot = Nothing
, _ddcsrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
ddcsrsDBClusterSnapshot :: Lens' DeleteDBClusterSnapshotResponse (Maybe DBClusterSnapshot)
ddcsrsDBClusterSnapshot = lens _ddcsrsDBClusterSnapshot (\ s a -> s{_ddcsrsDBClusterSnapshot = a});
-- | The response status code.
ddcsrsResponseStatus :: Lens' DeleteDBClusterSnapshotResponse Int
ddcsrsResponseStatus = lens _ddcsrsResponseStatus (\ s a -> s{_ddcsrsResponseStatus = a});
|
olorin/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/DeleteDBClusterSnapshot.hs
|
mpl-2.0
| 5,016
| 0
| 13
| 924
| 558
| 340
| 218
| 73
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Route53.DeleteHostedZone
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This action deletes a hosted zone. To delete a hosted zone, send a
-- 'DELETE' request to the '2013-04-01\/hostedzone\/hosted zone ID'
-- resource.
--
-- For more information about deleting a hosted zone, see
-- <http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/DeleteHostedZone.html Deleting a Hosted Zone>
-- in the /Amazon Route 53 Developer Guide/.
--
-- You can delete a hosted zone only if there are no resource record sets
-- other than the default SOA record and NS resource record sets. If your
-- hosted zone contains other resource record sets, you must delete them
-- before you can delete your hosted zone. If you try to delete a hosted
-- zone that contains other resource record sets, Route 53 will deny your
-- request with a 'HostedZoneNotEmpty' error. For information about
-- deleting records from your hosted zone, see ChangeResourceRecordSets.
--
-- /See:/ <http://docs.aws.amazon.com/Route53/latest/APIReference/API_DeleteHostedZone.html AWS API Reference> for DeleteHostedZone.
module Network.AWS.Route53.DeleteHostedZone
(
-- * Creating a Request
deleteHostedZone
, DeleteHostedZone
-- * Request Lenses
, dhzId
-- * Destructuring the Response
, deleteHostedZoneResponse
, DeleteHostedZoneResponse
-- * Response Lenses
, dhzrsResponseStatus
, dhzrsChangeInfo
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.Route53.Types
import Network.AWS.Route53.Types.Product
-- | A complex type that contains information about the hosted zone that you
-- want to delete.
--
-- /See:/ 'deleteHostedZone' smart constructor.
newtype DeleteHostedZone = DeleteHostedZone'
{ _dhzId :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteHostedZone' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dhzId'
deleteHostedZone
:: Text -- ^ 'dhzId'
-> DeleteHostedZone
deleteHostedZone pId_ =
DeleteHostedZone'
{ _dhzId = pId_
}
-- | The ID of the hosted zone you want to delete.
dhzId :: Lens' DeleteHostedZone Text
dhzId = lens _dhzId (\ s a -> s{_dhzId = a});
instance AWSRequest DeleteHostedZone where
type Rs DeleteHostedZone = DeleteHostedZoneResponse
request = delete route53
response
= receiveXML
(\ s h x ->
DeleteHostedZoneResponse' <$>
(pure (fromEnum s)) <*> (x .@ "ChangeInfo"))
instance ToHeaders DeleteHostedZone where
toHeaders = const mempty
instance ToPath DeleteHostedZone where
toPath DeleteHostedZone'{..}
= mconcat ["/2013-04-01/hostedzone/", toBS _dhzId]
instance ToQuery DeleteHostedZone where
toQuery = const mempty
-- | A complex type containing the response information for the request.
--
-- /See:/ 'deleteHostedZoneResponse' smart constructor.
data DeleteHostedZoneResponse = DeleteHostedZoneResponse'
{ _dhzrsResponseStatus :: !Int
, _dhzrsChangeInfo :: !ChangeInfo
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteHostedZoneResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dhzrsResponseStatus'
--
-- * 'dhzrsChangeInfo'
deleteHostedZoneResponse
:: Int -- ^ 'dhzrsResponseStatus'
-> ChangeInfo -- ^ 'dhzrsChangeInfo'
-> DeleteHostedZoneResponse
deleteHostedZoneResponse pResponseStatus_ pChangeInfo_ =
DeleteHostedZoneResponse'
{ _dhzrsResponseStatus = pResponseStatus_
, _dhzrsChangeInfo = pChangeInfo_
}
-- | The response status code.
dhzrsResponseStatus :: Lens' DeleteHostedZoneResponse Int
dhzrsResponseStatus = lens _dhzrsResponseStatus (\ s a -> s{_dhzrsResponseStatus = a});
-- | A complex type that contains the ID, the status, and the date and time
-- of your delete request.
dhzrsChangeInfo :: Lens' DeleteHostedZoneResponse ChangeInfo
dhzrsChangeInfo = lens _dhzrsChangeInfo (\ s a -> s{_dhzrsChangeInfo = a});
|
fmapfmapfmap/amazonka
|
amazonka-route53/gen/Network/AWS/Route53/DeleteHostedZone.hs
|
mpl-2.0
| 4,847
| 0
| 14
| 941
| 535
| 330
| 205
| 68
| 1
|
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE RecordWildCards #-}
-- |
-- Module : Network.AWS.Data.ByteString
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.Data.ByteString
(
-- * ByteString
ByteString
, LazyByteString
, ToByteString (..)
, showBS
, stripBS
) where
import Data.ByteString (ByteString)
import Data.ByteString.Builder (Builder)
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Lazy.Builder as Build
import Data.CaseInsensitive (CI)
import qualified Data.CaseInsensitive as CI
import Data.Char
import qualified Data.Text.Encoding as Text
import Data.Time (UTCTime)
import Network.AWS.Data.Text
import Network.HTTP.Types
import Numeric
import Numeric.Natural
import Prelude
type LazyByteString = LBS.ByteString
showBS :: ToByteString a => a -> String
showBS = BS8.unpack . toBS
stripBS :: ByteString -> ByteString
stripBS = BS8.dropWhile isSpace . fst . BS8.spanEnd isSpace
class ToByteString a where
toBS :: a -> ByteString
default toBS :: ToText a => a -> ByteString
toBS = Text.encodeUtf8 . toText
instance ToByteString ByteString where toBS = id
instance ToByteString Builder where toBS = toBS . Build.toLazyByteString
instance ToByteString LazyByteString where toBS = LBS.toStrict
instance ToByteString Text where toBS = Text.encodeUtf8
instance ToByteString String where toBS = BS8.pack
instance ToByteString Int where toBS = toBS . Build.intDec
instance ToByteString Integer where toBS = toBS . Build.integerDec
instance ToByteString Natural where toBS = toBS . toInteger
instance ToByteString Double where toBS = toBS . ($ "") . showFFloat Nothing
instance ToByteString StdMethod where toBS = renderStdMethod
instance ToByteString UTCTime where toBS = BS8.pack . show
instance ToByteString a => ToByteString (CI a) where
toBS = toBS . CI.original
|
fmapfmapfmap/amazonka
|
core/src/Network/AWS/Data/ByteString.hs
|
mpl-2.0
| 2,504
| 0
| 9
| 628
| 497
| 291
| 206
| 50
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ECS.DiscoverPollEndpoint
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This action is only used by the Amazon EC2 Container Service agent, and it is
-- not intended for use outside of the agent.
--
-- Returns an endpoint for the Amazon EC2 Container Service agent to poll for
-- updates.
--
-- <http://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_DiscoverPollEndpoint.html>
module Network.AWS.ECS.DiscoverPollEndpoint
(
-- * Request
DiscoverPollEndpoint
-- ** Request constructor
, discoverPollEndpoint
-- ** Request lenses
, dpeCluster
, dpeContainerInstance
-- * Response
, DiscoverPollEndpointResponse
-- ** Response constructor
, discoverPollEndpointResponse
-- ** Response lenses
, dperEndpoint
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.ECS.Types
import qualified GHC.Exts
data DiscoverPollEndpoint = DiscoverPollEndpoint
{ _dpeCluster :: Maybe Text
, _dpeContainerInstance :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DiscoverPollEndpoint' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dpeCluster' @::@ 'Maybe' 'Text'
--
-- * 'dpeContainerInstance' @::@ 'Maybe' 'Text'
--
discoverPollEndpoint :: DiscoverPollEndpoint
discoverPollEndpoint = DiscoverPollEndpoint
{ _dpeContainerInstance = Nothing
, _dpeCluster = Nothing
}
-- | The cluster that the container instance belongs to.
dpeCluster :: Lens' DiscoverPollEndpoint (Maybe Text)
dpeCluster = lens _dpeCluster (\s a -> s { _dpeCluster = a })
-- | The container instance UUID or full Amazon Resource Name (ARN) of the
-- container instance. The ARN contains the 'arn:aws:ecs' namespace, followed by
-- the region of the container instance, the AWS account ID of the container
-- instance owner, the 'container-instance' namespace, and then the container
-- instance UUID. For example, arn:aws:ecs:/region/:/aws_account_id/:container-instance/
-- /container_instance_UUID/.
dpeContainerInstance :: Lens' DiscoverPollEndpoint (Maybe Text)
dpeContainerInstance =
lens _dpeContainerInstance (\s a -> s { _dpeContainerInstance = a })
newtype DiscoverPollEndpointResponse = DiscoverPollEndpointResponse
{ _dperEndpoint :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DiscoverPollEndpointResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dperEndpoint' @::@ 'Maybe' 'Text'
--
discoverPollEndpointResponse :: DiscoverPollEndpointResponse
discoverPollEndpointResponse = DiscoverPollEndpointResponse
{ _dperEndpoint = Nothing
}
-- | The endpoint for the Amazon ECS agent to poll.
dperEndpoint :: Lens' DiscoverPollEndpointResponse (Maybe Text)
dperEndpoint = lens _dperEndpoint (\s a -> s { _dperEndpoint = a })
instance ToPath DiscoverPollEndpoint where
toPath = const "/"
instance ToQuery DiscoverPollEndpoint where
toQuery = const mempty
instance ToHeaders DiscoverPollEndpoint
instance ToJSON DiscoverPollEndpoint where
toJSON DiscoverPollEndpoint{..} = object
[ "containerInstance" .= _dpeContainerInstance
, "cluster" .= _dpeCluster
]
instance AWSRequest DiscoverPollEndpoint where
type Sv DiscoverPollEndpoint = ECS
type Rs DiscoverPollEndpoint = DiscoverPollEndpointResponse
request = post "DiscoverPollEndpoint"
response = jsonResponse
instance FromJSON DiscoverPollEndpointResponse where
parseJSON = withObject "DiscoverPollEndpointResponse" $ \o -> DiscoverPollEndpointResponse
<$> o .:? "endpoint"
|
kim/amazonka
|
amazonka-ecs/gen/Network/AWS/ECS/DiscoverPollEndpoint.hs
|
mpl-2.0
| 4,633
| 0
| 9
| 920
| 533
| 324
| 209
| 62
| 1
|
{-# LANGUAGE DeriveGeneric, GeneralizedNewtypeDeriving #-}
-- | Actors perceiving other actors and the dungeon level.
--
-- Visibility works according to KISS. Everything that player sees is real.
-- There are no unmarked hidden tiles and only solid tiles can be marked,
-- so there are no invisible walls and to pass through an illusory wall,
-- you have use a turn bumping into it first. Only tiles marked with Suspect
-- can turn out to be another tile. (So, if all tiles are marked with
-- Suspect, the player knows nothing for sure, but this should be avoided,
-- because searching becomes too time-consuming.)
-- Each actor sees adjacent tiles, even when blind, so adjacent tiles are
-- known, so the actor can decide accurately whether to pass thorugh
-- or alter, etc.
--
-- Items are always real and visible. Actors are real, but can be invisible.
-- Invisible actors in walls can't be hit, but are hinted at when altering
-- the tile, so the player can flee or block. Invisible actors in open
-- space can be hit.
module Game.LambdaHack.Common.Perception
( Perception(Perception), PerceptionVisible(PerceptionVisible)
, totalVisible, smellVisible
, nullPer, addPer, diffPer
, FactionPers, Pers
) where
import Data.Binary
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import GHC.Generics (Generic)
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Point
newtype PerceptionVisible = PerceptionVisible
{pvisible :: ES.EnumSet Point}
deriving (Show, Eq, Binary)
-- TOOD: if really needed, optimize by representing as a set of intervals
-- or a set of bitmaps, like the internal representation of IntSet.
-- | The type representing the perception of a faction on a level.
data Perception = Perception
{ ptotal :: !PerceptionVisible -- ^ sum over all actors
, psmell :: !PerceptionVisible -- ^ sum over actors that can smell
}
deriving (Show, Eq, Generic)
instance Binary Perception
-- | Perception of a single faction, indexed by level identifier.
type FactionPers = EM.EnumMap LevelId Perception
-- | Perception indexed by faction identifier.
-- This can't be added to @FactionDict@, because clients can't see it.
type Pers = EM.EnumMap FactionId FactionPers
-- | The set of tiles visible by at least one hero.
totalVisible :: Perception -> ES.EnumSet Point
totalVisible = pvisible . ptotal
-- | The set of tiles smelled by at least one hero.
smellVisible :: Perception -> ES.EnumSet Point
smellVisible = pvisible . psmell
nullPer :: Perception -> Bool
nullPer per = ES.null (totalVisible per) && ES.null (smellVisible per)
addPer :: Perception -> Perception -> Perception
addPer per1 per2 =
Perception
{ ptotal = PerceptionVisible
$ totalVisible per1 `ES.union` totalVisible per2
, psmell = PerceptionVisible
$ smellVisible per1 `ES.union` smellVisible per2
}
diffPer :: Perception -> Perception -> Perception
diffPer per1 per2 =
Perception
{ ptotal = PerceptionVisible
$ totalVisible per1 ES.\\ totalVisible per2
, psmell = PerceptionVisible
$ smellVisible per1 ES.\\ smellVisible per2
}
|
Concomitant/LambdaHack
|
Game/LambdaHack/Common/Perception.hs
|
bsd-3-clause
| 3,209
| 0
| 9
| 599
| 461
| 272
| 189
| 52
| 1
|
-- |
-- Module : Crypto.Math.F2m
-- License : BSD-style
-- Maintainer : Danny Navarro <j@dannynavarro.net>
-- Stability : experimental
-- Portability : Good
--
-- This module provides basic arithmetic operations over F₂m. Performance is
-- not optimal and it doesn't provide protection against timing
-- attacks. The 'm' parameter is implicitly derived from the irreducible
-- polynomial where applicable.
module Crypto.Number.F2m
( BinaryPolynomial
, addF2m
, mulF2m
, squareF2m'
, squareF2m
, modF2m
, invF2m
, divF2m
) where
import Data.Bits (xor, shift, testBit, setBit)
import Data.List
import Crypto.Internal.Imports
import Crypto.Number.Basic
-- | Binary Polynomial represented by an integer
type BinaryPolynomial = Integer
-- | Addition over F₂m. This is just a synonym of 'xor'.
addF2m :: Integer
-> Integer
-> Integer
addF2m = xor
{-# INLINE addF2m #-}
-- | Reduction by modulo over F₂m.
--
-- This function is undefined for negative arguments, because their bit
-- representation is platform-dependent. Zero modulus is also prohibited.
modF2m :: BinaryPolynomial -- ^ Modulus
-> Integer
-> Integer
modF2m fx i
| fx < 0 || i < 0 = error "modF2m: negative number represent no binary polynomial"
| fx == 0 = error "modF2m: cannot divide by zero polynomial"
| fx == 1 = 0
| otherwise = go i
where
lfx = log2 fx
go n | s == 0 = n `addF2m` fx
| s < 0 = n
| otherwise = go $ n `addF2m` shift fx s
where s = log2 n - lfx
{-# INLINE modF2m #-}
-- | Multiplication over F₂m.
--
-- This function is undefined for negative arguments, because their bit
-- representation is platform-dependent. Zero modulus is also prohibited.
mulF2m :: BinaryPolynomial -- ^ Modulus
-> Integer
-> Integer
-> Integer
mulF2m fx n1 n2
| fx < 0
|| n1 < 0
|| n2 < 0 = error "mulF2m: negative number represent no binary binary polynomial"
| fx == 0 = error "modF2m: cannot multiply modulo zero polynomial"
| otherwise = modF2m fx $ go (if n2 `mod` 2 == 1 then n1 else 0) (log2 n2)
where
go n s | s == 0 = n
| otherwise = if testBit n2 s
then go (n `addF2m` shift n1 s) (s - 1)
else go n (s - 1)
{-# INLINABLE mulF2m #-}
-- | Squaring over F₂m.
--
-- This function is undefined for negative arguments, because their bit
-- representation is platform-dependent. Zero modulus is also prohibited.
squareF2m :: BinaryPolynomial -- ^ Modulus
-> Integer
-> Integer
squareF2m fx = modF2m fx . squareF2m'
{-# INLINE squareF2m #-}
-- | Squaring over F₂m without reduction by modulo.
--
-- The implementation utilizes the fact that for binary polynomial S(x) we have
-- S(x)^2 = S(x^2). In other words, insert a zero bit between every bits of argument: 1101 -> 1010001.
--
-- This function is undefined for negative arguments, because their bit
-- representation is platform-dependent.
squareF2m' :: Integer
-> Integer
squareF2m' n
| n < 0 = error "mulF2m: negative number represent no binary binary polynomial"
| otherwise = foldl' (\acc s -> if testBit n s then setBit acc (2 * s) else acc) 0 [0 .. log2 n]
{-# INLINE squareF2m' #-}
-- | Extended GCD algorithm for polynomials. For @a@ and @b@ returns @(g, u, v)@ such that @a * u + b * v == g@.
--
-- Reference: https://en.wikipedia.org/wiki/Polynomial_greatest_common_divisor#B.C3.A9zout.27s_identity_and_extended_GCD_algorithm
gcdF2m :: Integer
-> Integer
-> (Integer, Integer, Integer)
gcdF2m a b = go (a, b, 1, 0, 0, 1)
where
go (g, 0, u, _, v, _)
= (g, u, v)
go (r0, r1, s0, s1, t0, t1)
= go (r1, r0 `addF2m` shift r1 j, s1, s0 `addF2m` shift s1 j, t1, t0 `addF2m` shift t1 j)
where j = max 0 (log2 r0 - log2 r1)
-- | Modular inversion over F₂m.
-- If @n@ doesn't have an inverse, 'Nothing' is returned.
--
-- This function is undefined for negative arguments, because their bit
-- representation is platform-dependent. Zero modulus is also prohibited.
invF2m :: BinaryPolynomial -- ^ Modulus
-> Integer
-> Maybe Integer
invF2m fx n = if g == 1 then Just (modF2m fx u) else Nothing
where
(g, u, _) = gcdF2m n fx
{-# INLINABLE invF2m #-}
-- | Division over F₂m. If the dividend doesn't have an inverse it returns
-- 'Nothing'.
--
-- This function is undefined for negative arguments, because their bit
-- representation is platform-dependent. Zero modulus is also prohibited.
divF2m :: BinaryPolynomial -- ^ Modulus
-> Integer -- ^ Dividend
-> Integer -- ^ Divisor
-> Maybe Integer -- ^ Quotient
divF2m fx n1 n2 = mulF2m fx n1 <$> invF2m fx n2
{-# INLINE divF2m #-}
|
tekul/cryptonite
|
Crypto/Number/F2m.hs
|
bsd-3-clause
| 4,901
| 0
| 12
| 1,286
| 957
| 532
| 425
| 78
| 3
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>ToDo-List</title>
<maps>
<homeID>todo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/todo/src/main/javahelp/help_sr_CS/helpset_sr_CS.hs
|
apache-2.0
| 955
| 77
| 67
| 155
| 408
| 207
| 201
| -1
| -1
|
module T14137 where
-- The point of this test is that we should inline 'thunk'
-- into j's RHS, and we can do so quite aggressively, even
-- when we aren't optimising. See the ticket.
--
-- It's not a big deal, because in the end FloatIn
-- does the same job, only later
f xs = let thunk = length xs
j = Just thunk
g 0 = j
g n = g (n-1)
in
g 7
|
shlevy/ghc
|
testsuite/tests/simplCore/should_compile/T14137.hs
|
bsd-3-clause
| 393
| 0
| 11
| 127
| 69
| 37
| 32
| 6
| 2
|
{-# LANGUAGE LambdaCase #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Glambda.Parse
-- Copyright : (C) 2015 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg (eir@cis.upenn.edu)
-- Stability : experimental
--
-- Parses tokens into the un-type-checked AST. "Parsing", in glambda,
-- also includes name resolution. This all might
-- conceivably be done in a later pass, but there doesn't seem to be
-- an incentive to do so.
--
----------------------------------------------------------------------------
module Language.Glambda.Parse (
parseStmtsG, parseStmts,
parseStmtG, parseExpG,
parseStmt, parseExp
) where
import Language.Glambda.Unchecked
import Language.Glambda.Statement
import Language.Glambda.Token
import Language.Glambda.Type
import Language.Glambda.Monad
import Language.Glambda.Util
import Text.Parsec.Prim as Parsec hiding ( parse )
import Text.Parsec.Pos
import Text.Parsec.Combinator
import Text.PrettyPrint.ANSI.Leijen hiding ( (<$>) )
import Data.List as List
import Control.Applicative
import Control.Arrow as Arrow ( left )
import Control.Monad.Reader
-- | Parse a sequence of semicolon-separated statements, aborting with
-- an error upon failure
parseStmtsG :: [LToken] -> GlamE [Statement]
parseStmtsG = eitherToGlamE . parseStmts
-- | Parse a sequence of semicolon-separated statements
parseStmts :: [LToken] -> Either String [Statement]
parseStmts = parse stmts
-- | Parse a 'Statement', aborting with an error upon failure
parseStmtG :: [LToken] -> GlamE Statement
parseStmtG = eitherToGlamE . parseStmt
-- | Parse a 'Statement'
parseStmt :: [LToken] -> Either String Statement
parseStmt = parse stmt
-- | Parse a 'UExp', aborting with an error upon failure
parseExpG :: [LToken] -> GlamE UExp
parseExpG = eitherToGlamE . parseExp
-- | Parse a 'UExp'
parseExp :: [LToken] -> Either String UExp
parseExp = parse expr
parse :: Parser a -> [LToken] -> Either String a
parse p tokens = Arrow.left show $
runReader (runParserT (p <* eof) () "" tokens) []
----------------------
-- Plumbing
-- the "state" is a list of bound names. searching a bound name in the list
-- gives you the correct deBruijn index
type Parser = ParsecT [LToken] () (Reader [String])
-- | Bind a name over an expression
bind :: String -> Parser a -> Parser a
bind bound_var thing_inside
= local (bound_var :) thing_inside
-- | Parse the given nullary token
tok :: Token -> Parser ()
tok t = tokenPrim (render . pretty) next_pos (guard . (t ==) . unLoc)
-- | Parse the given unary token
tok' :: (Token -> Maybe thing) -> Parser thing
tok' matcher = tokenPrim (render . pretty) next_pos (matcher . unLoc)
-- | Parse one of a set of 'ArithOp's
arith_op :: [UArithOp] -> Parser UArithOp
arith_op ops = tokenPrim (render . pretty) next_pos
(\case L _ (ArithOp op) | op `elem` ops -> Just op
_ -> Nothing)
next_pos :: SourcePos -- ^ position of the current token
-> LToken -- ^ current token
-> [LToken] -- ^ remaining tokens
-> SourcePos -- ^ location of the next token
next_pos pos _ [] = pos
next_pos _ _ (L pos _ : _) = pos
--------------
-- Real work
stmts :: Parser [Statement]
stmts = stmt `sepEndBy` tok Semi
stmt :: Parser Statement
stmt = choice [ try $ NewGlobal <$> tok' unName <* tok Assign <*> expr
, BareExp <$> expr ]
expr :: Parser UExp
expr = choice [ lam
, cond
, int_exp `chainl1` bool_op ]
int_exp :: Parser UExp
int_exp = term `chainl1` add_op
term :: Parser UExp
term = apps `chainl1` mul_op
apps :: Parser UExp
apps = choice [ UFix <$ tok FixT <*> expr
, List.foldl1 UApp <$> some factor ]
factor :: Parser UExp
factor = choice [ between (tok LParen) (tok RParen) expr
, UIntE <$> tok' unInt
, UBoolE <$> tok' unBool
, var ]
lam :: Parser UExp
lam = do
tok Lambda
bound_var <- tok' unName
tok Colon
typ <- ty
tok Dot
e <- bind bound_var $ expr
return (ULam typ e)
cond :: Parser UExp
cond = UCond <$ tok If <*> expr <* tok Then <*> expr <* tok Else <*> expr
var :: Parser UExp
var = do
n <- tok' unName
m_index <- asks (elemIndex n)
case m_index of
Nothing -> return (UGlobal n)
Just i -> return (UVar i)
ty :: Parser Ty
ty = chainr1 arg_ty (Arr <$ tok Arrow)
arg_ty :: Parser Ty
arg_ty = choice [ between (tok LParen) (tok RParen) ty
, tycon ]
tycon :: Parser Ty
tycon = do
n <- tok' unName
case readTyCon n of
Nothing -> unexpected $ render $
text "type" <+> squotes (text n)
Just ty -> return ty
add_op, mul_op, bool_op :: Parser (UExp -> UExp -> UExp)
add_op = mk_op <$> arith_op [uPlus, uMinus]
mul_op = mk_op <$> arith_op [uTimes, uDivide, uMod]
bool_op = mk_op <$> arith_op [uLess, uLessE, uGreater, uGreaterE, uEquals]
mk_op :: UArithOp -> UExp -> UExp -> UExp
mk_op op = \e1 e2 -> UArith e1 op e2
|
ajnsit/glambda
|
src/Language/Glambda/Parse.hs
|
bsd-3-clause
| 5,116
| 0
| 13
| 1,188
| 1,420
| 763
| 657
| 109
| 2
|
{-# LANGUAGE OverloadedLists #-}
-- This will fail because there is no type defaulting implemented as of yet.
main = print [1]
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/overloadedlists/should_fail/overloadedlistsfail01.hs
|
bsd-3-clause
| 129
| 0
| 6
| 24
| 14
| 8
| 6
| 2
| 1
|
module Network.SocketIO.Growler where
import Control.Monad.Trans (MonadIO)
import Control.Monad.Trans.Reader (ReaderT)
import Control.Monad.Trans.State.Strict (StateT)
import Network.SocketIO (initialize, RoutingTable, Socket)
import Web.Growler (GrowlerT, HandlerT, matchAny, literal)
import Network.EngineIO.Growler (growlerAPI)
initializeSocketIO :: MonadIO m => StateT RoutingTable (ReaderT Socket (HandlerT m)) a -> IO (HandlerT m ())
initializeSocketIO = initialize growlerAPI
socketIOHandler :: MonadIO m => HandlerT m () -> GrowlerT m ()
socketIOHandler = matchAny (literal "/socket.io")
|
iand675/growler
|
extra/growler-socket-io/src/Network/SocketIO/Growler.hs
|
mit
| 658
| 0
| 11
| 126
| 191
| 107
| 84
| 11
| 1
|
module Typing where
import qualified Data.Set as Set
import qualified Data.Map as Map
import Core
import Substitution
----------------------------------------------------------------------------
-- Unification
----------------------------------------------------------------------------
|
robertkleffner/gruit
|
src/Typing.hs
|
mit
| 293
| 0
| 4
| 27
| 29
| 22
| 7
| 5
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Models.MatchingResponse where
import GHC.Generics
import Data.Aeson (ToJSON)
newtype MatchingResponse = MatchingResponse { result :: String } deriving (Show, Generic)
instance ToJSON MatchingResponse
match :: MatchingResponse
match = MatchingResponse "match"
noMatch :: MatchingResponse
noMatch = MatchingResponse "no-match"
|
richardTowers/verify-sample-local-matching-services
|
haskell/Models/MatchingResponse.hs
|
mit
| 404
| 0
| 6
| 51
| 80
| 47
| 33
| 11
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import System.Process.Typed
import System.FilePath
import Data.Foldable
import System.Directory
import System.Environment (getArgs)
import Control.Exception
import Control.Monad
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Encoding (decodeUtf8)
import Data.Yaml
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Vector as V
import Data.Maybe
parseStackYaml :: IO ([String], [String])
parseStackYaml = do
val <- decodeFileEither "stack.yaml" >>= either throwIO return
let buildFirst = fromMaybe [] $ do
Object o1 <- Just val
Object o2 <- HashMap.lookup "x-stack-docker-image-build" o1
Array vals <- HashMap.lookup "build-first" o2
Just [T.unpack dep | String dep <- V.toList vals]
extraDeps = fromMaybe [] $ do
Object o <- Just val
Array vals <- HashMap.lookup "extra-deps" o
Just [T.unpack dep | String dep <- V.toList vals]
return (buildFirst, extraDeps)
stack :: [String] -> ProcessConfig () () ()
stack args = proc "stack" $ ["--no-install-ghc", "--system-ghc"] ++ args
runStack :: [String] -> IO ()
runStack = runProcess_ . stack
readStack :: [String] -> IO String
readStack = fmap (TL.unpack . decodeUtf8 . fst) . readProcess_ . stack
getDir :: String -> IO FilePath
getDir flag = do
dirRaw <- readStack ["path", flag]
return $ takeWhile (/= '\n') dirRaw
getDBDir :: String -> IO FilePath
getDBDir typ = getDir $ concat ["--", typ, "-pkg-db"]
getBinDir :: String -> IO FilePath
getBinDir typ = do
dir <- getDir $ concat ["--", typ, "-install-root"]
return $ dir </> "bin"
main :: IO ()
main = do
args <- getArgs
(buildFirst, deps) <- parseStackYaml
forM_ buildFirst $ \pkg -> do
putStrLn $ "Building " ++ pkg ++ " from build-first"
runStack $ "build" : words pkg ++ args
unless (null deps) $ do
putStrLn "Building extra-deps"
runStack $ "build" : deps ++ args
putStrLn "Performing build local"
runStack $ "build" : args
globaldb <- getDBDir "global"
forM_ (words "snapshot local") $ \typ -> do
bindir <- getBinDir typ
bindirexists <- doesDirectoryExist bindir
bincontents <- if bindirexists then getDirectoryContents bindir else return []
forM_ bincontents $ \file -> do
let fp = bindir </> file
exists <- doesFileExist fp
when exists $ do
putStrLn $ "Linking " ++ fp
runProcess_ $ proc "ln" [fp, "/usr/local/bin" </> file]
dbdir <- getDBDir typ
dbdirexists <- doesDirectoryExist dbdir
dbcontents <- if dbdirexists then getDirectoryContents dbdir else return []
forM_ dbcontents $ \file -> when (takeExtension file == ".conf") $ do
let fp = dbdir </> file
putStrLn $ "Registering: " ++ file
runStack
[ "exec"
, "--"
, "ghc-pkg"
, "register"
, fp
, "--package-db"
, globaldb
, "--force"
]
stackDir <- getAppUserDataDirectory "stack"
stackContents <- getDirectoryContents stackDir
let toKeep "." = True
toKeep ".." = True
toKeep "snapshots" = True
toKeep _ = False
forM_ (filter (not . toKeep) stackContents) $ \x ->
runProcess_ $ proc "rm" ["-rf", stackDir </> x]
|
fpco/stack-docker-image-build
|
app/Main.hs
|
mit
| 3,554
| 0
| 21
| 1,021
| 1,120
| 554
| 566
| 91
| 6
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Reflex.Cocos2d.Types
( Mouse(Mouse)
, mouseCursorLocation
, mouseScroll
, mouseButton
, Touch(Touch)
, touchLocation
, Acceleration(Acceleration)
, accelerationVector
, accelerationTimestamp
, Outline(Outline)
, outlineColor
, outlineSize
, Shadow(Shadow)
, shadowColor
, shadowOffset
, shadowBlur
, Glow(Glow)
, glowColor
-- re-export cocos2d-hs types
, Size(..)
, size_width
, size_height
, Rect(..)
, rect_origin
, rect_size
) where
import Data.Default
import Diagrams (Point(..), P2, V2(..), V3(..))
import Control.Lens
import Data.Colour
import Data.Colour.Names
import Graphics.UI.Cocos2d (Decodable(..))
import Graphics.UI.Cocos2d.Event
import Graphics.UI.Cocos2d.Extra
data Mouse = Mouse
{ _mouseCursorLocation :: P2 Float
, _mouseScroll :: V2 Float
, _mouseButton :: Int
} deriving (Show, Eq)
mouseButton :: Lens' Mouse Int
mouseButton f (Mouse loc scroll but)
= fmap
(\ but' -> Mouse loc scroll but') (f but)
{-# INLINE mouseButton #-}
mouseCursorLocation :: Lens' Mouse (P2 Float)
mouseCursorLocation f (Mouse loc scroll but)
= fmap
(\ loc' -> Mouse loc' scroll but) (f loc)
{-# INLINE mouseCursorLocation #-}
mouseScroll :: Lens' Mouse (V2 Float)
mouseScroll f (Mouse loc scroll but)
= fmap
(\ scroll' -> Mouse loc scroll' but) (f scroll)
{-# INLINE mouseScroll #-}
instance Decodable EventMouse Mouse where
decode = decode . toEventMouseConst
instance Decodable EventMouseConst Mouse where
decode em = Mouse <$> (P <$> eventMouse_getLocation em)
<*> (V2 <$> eventMouse_getScrollX em <*> eventMouse_getScrollY em)
<*> eventMouse_getMouseButton em
data Touch = Touch
{ _touchLocation :: P2 Float
} deriving (Show, Eq)
touchLocation :: Lens' Touch (P2 Float)
touchLocation f (Touch loc) = fmap Touch (f loc)
{-# INLINE touchLocation #-}
instance Decodable EventTouch Touch where
decode = decode . toEventTouchConst
instance Decodable EventTouchConst Touch where
decode et = Touch . P <$> eventTouch_getLocation et
data Acceleration = Acceleration
{ _accelerationVector :: V3 Double
, _accelerationTimestamp :: Double
}
accelerationTimestamp :: Lens' Acceleration Double
accelerationTimestamp f (Acceleration vec ts)
= fmap
(\ ts' -> Acceleration vec ts') (f ts)
{-# INLINE accelerationTimestamp #-}
accelerationVector :: Lens' Acceleration (V3 Double)
accelerationVector f (Acceleration vec ts)
= fmap
(\ vec' -> Acceleration vec' ts) (f vec)
{-# INLINE accelerationVector #-}
instance Decodable EventAcceleration Acceleration where
decode = decode . toEventAccelerationConst
instance Decodable EventAccelerationConst Acceleration where
decode ea = Acceleration <$> (V3 <$> eventAcceleration_x_get ea
<*> eventAcceleration_y_get ea
<*> eventAcceleration_z_get ea)
<*> eventAcceleration_timestamp_get ea
-- Label
data Outline = Outline
{ _outlineColor :: AlphaColour Float
, _outlineSize :: Int
} deriving (Show, Eq)
outlineColor :: Lens' Outline (AlphaColour Float)
outlineColor f (Outline color size)
= fmap (\ color' -> Outline color' size) (f color)
{-# INLINE outlineColor #-}
outlineSize :: Lens' Outline Int
outlineSize f (Outline color size)
= fmap (\ size' -> Outline color size') (f size)
{-# INLINE outlineSize #-}
instance Default Outline where
def = Outline
{ _outlineColor = opaque white
, _outlineSize = 0
}
data Shadow = Shadow
{ _shadowColor :: AlphaColour Float
, _shadowOffset :: V2 Float
, _shadowBlur :: Int
} deriving (Show, Eq)
shadowBlur :: Lens' Shadow Int
shadowBlur f (Shadow color offset blur)
= fmap
(\ blur' -> Shadow color offset blur')
(f blur)
{-# INLINE shadowBlur #-}
shadowColor :: Lens' Shadow (AlphaColour Float)
shadowColor f (Shadow color offset blur)
= fmap
(\ color' -> Shadow color' offset blur)
(f color)
{-# INLINE shadowColor #-}
shadowOffset :: Lens' Shadow (V2 Float)
shadowOffset f (Shadow color offset blur)
= fmap
(\ offset' -> Shadow color offset' blur)
(f offset)
{-# INLINE shadowOffset #-}
instance Default Shadow where
def = Shadow
{ _shadowColor = white `withOpacity` 0.5
, _shadowOffset = 0
, _shadowBlur = 0
}
data Glow = Glow
{ _glowColor :: AlphaColour Float
} deriving (Show, Eq)
glowColor :: Lens' Glow (AlphaColour Float)
glowColor f (Glow color) = fmap Glow (f color)
{-# INLINE glowColor #-}
instance Default Glow where
def = Glow
{ _glowColor = white `withOpacity` 0.5
}
|
lynnard/reflex-cocos2d
|
src/Reflex/Cocos2d/Types.hs
|
mit
| 4,782
| 0
| 12
| 1,114
| 1,373
| 749
| 624
| 156
| 1
|
module GHCJS.DOM.SVGPathSegMovetoRel (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/SVGPathSegMovetoRel.hs
|
mit
| 49
| 0
| 3
| 7
| 10
| 7
| 3
| 1
| 0
|
-- Copyright 2014 Alvaro J. Genial (http://alva.ro) -- see LICENSE.md for more.
module Text.Neat.Output.Haskell0 (output) where
import Data.Char (isSpace)
import Data.List (intercalate)
import Text.Neat.Template
import Text.Neat.Output
instance Output File where
output = file
file (File path (Block chunks)) = divide chunks where
divide [] = []
divide (c @ (Chunk l e) : rest) = element' e ++ divide rest
where element' (Text s) = location l ++ s
element' _ = chunk c
chunk (Chunk _ d @ (Define _ _)) = element d
chunk (Chunk l t @ (Text _)) = location l ++ element t
chunk (Chunk l e) = "(" ++ location l ++ element e ++ ")"
block (Block cs) = "(" ++ (nested $ case cs of
[c] -> "\n" ++ chunk c
_ -> "[]" ++ appendEach (fmap chunk cs)) ++ ")"
where appendEach = concatMap $ (++) ("\n++ ")
nested = intercalate "\n " . lines
function (Function _ n (Pattern _ p)) = n ++ p
value (Value _ v) = "(" ++ intercalate " $ " (reverse v) ++ ")"
pattern (Pattern l p) = location l ++ p
case' (Case p b) = pattern p ++ " -> " ++ block b
location (Location f l) = "{-# LINE " ++ show l ++ " " ++ show f ++ " #-}\n"
element (Output v) = "output " ++ value v
element (Comment b) = "{#" ++ output b ++ "#}"
element (Define f b) = function f ++ " = " ++ block b
element (Filter v b) = value v ++ " " ++ block b
element (For (Binding p v) b o) =
"let _l = list " ++ value v ++ " in if (not . null) _l"
++ "\n then _l >>= \\" ++ pattern p ++ " -> " ++ block b
++ "\n else " ++ maybe "[]" block o
element (If v b o) =
"if (not . zero) " ++ value v
++ "\n then " ++ block b
++ "\n else " ++ maybe "[]" block o
element (Switch v cs o) =
"case " ++ value v ++ " of "
++ (case' =<< cs)
++ maybe "" (("\n_ -> " ++) . block) o
element (With (Binding p v) b) =
"case " ++ value v ++ " of "
++ pattern p ++ " -> " ++ block b
element (Text s) = show . trimTrail $ s
trimTrail s | hasTrail = intercalate "\n" init'
| otherwise = s where
(lines', init', last') = (lines s, init lines', last lines')
hasTrail = length lines' > 1 && not (null last') && all isSpace last'
|
ajg/neat
|
Text/Neat/Output/Haskell0.hs
|
mit
| 2,164
| 5
| 15
| 573
| 993
| 488
| 505
| 49
| 3
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLOptionsCollection
(js_namedItem, namedItem, js_addBefore, addBefore, js_add, add,
js_remove, remove, js_setSelectedIndex, setSelectedIndex,
js_getSelectedIndex, getSelectedIndex, js_setLength, setLength,
js_getLength, getLength, HTMLOptionsCollection,
castToHTMLOptionsCollection, gTypeHTMLOptionsCollection)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"namedItem\"]($2)"
js_namedItem ::
HTMLOptionsCollection -> JSString -> IO (Nullable Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.namedItem Mozilla HTMLOptionsCollection.namedItem documentation>
namedItem ::
(MonadIO m, ToJSString name) =>
HTMLOptionsCollection -> name -> m (Maybe Node)
namedItem self name
= liftIO
(nullableToMaybe <$> (js_namedItem (self) (toJSString name)))
foreign import javascript unsafe "$1[\"add\"]($2, $3)" js_addBefore
::
HTMLOptionsCollection ->
Nullable HTMLElement -> Nullable HTMLElement -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.add Mozilla HTMLOptionsCollection.add documentation>
addBefore ::
(MonadIO m, IsHTMLElement element, IsHTMLElement before) =>
HTMLOptionsCollection -> Maybe element -> Maybe before -> m ()
addBefore self element before
= liftIO
(js_addBefore (self) (maybeToNullable (fmap toHTMLElement element))
(maybeToNullable (fmap toHTMLElement before)))
foreign import javascript unsafe "$1[\"add\"]($2, $3)" js_add ::
HTMLOptionsCollection -> Nullable HTMLElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.add Mozilla HTMLOptionsCollection.add documentation>
add ::
(MonadIO m, IsHTMLElement element) =>
HTMLOptionsCollection -> Maybe element -> Int -> m ()
add self element index
= liftIO
(js_add (self) (maybeToNullable (fmap toHTMLElement element))
index)
foreign import javascript unsafe "$1[\"remove\"]($2)" js_remove ::
HTMLOptionsCollection -> Word -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.remove Mozilla HTMLOptionsCollection.remove documentation>
remove :: (MonadIO m) => HTMLOptionsCollection -> Word -> m ()
remove self index = liftIO (js_remove (self) index)
foreign import javascript unsafe "$1[\"selectedIndex\"] = $2;"
js_setSelectedIndex :: HTMLOptionsCollection -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.selectedIndex Mozilla HTMLOptionsCollection.selectedIndex documentation>
setSelectedIndex ::
(MonadIO m) => HTMLOptionsCollection -> Int -> m ()
setSelectedIndex self val = liftIO (js_setSelectedIndex (self) val)
foreign import javascript unsafe "$1[\"selectedIndex\"]"
js_getSelectedIndex :: HTMLOptionsCollection -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.selectedIndex Mozilla HTMLOptionsCollection.selectedIndex documentation>
getSelectedIndex :: (MonadIO m) => HTMLOptionsCollection -> m Int
getSelectedIndex self = liftIO (js_getSelectedIndex (self))
foreign import javascript unsafe "$1[\"length\"] = $2;"
js_setLength :: HTMLOptionsCollection -> Word -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.length Mozilla HTMLOptionsCollection.length documentation>
setLength :: (MonadIO m) => HTMLOptionsCollection -> Word -> m ()
setLength self val = liftIO (js_setLength (self) val)
foreign import javascript unsafe "$1[\"length\"]" js_getLength ::
HTMLOptionsCollection -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLOptionsCollection.length Mozilla HTMLOptionsCollection.length documentation>
getLength :: (MonadIO m) => HTMLOptionsCollection -> m Word
getLength self = liftIO (js_getLength (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/HTMLOptionsCollection.hs
|
mit
| 4,803
| 64
| 11
| 714
| 1,061
| 595
| 466
| 70
| 1
|
-- vim cheatsheet
-- Jumping words
* w / W -- (Word)
* e / E -- (End)
* b / B -- (Back)
* g + e / g + E
* $ -- (End)
* 0 -- (Pos1)
* [num]f + <char> -- (jump forward to num * char)
** ; -- (repeat)
** F + <char> -- (backwards)
* [num]t + <char> -- (jump forward till num * char)
** ; -- (repeat)
** T <char> -- (backwards)
--
|
cirquit/Personal-Repository
|
Vim/vim-cheatsheet.hs
|
mit
| 426
| 14
| 24
| 182
| 140
| 79
| 61
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-
Copyright (C) 2006-8 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Shared
Copyright : Copyright (C) 2006-8 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Utility functions and definitions used by the various Pandoc modules.
-}
module Text.Pandoc.Shared (
-- * List processing
splitBy,
splitByIndices,
substitute,
-- * Text processing
backslashEscapes,
escapeStringUsing,
stripTrailingNewlines,
removeLeadingTrailingSpace,
removeLeadingSpace,
removeTrailingSpace,
stripFirstAndLast,
camelCaseToHyphenated,
toRomanNumeral,
wrapped,
wrapIfNeeded,
wrappedTeX,
wrapTeXIfNeeded,
BlockWrapper (..),
wrappedBlocksToDoc,
tabFilter,
-- * Parsing
(>>~),
anyLine,
many1Till,
notFollowedBy',
oneOfStrings,
spaceChar,
skipSpaces,
blankline,
blanklines,
enclosed,
stringAnyCase,
parseFromString,
lineClump,
charsInBalanced,
charsInBalanced',
romanNumeral,
emailAddress,
uri,
withHorizDisplacement,
nullBlock,
failIfStrict,
failUnlessLHS,
escaped,
anyOrderedListMarker,
orderedListMarker,
charRef,
readWith,
testStringWith,
ParserState (..),
defaultParserState,
HeaderType (..),
ParserContext (..),
QuoteContext (..),
NoteTable,
KeyTable,
lookupKeySrc,
refsMatch,
-- * Prettyprinting
hang',
prettyPandoc,
-- * Pandoc block and inline list processing
orderedListMarkers,
normalizeSpaces,
compactify,
Element (..),
hierarchicalize,
isHeaderBlock,
-- * Writer options
HTMLMathMethod (..),
ObfuscationMethod (..),
WriterOptions (..),
defaultWriterOptions,
-- * File handling
inDirectory,
readDataFile
) where
import Text.Pandoc.Definition
import Text.ParserCombinators.Parsec
import Text.PrettyPrint.HughesPJ ( Doc, fsep, ($$), (<>), empty, isEmpty, text, nest )
import qualified Text.PrettyPrint.HughesPJ as PP
import Text.Pandoc.CharacterReferences ( characterReference )
import Data.Char ( toLower, toUpper, ord, isLower, isUpper, isAlpha,
isPunctuation )
import Data.List ( find, isPrefixOf, intercalate )
import Network.URI ( parseURI, URI (..), isAllowedInURI )
import System.Directory
import System.FilePath ( (</>) )
-- Note: ghc >= 6.12 (base >=4.2) supports unicode through iconv
-- So we use System.IO.UTF8 only if we have an earlier version
#if MIN_VERSION_base(4,2,0)
#else
import Prelude hiding ( putStr, putStrLn, writeFile, readFile, getContents )
import System.IO.UTF8
#endif
import Data.Generics
import qualified Control.Monad.State as S
import Control.Monad (join)
import Paths_pandoc (getDataFileName)
--
-- List processing
--
-- | Split list by groups of one or more sep.
splitBy :: (Eq a) => a -> [a] -> [[a]]
splitBy _ [] = []
splitBy sep lst =
let (first, rest) = break (== sep) lst
rest' = dropWhile (== sep) rest
in first:(splitBy sep rest')
-- | Split list into chunks divided at specified indices.
splitByIndices :: [Int] -> [a] -> [[a]]
splitByIndices [] lst = [lst]
splitByIndices (x:xs) lst =
let (first, rest) = splitAt x lst in
first:(splitByIndices (map (\y -> y - x) xs) rest)
-- | Replace each occurrence of one sublist in a list with another.
substitute :: (Eq a) => [a] -> [a] -> [a] -> [a]
substitute _ _ [] = []
substitute [] _ lst = lst
substitute target replacement lst =
if target `isPrefixOf` lst
then replacement ++ (substitute target replacement $ drop (length target) lst)
else (head lst):(substitute target replacement $ tail lst)
--
-- Text processing
--
-- | Returns an association list of backslash escapes for the
-- designated characters.
backslashEscapes :: [Char] -- ^ list of special characters to escape
-> [(Char, String)]
backslashEscapes = map (\ch -> (ch, ['\\',ch]))
-- | Escape a string of characters, using an association list of
-- characters and strings.
escapeStringUsing :: [(Char, String)] -> String -> String
escapeStringUsing _ [] = ""
escapeStringUsing escapeTable (x:xs) =
case (lookup x escapeTable) of
Just str -> str ++ rest
Nothing -> x:rest
where rest = escapeStringUsing escapeTable xs
-- | Strip trailing newlines from string.
stripTrailingNewlines :: String -> String
stripTrailingNewlines = reverse . dropWhile (== '\n') . reverse
-- | Remove leading and trailing space (including newlines) from string.
removeLeadingTrailingSpace :: String -> String
removeLeadingTrailingSpace = removeLeadingSpace . removeTrailingSpace
-- | Remove leading space (including newlines) from string.
removeLeadingSpace :: String -> String
removeLeadingSpace = dropWhile (`elem` " \n\t")
-- | Remove trailing space (including newlines) from string.
removeTrailingSpace :: String -> String
removeTrailingSpace = reverse . removeLeadingSpace . reverse
-- | Strip leading and trailing characters from string
stripFirstAndLast :: String -> String
stripFirstAndLast str =
drop 1 $ take ((length str) - 1) str
-- | Change CamelCase word to hyphenated lowercase (e.g., camel-case).
camelCaseToHyphenated :: String -> String
camelCaseToHyphenated [] = ""
camelCaseToHyphenated (a:b:rest) | isLower a && isUpper b =
a:'-':(toLower b):(camelCaseToHyphenated rest)
camelCaseToHyphenated (a:rest) = (toLower a):(camelCaseToHyphenated rest)
-- | Convert number < 4000 to uppercase roman numeral.
toRomanNumeral :: Int -> String
toRomanNumeral x =
if x >= 4000 || x < 0
then "?"
else case x of
_ | x >= 1000 -> "M" ++ toRomanNumeral (x - 1000)
_ | x >= 900 -> "CM" ++ toRomanNumeral (x - 900)
_ | x >= 500 -> "D" ++ toRomanNumeral (x - 500)
_ | x >= 400 -> "CD" ++ toRomanNumeral (x - 400)
_ | x >= 100 -> "C" ++ toRomanNumeral (x - 100)
_ | x >= 90 -> "XC" ++ toRomanNumeral (x - 90)
_ | x >= 50 -> "L" ++ toRomanNumeral (x - 50)
_ | x >= 40 -> "XL" ++ toRomanNumeral (x - 40)
_ | x >= 10 -> "X" ++ toRomanNumeral (x - 10)
_ | x >= 9 -> "IX" ++ toRomanNumeral (x - 5)
_ | x >= 5 -> "V" ++ toRomanNumeral (x - 5)
_ | x >= 4 -> "IV" ++ toRomanNumeral (x - 4)
_ | x >= 1 -> "I" ++ toRomanNumeral (x - 1)
_ -> ""
-- | Wrap inlines to line length.
wrapped :: Monad m => ([Inline] -> m Doc) -> [Inline] -> m Doc
wrapped listWriter sect = (mapM listWriter $ splitBy Space sect) >>=
return . fsep
-- | Wrap inlines if the text wrap option is selected.
wrapIfNeeded :: Monad m => WriterOptions -> ([Inline] -> m Doc) ->
[Inline] -> m Doc
wrapIfNeeded opts = if writerWrapText opts
then wrapped
else ($)
-- auxiliary function for wrappedTeX
isNote :: Inline -> Bool
isNote (Note _) = True
isNote _ = False
-- | Wrap inlines to line length, treating footnotes in a way that
-- makes sense in LaTeX and ConTeXt.
wrappedTeX :: Monad m
=> Bool
-> ([Inline] -> m Doc)
-> [Inline]
-> m Doc
wrappedTeX includePercent listWriter sect = do
let (firstpart, rest) = break isNote sect
firstpartWrapped <- wrapped listWriter firstpart
if null rest
then return firstpartWrapped
else do let (note:rest') = rest
let (rest1, rest2) = break (== Space) rest'
-- rest1 is whatever comes between the note and a Space.
-- if the note is followed directly by a Space, rest1 is null.
-- rest1 is printed after the note but before the line break,
-- to avoid spurious blank space the note and immediately
-- following punctuation.
rest1Out <- if null rest1
then return empty
else listWriter rest1
rest2Wrapped <- if null rest2
then return empty
else wrappedTeX includePercent listWriter (tail rest2)
noteText <- listWriter [note]
return $ (firstpartWrapped <> if includePercent then PP.char '%' else empty) $$
(noteText <> rest1Out) $$
rest2Wrapped
-- | Wrap inlines if the text wrap option is selected, specialized
-- for LaTeX and ConTeXt.
wrapTeXIfNeeded :: Monad m
=> WriterOptions
-> Bool
-> ([Inline] -> m Doc)
-> [Inline]
-> m Doc
wrapTeXIfNeeded opts includePercent = if writerWrapText opts
then wrappedTeX includePercent
else ($)
-- | Indicates whether block should be surrounded by blank lines (@Pad@) or not (@Reg@).
data BlockWrapper = Pad Doc | Reg Doc
-- | Converts a list of wrapped blocks to a Doc, with appropriate spaces around blocks.
wrappedBlocksToDoc :: [BlockWrapper] -> Doc
wrappedBlocksToDoc = foldr addBlock empty
where addBlock (Pad d) accum | isEmpty accum = d
addBlock (Pad d) accum = d $$ text "" $$ accum
addBlock (Reg d) accum = d $$ accum
-- | Convert tabs to spaces and filter out DOS line endings.
-- Tabs will be preserved if tab stop is set to 0.
tabFilter :: Int -- ^ Tab stop
-> String -- ^ Input
-> String
tabFilter tabStop =
let go _ [] = ""
go _ ('\n':xs) = '\n' : go tabStop xs
go _ ('\r':'\n':xs) = '\n' : go tabStop xs
go _ ('\r':xs) = '\n' : go tabStop xs
go spsToNextStop ('\t':xs) =
if tabStop == 0
then '\t' : go tabStop xs
else replicate spsToNextStop ' ' ++ go tabStop xs
go 1 (x:xs) =
x : go tabStop xs
go spsToNextStop (x:xs) =
x : go (spsToNextStop - 1) xs
in go tabStop
--
-- Parsing
--
-- | Like >>, but returns the operation on the left.
-- (Suggested by Tillmann Rendel on Haskell-cafe list.)
(>>~) :: (Monad m) => m a -> m b -> m a
a >>~ b = a >>= \x -> b >> return x
-- | Parse any line of text
anyLine :: GenParser Char st [Char]
anyLine = manyTill anyChar newline
-- | Like @manyTill@, but reads at least one item.
many1Till :: GenParser tok st a
-> GenParser tok st end
-> GenParser tok st [a]
many1Till p end = do
first <- p
rest <- manyTill p end
return (first:rest)
-- | A more general form of @notFollowedBy@. This one allows any
-- type of parser to be specified, and succeeds only if that parser fails.
-- It does not consume any input.
notFollowedBy' :: Show b => GenParser a st b -> GenParser a st ()
notFollowedBy' p = try $ join $ do a <- try p
return (unexpected (show a))
<|>
return (return ())
-- (This version due to Andrew Pimlott on the Haskell mailing list.)
-- | Parses one of a list of strings (tried in order).
oneOfStrings :: [String] -> GenParser Char st String
oneOfStrings listOfStrings = choice $ map (try . string) listOfStrings
-- | Parses a space or tab.
spaceChar :: CharParser st Char
spaceChar = char ' ' <|> char '\t'
-- | Skips zero or more spaces or tabs.
skipSpaces :: GenParser Char st ()
skipSpaces = skipMany spaceChar
-- | Skips zero or more spaces or tabs, then reads a newline.
blankline :: GenParser Char st Char
blankline = try $ skipSpaces >> newline
-- | Parses one or more blank lines and returns a string of newlines.
blanklines :: GenParser Char st [Char]
blanklines = many1 blankline
-- | Parses material enclosed between start and end parsers.
enclosed :: GenParser Char st t -- ^ start parser
-> GenParser Char st end -- ^ end parser
-> GenParser Char st a -- ^ content parser (to be used repeatedly)
-> GenParser Char st [a]
enclosed start end parser = try $
start >> notFollowedBy space >> many1Till parser end
-- | Parse string, case insensitive.
stringAnyCase :: [Char] -> CharParser st String
stringAnyCase [] = string ""
stringAnyCase (x:xs) = do
firstChar <- char (toUpper x) <|> char (toLower x)
rest <- stringAnyCase xs
return (firstChar:rest)
-- | Parse contents of 'str' using 'parser' and return result.
parseFromString :: GenParser tok st a -> [tok] -> GenParser tok st a
parseFromString parser str = do
oldPos <- getPosition
oldInput <- getInput
setInput str
result <- parser
setInput oldInput
setPosition oldPos
return result
-- | Parse raw line block up to and including blank lines.
lineClump :: GenParser Char st String
lineClump = blanklines
<|> (many1 (notFollowedBy blankline >> anyLine) >>= return . unlines)
-- | Parse a string of characters between an open character
-- and a close character, including text between balanced
-- pairs of open and close, which must be different. For example,
-- @charsInBalanced '(' ')'@ will parse "(hello (there))"
-- and return "hello (there)". Stop if a blank line is
-- encountered.
charsInBalanced :: Char -> Char -> GenParser Char st String
charsInBalanced open close = try $ do
char open
raw <- many $ (many1 (noneOf [open, close, '\n']))
<|> (do res <- charsInBalanced open close
return $ [open] ++ res ++ [close])
<|> try (string "\n" >>~ notFollowedBy' blanklines)
char close
return $ concat raw
-- | Like @charsInBalanced@, but allow blank lines in the content.
charsInBalanced' :: Char -> Char -> GenParser Char st String
charsInBalanced' open close = try $ do
char open
raw <- many $ (many1 (noneOf [open, close]))
<|> (do res <- charsInBalanced' open close
return $ [open] ++ res ++ [close])
char close
return $ concat raw
-- Auxiliary functions for romanNumeral:
lowercaseRomanDigits :: [Char]
lowercaseRomanDigits = ['i','v','x','l','c','d','m']
uppercaseRomanDigits :: [Char]
uppercaseRomanDigits = map toUpper lowercaseRomanDigits
-- | Parses a roman numeral (uppercase or lowercase), returns number.
romanNumeral :: Bool -- ^ Uppercase if true
-> GenParser Char st Int
romanNumeral upperCase = do
let romanDigits = if upperCase
then uppercaseRomanDigits
else lowercaseRomanDigits
lookAhead $ oneOf romanDigits
let [one, five, ten, fifty, hundred, fivehundred, thousand] =
map char romanDigits
thousands <- many thousand >>= (return . (1000 *) . length)
ninehundreds <- option 0 $ try $ hundred >> thousand >> return 900
fivehundreds <- many fivehundred >>= (return . (500 *) . length)
fourhundreds <- option 0 $ try $ hundred >> fivehundred >> return 400
hundreds <- many hundred >>= (return . (100 *) . length)
nineties <- option 0 $ try $ ten >> hundred >> return 90
fifties <- many fifty >>= (return . (50 *) . length)
forties <- option 0 $ try $ ten >> fifty >> return 40
tens <- many ten >>= (return . (10 *) . length)
nines <- option 0 $ try $ one >> ten >> return 9
fives <- many five >>= (return . (5 *) . length)
fours <- option 0 $ try $ one >> five >> return 4
ones <- many one >>= (return . length)
let total = thousands + ninehundreds + fivehundreds + fourhundreds +
hundreds + nineties + fifties + forties + tens + nines +
fives + fours + ones
if total == 0
then fail "not a roman numeral"
else return total
-- Parsers for email addresses and URIs
emailChar :: GenParser Char st Char
emailChar = alphaNum <|> oneOf "-+_."
domainChar :: GenParser Char st Char
domainChar = alphaNum <|> char '-'
domain :: GenParser Char st [Char]
domain = do
first <- many1 domainChar
dom <- many1 $ try (char '.' >> many1 domainChar )
return $ intercalate "." (first:dom)
-- | Parses an email address; returns string.
emailAddress :: GenParser Char st [Char]
emailAddress = try $ do
firstLetter <- alphaNum
restAddr <- many emailChar
let addr = firstLetter:restAddr
char '@'
dom <- domain
return $ addr ++ '@':dom
-- | Parses a URI.
uri :: GenParser Char st String
uri = try $ do
str <- many1 $ satisfy isAllowedInURI
case parseURI str of
Just uri' -> if uriScheme uri' `elem` [ "http:", "https:", "ftp:",
"file:", "mailto:",
"news:", "telnet:" ]
then return $ show uri'
else fail "not a URI"
Nothing -> fail "not a URI"
-- | Applies a parser, returns tuple of its results and its horizontal
-- displacement (the difference between the source column at the end
-- and the source column at the beginning). Vertical displacement
-- (source row) is ignored.
withHorizDisplacement :: GenParser Char st a -- ^ Parser to apply
-> GenParser Char st (a, Int) -- ^ (result, displacement)
withHorizDisplacement parser = do
pos1 <- getPosition
result <- parser
pos2 <- getPosition
return (result, sourceColumn pos2 - sourceColumn pos1)
-- | Parses a character and returns 'Null' (so that the parser can move on
-- if it gets stuck).
nullBlock :: GenParser Char st Block
nullBlock = anyChar >> return Null
-- | Fail if reader is in strict markdown syntax mode.
failIfStrict :: GenParser Char ParserState ()
failIfStrict = do
state <- getState
if stateStrict state then fail "strict mode" else return ()
-- | Fail unless we're in literate haskell mode.
failUnlessLHS :: GenParser tok ParserState ()
failUnlessLHS = do
state <- getState
if stateLiterateHaskell state then return () else fail "Literate haskell feature"
-- | Parses backslash, then applies character parser.
escaped :: GenParser Char st Char -- ^ Parser for character to escape
-> GenParser Char st Inline
escaped parser = try $ do
char '\\'
result <- parser
return (Str [result])
-- | Parses an uppercase roman numeral and returns (UpperRoman, number).
upperRoman :: GenParser Char st (ListNumberStyle, Int)
upperRoman = do
num <- romanNumeral True
return (UpperRoman, num)
-- | Parses a lowercase roman numeral and returns (LowerRoman, number).
lowerRoman :: GenParser Char st (ListNumberStyle, Int)
lowerRoman = do
num <- romanNumeral False
return (LowerRoman, num)
-- | Parses a decimal numeral and returns (Decimal, number).
decimal :: GenParser Char st (ListNumberStyle, Int)
decimal = do
num <- many1 digit
return (Decimal, read num)
-- | Parses a '#' returns (DefaultStyle, 1).
defaultNum :: GenParser Char st (ListNumberStyle, Int)
defaultNum = do
char '#'
return (DefaultStyle, 1)
-- | Parses a lowercase letter and returns (LowerAlpha, number).
lowerAlpha :: GenParser Char st (ListNumberStyle, Int)
lowerAlpha = do
ch <- oneOf ['a'..'z']
return (LowerAlpha, ord ch - ord 'a' + 1)
-- | Parses an uppercase letter and returns (UpperAlpha, number).
upperAlpha :: GenParser Char st (ListNumberStyle, Int)
upperAlpha = do
ch <- oneOf ['A'..'Z']
return (UpperAlpha, ord ch - ord 'A' + 1)
-- | Parses a roman numeral i or I
romanOne :: GenParser Char st (ListNumberStyle, Int)
romanOne = (char 'i' >> return (LowerRoman, 1)) <|>
(char 'I' >> return (UpperRoman, 1))
-- | Parses an ordered list marker and returns list attributes.
anyOrderedListMarker :: GenParser Char st ListAttributes
anyOrderedListMarker = choice $
[delimParser numParser | delimParser <- [inPeriod, inOneParen, inTwoParens],
numParser <- [decimal, defaultNum, romanOne,
lowerAlpha, lowerRoman, upperAlpha, upperRoman]]
-- | Parses a list number (num) followed by a period, returns list attributes.
inPeriod :: GenParser Char st (ListNumberStyle, Int)
-> GenParser Char st ListAttributes
inPeriod num = try $ do
(style, start) <- num
char '.'
let delim = if style == DefaultStyle
then DefaultDelim
else Period
return (start, style, delim)
-- | Parses a list number (num) followed by a paren, returns list attributes.
inOneParen :: GenParser Char st (ListNumberStyle, Int)
-> GenParser Char st ListAttributes
inOneParen num = try $ do
(style, start) <- num
char ')'
return (start, style, OneParen)
-- | Parses a list number (num) enclosed in parens, returns list attributes.
inTwoParens :: GenParser Char st (ListNumberStyle, Int)
-> GenParser Char st ListAttributes
inTwoParens num = try $ do
char '('
(style, start) <- num
char ')'
return (start, style, TwoParens)
-- | Parses an ordered list marker with a given style and delimiter,
-- returns number.
orderedListMarker :: ListNumberStyle
-> ListNumberDelim
-> GenParser Char st Int
orderedListMarker style delim = do
let num = defaultNum <|> -- # can continue any kind of list
case style of
DefaultStyle -> decimal
Decimal -> decimal
UpperRoman -> upperRoman
LowerRoman -> lowerRoman
UpperAlpha -> upperAlpha
LowerAlpha -> lowerAlpha
let context = case delim of
DefaultDelim -> inPeriod
Period -> inPeriod
OneParen -> inOneParen
TwoParens -> inTwoParens
(start, _, _) <- context num
return start
-- | Parses a character reference and returns a Str element.
charRef :: GenParser Char st Inline
charRef = do
c <- characterReference
return $ Str [c]
-- | Parse a string with a given parser and state.
readWith :: GenParser Char ParserState a -- ^ parser
-> ParserState -- ^ initial state
-> String -- ^ input string
-> a
readWith parser state input =
case runParser parser state "source" input of
Left err -> error $ "\nError:\n" ++ show err
Right result -> result
-- | Parse a string with @parser@ (for testing).
testStringWith :: (Show a) => GenParser Char ParserState a
-> String
-> IO ()
testStringWith parser str = putStrLn $ show $
readWith parser defaultParserState str
-- | Parsing options.
data ParserState = ParserState
{ stateParseRaw :: Bool, -- ^ Parse raw HTML and LaTeX?
stateParserContext :: ParserContext, -- ^ Inside list?
stateQuoteContext :: QuoteContext, -- ^ Inside quoted environment?
stateSanitizeHTML :: Bool, -- ^ Sanitize HTML?
stateKeys :: KeyTable, -- ^ List of reference keys
#ifdef _CITEPROC
stateCitations :: [String], -- ^ List of available citations
#endif
stateNotes :: NoteTable, -- ^ List of notes
stateTabStop :: Int, -- ^ Tab stop
stateStandalone :: Bool, -- ^ Parse bibliographic info?
stateTitle :: [Inline], -- ^ Title of document
stateAuthors :: [[Inline]], -- ^ Authors of document
stateDate :: [Inline], -- ^ Date of document
stateStrict :: Bool, -- ^ Use strict markdown syntax?
stateSmart :: Bool, -- ^ Use smart typography?
stateLiterateHaskell :: Bool, -- ^ Treat input as literate haskell
stateColumns :: Int, -- ^ Number of columns in terminal
stateHeaderTable :: [HeaderType], -- ^ Ordered list of header types used
stateIndentedCodeClasses :: [String] -- ^ Classes to use for indented code blocks
}
deriving Show
defaultParserState :: ParserState
defaultParserState =
ParserState { stateParseRaw = False,
stateParserContext = NullState,
stateQuoteContext = NoQuote,
stateSanitizeHTML = False,
stateKeys = [],
#ifdef _CITEPROC
stateCitations = [],
#endif
stateNotes = [],
stateTabStop = 4,
stateStandalone = False,
stateTitle = [],
stateAuthors = [],
stateDate = [],
stateStrict = False,
stateSmart = False,
stateLiterateHaskell = False,
stateColumns = 80,
stateHeaderTable = [],
stateIndentedCodeClasses = [] }
data HeaderType
= SingleHeader Char -- ^ Single line of characters underneath
| DoubleHeader Char -- ^ Lines of characters above and below
deriving (Eq, Show)
data ParserContext
= ListItemState -- ^ Used when running parser on list item contents
| NullState -- ^ Default state
deriving (Eq, Show)
data QuoteContext
= InSingleQuote -- ^ Used when parsing inside single quotes
| InDoubleQuote -- ^ Used when parsing inside double quotes
| NoQuote -- ^ Used when not parsing inside quotes
deriving (Eq, Show)
type NoteTable = [(String, String)]
type KeyTable = [([Inline], Target)]
-- | Look up key in key table and return target object.
lookupKeySrc :: KeyTable -- ^ Key table
-> [Inline] -- ^ Key
-> Maybe Target
lookupKeySrc table key = case find (refsMatch key . fst) table of
Nothing -> Nothing
Just (_, src) -> Just src
-- | Returns @True@ if keys match (case insensitive).
refsMatch :: [Inline] -> [Inline] -> Bool
refsMatch ((Str x):restx) ((Str y):resty) =
((map toLower x) == (map toLower y)) && refsMatch restx resty
refsMatch ((Emph x):restx) ((Emph y):resty) =
refsMatch x y && refsMatch restx resty
refsMatch ((Strong x):restx) ((Strong y):resty) =
refsMatch x y && refsMatch restx resty
refsMatch ((Strikeout x):restx) ((Strikeout y):resty) =
refsMatch x y && refsMatch restx resty
refsMatch ((Superscript x):restx) ((Superscript y):resty) =
refsMatch x y && refsMatch restx resty
refsMatch ((Subscript x):restx) ((Subscript y):resty) =
refsMatch x y && refsMatch restx resty
refsMatch ((SmallCaps x):restx) ((SmallCaps y):resty) =
refsMatch x y && refsMatch restx resty
refsMatch ((Quoted t x):restx) ((Quoted u y):resty) =
t == u && refsMatch x y && refsMatch restx resty
refsMatch ((Code x):restx) ((Code y):resty) =
((map toLower x) == (map toLower y)) && refsMatch restx resty
refsMatch ((Math t x):restx) ((Math u y):resty) =
((map toLower x) == (map toLower y)) && t == u && refsMatch restx resty
refsMatch ((TeX x):restx) ((TeX y):resty) =
((map toLower x) == (map toLower y)) && refsMatch restx resty
refsMatch ((HtmlInline x):restx) ((HtmlInline y):resty) =
((map toLower x) == (map toLower y)) && refsMatch restx resty
refsMatch (x:restx) (y:resty) = (x == y) && refsMatch restx resty
refsMatch [] x = null x
refsMatch x [] = null x
--
-- Prettyprinting
--
-- | A version of hang that works like the version in pretty-1.0.0.0
hang' :: Doc -> Int -> Doc -> Doc
hang' d1 n d2 = d1 $$ (nest n d2)
-- | Indent string as a block.
indentBy :: Int -- ^ Number of spaces to indent the block
-> Int -- ^ Number of spaces (rel to block) to indent first line
-> String -- ^ Contents of block to indent
-> String
indentBy _ _ [] = ""
indentBy num first str =
let (firstLine:restLines) = lines str
firstLineIndent = num + first
in (replicate firstLineIndent ' ') ++ firstLine ++ "\n" ++
(intercalate "\n" $ map ((replicate num ' ') ++ ) restLines)
-- | Prettyprint list of Pandoc blocks elements.
prettyBlockList :: Int -- ^ Number of spaces to indent list of blocks
-> [Block] -- ^ List of blocks
-> String
prettyBlockList indent [] = indentBy indent 0 "[]"
prettyBlockList indent blocks = indentBy indent (-2) $ "[ " ++
(intercalate "\n, " (map prettyBlock blocks)) ++ " ]"
-- | Prettyprint Pandoc block element.
prettyBlock :: Block -> String
prettyBlock (BlockQuote blocks) = "BlockQuote\n " ++
(prettyBlockList 2 blocks)
prettyBlock (OrderedList attribs blockLists) =
"OrderedList " ++ show attribs ++ "\n" ++ indentBy 2 0 ("[ " ++
(intercalate ", " $ map (\blocks -> prettyBlockList 2 blocks)
blockLists)) ++ " ]"
prettyBlock (BulletList blockLists) = "BulletList\n" ++
indentBy 2 0 ("[ " ++ (intercalate ", "
(map (\blocks -> prettyBlockList 2 blocks) blockLists))) ++ " ]"
prettyBlock (DefinitionList items) = "DefinitionList\n" ++
indentBy 2 0 ("[ " ++ (intercalate "\n, "
(map (\(term, defs) -> "(" ++ show term ++ ",\n" ++
indentBy 3 0 ("[ " ++ (intercalate ", "
(map (\blocks -> prettyBlockList 2 blocks) defs)) ++ "]") ++
")") items))) ++ " ]"
prettyBlock (Table caption aligns widths header rows) =
"Table " ++ show caption ++ " " ++ show aligns ++ " " ++
show widths ++ "\n" ++ prettyRow header ++ " [\n" ++
(intercalate ",\n" (map prettyRow rows)) ++ " ]"
where prettyRow cols = indentBy 2 0 ("[ " ++ (intercalate ", "
(map (\blocks -> prettyBlockList 2 blocks)
cols))) ++ " ]"
prettyBlock block = show block
-- | Prettyprint Pandoc document.
prettyPandoc :: Pandoc -> String
prettyPandoc (Pandoc meta blocks) = "Pandoc " ++ "(" ++ show meta ++
")\n" ++ (prettyBlockList 0 blocks) ++ "\n"
--
-- Pandoc block and inline list processing
--
-- | Generate infinite lazy list of markers for an ordered list,
-- depending on list attributes.
orderedListMarkers :: (Int, ListNumberStyle, ListNumberDelim) -> [String]
orderedListMarkers (start, numstyle, numdelim) =
let singleton c = [c]
nums = case numstyle of
DefaultStyle -> map show [start..]
Decimal -> map show [start..]
UpperAlpha -> drop (start - 1) $ cycle $
map singleton ['A'..'Z']
LowerAlpha -> drop (start - 1) $ cycle $
map singleton ['a'..'z']
UpperRoman -> map toRomanNumeral [start..]
LowerRoman -> map (map toLower . toRomanNumeral) [start..]
inDelim str = case numdelim of
DefaultDelim -> str ++ "."
Period -> str ++ "."
OneParen -> str ++ ")"
TwoParens -> "(" ++ str ++ ")"
in map inDelim nums
-- | Normalize a list of inline elements: remove leading and trailing
-- @Space@ elements, collapse double @Space@s into singles, and
-- remove empty Str elements.
normalizeSpaces :: [Inline] -> [Inline]
normalizeSpaces [] = []
normalizeSpaces list =
let removeDoubles [] = []
removeDoubles (Space:Space:rest) = removeDoubles (Space:rest)
removeDoubles (Space:(Str ""):Space:rest) = removeDoubles (Space:rest)
removeDoubles ((Str ""):rest) = removeDoubles rest
removeDoubles (x:rest) = x:(removeDoubles rest)
removeLeading (Space:xs) = removeLeading xs
removeLeading x = x
removeTrailing [] = []
removeTrailing lst = if (last lst == Space)
then init lst
else lst
in removeLeading $ removeTrailing $ removeDoubles list
-- | Change final list item from @Para@ to @Plain@ if the list contains
-- no other @Para@ blocks.
compactify :: [[Block]] -- ^ List of list items (each a list of blocks)
-> [[Block]]
compactify [] = []
compactify items =
case (init items, last items) of
(_,[]) -> items
(others, final) ->
case last final of
Para a -> case (filter isPara $ concat items) of
-- if this is only Para, change to Plain
[_] -> others ++ [init final ++ [Plain a]]
_ -> items
_ -> items
isPara :: Block -> Bool
isPara (Para _) = True
isPara _ = False
-- | Data structure for defining hierarchical Pandoc documents
data Element = Blk Block
| Sec Int [Int] String [Inline] [Element]
-- lvl num ident label contents
deriving (Eq, Read, Show, Typeable, Data)
-- | Convert Pandoc inline list to plain text identifier.
inlineListToIdentifier :: [Inline] -> String
inlineListToIdentifier = dropWhile (not . isAlpha) . inlineListToIdentifier'
inlineListToIdentifier' :: [Inline] -> [Char]
inlineListToIdentifier' [] = ""
inlineListToIdentifier' (x:xs) =
xAsText ++ inlineListToIdentifier' xs
where xAsText = case x of
Str s -> filter (\c -> c `elem` "_-.~" || not (isPunctuation c)) $
intercalate "-" $ words $ map toLower s
Emph lst -> inlineListToIdentifier' lst
Strikeout lst -> inlineListToIdentifier' lst
Superscript lst -> inlineListToIdentifier' lst
SmallCaps lst -> inlineListToIdentifier' lst
Subscript lst -> inlineListToIdentifier' lst
Strong lst -> inlineListToIdentifier' lst
Quoted _ lst -> inlineListToIdentifier' lst
Cite _ lst -> inlineListToIdentifier' lst
Code s -> s
Space -> "-"
EmDash -> "-"
EnDash -> "-"
Apostrophe -> ""
Ellipses -> ""
LineBreak -> "-"
Math _ _ -> ""
TeX _ -> ""
HtmlInline _ -> ""
Link lst _ -> inlineListToIdentifier' lst
Image lst _ -> inlineListToIdentifier' lst
Note _ -> ""
-- | Convert list of Pandoc blocks into (hierarchical) list of Elements
hierarchicalize :: [Block] -> [Element]
hierarchicalize blocks = S.evalState (hierarchicalizeWithIds blocks) ([],[])
hierarchicalizeWithIds :: [Block] -> S.State ([Int],[String]) [Element]
hierarchicalizeWithIds [] = return []
hierarchicalizeWithIds ((Header level title'):xs) = do
(lastnum, usedIdents) <- S.get
let ident = uniqueIdent title' usedIdents
let lastnum' = take level lastnum
let newnum = if length lastnum' >= level
then init lastnum' ++ [last lastnum' + 1]
else lastnum ++ replicate (level - length lastnum - 1) 0 ++ [1]
S.put (newnum, (ident : usedIdents))
let (sectionContents, rest) = break (headerLtEq level) xs
sectionContents' <- hierarchicalizeWithIds sectionContents
rest' <- hierarchicalizeWithIds rest
return $ Sec level newnum ident title' sectionContents' : rest'
hierarchicalizeWithIds (x:rest) = do
rest' <- hierarchicalizeWithIds rest
return $ (Blk x) : rest'
headerLtEq :: Int -> Block -> Bool
headerLtEq level (Header l _) = l <= level
headerLtEq _ _ = False
uniqueIdent :: [Inline] -> [String] -> String
uniqueIdent title' usedIdents =
let baseIdent = inlineListToIdentifier title'
numIdent n = baseIdent ++ "-" ++ show n
in if baseIdent `elem` usedIdents
then case find (\x -> numIdent x `notElem` usedIdents) ([1..60000] :: [Int]) of
Just x -> numIdent x
Nothing -> baseIdent -- if we have more than 60,000, allow repeats
else baseIdent
-- | True if block is a Header block.
isHeaderBlock :: Block -> Bool
isHeaderBlock (Header _ _) = True
isHeaderBlock _ = False
--
-- Writer options
--
data HTMLMathMethod = PlainMath
| LaTeXMathML (Maybe String) -- url of LaTeXMathML.js
| JsMath (Maybe String) -- url of jsMath load script
| GladTeX
| MimeTeX String -- url of mimetex.cgi
deriving (Show, Read, Eq)
-- | Methods for obfuscating email addresses in HTML.
data ObfuscationMethod = NoObfuscation
| ReferenceObfuscation
| JavascriptObfuscation
deriving (Show, Read, Eq)
-- | Options for writers
data WriterOptions = WriterOptions
{ writerStandalone :: Bool -- ^ Include header and footer
, writerTemplate :: String -- ^ Template to use in standalone mode
, writerVariables :: [(String, String)] -- ^ Variables to set in template
, writerIncludeBefore :: String -- ^ Text to include before the body
, writerIncludeAfter :: String -- ^ Text to include after the body
, writerTabStop :: Int -- ^ Tabstop for conversion btw spaces and tabs
, writerTableOfContents :: Bool -- ^ Include table of contents
, writerS5 :: Bool -- ^ We're writing S5
, writerXeTeX :: Bool -- ^ Create latex suitable for use by xetex
, writerHTMLMathMethod :: HTMLMathMethod -- ^ How to print math in HTML
, writerIgnoreNotes :: Bool -- ^ Ignore footnotes (used in making toc)
, writerIncremental :: Bool -- ^ Incremental S5 lists
, writerNumberSections :: Bool -- ^ Number sections in LaTeX
, writerStrictMarkdown :: Bool -- ^ Use strict markdown syntax
, writerReferenceLinks :: Bool -- ^ Use reference links in writing markdown, rst
, writerWrapText :: Bool -- ^ Wrap text to line length
, writerLiterateHaskell :: Bool -- ^ Write as literate haskell
, writerEmailObfuscation :: ObfuscationMethod -- ^ How to obfuscate emails
, writerIdentifierPrefix :: String -- ^ Prefix for section & note ids in HTML
} deriving Show
-- | Default writer options.
defaultWriterOptions :: WriterOptions
defaultWriterOptions =
WriterOptions { writerStandalone = False
, writerTemplate = ""
, writerVariables = []
, writerIncludeBefore = ""
, writerIncludeAfter = ""
, writerTabStop = 4
, writerTableOfContents = False
, writerS5 = False
, writerXeTeX = False
, writerHTMLMathMethod = PlainMath
, writerIgnoreNotes = False
, writerIncremental = False
, writerNumberSections = False
, writerStrictMarkdown = False
, writerReferenceLinks = False
, writerWrapText = True
, writerLiterateHaskell = False
, writerEmailObfuscation = JavascriptObfuscation
, writerIdentifierPrefix = ""
}
--
-- File handling
--
-- | Perform an IO action in a directory, returning to starting directory.
inDirectory :: FilePath -> IO a -> IO a
inDirectory path action = do
oldDir <- getCurrentDirectory
setCurrentDirectory path
result <- action
setCurrentDirectory oldDir
return result
-- | Read file from specified user data directory or, if not found there, from
-- Cabal data directory.
readDataFile :: Maybe FilePath -> FilePath -> IO String
readDataFile userDir fname =
case userDir of
Nothing -> getDataFileName fname >>= readFile
Just u -> catch (readFile $ u </> fname)
(\_ -> getDataFileName fname >>= readFile)
|
kowey/pandoc-old
|
src/Text/Pandoc/Shared.hs
|
gpl-2.0
| 41,778
| 407
| 28
| 13,104
| 9,923
| 5,352
| 4,571
| 768
| 22
|
module EcuacionesNoLineales where
import GramaticaAbstracta
import GramaticaConcreta
import Semantica
import UU.Parsing
{-FUNCIONES AUXILIARES
Estas funciones son utilizadas por los difetentes metodos lo modulo, son adaptadas de la practica del semestre 2010-1 realizada por Santiago Rodriguez y Carolina Campillo
-}--Funcion que determina si hay cambio de signo en la funcion evaluada en dos puntos
signo :: Func -> Func -> Func -> Bool
signo f a b
| reduccion ((sust f ('x',a)) */ (sust f ('x',b))) < (ton 0) = True
| otherwise = False
-- Funcion que determina si se esta parado en una raiz
raiz :: Func -> Func -> Bool
raiz f x
| (eval f ('x',x)) == (ton 0) = True
| otherwise = False
--Funcion que retorna el valor de la derivada de un numero evaluada en un punto
derivada :: Func -> Func -> Func
derivada f x = reduccion ((eval f ('x',( x +/ h))) -/ (eval f ('x', x)) // h)
where h = ton 0.0001
--Funcion que retorna la segunda derivada de una funcion, evaluada en un punto.
sdaDerivada :: Func -> Func -> Func
sdaDerivada f n = reduccion(((eval f ('x', n +/ h))) -/ (ton 2 */ (eval f ('x',n))) +/ (eval f ('x',n -/ h)) // (h^/ (ton 2)))
where h = ton 0.0001
--Funcion que recibe el tipo de error a calcular, los valores actual y anterior y retorna el error deseado
error' :: String -> Func -> Func -> Func
error' t act ant
| t == "Error Absoluto" = eAbs act ant
| t == "Error Relativo" = eRel act ant
| otherwise = error "No existe ese tipo de error"
--Funcion que calcula el error absoluto
eAbs :: Func -> Func -> Func
eAbs act ant = abs' (act -/ ant)
--Funcion que calcula el error relativo
eRel :: Func -> Func -> Func
eRel act ant = abs' (reduccion (act -/ ant) // act)
{-METODO DE BUSQUEDAS INCREMENTALES
NOTA: Esta adaptado del metodo implementado por Santiago Rodriguez y Carolina Campillo en la practica del semestre 2010-1, se hicieron los cambios necesarios para usarlo con nuestra gramatica de funciones
-}
--Funcion que realiza la busqueda incremental de un intervalo que contenga almenos una raiz
busqdIncremental :: Func -> Func -> Func -> Integer -> [(Func,Func)]
busqdIncremental f a d 0 = []
busqdIncremental f a d i
| (raiz f a) = [((reduccion a),(reduccion a))]
| (signo f a (a +/ d)) = [((reduccion a),(reduccion (a +/ d)))]
| (otherwise) = (busqdIncremental f (a +/ d) d (i-1))
{-METODO DE BISECCION
NOTA: Esta adaptado del metodo implementado por Santiago Rodriguez y Carolina Campillo en la practica del semestre 2010-1, se hicieron los cambios necesarios para usarlo con nuestra gramatica de funciones
-}
--Funcion que realiza los chequeos de entrada y ordena iniciar el ciclo principal
biseccion :: Func -> Func -> Func -> Func -> Integer -> String -> String
biseccion f xi xs tol n typErr
| (raiz f xi) = (show xi) ++ " es raiz"
| (raiz f xs) = (show xs) ++ " es raiz"
| (not (signo f xi xs)) = "Intervalo incorrecto"
| (otherwise) = (biseccion' f xi xs (ton 0) (reduccion ((tol) +/ (ton 1))) tol (n-1) typErr)
--Funcion que realiza la biseccion
biseccion' :: Func -> Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
biseccion' f xi xs xm' e tol i typErr
| ((not (raiz f xm)) && e > tol && i>0) = if (not (signo f xi xm))
then (biseccion' f xm xs xm err tol (i-1) typErr)
else (biseccion' f xi xm xm err tol (i-1) typErr)
| (raiz f xm) = (show xm) ++ " es raiz"
| (e <= tol) = (show xm) ++ " es raiz con un error " ++ (show e)
| (otherwise) = "El metodo no converge en las iteraciones dadas"
where xm = (reduccion ((xi +/ xs) // (ton 2)))
err = error' typErr xm xm'
{-METODO DE REGLA FALSA
NOTA: Esta adaptado del metodo implementado por Santiago Rodriguez y Carolina Campillo en la practica del semestre 2010-1, se hicieron los cambios necesarios para usarlo con nuestra gramatica de funciones
-}
--Funcion que realiza los chequeos de entrada y ordena iniciar el ciclo principal
reglaFalsa :: Func -> Func -> Func -> Func -> Integer -> String -> String
reglaFalsa f a b tol n typErr
| (raiz f a) = (show a) ++ " es raiz"
| (raiz f b) = (show b) ++ " es raiz"
| (not (signo f a b)) = "Intervalo incorrecto"
| (otherwise) = (reglaFalsa' f a b (ton 0) (reduccion ((tol) +/ (ton 1))) tol (n-1) typErr)
--Funcion que realiza el metodo de regla falsa
reglaFalsa' :: Func -> Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
reglaFalsa' f a b p' e tol i typErr
| ((not (raiz f p)) && e > tol && i>0) = if (not (signo f a p))
then (biseccion' f p b p err tol (i-1) typErr)
else (biseccion' f a p p err tol (i-1) typErr)
| (raiz f p) = (show p) ++ " es raiz"
| (e <= tol) = (show p) ++ " es raiz con un error " ++ (show e)
| otherwise = "El metodo no converge en las iteraciones dadas"
where p = (reduccion (a -/ (((eval f ('x', a)) */ (b -/ a)) // ((eval f ('x',b)) -/ (eval f ('x',a))))))
err = error' typErr p p'
{-METODO DE PUNTO FIJO
NOTA: Esta adaptado del metodo implementado por Santiago Rodriguez y Carolina Campillo en la practica del semestre 2010-1, se hicieron los cambios necesarios para usarlo con nuestra gramatica de funciones
-}
--Funcion que recibe los datos y ordena iniciar el ciclo principal
puntoFijo :: Func -> Func -> Func -> Func -> Integer -> String -> String
puntoFijo f g x0 tol n typErr = (puntoFijo' f g x0 (reduccion ((tol) +/ (ton 1))) tol n typErr)
--Funcion que realiza el metodo de punto fijo
puntoFijo' :: Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
puntoFijo' f g x0 e tol i typErr
| ((not (raiz f x1)) && e > tol && i > 0) = (puntoFijo' f g x1 err tol (i-1) typErr)
| (raiz f x1) = (show x1) ++ " es raiz"
| (e <= tol) = (show x1) ++ " es raiz con un error " ++ (show e)
| (otherwise) = "El metodo no converge en las iteraciones dadas"
where x1 = eval g ('x', x0)
err = error' typErr x1 x0
{-METODO DE NEWTON
NOTA: Esta adaptado del metodo implementado por Santiago Rodriguez y Carolina Campillo en la practica del semestre 2010-1, se hicieron los cambios necesarios para usarlo con nuestra gramatica de funciones
-}
--Funcion que recibe los datos y ordena iniciar el ciclo principal
newton :: Func -> Func -> Func -> Func -> Integer -> String -> String
newton f f' x0 tol n typErr = (newton' f f' x0 (reduccion ((tol) +/ (ton 1))) tol n typErr)
--Funcion que realiza el metodo de newton
newton' :: Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
newton' f f' x0 e tol i typErr
| ((not (raiz f x1)) && e > tol && (not (raiz f' x1)) && i > 0) = (newton' f f' x1 err tol (i-1) typErr)
| (raiz f x1) = (show x1) ++ "es raiz"
| (e <= tol) = (show x1) ++ " es raiz con un error " ++ (show e)
| (raiz f' x1)= "La derivada se hizo cero -> Division por cero"
| (otherwise) = "El metodo no converge en las iteraciones dadas"
where x1 = reduccion (x0 -/ ((sust f ('x', x0)) // (sust f' ('x', x0))))
err = error' typErr x1 x0
{-METODO DE LA SECANTE
NOTA: Esta adaptado del metodo implementado por Santiago Rodriguez y Carolina Campillo en la practica del semestre 2010-1, se hicieron los cambios necesarios para usarlo con nuestra gramatica de funciones
-}
--Funcion que realiza los chequeos de entrada y ordena iniciar el ciclo principal
secante :: Func -> Func -> Func -> Func -> Integer -> String -> String
secante f x0 x1 tol n typErr
| (raiz f x0) = (show x0) ++ "es raiz"
| (otherwise) = (secante' f x0 x1 (reduccion ((tol) +/ (ton 1))) tol (n-1) typErr)
--Funcion que realiza el metodo de la secante
secante' :: Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
secante' f x0 x1 e tol i typErr
| ((not (raiz f x1)) && e > tol && denom /= (ton 0) && i>0) = (secante' f x1 x2 err tol (i-1) typErr)
| (raiz f x1) = (show x1) ++ "es raiz"
| (e <= tol) = (show x1) ++ "es raiz con un error " ++ (show e)
| (denom == (ton 0)) = "El denominador se hizo cero"
| (otherwise) = "El metodo no converge en las iteraciones dadas"
where x2 = reduccion (x1 -/ ((y1 */ (x1 -/ x0)) // denom))
denom = reduccion (y1 -/ y0)
y0 = eval f ('x', x0)
y1 = eval f ('x', x1)
err = error' typErr x2 x1
{-METODO DE RAICES MULTIPLES
Este metodo esta completamente desarrollado por nosotros ya que en la practica del semestre 2010-1 no esta definido
-}
--Funcion que recibe los datos e inicia el ciclo principal de raicesMultiples.
raicesMult :: Func -> Func -> Func ->Integer -> String -> String
raicesMult f a tol n typErr = (raicesMult' f a (reduccion tol +/ ton 1) tol n typErr)
--Funcion de raices multiples utilizando metodos numericos para hallar la primera y segunda derivada de la funcion.
raicesMult' :: Func -> Func -> Func -> Func -> Integer -> String -> String
raicesMult' f x0 e tol i typErr
| ((not(raiz f x1)) && e > tol && den /= (ton 0) && i > 0) = (raicesMult' f x1 err tol (i-1) typErr)
| (raiz f x1) = (show x1) ++ " es raiz"
| (e <= tol) = (show x1) ++ " es raiz con un error " ++ (show e)
| (den == (ton 0) ) = "Denomidador igual a 0"
| otherwise = "El metodo no converge en las iteraciones dadas"
where y = eval f ('x',x0)
den = reduccion (((derivada f x0) ^/ ton 2) -/ (y */ (sdaDerivada f x0)))
x1 = reduccion (x0 -/ ((y */ (derivada f x0)) // den))
err = error' typErr x1 x0
{-METODO DE RAICES MULTIPLES
Este metodo esta completamente desarrollado por nosotros ya que en la practica del semestre 2010-1 no esta definido
-}
--Funcion que recibe los datos y ordena iniciar el ciclo principal
raicesMultiples :: Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
raicesMultiples f f' f'' x0 tol n typErr = (raicesMultiples' f f' f'' x0 (reduccion ((tol) `FSum` (FConst 1))) tol n typErr)
raicesMultiples' :: Func -> Func -> Func -> Func -> Func -> Func -> Integer -> String -> String
raicesMultiples' f f' f'' x0 e tol i typErr
| ((not (raiz f x0)) && e > tol && i > 0) = (raicesMultiples' f f' f'' x1 err tol (i-1) typErr)
| (raiz f x0) = (show x0) ++ "es raiz"
| (e <= tol) = (show x0) ++ "es raiz con un error " ++ (show e)
| (otherwise) = "El metodo no converge en las iteraciones dadas"
where x1 = reduccion (x0 `FRes` ((y `FMult` y') `FDiv` ((y' `FPot` (FConst 2)) `FRes` (y `FMult` y''))))
y = eval f ('x', x0)
y' = eval f' ('x', x0)
y'' = eval f'' ('x', x0)
err = error' typErr x1 x0
{-Se pueden probar los diferentes metodos con estos parametros-}
--(X^2)-3
f :: Func
f = ((tov 'x') `FPot` (ton 2)) -/ (ton 3)
g :: Func
g = FRes (tov 'x') (FDiv (FRes (FPot (tov 'x') (ton 2.0)) (ton 3.0)) (FMult (ton 2.0) (tov 'x')))
f' :: Func
f' = (ton 2) */ (tov 'x')
a :: Func
a = (ton (-2))
b :: Func
b = (ton (-1.5))
tol :: Func
tol = (ton (1e-5))
{-Se puede probar el metodo de raices multiples con estas funciones-}
--COS^2(x)-2xCOS(x)+x^2
fr :: Func
fr = (((FCos (tov 'x')) ^/ (ton 2))) -/ (((ton 2)*/ (tov 'x'))*/ (FCos (tov 'x'))) +/ ((tov 'x') ^/ (ton 2))
fr' :: Func
fr' = FSum (FRes (FPot (FCos (FVar 'x')) (FConst 2.0)) (FMult (FMult (FConst 2.0) (FVar 'x')) (FCos (FVar 'x')))) (FPot (FVar 'x') (FConst 2.0))
--EXP(-2x)-2xEXP(-x)+x^2
fn :: Func
fn = FSum (FRes (FExp (FMult (FConst (-2.0)) (FVar 'x'))) (FMult (FMult (FConst 2.0) (FVar 'x')) (FExp (FMult (FConst (-1.0)) (FVar 'x'))))) (FPot (FVar 'x') (FConst 2.0))
fcos = (cos_(tov 'x') -/ (ton 2))
ff :: Func
ff = ((tov 'x') ^/ (ton 2)) -/ (ton 2)
aleja :: Func
aleja = ((tov 'x' */ (sec_ (tov 'x'))^/ ton(2) ))
parcial = (sen_ (tov 'x')) +/ cos_((ton 1) +/ (tov 'x'^/ (ton 2))) -/ (ton 1.3)
|
dmuneras/LambdaMethods
|
src/EcuacionesNoLineales.hs
|
gpl-2.0
| 12,133
| 5
| 19
| 3,120
| 4,461
| 2,322
| 2,139
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Pages.Base where
import Lucid
import Lucid.Base (makeAttribute)
import Pages.Bootstrap
---
-- | The basic template for all pages.
base :: Html () -> Html ()
base content = do
doctype_
html_ $ do
head_ $ do
meta_ [charset_ "utf-8"]
title_ "NanQ - Analyse Japanese Text"
link_ [ href_ "/assets/bootstrap.min.css"
, rel_ "stylesheet"
, media_ "all"
, type_ "text/css" ]
body_ [style_ "padding-top: 20px;"] $
containerFluid_ $ do
row_ $
col6_ [class_ "col-md-offset-3"] $
div_ [class_ "header clearfix"] $ do
nav_ $ ul_ [class_ "nav nav-pills pull-right"] $
li_ [role_ "presentation"] $
a_ [href_ "/about", class_ "btn btn-default"] "About"
h3_ [ class_ "text-muted"
, style_ "margin-top: 0;margin-bottom: 0;line-height: 40px;"
] $ do
a_ [href_ "/"] $ img_ [src_ "/assets/logo-small.png"]
"・NanQ - Analyse Japanese Text・漢字分析"
hr_ []
content
row_ $
col6_ [class_ "col-md-offset-3"] $ do
hr_ []
center_ ghButton
center_ $ a_ [href_ "/"] $ img_ [src_ "/assets/logo.png"]
ghButton :: Html ()
ghButton = iframe_
[ src_ "https://ghbtns.com/github-btn.html?user=fosskers&type=follow&count=true&size=large"
, makeAttribute "frameborder" "0"
, makeAttribute "scrolling" "0"
, width_ "180px"
, height_ "30px"] ""
-- | The Home Page, accessible through the `/` endpoint.
home :: Html ()
home = row_ $ do
col4_ [class_ "col-md-offset-1"] explanation
col6_ form
form :: Html ()
form = form_ [action_ "/analyse", method_ "POST"] $
div_ [class_ "input"] $ do
label_ [for_ "japText"] "Japanese Text・日本語入力"
textarea_ [ id_ "japText"
, class_ "form-control"
, name_ "japText"
, rows_ "15"
, placeholder_ "Paste your text here."
] ""
center_ [style_ "padding-top:15px;"] $
button_ [ class_ "btn btn-primary btn-lg", type_ "submit" ] "Analyse・分析"
explanation :: Html ()
explanation = div_ [class_ "jumbotron"] $ do
p_ . toHtml $ unwords [ "Are you a learner or native speaker of Japanese?"
, "You can use this website to analyse Japanese text"
, "for its Kanji difficulty."
]
p_ $ mconcat [ "日本語のネイティブも学生も、日本語の文章の漢字難易度を"
, "分析するためにこのサイトを無料に利用できます。" ]
|
fosskers/nanq
|
nanq-site/Pages/Base.hs
|
gpl-3.0
| 2,654
| 0
| 22
| 763
| 639
| 301
| 338
| 66
| 1
|
module Util where
import Data.Either
--import Data.ByteString.Char8(unpack)
import qualified Data.ByteString.Lazy as L
import Control.Exception as X
import Data.Char(chr)
import Data.Word(Word8)
import Network.URI
import Data.Maybe(fromJust, fromMaybe)
import Debug.Trace
import Network.HTTP.Conduit
grabUrl :: String -> IO (Maybe String)
grabUrl url = do
body <- simpleHttp url `X.catch` exceptionHandler
return
(if body == L.empty then Nothing else
Just $ bytesToString $ L.unpack body)
where exceptionHandler :: HttpException -> IO L.ByteString
exceptionHandler e = putStr "An error occured downloading article: " >> print e >> return L.empty
--for error handling Eithers - crap haskell stdlib again?
isLeft (Left _) = True
isLeft _ = False
bytesToString :: [Word8] -> String
bytesToString = map (chr . fromIntegral)
--haskell doesn't have basic triple-or-above manip funcs, wtf?
fst3::(a,b,c)->a
fst3 (a,b,c) = a
|
bcorrigan/PipeFeed
|
src/Util.hs
|
gpl-3.0
| 1,049
| 0
| 12
| 257
| 284
| 158
| 126
| 24
| 2
|
module Types where
import Data.Int
import Data.Word
import Data.Vector.Unboxed (Vector)
boardSize = 20 :: Int8
type Turn = Int8
newtype Player = Player Int deriving (Eq,Ord,Enum,Bounded,Show)
(none:red:green:yellow:blue:_) = [Player 0 ..]
allPlayers = [red,green,yellow,blue] :: [Player]
numPlayers = length allPlayers :: Int
type Coord = Int8
type Offset = Int8
data Coords = Coords !Coord !Coord deriving (Show)
data Offsets = Offsets !Offset !Offset deriving (Show)
data CornerType = UpperRight | UpperLeft | LowerRight | LowerLeft deriving (Eq,Ord,Enum,Bounded,Show)
allCornerTypes = [minBound..maxBound] :: [CornerType]
numCorners = length allCornerTypes
type ValidityBitmap = Word32
data Piece =
OnePiece |
TwoPiece |
ThreePiece |
CrookedThree |
SquarePiece |
ShortI |
ShortT |
ShortL |
ShortZ |
LongI |
LongT |
LongL |
LongZ |
PPiece |
FPiece |
XPiece |
VPiece |
UPiece |
YPiece |
NPiece |
WPiece
deriving (Show,Eq,Enum,Bounded,Ord)
allPieces = [minBound..maxBound] :: [Piece]
numPieces = length allPieces
data PieceCorner = PieceCorner !Offsets !CornerType deriving (Show)
data Placement = Placement !Piece !CornerType ![Offsets] ![PieceCorner] !ValidityBitmap
newtype Board = Board (Vector Word64)
data TerritoryCorner = TerritoryCorner Coords CornerType ValidityBitmap
data Move = Move Coords Placement
data GameState = State !Turn Board !([TerritoryCorner],[TerritoryCorner],[TerritoryCorner],[TerritoryCorner]) !([Piece],[Piece],[Piece],[Piece])
|
djudd/blokus
|
Types.hs
|
gpl-3.0
| 1,564
| 0
| 10
| 296
| 527
| 304
| 223
| 77
| 1
|
module HEP.Physics.TTBar.Print where
import Data.List
hline :: IO ()
hline = putStrLn "============================"
title1 :: [String]
title1 = ["nocut", "photon-veto", "1 btag", "1 lepton", "MET > 20GeV", "hard lepton eta < 1.1, pt>20", "four tight jets", "one central b jet", "5 tight jets"]
prettyprint :: (Show a) => [String] -> [a] -> [a] -> [a] -> String
prettyprint title r0 r1 r2 = concat $ zipWith4 f title r0 r1 r2
where f t1 r01 r11 r21 = "|" ++ t1 ++ "|"
++ (show r01) ++ "|"
++ (show r11) ++ "|"
++ (show r21) ++ "|" ++ "\n"
|
wavewave/ttbar
|
lib/HEP/Physics/TTBar/Print.hs
|
gpl-3.0
| 645
| 0
| 15
| 206
| 212
| 117
| 95
| 12
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
module TypeHelper (
decodeTypeHelper
,TypeQuery (..)
,_type
,_browse
) where
import Language.Haskell.Interpreter hiding (get)
import Data.Generics hiding (typeOf)
import Plugins.Html
data TypeQuery = FunctionType String
| ModuleElems String
deriving (Data,Typeable)
decodeTypeHelper (FunctionType string) = typeOf string
decodeTypeHelper (ModuleElems string) = getModuleExports string >>= return . show . hTable . map (:[])
_type = FunctionType
_browse = ModuleElems
|
xpika/interpreter-haskell
|
TypeHelper.hs
|
gpl-3.0
| 605
| 0
| 9
| 99
| 147
| 85
| 62
| 18
| 1
|
{-|
Module : Bench
Description : Memory benchmark of Multilinear library
Copyright : (c) Artur M. Brodzki, 2018
License : BSD3
Maintainer : artur@brodzki.org
Stability : experimental
Portability : Windows/POSIX
-}
module Main (
main
) where
import Weigh
import Multilinear.Generic.MultiCore
import qualified Multilinear.Matrix as Matrix
import qualified Multilinear.Vector as Vector
-- | Simple generator function for benchmarked matrices
gen :: Int -> Int -> Double
gen j k = sin (fromIntegral j) + cos (fromIntegral k)
-- matrix sizes
s1 :: Int
s1 = 64
s2 :: Int
s2 = 256
s3 :: Int
s3 = 1024
-- | ENTRY POINT
main :: IO ()
main = mainWith (do
setColumns [Case, Allocated, GCs, Live, Max]
-- Benchmarking small vectors
value "vector 1 elem generation" (Vector.fromIndices "i" 1 fromIntegral :: Tensor Double)
value "vector 2 elem generation" (Vector.fromIndices "i" 2 fromIntegral :: Tensor Double)
value "vector 3 elem generation" (Vector.fromIndices "i" 3 fromIntegral :: Tensor Double)
-- Benchmarking matrix generators
value "matrix 64 x 64 generation"
(Matrix.fromIndices "ij" s1 s1 gen :: Tensor Double)
value "matrix 256 x 256 generation"
(Matrix.fromIndices "ij" s2 s2 gen :: Tensor Double)
value "matrix 1024 x 1024 generation"
(Matrix.fromIndices "ij" s3 s3 gen :: Tensor Double)
-- Benchmarking matrix addition
func "matrix 64 x 64 addition"
(+ Matrix.fromIndices "ab" s1 s1 gen)
(Matrix.fromIndices "ab" s1 s1 (\a b -> fromIntegral a + fromIntegral b) :: Tensor Double)
func "matrix 256 x 256 addition"
(+ Matrix.fromIndices "ab" s2 s2 gen)
(Matrix.fromIndices "ab" s2 s2 (\a b -> fromIntegral a + fromIntegral b) :: Tensor Double)
func "matrix 1024 x 1024 addition"
(+ Matrix.fromIndices "ab" s3 s3 gen)
(Matrix.fromIndices "ab" s3 s3 (\a b -> fromIntegral a + fromIntegral b) :: Tensor Double)
-- Benchmarking matrix multiplication
func "matrix 40 x 4,000 multiplication"
(* Matrix.fromIndices "jk" 4000 40 gen)
(Matrix.fromIndices "ij" 40 4000 gen :: Tensor Double)
func "matrix 40 x 16,000 multiplication"
(* Matrix.fromIndices "jk" 16000 40 gen)
(Matrix.fromIndices "ij" 40 16000 gen :: Tensor Double)
func "matrix 40 x 64,000 multiplication"
(* Matrix.fromIndices "jk" 64000 40 gen)
(Matrix.fromIndices "ij" 40 64000 gen :: Tensor Double)
)
|
ArturB/Multilinear
|
benchmark/multicore/memory/Bench.hs
|
gpl-3.0
| 2,507
| 0
| 15
| 599
| 651
| 332
| 319
| 44
| 1
|
{- The EvoChess Game
Copyright (C) Bogdan Penkovsky 2017
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Evochess
========
== Rules
See rules.txt
== Implementation
This is a board-centric approach which guarantees that
- each piece has unique coordinates.
The game stores the following information:
1. Board
2. Turn number. Odd numbers correspond to the moves of White,
even numbers, of Black.
3. Number of Pawn moves (including captures by Pawns), required
for a Pawn-minor piece transformation (#1).
4. Number of piece captures, required for rook promotion (#2).
5. The information about the last long Pawn move
(the en passant rule).
6. Location of both kings to quickly check for pins.
Alternatively, no kings location is needed when every opponent's
piece is checked if the player's king is in its attack range.
Information about castling is not stored since it is
not defined for the evochess.
-}
{-# LANGUAGE FlexibleInstances #-}
module Lib where
import Data.List
import Data.Array
import Data.Maybe ( fromJust )
-- Min number of pawn moves to promote #1
_N = 3
-- Min number of piece captures to promote #2
_K = 2
data Kind = Rook | Knight | Bishop | Queen | King | Pawn
deriving Eq
data Color = White | Black
deriving Eq
type Square = Maybe Piece
data Piece = Piece Color Kind
deriving Eq
type Coord = (Int, Int)
data Game = Game { player1 :: Player
, player2 :: Player
, turn :: Int
, board :: Board }
initial = Game { player1 = player0 { color = White }
, player2 = player0 { color = Black }
, turn = 1
, board = evoboard8 }
data Player = Player { pawnMoves :: Int
, color :: Color
, capturedPieces :: Int}
player0 = Player { pawnMoves = 0
, color = White
, capturedPieces = 0 }
type Move = (Coord, Coord, Attr Char)
data Attr a = None | Promote a
play :: Move -> Game -> Either String Game
play m g@Game
{ turn = turn
, player1 = player1
, player2 = player2
, board = board } | validMove m activePlayer board = Right g'
| otherwise = Left "Invalid move"
where g' = g { turn = turn + 1
, player1 = player1'
, player2 = player2'
, board = board' }
-- Update players state
player1' = if odd turn then activePlayer' else player1
player2' = if even turn then player2 else activePlayer'
activePlayer = if odd turn then player1 else player2 :: Player
activePlayer' = countMoves activePlayer move
-- Update the board state
board' = board -- TODO
countMoves player move = player -- TODO
validMove (c1, c2, at) player b =
-- TODO Check the attributes at
inBoard c1 && inBoard c2 && canMoveTo c1 c2 board && myPiece c1 player board
canMoveTo c1 c2 board = True -- TODO
myPiece c1 player board = True -- TODO: is this check necessary?
showP (Piece White Rook) = "R"
showP (Piece White Knight) = "N"
showP (Piece White Bishop) = "B"
showP (Piece White Queen) = "Q"
showP (Piece White King) = "K"
showP (Piece White Pawn) = "P"
showP (Piece Black Rook) = "r"
showP (Piece Black Knight) = "n"
showP (Piece Black Bishop) = "b"
showP (Piece Black Queen) = "q"
showP (Piece Black King) = "k"
showP (Piece Black Pawn) = "p"
showS Nothing = " "
showS (Just p) = showP p
minX, maxX, minY, maxY :: Int
minX = 1
maxX = 8
minY = 1
maxY = 8
boardBounds :: ((Int, Int), (Int, Int))
boardBounds = ((minX, minY), (maxX, maxY))
instance Show Board where
show (Board b) = concat $ intersperse "\n" s
where s = [(intersperse ' ' [(head $ showS $ b ! (maxX - i + 1, j)) | j <- [minY..maxY]]) | i <- [minX..maxX]]
newtype Board = Board { getBoard :: Array Coord Square }
inBoard :: Coord -> Bool
inBoard (x, y) = x >= minX && x <= maxX
&& y >= minY && y <= maxY
{-# INLINE inBoard #-}
-- Pieces in the very beginning of the game on a classical 8x8 board
board8 :: Board
board8 = Board $ listArray ((1, 1), (8, 8)) b
where
b = bw ++ pw ++ b0 ++ b0 ++ b0 ++ b0 ++ pb ++ bb
bw = map Just [Piece White Rook, Piece White Knight, Piece White Bishop, Piece White Queen
, Piece White King, Piece White Bishop, Piece White Knight, Piece White Rook]
bb = map Just [Piece Black Rook, Piece Black Knight, Piece Black Bishop, Piece Black Queen
, Piece Black King, Piece Black Bishop, Piece Black Knight, Piece Black Rook]
pw = replicate 8 (Just $ Piece White Pawn)
pb = replicate 8 (Just $ Piece Black Pawn)
b0 = replicate 8 Nothing
evoboard8 :: Board
evoboard8 = Board $ listArray ((1, 1), (8, 8)) b
where
b = b1 ++ b2 ++ b0 ++ b0 ++ b0 ++ b0 ++ b7 ++ b8
b0 = replicate 8 Nothing
b1 = [Nothing, Nothing, Nothing, Nothing, Just $ Piece White King, Nothing, Nothing, Nothing]
b2 = replicate 8 (Just $ Piece White Pawn) -- White pawns only
b7 = replicate 8 (Just $ Piece Black Pawn)
b8 = [Nothing, Nothing, Nothing, Nothing, Just $ Piece Black King, Nothing, Nothing, Nothing]
emptyBoard :: Board
emptyBoard = Board $ listArray boardBounds (repeat Nothing)
pawnDirection :: Color -> Int
pawnDirection White = 1
pawnDirection Black = -1
-- Verify if a piece given by coord is under attack.
-- Especially useful to see if a king is in check.
-- That is needed to verify if a piece is pinned.
-- A piece is pinned if after its move the king of
-- the same color would be in check.
-- Thus, the game should not only retain the board
-- information, but also it should be able to quickly
-- find the position of both kings.
isAttacked :: Board -> Coord -> Bool
isAttacked board@(Board brd) coord@(x, y) = pawn1 || pawn2 || other
where
-- Using fromJust as we are sure the piece exists
color = getColor $ fromJust $ getSquare board coord
-- Check if there are any enemy pawns
pawn1 = pawnAttacks (x - 1, y - opdirection)
pawn2 = pawnAttacks (x + 1, y - opdirection)
pawnAttacks coord' = (getSquare board coord') == (Just $ Piece opcolor Pawn)
opcolor = opposite color
opdirection = pawnDirection opcolor
other = False
getSquare :: Board -> Coord -> Square
getSquare (Board brd) coord | not (inBoard coord) = Nothing
| otherwise = brd ! coord
getColor :: Piece -> Color
getColor (Piece White _) = White
getColor _ = Black
opposite :: Color -> Color
opposite White = Black
opposite _ = White
isValidMove :: Board -> Coord -> Coord -> Bool
isValidMove _ _ _ = False
-- Do not forget en passant
move :: Board -> Coord -> Coord -> Board
move _ _ _ = emptyBoard
-- Move geometry based on the partial game state (no en passant, no castling)
geometry :: Board -> Piece -> Coord -> [Coord]
geometry _ (Piece _ Knight) (x, y) = [ (x - 1, y + 2)
, (x + 1, y + 2)
, (x + 2, y - 1)
, (x + 2, y + 1)
, (x - 1, y - 2)
, (x + 1, y + 2)
, (x - 2, y - 1)
, (x - 2, y + 1) ]
-- Now, the idea is to use the board state and boardBounds
-- in order to get piece's visibility range.
geometry board (Piece _ Bishop) (x, y) = []
geometry board (Piece _ Rook) (x, y) = []
geometry board (Piece color Queen) coord = bishop ++ rook
where bishop = geometry board (Piece color Bishop) coord
rook = geometry board (Piece color Rook) coord
geometry _ (Piece _ King) (x, y) = [ (x + 1, y )
, (x - 1, y )
, (x , y + 1)
, (x , y - 1)
, (x + 1, y + 1)
, (x + 1, y - 1)
, (x - 1, y + 1)
, (x - 1, y - 1) ]
geometry board (Piece White Pawn) (x, y) = [(x, y + 1)] ++ attack1 ++ attack2 ++ longmove
where
attack coord' | (board `hasEnemyPiece` White) coord' = [coord']
| otherwise = []
attack1 = attack (x + 1, y + 1)
attack2 = attack (x - 1, y + 1)
longmove | y == 2 = [(x, 4)]
| otherwise = []
geometry board (Piece Black Pawn) (x, y) = [(x, y - 1)] ++ attack1 ++ attack2 ++ longmove
where
attack coord' | (board `hasEnemyPiece` Black) coord' = [coord']
| otherwise = []
attack1 = attack (x + 1, y - 1)
attack2 = attack (x - 1, y - 1)
longmove | y == 7 = [(x, 5)]
| otherwise = []
-- Check if the square contains a piece of given color
hasMyPiece :: Board -> Color -> Coord -> Bool
hasMyPiece board mycolor coord = f sq
where sq = getSquare board coord
f (Just (Piece mycolor _)) = True
f _ = False
hasEnemyPiece :: Board -> Color -> Coord -> Bool
hasEnemyPiece board mycolor coord = hasMyPiece board (opposite mycolor) coord
|
masterdezign/evochess
|
src/Lib.hs
|
gpl-3.0
| 9,669
| 0
| 16
| 2,931
| 2,822
| 1,549
| 1,273
| 169
| 4
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
-- | Storage for Notary's data.
module RSCoin.Notary.Storage
( Storage (_allocationEndurance, _masterKeys, _transactionEndurance)
, addSignedTransaction
, allocateMSAddress
, announceNewPeriod
, batchUpdatePeriods
, checkIfSynchronized
, emptyNotaryStorage
, getPeriodId
, getSignatures
, outdatedAllocs
, pollPendingTxs
, queryAllMSAdresses
, queryCompleteMSAdresses
, queryMyMSRequests
, removeCompleteMSAddresses
, setSynchronization
) where
import Control.Lens (Getter, Lens', at, makeLenses, to, use,
uses, view, views, (%=), (%~), (&),
(.=), (?=), (^.))
import Control.Monad (forM_, unless, when)
import Control.Monad.Catch (MonadThrow (throwM))
import Control.Monad.Extra (unlessM, whenJust, whenM)
import Data.Acid (Query, Update, liftQuery)
import qualified Data.Foldable as F
import Data.Hashable (Hashable)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM hiding (HashMap)
import Data.HashSet (HashSet)
import qualified Data.HashSet as HS hiding (HashSet)
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict as IM hiding (IntMap)
import qualified Data.Map.Strict as M hiding (Map)
import Data.Maybe (fromJust, fromMaybe, mapMaybe)
import qualified Data.Set as S
import Formatting (build, int, sformat, (%))
import RSCoin.Core (Address (..), HBlock (..), PeriodId,
PublicKey, Signature, Transaction (..),
TxVerdict (..), Utxo,
computeOutputAddrids, maxStrategySize,
validateSignature, validateTxPure,
verify)
import RSCoin.Core.Strategy (AddressToTxStrategyMap,
AllocationAddress (..),
AllocationInfo (..),
AllocationStrategy (..), MSAddress,
PartyAddress (..), TxStrategy (..),
allParties, allocateTxFromAlloc,
allocationStrategy,
currentConfirmations,
partyToAllocation)
import RSCoin.Notary.Defaults (allocationAttemptsLimit,
defaultAllocationEndurance,
defaultTransactionEndurance)
import RSCoin.Notary.Error (NotaryError (..))
type TxPoolSignatureBundle = HashMap Address (Signature Transaction)
data Storage = Storage
{ -- | Pool of trasactions to be signed. Maps transaction to already
-- collected signatures.
_txPool :: !(HashMap Transaction TxPoolSignatureBundle)
-- | Mapping from newly allocated multisignature addresses. This Map is
-- used only during multisignature address allocation process.
, _allocationStrategyPool :: !(HashMap MSAddress AllocationInfo)
-- | Mapping PeriodId -> MSAddresses that were allocated during period.
, _discardMSAddresses :: !(IntMap [MSAddress])
-- | Mapping PeriodId -> Transactions that were submitted during period.
, _discardTransactions :: !(IntMap [Transaction])
-- | Constant that defines how many periods we should store MS address allocation
-- requests (i.e. information in '_discardMSAddresses').
, _allocationEndurance :: !PeriodId
-- | Constant that defines how many periods we should store transactions
-- requests (i.e. information in '_discardTransactions').
, _transactionEndurance :: !PeriodId
-- | Number of attempts for user per period to allocate multisig address.
, _periodStats :: !(HashMap Address Int)
-- | Non-default addresses, registered in system (published to bank).
, _addresses :: !AddressToTxStrategyMap
-- | Mapping between addrid and address.
, _utxo :: !Utxo
-- | Trusted master keys to check for signatures in MS address allocation &
-- transaction signing.
, _masterKeys :: ![PublicKey] -- @TODO: replace with HashSet
-- | PeriodId of block which Notary expects.
, _periodId :: !PeriodId
-- | This flag is @True@ if 'Notary' has up-to-date 'Storage'.
, _isSynchronized :: !Bool
} deriving (Show)
makeLenses ''Storage
emptyNotaryStorage :: Storage
emptyNotaryStorage =
Storage
{ _txPool = mempty
, _allocationStrategyPool = mempty
, _discardMSAddresses = mempty
, _discardTransactions = mempty
, _allocationEndurance = defaultAllocationEndurance
, _transactionEndurance = defaultTransactionEndurance
, _periodStats = mempty
, _addresses = mempty
, _utxo = mempty
, _masterKeys = mempty
, _periodId = 0
, _isSynchronized = False
}
guardIfSynchronized :: Query Storage ()
guardIfSynchronized = unlessM (view isSynchronized) $
throwM $ NENotUpdated "Notary is not synchronized!"
-- ==============
-- UPDATE SECTION
-- ==============
-----------------
-- ms alloccation
-----------------
-- | Throws NEBlocked if user reaches limit of attempts (DoS protection).
guardMaxAttemps :: Address -> Update Storage ()
guardMaxAttemps userAddr = do
periodStats %=
HM.insertWith (\new old -> min (old + new) allocationAttemptsLimit) userAddr 1
currentAttemtps <- uses periodStats $ fromJust . HM.lookup userAddr
when (currentAttemtps >= allocationAttemptsLimit) $ throwM NEBlocked
type MSSignature = Signature (MSAddress, AllocationStrategy)
type MaybePKSignature = Maybe (PublicKey, Signature PublicKey)
-- | Allocate new multisignature address by chosen strategy and
-- given chain of certificates.
-- TODO: split into 2 functions -- initialize and confirm
allocateMSAddress
:: MSAddress -- ^ New multisig address itself
-> PartyAddress -- ^ Address of party who call this
-> AllocationStrategy -- ^ Strategy for MS address allocation
-> MSSignature -- ^ 'Signature' of @(msAddr, argStrategy)@
-> MaybePKSignature -- ^ Party address authorization.
-- 1. cold master public key
-- 2. signature of party by master key
-> Update Storage ()
allocateMSAddress
msAddr
argPartyAddress
argStrategy@AllocationStrategy{..}
requestSig
mMasterSlavePair
= do
liftQuery $ guardIfSynchronized
-- too many checks :( I wish I know which one we shouldn't check
-- but order of checks matters!!!
let partyAddr@(Address partyPk) = partyAddress argPartyAddress
let signedData = (msAddr, argStrategy)
let slavePk = case argPartyAddress of
TrustParty{..} -> hotTrustKey
UserParty{..} -> partyPk
trustedKeys <- use masterKeys
unless (null trustedKeys) $ case mMasterSlavePair of
Nothing -> throwM $ NEInvalidArguments "You should provide master pk and slave signature"
Just (masterPk, masterSlaveSig) -> do
unless (verify masterPk masterSlaveSig slavePk) $
throwM $ NEUnrelatedSignature $ sformat
("Invalid signature " % build %
" of slave address " % build %
" with master " % build)
masterSlaveSig
slavePk
masterPk
when (masterPk `notElem` trustedKeys) $
throwM $ NEInvalidArguments "provided master pk is not a trusted key"
unless (verify slavePk requestSig signedData) $
throwM $ NEUnrelatedSignature $ sformat
("(msAddr, strategy) not signed with proper sk for pk: " % build) slavePk
when (S.size _allParties > maxStrategySize) $
throwM $ NEInvalidStrategy
(sformat ("multisignature address can't have more than " % int % " parties")
(maxStrategySize :: Word))
when (S.size _allParties < 2) $
throwM $ NEInvalidStrategy "multisignature address should have at least two members"
when (_sigNumber <= 0) $
throwM $ NEInvalidStrategy "number of signatures to sign tx should be positive"
when (_sigNumber > S.size _allParties) $
throwM $ NEInvalidStrategy
"number of signatures to sign tx is greater then number of members"
whenM (uses addresses $ M.member msAddr) $
throwM $ NEInvalidArguments $ sformat
("ms address " % build % " already registered; please, regenerate new") msAddr
let allocAddress = partyToAllocation argPartyAddress
-- TODO: this need to refactored a lot but requires argument changing
case argPartyAddress of
TrustParty{..} -> case mMasterSlavePair of
Nothing -> throwM $ NEInvalidArguments "trust didn't provide master key"
Just (TrustAlloc . Address -> masterAlloc, _) ->
unless (masterAlloc `S.member` _allParties) $
throwM $ NEInvalidArguments $ sformat
("master key is not a trust member of strategy")
UserParty{..} ->
unless (allocAddress `S.member` _allParties) $
throwM $ NEInvalidArguments $ sformat
("user address " % build % " is not a member of strategy")
argPartyAddress
guardMaxAttemps partyAddr
mMSAddressInfo <- uses allocationStrategyPool $ HM.lookup msAddr
case mMSAddressInfo of
Nothing -> do
pId <- use periodId
discardMSAddresses %= IM.alter (Just . (msAddr :) . fromMaybe []) pId
allocationStrategyPool.at msAddr ?=
AllocationInfo { _allocationStrategy = argStrategy
, _currentConfirmations = HM.singleton allocAddress partyAddr }
Just ainfo -> do
when (ainfo^.allocationStrategy /= argStrategy) $
throwM $ NEInvalidArguments "result strategy for MS address is not equal to yours"
allocationStrategyPool.at msAddr ?=
(ainfo & currentConfirmations %~ HM.insert allocAddress partyAddr)
-- | Remove all addresses from list (bank only usage).
removeCompleteMSAddresses :: PublicKey -> [MSAddress] -> Signature [MSAddress] -> Update Storage ()
removeCompleteMSAddresses bankPublicKey completeAddrs signedAddrs = do
unless (verify bankPublicKey signedAddrs completeAddrs) $
throwM $
NEUnrelatedSignature "addr list in remove MS query not signed by bank"
forM_ completeAddrs $
\adress ->
allocationStrategyPool %= HM.delete adress
---------------
-- transactions
---------------
-- | Receives @tx@, @msAddr@, @(partyAddr, sig)@ pair, checks
-- validity and publishes @(tx, partyAddr)@ to storage, adds @(partyAddr, sig)@
-- to list of already collected for particular @tx@ pair.
addSignedTransaction :: Transaction
-> MSAddress
-> (Address, Signature Transaction)
-> Update Storage ()
addSignedTransaction tx@Transaction{..} msAddr (partyAddr, sig) = do
liftQuery $ guardIfSynchronized
checkTransactionValidity
checkAddrIdsKnown
checkSigRelativeToAddr
pId <- use periodId
discardTransactions %= IM.alter (Just . (tx :) . fromMaybe []) pId
txPool %= HM.alter (Just . HM.insert partyAddr sig . fromMaybe HM.empty) tx
where
checkTransactionValidity = case validateTxPure tx of
TxValid -> return ()
TxInvalid err -> throwM $ NEInvalidArguments $
sformat ("Transaction doesn't pass validity check (" % build % "): " % build)
err tx
-- | Throws error if addrid isn't known (with corresponding label).
-- User should repeat transaction after some timeout
checkAddrIdsKnown = do
curUtxo <- use utxo
unless (all (`HM.member` curUtxo) txInputs) $
use periodId >>= throwM . NEAddrIdNotInUtxo
checkSigRelativeToAddr = do
strategy <- liftQuery (getStrategy msAddr)
case strategy of
DefaultStrategy ->
throwM $ NEStrategyNotSupported "DefaultStrategy"
MOfNStrategy _ addrs ->
when (partyAddr `notElem` addrs) $
throwM $ NEUnrelatedSignature "party address not a member of this MS address"
unless (validateSignature sig partyAddr tx) $
throwM NEInvalidSignature
--------------------
-- handle period end
--------------------
-- | Update Notary 'Storage' with last known to Bank HBlock.
-- This function checks if Notary really has all previous HBlocks.
-- And if he doesn't: throws exceptions.
updateWithLastHBlock
:: PeriodId
-> HBlock
-> Update Storage ()
updateWithLastHBlock bankPid hb = do
expectedBlockId <- use periodId
guardIncomingHBlock bankPid expectedBlockId hb
guardIncomingHBlock
:: PeriodId
-> PeriodId
-> HBlock
-> Update Storage ()
guardIncomingHBlock bankPid expectedBlockId HBlock{..}
| bankPid < expectedBlockId = return ()
| bankPid /= expectedBlockId = do
isSynchronized .= False
throwM $ NENotUpdated $ sformat
("Got HBlock from period id " % int % " but Notary expected " % int)
bankPid
expectedBlockId
| otherwise = do
isSynchronized .= True
addresses %= M.union hbAddresses
periodStats .= HM.empty
periodId .= bankPid + 1
forM_ hbTransactions processPublishedTransacion
where
processPublishedTransacion tx@Transaction{..} = do
txPool %= HM.delete tx
let txOuts = computeOutputAddrids tx
forM_ txInputs $ \addrId ->
utxo %= HM.delete addrId
forM_ txOuts $ \(addrId, addr) ->
utxo %= HM.insert addrId addr
-- | Announce HBlocks, not yet known to Notary.
batchUpdatePeriods
:: PeriodId -- ^ Last period id; i.e. id of head of HBlocks
-> [HBlock] -- ^ Blocks; head corresponds to the latest block
-> Update Storage ()
batchUpdatePeriods lastPid hBlocks = do
let indexedBlocks = zip [lastPid, lastPid - 1 ..] hBlocks
mapM_ (uncurry updateWithLastHBlock) $ reverse indexedBlocks
-- | Clear all outdated information from `discard*` Maps.
removeOutdatedInfo
:: (Eq info, Hashable info)
=> PeriodId
-> Getter Storage PeriodId
-> Lens' Storage (IntMap [info])
-> Lens' Storage (HashMap info value)
-> Update Storage ()
removeOutdatedInfo pId enduranceLens discardLens poolLens = do
aliveInterval <- use enduranceLens
unlessM (uses discardLens IM.null) $ do
(oldestSavedPid, _) <- uses discardLens IM.findMin
let deleteLookup = const $ const Nothing
forM_ [oldestSavedPid .. pId - aliveInterval] $ \oldPid -> do
(mInfoList, newDiscard) <- uses discardLens
$ IM.updateLookupWithKey deleteLookup oldPid
discardLens .= newDiscard -- @TODO: optimize and set only once
whenJust mInfoList $ \infoList -> forM_ infoList $ \info ->
poolLens %= HM.delete info
-- | Annouce new period to Notary from Bank.
announceNewPeriod :: PeriodId -> HBlock -> Update Storage ()
announceNewPeriod lastPid hb = do
updateWithLastHBlock lastPid hb
-- Discard old MS address allocation requests
removeOutdatedInfo
lastPid
allocationEndurance
discardMSAddresses
allocationStrategyPool
-- Discard old pending transactions
removeOutdatedInfo
lastPid
transactionEndurance
discardTransactions
txPool
-- =============
-- QUERY SECTION
-- =============
-----------------
-- ms alloccation
-----------------
outdatedAllocs :: Query Storage (IntMap [MSAddress])
outdatedAllocs = view discardMSAddresses
queryMSAddressesHelper
:: Lens' Storage (HashMap MSAddress info)
-> (info -> Bool)
-> (info -> meta)
-> Query Storage [(MSAddress, meta)]
queryMSAddressesHelper poolLens selector mapper =
view
$ poolLens
. to (HM.filter selector)
. to (HM.map mapper)
. to HM.toList
-- | Query all Multisignature addresses.
queryAllMSAdresses :: Query Storage [(MSAddress, AllocationInfo)]
queryAllMSAdresses = queryMSAddressesHelper allocationStrategyPool (const True) id
-- | Query all completed multisignature addresses
queryCompleteMSAdresses :: Query Storage [(MSAddress, TxStrategy)]
queryCompleteMSAdresses = queryMSAddressesHelper
allocationStrategyPool
(\ainfo ->
ainfo^.allocationStrategy.allParties.to S.size ==
ainfo^.currentConfirmations.to HM.size)
(allocateTxFromAlloc . _allocationStrategy)
-- | Request all address which contains 'allocAddress' as party.
queryMyMSRequests :: AllocationAddress -> Query Storage [(MSAddress, AllocationInfo)]
queryMyMSRequests allocAddress = queryMSAddressesHelper
allocationStrategyPool
(\ainfo -> ainfo^.allocationStrategy.allParties.to (S.member allocAddress))
id
---------------
-- transactions
---------------
-- | Get address strategy.
getStrategy :: Address -> Query Storage TxStrategy
getStrategy addr = fromMaybe DefaultStrategy . M.lookup addr <$> view addresses
-- | By given @tx@ get list of collected signatures (or empty list if (tx, addr)
-- is not registered/already removed from Notary).
getSignatures :: Transaction -> Query Storage [(Address, Signature Transaction)]
getSignatures tx = HM.toList . (HM.lookupDefault HM.empty tx) <$> view txPool
-- | Collect all pending multisignature transactions which have one of
-- party is a member of given list.
-- @TODO: replace [Address] with @HashSet Address@ for faster checks
pollPendingTxs :: [Address] -> Query Storage [Transaction]
pollPendingTxs parties = do
guardIfSynchronized
pendingTxs <- views txPool HM.keys
curUtxo <- view utxo
let resultTxSet = F.foldl' (partyFold curUtxo) HS.empty pendingTxs
return $ HS.toList resultTxSet
where
partyFold :: Utxo
-> HashSet Transaction
-> Transaction
-> HashSet Transaction
partyFold addrIdResolve txSet tx@Transaction{..} =
if any (`elem` parties)
$ mapMaybe (`HM.lookup` addrIdResolve) txInputs
then HS.insert tx txSet
else txSet
------------------------
-- storage miscellaneous
------------------------
-- | Get last known periodId of Notary (interface for bank).
getPeriodId :: Query Storage PeriodId
getPeriodId = view periodId
checkIfSynchronized :: Query Storage Bool
checkIfSynchronized = view isSynchronized
setSynchronization :: Bool -> Update Storage ()
setSynchronization isUpdated = isSynchronized .= isUpdated
|
input-output-hk/rscoin-haskell
|
src/RSCoin/Notary/Storage.hs
|
gpl-3.0
| 19,776
| 0
| 20
| 5,832
| 3,698
| 1,966
| 1,732
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.SQLAdmin.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.SQLAdmin.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | The database engine type and version. The **databaseVersion** field
-- cannot be changed after instance creation. MySQL instances:
-- **MYSQL_8_0**, **MYSQL_5_7** (default), or **MYSQL_5_6**. PostgreSQL
-- instances: **POSTGRES_9_6**, **POSTGRES_10**, **POSTGRES_11** or
-- **POSTGRES_12** (default). SQL Server instances:
-- **SQLSERVER_2017_STANDARD** (default), **SQLSERVER_2017_ENTERPRISE**,
-- **SQLSERVER_2017_EXPRESS**, or **SQLSERVER_2017_WEB**.
data ConnectSettingsDatabaseVersion
= SQLDatabaseVersionUnspecified
-- ^ @SQL_DATABASE_VERSION_UNSPECIFIED@
-- This is an unknown database version.
| Mysql51
-- ^ @MYSQL_5_1@
-- The database version is MySQL 5.1.
| Mysql55
-- ^ @MYSQL_5_5@
-- The database version is MySQL 5.5.
| Mysql56
-- ^ @MYSQL_5_6@
-- The database version is MySQL 5.6.
| Mysql57
-- ^ @MYSQL_5_7@
-- The database version is MySQL 5.7.
| Postgres96
-- ^ @POSTGRES_9_6@
-- The database version is PostgreSQL 9.6.
| Postgres11
-- ^ @POSTGRES_11@
-- The database version is PostgreSQL 11.
| SQLserver2017Standard
-- ^ @SQLSERVER_2017_STANDARD@
-- The database version is SQL Server 2017 Standard.
| SQLserver2017Enterprise
-- ^ @SQLSERVER_2017_ENTERPRISE@
-- The database version is SQL Server 2017 Enterprise.
| SQLserver2017Express
-- ^ @SQLSERVER_2017_EXPRESS@
-- The database version is SQL Server 2017 Express.
| SQLserver2017Web
-- ^ @SQLSERVER_2017_WEB@
-- The database version is SQL Server 2017 Web.
| Postgres10
-- ^ @POSTGRES_10@
-- The database version is PostgreSQL 10.
| Postgres12
-- ^ @POSTGRES_12@
-- The database version is PostgreSQL 12.
| Postgres13
-- ^ @POSTGRES_13@
-- The database version is PostgreSQL 13.
| SQLserver2019Standard
-- ^ @SQLSERVER_2019_STANDARD@
-- The database version is SQL Server 2019 Standard.
| SQLserver2019Enterprise
-- ^ @SQLSERVER_2019_ENTERPRISE@
-- The database version is SQL Server 2019 Enterprise.
| SQLserver2019Express
-- ^ @SQLSERVER_2019_EXPRESS@
-- The database version is SQL Server 2019 Express.
| SQLserver2019Web
-- ^ @SQLSERVER_2019_WEB@
-- The database version is SQL Server 2019 Web.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ConnectSettingsDatabaseVersion
instance FromHttpApiData ConnectSettingsDatabaseVersion where
parseQueryParam = \case
"SQL_DATABASE_VERSION_UNSPECIFIED" -> Right SQLDatabaseVersionUnspecified
"MYSQL_5_1" -> Right Mysql51
"MYSQL_5_5" -> Right Mysql55
"MYSQL_5_6" -> Right Mysql56
"MYSQL_5_7" -> Right Mysql57
"POSTGRES_9_6" -> Right Postgres96
"POSTGRES_11" -> Right Postgres11
"SQLSERVER_2017_STANDARD" -> Right SQLserver2017Standard
"SQLSERVER_2017_ENTERPRISE" -> Right SQLserver2017Enterprise
"SQLSERVER_2017_EXPRESS" -> Right SQLserver2017Express
"SQLSERVER_2017_WEB" -> Right SQLserver2017Web
"POSTGRES_10" -> Right Postgres10
"POSTGRES_12" -> Right Postgres12
"POSTGRES_13" -> Right Postgres13
"SQLSERVER_2019_STANDARD" -> Right SQLserver2019Standard
"SQLSERVER_2019_ENTERPRISE" -> Right SQLserver2019Enterprise
"SQLSERVER_2019_EXPRESS" -> Right SQLserver2019Express
"SQLSERVER_2019_WEB" -> Right SQLserver2019Web
x -> Left ("Unable to parse ConnectSettingsDatabaseVersion from: " <> x)
instance ToHttpApiData ConnectSettingsDatabaseVersion where
toQueryParam = \case
SQLDatabaseVersionUnspecified -> "SQL_DATABASE_VERSION_UNSPECIFIED"
Mysql51 -> "MYSQL_5_1"
Mysql55 -> "MYSQL_5_5"
Mysql56 -> "MYSQL_5_6"
Mysql57 -> "MYSQL_5_7"
Postgres96 -> "POSTGRES_9_6"
Postgres11 -> "POSTGRES_11"
SQLserver2017Standard -> "SQLSERVER_2017_STANDARD"
SQLserver2017Enterprise -> "SQLSERVER_2017_ENTERPRISE"
SQLserver2017Express -> "SQLSERVER_2017_EXPRESS"
SQLserver2017Web -> "SQLSERVER_2017_WEB"
Postgres10 -> "POSTGRES_10"
Postgres12 -> "POSTGRES_12"
Postgres13 -> "POSTGRES_13"
SQLserver2019Standard -> "SQLSERVER_2019_STANDARD"
SQLserver2019Enterprise -> "SQLSERVER_2019_ENTERPRISE"
SQLserver2019Express -> "SQLSERVER_2019_EXPRESS"
SQLserver2019Web -> "SQLSERVER_2019_WEB"
instance FromJSON ConnectSettingsDatabaseVersion where
parseJSON = parseJSONText "ConnectSettingsDatabaseVersion"
instance ToJSON ConnectSettingsDatabaseVersion where
toJSON = toJSONText
-- | The type of the flag. Flags are typed to being **BOOLEAN**, **STRING**,
-- **INTEGER** or **NONE**. **NONE** is used for flags which do not take a
-- value, such as **skip_grant_tables**.
data FlagType
= SQLFlagTypeUnspecified
-- ^ @SQL_FLAG_TYPE_UNSPECIFIED@
-- This is an unknown flag type.
| Boolean
-- ^ @BOOLEAN@
-- Boolean type flag.
| String
-- ^ @STRING@
-- String type flag.
| Integer
-- ^ @INTEGER@
-- Integer type flag.
| None
-- ^ @NONE@
-- Flag type used for a server startup option.
| MysqlTimezoneOffSet
-- ^ @MYSQL_TIMEZONE_OFFSET@
-- Type introduced specially for MySQL TimeZone offset. Accept a string
-- value with the format [-12:59, 13:00].
| Float
-- ^ @FLOAT@
-- Float type flag.
| RepeatedString
-- ^ @REPEATED_STRING@
-- Comma-separated list of the strings in a SqlFlagType enum.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable FlagType
instance FromHttpApiData FlagType where
parseQueryParam = \case
"SQL_FLAG_TYPE_UNSPECIFIED" -> Right SQLFlagTypeUnspecified
"BOOLEAN" -> Right Boolean
"STRING" -> Right String
"INTEGER" -> Right Integer
"NONE" -> Right None
"MYSQL_TIMEZONE_OFFSET" -> Right MysqlTimezoneOffSet
"FLOAT" -> Right Float
"REPEATED_STRING" -> Right RepeatedString
x -> Left ("Unable to parse FlagType from: " <> x)
instance ToHttpApiData FlagType where
toQueryParam = \case
SQLFlagTypeUnspecified -> "SQL_FLAG_TYPE_UNSPECIFIED"
Boolean -> "BOOLEAN"
String -> "STRING"
Integer -> "INTEGER"
None -> "NONE"
MysqlTimezoneOffSet -> "MYSQL_TIMEZONE_OFFSET"
Float -> "FLOAT"
RepeatedString -> "REPEATED_STRING"
instance FromJSON FlagType where
parseJSON = parseJSONText "FlagType"
instance ToJSON FlagType where
toJSON = toJSONText
-- | The type of this run; can be either \"AUTOMATED\" or \"ON_DEMAND\". This
-- field defaults to \"ON_DEMAND\" and is ignored, when specified for
-- insert requests.
data BackupRunType
= SQLBackupRunTypeUnspecified
-- ^ @SQL_BACKUP_RUN_TYPE_UNSPECIFIED@
-- This is an unknown BackupRun type.
| Automated
-- ^ @AUTOMATED@
-- The backup schedule automatically triggers a backup.
| OnDemand
-- ^ @ON_DEMAND@
-- The user manually triggers a backup.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable BackupRunType
instance FromHttpApiData BackupRunType where
parseQueryParam = \case
"SQL_BACKUP_RUN_TYPE_UNSPECIFIED" -> Right SQLBackupRunTypeUnspecified
"AUTOMATED" -> Right Automated
"ON_DEMAND" -> Right OnDemand
x -> Left ("Unable to parse BackupRunType from: " <> x)
instance ToHttpApiData BackupRunType where
toQueryParam = \case
SQLBackupRunTypeUnspecified -> "SQL_BACKUP_RUN_TYPE_UNSPECIFIED"
Automated -> "AUTOMATED"
OnDemand -> "ON_DEMAND"
instance FromJSON BackupRunType where
parseJSON = parseJSONText "BackupRunType"
instance ToJSON BackupRunType where
toJSON = toJSONText
-- | **SECOND_GEN**: Cloud SQL database instance. **EXTERNAL**: A database
-- server that is not managed by Google. This property is read-only; use
-- the **tier** property in the **settings** object to determine the
-- database type.
data ConnectSettingsBackendType
= SQLBackendTypeUnspecified
-- ^ @SQL_BACKEND_TYPE_UNSPECIFIED@
-- This is an unknown backend type for instance.
| FirstGen
-- ^ @FIRST_GEN@
-- V1 speckle instance.
| SecondGen
-- ^ @SECOND_GEN@
-- V2 speckle instance.
| External
-- ^ @EXTERNAL@
-- On premises instance.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ConnectSettingsBackendType
instance FromHttpApiData ConnectSettingsBackendType where
parseQueryParam = \case
"SQL_BACKEND_TYPE_UNSPECIFIED" -> Right SQLBackendTypeUnspecified
"FIRST_GEN" -> Right FirstGen
"SECOND_GEN" -> Right SecondGen
"EXTERNAL" -> Right External
x -> Left ("Unable to parse ConnectSettingsBackendType from: " <> x)
instance ToHttpApiData ConnectSettingsBackendType where
toQueryParam = \case
SQLBackendTypeUnspecified -> "SQL_BACKEND_TYPE_UNSPECIFIED"
FirstGen -> "FIRST_GEN"
SecondGen -> "SECOND_GEN"
External -> "EXTERNAL"
instance FromJSON ConnectSettingsBackendType where
parseJSON = parseJSONText "ConnectSettingsBackendType"
instance ToJSON ConnectSettingsBackendType where
toJSON = toJSONText
-- | The unit that \'retained_backups\' represents.
data BackupRetentionSettingsRetentionUnit
= RetentionUnitUnspecified
-- ^ @RETENTION_UNIT_UNSPECIFIED@
-- Backup retention unit is unspecified, will be treated as COUNT.
| Count
-- ^ @COUNT@
-- Retention will be by count, eg. \"retain the most recent 7 backups\".
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable BackupRetentionSettingsRetentionUnit
instance FromHttpApiData BackupRetentionSettingsRetentionUnit where
parseQueryParam = \case
"RETENTION_UNIT_UNSPECIFIED" -> Right RetentionUnitUnspecified
"COUNT" -> Right Count
x -> Left ("Unable to parse BackupRetentionSettingsRetentionUnit from: " <> x)
instance ToHttpApiData BackupRetentionSettingsRetentionUnit where
toQueryParam = \case
RetentionUnitUnspecified -> "RETENTION_UNIT_UNSPECIFIED"
Count -> "COUNT"
instance FromJSON BackupRetentionSettingsRetentionUnit where
parseJSON = parseJSONText "BackupRetentionSettingsRetentionUnit"
instance ToJSON BackupRetentionSettingsRetentionUnit where
toJSON = toJSONText
data FlagAppliesToItem
= FATISQLDatabaseVersionUnspecified
-- ^ @SQL_DATABASE_VERSION_UNSPECIFIED@
-- This is an unknown database version.
| FATIMysql51
-- ^ @MYSQL_5_1@
-- The database version is MySQL 5.1.
| FATIMysql55
-- ^ @MYSQL_5_5@
-- The database version is MySQL 5.5.
| FATIMysql56
-- ^ @MYSQL_5_6@
-- The database version is MySQL 5.6.
| FATIMysql57
-- ^ @MYSQL_5_7@
-- The database version is MySQL 5.7.
| FATIPostgres96
-- ^ @POSTGRES_9_6@
-- The database version is PostgreSQL 9.6.
| FATIPostgres11
-- ^ @POSTGRES_11@
-- The database version is PostgreSQL 11.
| FATISQLserver2017Standard
-- ^ @SQLSERVER_2017_STANDARD@
-- The database version is SQL Server 2017 Standard.
| FATISQLserver2017Enterprise
-- ^ @SQLSERVER_2017_ENTERPRISE@
-- The database version is SQL Server 2017 Enterprise.
| FATISQLserver2017Express
-- ^ @SQLSERVER_2017_EXPRESS@
-- The database version is SQL Server 2017 Express.
| FATISQLserver2017Web
-- ^ @SQLSERVER_2017_WEB@
-- The database version is SQL Server 2017 Web.
| FATIPostgres10
-- ^ @POSTGRES_10@
-- The database version is PostgreSQL 10.
| FATIPostgres12
-- ^ @POSTGRES_12@
-- The database version is PostgreSQL 12.
| FATIPostgres13
-- ^ @POSTGRES_13@
-- The database version is PostgreSQL 13.
| FATISQLserver2019Standard
-- ^ @SQLSERVER_2019_STANDARD@
-- The database version is SQL Server 2019 Standard.
| FATISQLserver2019Enterprise
-- ^ @SQLSERVER_2019_ENTERPRISE@
-- The database version is SQL Server 2019 Enterprise.
| FATISQLserver2019Express
-- ^ @SQLSERVER_2019_EXPRESS@
-- The database version is SQL Server 2019 Express.
| FATISQLserver2019Web
-- ^ @SQLSERVER_2019_WEB@
-- The database version is SQL Server 2019 Web.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable FlagAppliesToItem
instance FromHttpApiData FlagAppliesToItem where
parseQueryParam = \case
"SQL_DATABASE_VERSION_UNSPECIFIED" -> Right FATISQLDatabaseVersionUnspecified
"MYSQL_5_1" -> Right FATIMysql51
"MYSQL_5_5" -> Right FATIMysql55
"MYSQL_5_6" -> Right FATIMysql56
"MYSQL_5_7" -> Right FATIMysql57
"POSTGRES_9_6" -> Right FATIPostgres96
"POSTGRES_11" -> Right FATIPostgres11
"SQLSERVER_2017_STANDARD" -> Right FATISQLserver2017Standard
"SQLSERVER_2017_ENTERPRISE" -> Right FATISQLserver2017Enterprise
"SQLSERVER_2017_EXPRESS" -> Right FATISQLserver2017Express
"SQLSERVER_2017_WEB" -> Right FATISQLserver2017Web
"POSTGRES_10" -> Right FATIPostgres10
"POSTGRES_12" -> Right FATIPostgres12
"POSTGRES_13" -> Right FATIPostgres13
"SQLSERVER_2019_STANDARD" -> Right FATISQLserver2019Standard
"SQLSERVER_2019_ENTERPRISE" -> Right FATISQLserver2019Enterprise
"SQLSERVER_2019_EXPRESS" -> Right FATISQLserver2019Express
"SQLSERVER_2019_WEB" -> Right FATISQLserver2019Web
x -> Left ("Unable to parse FlagAppliesToItem from: " <> x)
instance ToHttpApiData FlagAppliesToItem where
toQueryParam = \case
FATISQLDatabaseVersionUnspecified -> "SQL_DATABASE_VERSION_UNSPECIFIED"
FATIMysql51 -> "MYSQL_5_1"
FATIMysql55 -> "MYSQL_5_5"
FATIMysql56 -> "MYSQL_5_6"
FATIMysql57 -> "MYSQL_5_7"
FATIPostgres96 -> "POSTGRES_9_6"
FATIPostgres11 -> "POSTGRES_11"
FATISQLserver2017Standard -> "SQLSERVER_2017_STANDARD"
FATISQLserver2017Enterprise -> "SQLSERVER_2017_ENTERPRISE"
FATISQLserver2017Express -> "SQLSERVER_2017_EXPRESS"
FATISQLserver2017Web -> "SQLSERVER_2017_WEB"
FATIPostgres10 -> "POSTGRES_10"
FATIPostgres12 -> "POSTGRES_12"
FATIPostgres13 -> "POSTGRES_13"
FATISQLserver2019Standard -> "SQLSERVER_2019_STANDARD"
FATISQLserver2019Enterprise -> "SQLSERVER_2019_ENTERPRISE"
FATISQLserver2019Express -> "SQLSERVER_2019_EXPRESS"
FATISQLserver2019Web -> "SQLSERVER_2019_WEB"
instance FromJSON FlagAppliesToItem where
parseJSON = parseJSONText "FlagAppliesToItem"
instance ToJSON FlagAppliesToItem where
toJSON = toJSONText
-- | The type of the operation. Valid values are: **CREATE** **DELETE**
-- **UPDATE** **RESTART** **IMPORT** **EXPORT** **BACKUP_VOLUME**
-- **RESTORE_VOLUME** **CREATE_USER** **DELETE_USER** **CREATE_DATABASE**
-- **DELETE_DATABASE**
data OperationOperationType
= SQLOperationTypeUnspecified
-- ^ @SQL_OPERATION_TYPE_UNSPECIFIED@
-- Unknown operation type.
| Import
-- ^ @IMPORT@
-- Imports data into a Cloud SQL instance.
| Export
-- ^ @EXPORT@
-- Exports data from a Cloud SQL instance to a Cloud Storage bucket.
| Create
-- ^ @CREATE@
-- Creates a new Cloud SQL instance.
| Update
-- ^ @UPDATE@
-- Updates the settings of a Cloud SQL instance.
| Delete'
-- ^ @DELETE@
-- Deletes a Cloud SQL instance.
| Restart
-- ^ @RESTART@
-- Restarts the Cloud SQL instance.
| Backup
-- ^ @BACKUP@
| Snapshot
-- ^ @SNAPSHOT@
| BackupVolume
-- ^ @BACKUP_VOLUME@
-- Performs instance backup.
| DeleteVolume
-- ^ @DELETE_VOLUME@
-- Deletes an instance backup.
| RestoreVolume
-- ^ @RESTORE_VOLUME@
-- Restores an instance backup.
| InjectUser
-- ^ @INJECT_USER@
-- Injects a privileged user in mysql for MOB instances.
| Clone
-- ^ @CLONE@
-- Clones a Cloud SQL instance.
| StopReplica
-- ^ @STOP_REPLICA@
-- Stops replication on a Cloud SQL read replica instance.
| StartReplica
-- ^ @START_REPLICA@
-- Starts replication on a Cloud SQL read replica instance.
| PromoteReplica
-- ^ @PROMOTE_REPLICA@
-- Promotes a Cloud SQL replica instance.
| CreateReplica
-- ^ @CREATE_REPLICA@
-- Creates a Cloud SQL replica instance.
| CreateUser
-- ^ @CREATE_USER@
-- Creates a new user in a Cloud SQL instance.
| DeleteUser
-- ^ @DELETE_USER@
-- Deletes a user from a Cloud SQL instance.
| UpdateUser
-- ^ @UPDATE_USER@
-- Updates an existing user in a Cloud SQL instance.
| CreateDatabase
-- ^ @CREATE_DATABASE@
-- Creates a database in the Cloud SQL instance.
| DeleteDatabase
-- ^ @DELETE_DATABASE@
-- Deletes a database in the Cloud SQL instance.
| UpdateDatabase
-- ^ @UPDATE_DATABASE@
-- Updates a database in the Cloud SQL instance.
| Failover
-- ^ @FAILOVER@
-- Performs failover of an HA-enabled Cloud SQL failover replica.
| DeleteBackup
-- ^ @DELETE_BACKUP@
-- Deletes the backup taken by a backup run.
| RecreateReplica
-- ^ @RECREATE_REPLICA@
| TruncateLog
-- ^ @TRUNCATE_LOG@
-- Truncates a general or slow log table in MySQL.
| DemoteMaster
-- ^ @DEMOTE_MASTER@
-- Demotes the stand-alone instance to be a Cloud SQL read replica for an
-- external database server.
| Maintenance
-- ^ @MAINTENANCE@
-- Indicates that the instance is currently in maintenance. Maintenance
-- typically causes the instance to be unavailable for 1-3 minutes.
| EnablePrivateIP
-- ^ @ENABLE_PRIVATE_IP@
-- This field is deprecated, and will be removed in future version of API.
| DeferMaintenance
-- ^ @DEFER_MAINTENANCE@
| CreateClone
-- ^ @CREATE_CLONE@
-- Creates clone instance.
| RescheduleMaintenance
-- ^ @RESCHEDULE_MAINTENANCE@
-- Reschedule maintenance to another time.
| StartExternalSync
-- ^ @START_EXTERNAL_SYNC@
-- Starts external sync of a Cloud SQL EM replica to an external primary
-- instance.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable OperationOperationType
instance FromHttpApiData OperationOperationType where
parseQueryParam = \case
"SQL_OPERATION_TYPE_UNSPECIFIED" -> Right SQLOperationTypeUnspecified
"IMPORT" -> Right Import
"EXPORT" -> Right Export
"CREATE" -> Right Create
"UPDATE" -> Right Update
"DELETE" -> Right Delete'
"RESTART" -> Right Restart
"BACKUP" -> Right Backup
"SNAPSHOT" -> Right Snapshot
"BACKUP_VOLUME" -> Right BackupVolume
"DELETE_VOLUME" -> Right DeleteVolume
"RESTORE_VOLUME" -> Right RestoreVolume
"INJECT_USER" -> Right InjectUser
"CLONE" -> Right Clone
"STOP_REPLICA" -> Right StopReplica
"START_REPLICA" -> Right StartReplica
"PROMOTE_REPLICA" -> Right PromoteReplica
"CREATE_REPLICA" -> Right CreateReplica
"CREATE_USER" -> Right CreateUser
"DELETE_USER" -> Right DeleteUser
"UPDATE_USER" -> Right UpdateUser
"CREATE_DATABASE" -> Right CreateDatabase
"DELETE_DATABASE" -> Right DeleteDatabase
"UPDATE_DATABASE" -> Right UpdateDatabase
"FAILOVER" -> Right Failover
"DELETE_BACKUP" -> Right DeleteBackup
"RECREATE_REPLICA" -> Right RecreateReplica
"TRUNCATE_LOG" -> Right TruncateLog
"DEMOTE_MASTER" -> Right DemoteMaster
"MAINTENANCE" -> Right Maintenance
"ENABLE_PRIVATE_IP" -> Right EnablePrivateIP
"DEFER_MAINTENANCE" -> Right DeferMaintenance
"CREATE_CLONE" -> Right CreateClone
"RESCHEDULE_MAINTENANCE" -> Right RescheduleMaintenance
"START_EXTERNAL_SYNC" -> Right StartExternalSync
x -> Left ("Unable to parse OperationOperationType from: " <> x)
instance ToHttpApiData OperationOperationType where
toQueryParam = \case
SQLOperationTypeUnspecified -> "SQL_OPERATION_TYPE_UNSPECIFIED"
Import -> "IMPORT"
Export -> "EXPORT"
Create -> "CREATE"
Update -> "UPDATE"
Delete' -> "DELETE"
Restart -> "RESTART"
Backup -> "BACKUP"
Snapshot -> "SNAPSHOT"
BackupVolume -> "BACKUP_VOLUME"
DeleteVolume -> "DELETE_VOLUME"
RestoreVolume -> "RESTORE_VOLUME"
InjectUser -> "INJECT_USER"
Clone -> "CLONE"
StopReplica -> "STOP_REPLICA"
StartReplica -> "START_REPLICA"
PromoteReplica -> "PROMOTE_REPLICA"
CreateReplica -> "CREATE_REPLICA"
CreateUser -> "CREATE_USER"
DeleteUser -> "DELETE_USER"
UpdateUser -> "UPDATE_USER"
CreateDatabase -> "CREATE_DATABASE"
DeleteDatabase -> "DELETE_DATABASE"
UpdateDatabase -> "UPDATE_DATABASE"
Failover -> "FAILOVER"
DeleteBackup -> "DELETE_BACKUP"
RecreateReplica -> "RECREATE_REPLICA"
TruncateLog -> "TRUNCATE_LOG"
DemoteMaster -> "DEMOTE_MASTER"
Maintenance -> "MAINTENANCE"
EnablePrivateIP -> "ENABLE_PRIVATE_IP"
DeferMaintenance -> "DEFER_MAINTENANCE"
CreateClone -> "CREATE_CLONE"
RescheduleMaintenance -> "RESCHEDULE_MAINTENANCE"
StartExternalSync -> "START_EXTERNAL_SYNC"
instance FromJSON OperationOperationType where
parseJSON = parseJSONText "OperationOperationType"
instance ToJSON OperationOperationType where
toJSON = toJSONText
-- | The pricing plan for this instance. This can be either **PER_USE** or
-- **PACKAGE**. Only **PER_USE** is supported for Second Generation
-- instances.
data SettingsPricingPlan
= SQLPricingPlanUnspecified
-- ^ @SQL_PRICING_PLAN_UNSPECIFIED@
-- This is an unknown pricing plan for this instance.
| Package
-- ^ @PACKAGE@
-- The instance is billed at a monthly flat rate.
| PerUse
-- ^ @PER_USE@
-- The instance is billed per usage.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SettingsPricingPlan
instance FromHttpApiData SettingsPricingPlan where
parseQueryParam = \case
"SQL_PRICING_PLAN_UNSPECIFIED" -> Right SQLPricingPlanUnspecified
"PACKAGE" -> Right Package
"PER_USE" -> Right PerUse
x -> Left ("Unable to parse SettingsPricingPlan from: " <> x)
instance ToHttpApiData SettingsPricingPlan where
toQueryParam = \case
SQLPricingPlanUnspecified -> "SQL_PRICING_PLAN_UNSPECIFIED"
Package -> "PACKAGE"
PerUse -> "PER_USE"
instance FromJSON SettingsPricingPlan where
parseJSON = parseJSONText "SettingsPricingPlan"
instance ToJSON SettingsPricingPlan where
toJSON = toJSONText
-- | Specifies the kind of backup, PHYSICAL or DEFAULT_SNAPSHOT.
data BackupRunBackupKind
= BRBKSQLBackupKindUnspecified
-- ^ @SQL_BACKUP_KIND_UNSPECIFIED@
-- This is an unknown BackupKind.
| BRBKSnapshot
-- ^ @SNAPSHOT@
-- The snapshot based backups
| BRBKPhysical
-- ^ @PHYSICAL@
-- Physical backups
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable BackupRunBackupKind
instance FromHttpApiData BackupRunBackupKind where
parseQueryParam = \case
"SQL_BACKUP_KIND_UNSPECIFIED" -> Right BRBKSQLBackupKindUnspecified
"SNAPSHOT" -> Right BRBKSnapshot
"PHYSICAL" -> Right BRBKPhysical
x -> Left ("Unable to parse BackupRunBackupKind from: " <> x)
instance ToHttpApiData BackupRunBackupKind where
toQueryParam = \case
BRBKSQLBackupKindUnspecified -> "SQL_BACKUP_KIND_UNSPECIFIED"
BRBKSnapshot -> "SNAPSHOT"
BRBKPhysical -> "PHYSICAL"
instance FromJSON BackupRunBackupKind where
parseJSON = parseJSONText "BackupRunBackupKind"
instance ToJSON BackupRunBackupKind where
toJSON = toJSONText
-- | The status of an operation. Valid values are: **PENDING** **RUNNING**
-- **DONE** **SQL_OPERATION_STATUS_UNSPECIFIED**
data OperationStatus
= SQLOperationStatusUnspecified
-- ^ @SQL_OPERATION_STATUS_UNSPECIFIED@
-- The state of the operation is unknown.
| Pending
-- ^ @PENDING@
-- The operation has been queued, but has not started yet.
| Running
-- ^ @RUNNING@
-- The operation is running.
| Done
-- ^ @DONE@
-- The operation completed.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable OperationStatus
instance FromHttpApiData OperationStatus where
parseQueryParam = \case
"SQL_OPERATION_STATUS_UNSPECIFIED" -> Right SQLOperationStatusUnspecified
"PENDING" -> Right Pending
"RUNNING" -> Right Running
"DONE" -> Right Done
x -> Left ("Unable to parse OperationStatus from: " <> x)
instance ToHttpApiData OperationStatus where
toQueryParam = \case
SQLOperationStatusUnspecified -> "SQL_OPERATION_STATUS_UNSPECIFIED"
Pending -> "PENDING"
Running -> "RUNNING"
Done -> "DONE"
instance FromJSON OperationStatus where
parseJSON = parseJSONText "OperationStatus"
instance ToJSON OperationStatus where
toJSON = toJSONText
-- | The activation policy specifies when the instance is activated; it is
-- applicable only when the instance state is RUNNABLE. Valid values:
-- **ALWAYS**: The instance is on, and remains so even in the absence of
-- connection requests. **NEVER**: The instance is off; it is not
-- activated, even if a connection request arrives.
data SettingsActivationPolicy
= SAPSQLActivationPolicyUnspecified
-- ^ @SQL_ACTIVATION_POLICY_UNSPECIFIED@
-- Unknown activation plan.
| SAPAlways
-- ^ @ALWAYS@
-- The instance is always up and running.
| SAPNever
-- ^ @NEVER@
-- The instance never starts.
| SAPOnDemand
-- ^ @ON_DEMAND@
-- The instance starts upon receiving requests.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SettingsActivationPolicy
instance FromHttpApiData SettingsActivationPolicy where
parseQueryParam = \case
"SQL_ACTIVATION_POLICY_UNSPECIFIED" -> Right SAPSQLActivationPolicyUnspecified
"ALWAYS" -> Right SAPAlways
"NEVER" -> Right SAPNever
"ON_DEMAND" -> Right SAPOnDemand
x -> Left ("Unable to parse SettingsActivationPolicy from: " <> x)
instance ToHttpApiData SettingsActivationPolicy where
toQueryParam = \case
SAPSQLActivationPolicyUnspecified -> "SQL_ACTIVATION_POLICY_UNSPECIFIED"
SAPAlways -> "ALWAYS"
SAPNever -> "NEVER"
SAPOnDemand -> "ON_DEMAND"
instance FromJSON SettingsActivationPolicy where
parseJSON = parseJSONText "SettingsActivationPolicy"
instance ToJSON SettingsActivationPolicy where
toJSON = toJSONText
-- | The file type for the specified uri. **SQL**: The file contains SQL
-- statements. **CSV**: The file contains CSV data.
data ImportContextFileType
= SQLFileTypeUnspecified
-- ^ @SQL_FILE_TYPE_UNSPECIFIED@
-- Unknown file type.
| SQL
-- ^ @SQL@
-- File containing SQL statements.
| CSV
-- ^ @CSV@
-- File in CSV format.
| Bak
-- ^ @BAK@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ImportContextFileType
instance FromHttpApiData ImportContextFileType where
parseQueryParam = \case
"SQL_FILE_TYPE_UNSPECIFIED" -> Right SQLFileTypeUnspecified
"SQL" -> Right SQL
"CSV" -> Right CSV
"BAK" -> Right Bak
x -> Left ("Unable to parse ImportContextFileType from: " <> x)
instance ToHttpApiData ImportContextFileType where
toQueryParam = \case
SQLFileTypeUnspecified -> "SQL_FILE_TYPE_UNSPECIFIED"
SQL -> "SQL"
CSV -> "CSV"
Bak -> "BAK"
instance FromJSON ImportContextFileType where
parseJSON = parseJSONText "ImportContextFileType"
instance ToJSON ImportContextFileType where
toJSON = toJSONText
-- | Availability type. Potential values: **ZONAL**: The instance serves data
-- from only one zone. Outages in that zone affect data accessibility.
-- **REGIONAL**: The instance can serve data from more than one zone in a
-- region (it is highly available). For more information, see [Overview of
-- the High Availability
-- Configuration](https:\/\/cloud.google.com\/sql\/docs\/mysql\/high-availability).
data SettingsAvailabilityType
= SQLAvailabilityTypeUnspecified
-- ^ @SQL_AVAILABILITY_TYPE_UNSPECIFIED@
-- This is an unknown Availability type.
| Zonal
-- ^ @ZONAL@
-- Zonal available instance.
| Regional
-- ^ @REGIONAL@
-- Regional available instance.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SettingsAvailabilityType
instance FromHttpApiData SettingsAvailabilityType where
parseQueryParam = \case
"SQL_AVAILABILITY_TYPE_UNSPECIFIED" -> Right SQLAvailabilityTypeUnspecified
"ZONAL" -> Right Zonal
"REGIONAL" -> Right Regional
x -> Left ("Unable to parse SettingsAvailabilityType from: " <> x)
instance ToHttpApiData SettingsAvailabilityType where
toQueryParam = \case
SQLAvailabilityTypeUnspecified -> "SQL_AVAILABILITY_TYPE_UNSPECIFIED"
Zonal -> "ZONAL"
Regional -> "REGIONAL"
instance FromJSON SettingsAvailabilityType where
parseJSON = parseJSONText "SettingsAvailabilityType"
instance ToJSON SettingsAvailabilityType where
toJSON = toJSONText
-- | Maintenance timing setting: **canary** (Earlier) or **stable** (Later).
-- [Learn more]
-- (https:\/\/cloud.google.com\/sql\/docs\/mysql\/instance-settings#maintenance-timing-2ndgen).
data MaintenanceWindowUpdateTrack
= SQLUpdateTrackUnspecified
-- ^ @SQL_UPDATE_TRACK_UNSPECIFIED@
-- This is an unknown maintenance timing preference.
| Canary
-- ^ @canary@
-- For instance update that requires a restart, this update track indicates
-- your instance prefer to restart for new version early in maintenance
-- window.
| Stable
-- ^ @stable@
-- For instance update that requires a restart, this update track indicates
-- your instance prefer to let Cloud SQL choose the timing of restart
-- (within its Maintenance window, if applicable).
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MaintenanceWindowUpdateTrack
instance FromHttpApiData MaintenanceWindowUpdateTrack where
parseQueryParam = \case
"SQL_UPDATE_TRACK_UNSPECIFIED" -> Right SQLUpdateTrackUnspecified
"canary" -> Right Canary
"stable" -> Right Stable
x -> Left ("Unable to parse MaintenanceWindowUpdateTrack from: " <> x)
instance ToHttpApiData MaintenanceWindowUpdateTrack where
toQueryParam = \case
SQLUpdateTrackUnspecified -> "SQL_UPDATE_TRACK_UNSPECIFIED"
Canary -> "canary"
Stable -> "stable"
instance FromJSON MaintenanceWindowUpdateTrack where
parseJSON = parseJSONText "MaintenanceWindowUpdateTrack"
instance ToJSON MaintenanceWindowUpdateTrack where
toJSON = toJSONText
-- | Code to uniquely identify the warning type.
data APIWarningCode
= SQLAPIWarningCodeUnspecified
-- ^ @SQL_API_WARNING_CODE_UNSPECIFIED@
-- An unknown or unset warning type from Cloud SQL API.
| RegionUnreachable
-- ^ @REGION_UNREACHABLE@
-- Warning when one or more regions are not reachable. The returned result
-- set may be incomplete.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable APIWarningCode
instance FromHttpApiData APIWarningCode where
parseQueryParam = \case
"SQL_API_WARNING_CODE_UNSPECIFIED" -> Right SQLAPIWarningCodeUnspecified
"REGION_UNREACHABLE" -> Right RegionUnreachable
x -> Left ("Unable to parse APIWarningCode from: " <> x)
instance ToHttpApiData APIWarningCode where
toQueryParam = \case
SQLAPIWarningCodeUnspecified -> "SQL_API_WARNING_CODE_UNSPECIFIED"
RegionUnreachable -> "REGION_UNREACHABLE"
instance FromJSON APIWarningCode where
parseJSON = parseJSONText "APIWarningCode"
instance ToJSON APIWarningCode where
toJSON = toJSONText
-- | External sync mode.
data ProjectsInstancesStartExternalSyncSyncMode
= ExternalSyncModeUnspecified
-- ^ @EXTERNAL_SYNC_MODE_UNSPECIFIED@
-- Unknown external sync mode, will be defaulted to ONLINE mode
| Online
-- ^ @ONLINE@
-- Online external sync will set up replication after initial data external
-- sync
| Offline
-- ^ @OFFLINE@
-- Offline external sync only dumps and loads a one-time snapshot of the
-- primary instance\'s data
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ProjectsInstancesStartExternalSyncSyncMode
instance FromHttpApiData ProjectsInstancesStartExternalSyncSyncMode where
parseQueryParam = \case
"EXTERNAL_SYNC_MODE_UNSPECIFIED" -> Right ExternalSyncModeUnspecified
"ONLINE" -> Right Online
"OFFLINE" -> Right Offline
x -> Left ("Unable to parse ProjectsInstancesStartExternalSyncSyncMode from: " <> x)
instance ToHttpApiData ProjectsInstancesStartExternalSyncSyncMode where
toQueryParam = \case
ExternalSyncModeUnspecified -> "EXTERNAL_SYNC_MODE_UNSPECIFIED"
Online -> "ONLINE"
Offline -> "OFFLINE"
instance FromJSON ProjectsInstancesStartExternalSyncSyncMode where
parseJSON = parseJSONText "ProjectsInstancesStartExternalSyncSyncMode"
instance ToJSON ProjectsInstancesStartExternalSyncSyncMode where
toJSON = toJSONText
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
-- | Identifies the specific error that occurred.
data SQLExternalSyncSettingErrorType
= SQLExternalSyncSettingErrorTypeUnspecified
-- ^ @SQL_EXTERNAL_SYNC_SETTING_ERROR_TYPE_UNSPECIFIED@
| ConnectionFailure
-- ^ @CONNECTION_FAILURE@
| BinlogNotEnabled
-- ^ @BINLOG_NOT_ENABLED@
| IncompatibleDatabaseVersion
-- ^ @INCOMPATIBLE_DATABASE_VERSION@
| ReplicaAlreadySetup
-- ^ @REPLICA_ALREADY_SETUP@
| InsufficientPrivilege
-- ^ @INSUFFICIENT_PRIVILEGE@
| UnsupportedMigrationType
-- ^ @UNSUPPORTED_MIGRATION_TYPE@
-- Unsupported migration type.
| NoPglogicalInstalled
-- ^ @NO_PGLOGICAL_INSTALLED@
-- No pglogical extension installed on databases, applicable for postgres.
| PglogicalNodeAlreadyExists
-- ^ @PGLOGICAL_NODE_ALREADY_EXISTS@
-- pglogical node already exists on databases, applicable for postgres.
| InvalidWalLevel
-- ^ @INVALID_WAL_LEVEL@
-- The value of parameter wal_level is not set to logical.
| InvalidSharedPreLoadLibrary
-- ^ @INVALID_SHARED_PRELOAD_LIBRARY@
-- The value of parameter shared_preload_libraries does not include
-- pglogical.
| InsufficientMaxReplicationSlots
-- ^ @INSUFFICIENT_MAX_REPLICATION_SLOTS@
-- The value of parameter max_replication_slots is not sufficient.
| InsufficientMaxWalSenders
-- ^ @INSUFFICIENT_MAX_WAL_SENDERS@
-- The value of parameter max_wal_senders is not sufficient.
| InsufficientMaxWorkerProcesses
-- ^ @INSUFFICIENT_MAX_WORKER_PROCESSES@
-- The value of parameter max_worker_processes is not sufficient.
| UnsupportedExtensions
-- ^ @UNSUPPORTED_EXTENSIONS@
-- Extensions installed are either not supported or having unsupported
-- versions
| InvalidRdsLogicalReplication
-- ^ @INVALID_RDS_LOGICAL_REPLICATION@
-- The value of parameter rds.logical_replication is not set to 1.
| InvalidLoggingSetup
-- ^ @INVALID_LOGGING_SETUP@
-- The primary instance logging setup doesn\'t allow EM sync.
| InvalidDBParam
-- ^ @INVALID_DB_PARAM@
-- The primary instance database parameter setup doesn\'t allow EM sync.
| UnsupportedGtidMode
-- ^ @UNSUPPORTED_GTID_MODE@
-- The gtid_mode is not supported, applicable for MySQL.
| SQLserverAgentNotRunning
-- ^ @SQLSERVER_AGENT_NOT_RUNNING@
-- SQL Server Agent is not running.
| UnsupportedTableDefinition
-- ^ @UNSUPPORTED_TABLE_DEFINITION@
-- The table definition is not support due to missing primary key or
-- replica identity, applicable for postgres.
| UnsupportedDefiner
-- ^ @UNSUPPORTED_DEFINER@
-- The customer has a definer that will break EM setup.
| SQLserverServernameMismatch
-- ^ @SQLSERVER_SERVERNAME_MISMATCH@
-- SQL Server \@\@SERVERNAME does not match actual host name
| PrimaryAlreadySetup
-- ^ @PRIMARY_ALREADY_SETUP@
-- The primary instance has been setup and will fail the setup.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SQLExternalSyncSettingErrorType
instance FromHttpApiData SQLExternalSyncSettingErrorType where
parseQueryParam = \case
"SQL_EXTERNAL_SYNC_SETTING_ERROR_TYPE_UNSPECIFIED" -> Right SQLExternalSyncSettingErrorTypeUnspecified
"CONNECTION_FAILURE" -> Right ConnectionFailure
"BINLOG_NOT_ENABLED" -> Right BinlogNotEnabled
"INCOMPATIBLE_DATABASE_VERSION" -> Right IncompatibleDatabaseVersion
"REPLICA_ALREADY_SETUP" -> Right ReplicaAlreadySetup
"INSUFFICIENT_PRIVILEGE" -> Right InsufficientPrivilege
"UNSUPPORTED_MIGRATION_TYPE" -> Right UnsupportedMigrationType
"NO_PGLOGICAL_INSTALLED" -> Right NoPglogicalInstalled
"PGLOGICAL_NODE_ALREADY_EXISTS" -> Right PglogicalNodeAlreadyExists
"INVALID_WAL_LEVEL" -> Right InvalidWalLevel
"INVALID_SHARED_PRELOAD_LIBRARY" -> Right InvalidSharedPreLoadLibrary
"INSUFFICIENT_MAX_REPLICATION_SLOTS" -> Right InsufficientMaxReplicationSlots
"INSUFFICIENT_MAX_WAL_SENDERS" -> Right InsufficientMaxWalSenders
"INSUFFICIENT_MAX_WORKER_PROCESSES" -> Right InsufficientMaxWorkerProcesses
"UNSUPPORTED_EXTENSIONS" -> Right UnsupportedExtensions
"INVALID_RDS_LOGICAL_REPLICATION" -> Right InvalidRdsLogicalReplication
"INVALID_LOGGING_SETUP" -> Right InvalidLoggingSetup
"INVALID_DB_PARAM" -> Right InvalidDBParam
"UNSUPPORTED_GTID_MODE" -> Right UnsupportedGtidMode
"SQLSERVER_AGENT_NOT_RUNNING" -> Right SQLserverAgentNotRunning
"UNSUPPORTED_TABLE_DEFINITION" -> Right UnsupportedTableDefinition
"UNSUPPORTED_DEFINER" -> Right UnsupportedDefiner
"SQLSERVER_SERVERNAME_MISMATCH" -> Right SQLserverServernameMismatch
"PRIMARY_ALREADY_SETUP" -> Right PrimaryAlreadySetup
x -> Left ("Unable to parse SQLExternalSyncSettingErrorType from: " <> x)
instance ToHttpApiData SQLExternalSyncSettingErrorType where
toQueryParam = \case
SQLExternalSyncSettingErrorTypeUnspecified -> "SQL_EXTERNAL_SYNC_SETTING_ERROR_TYPE_UNSPECIFIED"
ConnectionFailure -> "CONNECTION_FAILURE"
BinlogNotEnabled -> "BINLOG_NOT_ENABLED"
IncompatibleDatabaseVersion -> "INCOMPATIBLE_DATABASE_VERSION"
ReplicaAlreadySetup -> "REPLICA_ALREADY_SETUP"
InsufficientPrivilege -> "INSUFFICIENT_PRIVILEGE"
UnsupportedMigrationType -> "UNSUPPORTED_MIGRATION_TYPE"
NoPglogicalInstalled -> "NO_PGLOGICAL_INSTALLED"
PglogicalNodeAlreadyExists -> "PGLOGICAL_NODE_ALREADY_EXISTS"
InvalidWalLevel -> "INVALID_WAL_LEVEL"
InvalidSharedPreLoadLibrary -> "INVALID_SHARED_PRELOAD_LIBRARY"
InsufficientMaxReplicationSlots -> "INSUFFICIENT_MAX_REPLICATION_SLOTS"
InsufficientMaxWalSenders -> "INSUFFICIENT_MAX_WAL_SENDERS"
InsufficientMaxWorkerProcesses -> "INSUFFICIENT_MAX_WORKER_PROCESSES"
UnsupportedExtensions -> "UNSUPPORTED_EXTENSIONS"
InvalidRdsLogicalReplication -> "INVALID_RDS_LOGICAL_REPLICATION"
InvalidLoggingSetup -> "INVALID_LOGGING_SETUP"
InvalidDBParam -> "INVALID_DB_PARAM"
UnsupportedGtidMode -> "UNSUPPORTED_GTID_MODE"
SQLserverAgentNotRunning -> "SQLSERVER_AGENT_NOT_RUNNING"
UnsupportedTableDefinition -> "UNSUPPORTED_TABLE_DEFINITION"
UnsupportedDefiner -> "UNSUPPORTED_DEFINER"
SQLserverServernameMismatch -> "SQLSERVER_SERVERNAME_MISMATCH"
PrimaryAlreadySetup -> "PRIMARY_ALREADY_SETUP"
instance FromJSON SQLExternalSyncSettingErrorType where
parseJSON = parseJSONText "SQLExternalSyncSettingErrorType"
instance ToJSON SQLExternalSyncSettingErrorType where
toJSON = toJSONText
-- | The instance type. This can be one of the following.
-- *CLOUD_SQL_INSTANCE*: A Cloud SQL instance that is not replicating from
-- a primary instance. *ON_PREMISES_INSTANCE*: An instance running on the
-- customer\'s premises. *READ_REPLICA_INSTANCE*: A Cloud SQL instance
-- configured as a read-replica.
data DatabaseInstanceInstanceType
= SQLInstanceTypeUnspecified
-- ^ @SQL_INSTANCE_TYPE_UNSPECIFIED@
-- This is an unknown Cloud SQL instance type.
| CloudSQLInstance
-- ^ @CLOUD_SQL_INSTANCE@
-- A regular Cloud SQL instance.
| OnPremisesInstance
-- ^ @ON_PREMISES_INSTANCE@
-- An instance running on the customer\'s premises that is not managed by
-- Cloud SQL.
| ReadReplicaInstance
-- ^ @READ_REPLICA_INSTANCE@
-- A Cloud SQL instance acting as a read-replica.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DatabaseInstanceInstanceType
instance FromHttpApiData DatabaseInstanceInstanceType where
parseQueryParam = \case
"SQL_INSTANCE_TYPE_UNSPECIFIED" -> Right SQLInstanceTypeUnspecified
"CLOUD_SQL_INSTANCE" -> Right CloudSQLInstance
"ON_PREMISES_INSTANCE" -> Right OnPremisesInstance
"READ_REPLICA_INSTANCE" -> Right ReadReplicaInstance
x -> Left ("Unable to parse DatabaseInstanceInstanceType from: " <> x)
instance ToHttpApiData DatabaseInstanceInstanceType where
toQueryParam = \case
SQLInstanceTypeUnspecified -> "SQL_INSTANCE_TYPE_UNSPECIFIED"
CloudSQLInstance -> "CLOUD_SQL_INSTANCE"
OnPremisesInstance -> "ON_PREMISES_INSTANCE"
ReadReplicaInstance -> "READ_REPLICA_INSTANCE"
instance FromJSON DatabaseInstanceInstanceType where
parseJSON = parseJSONText "DatabaseInstanceInstanceType"
instance ToJSON DatabaseInstanceInstanceType where
toJSON = toJSONText
-- | The type of this IP address. A **PRIMARY** address is a public address
-- that can accept incoming connections. A **PRIVATE** address is a private
-- address that can accept incoming connections. An **OUTGOING** address is
-- the source address of connections originating from the instance, if
-- supported.
data IPMAppingType
= SQLIPAddressTypeUnspecified
-- ^ @SQL_IP_ADDRESS_TYPE_UNSPECIFIED@
-- This is an unknown IP address type.
| Primary
-- ^ @PRIMARY@
-- IP address the customer is supposed to connect to. Usually this is the
-- load balancer\'s IP address
| Outgoing
-- ^ @OUTGOING@
-- Source IP address of the connection a read replica establishes to its
-- external primary instance. This IP address can be allowlisted by the
-- customer in case it has a firewall that filters incoming connection to
-- its on premises primary instance.
| Private
-- ^ @PRIVATE@
-- Private IP used when using private IPs and network peering.
| Migrated1STGen
-- ^ @MIGRATED_1ST_GEN@
-- V1 IP of a migrated instance. We want the user to decommission this IP
-- as soon as the migration is complete. Note: V1 instances with V1 ip
-- addresses will be counted as PRIMARY.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable IPMAppingType
instance FromHttpApiData IPMAppingType where
parseQueryParam = \case
"SQL_IP_ADDRESS_TYPE_UNSPECIFIED" -> Right SQLIPAddressTypeUnspecified
"PRIMARY" -> Right Primary
"OUTGOING" -> Right Outgoing
"PRIVATE" -> Right Private
"MIGRATED_1ST_GEN" -> Right Migrated1STGen
x -> Left ("Unable to parse IPMAppingType from: " <> x)
instance ToHttpApiData IPMAppingType where
toQueryParam = \case
SQLIPAddressTypeUnspecified -> "SQL_IP_ADDRESS_TYPE_UNSPECIFIED"
Primary -> "PRIMARY"
Outgoing -> "OUTGOING"
Private -> "PRIVATE"
Migrated1STGen -> "MIGRATED_1ST_GEN"
instance FromJSON IPMAppingType where
parseJSON = parseJSONText "IPMAppingType"
instance ToJSON IPMAppingType where
toJSON = toJSONText
-- | The file type for the specified uri. **SQL**: The file contains SQL
-- statements. **CSV**: The file contains CSV data. **BAK**: The file
-- contains backup data for a SQL Server instance.
data ExportContextFileType
= ECFTSQLFileTypeUnspecified
-- ^ @SQL_FILE_TYPE_UNSPECIFIED@
-- Unknown file type.
| ECFTSQL
-- ^ @SQL@
-- File containing SQL statements.
| ECFTCSV
-- ^ @CSV@
-- File in CSV format.
| ECFTBak
-- ^ @BAK@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ExportContextFileType
instance FromHttpApiData ExportContextFileType where
parseQueryParam = \case
"SQL_FILE_TYPE_UNSPECIFIED" -> Right ECFTSQLFileTypeUnspecified
"SQL" -> Right ECFTSQL
"CSV" -> Right ECFTCSV
"BAK" -> Right ECFTBak
x -> Left ("Unable to parse ExportContextFileType from: " <> x)
instance ToHttpApiData ExportContextFileType where
toQueryParam = \case
ECFTSQLFileTypeUnspecified -> "SQL_FILE_TYPE_UNSPECIFIED"
ECFTSQL -> "SQL"
ECFTCSV -> "CSV"
ECFTBak -> "BAK"
instance FromJSON ExportContextFileType where
parseJSON = parseJSONText "ExportContextFileType"
instance ToJSON ExportContextFileType where
toJSON = toJSONText
-- | Required. The type of the reschedule.
data RescheduleRescheduleType
= RescheduleTypeUnspecified
-- ^ @RESCHEDULE_TYPE_UNSPECIFIED@
| Immediate
-- ^ @IMMEDIATE@
-- Reschedules maintenance to happen now (within 5 minutes).
| NextAvailableWindow
-- ^ @NEXT_AVAILABLE_WINDOW@
-- Reschedules maintenance to occur within one week from the originally
-- scheduled day and time.
| SpecificTime
-- ^ @SPECIFIC_TIME@
-- Reschedules maintenance to a specific time and day.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable RescheduleRescheduleType
instance FromHttpApiData RescheduleRescheduleType where
parseQueryParam = \case
"RESCHEDULE_TYPE_UNSPECIFIED" -> Right RescheduleTypeUnspecified
"IMMEDIATE" -> Right Immediate
"NEXT_AVAILABLE_WINDOW" -> Right NextAvailableWindow
"SPECIFIC_TIME" -> Right SpecificTime
x -> Left ("Unable to parse RescheduleRescheduleType from: " <> x)
instance ToHttpApiData RescheduleRescheduleType where
toQueryParam = \case
RescheduleTypeUnspecified -> "RESCHEDULE_TYPE_UNSPECIFIED"
Immediate -> "IMMEDIATE"
NextAvailableWindow -> "NEXT_AVAILABLE_WINDOW"
SpecificTime -> "SPECIFIC_TIME"
instance FromJSON RescheduleRescheduleType where
parseJSON = parseJSONText "RescheduleRescheduleType"
instance ToJSON RescheduleRescheduleType where
toJSON = toJSONText
-- | External sync mode
data ProjectsInstancesVerifyExternalSyncSettingsSyncMode
= PIVESSSMExternalSyncModeUnspecified
-- ^ @EXTERNAL_SYNC_MODE_UNSPECIFIED@
-- Unknown external sync mode, will be defaulted to ONLINE mode
| PIVESSSMOnline
-- ^ @ONLINE@
-- Online external sync will set up replication after initial data external
-- sync
| PIVESSSMOffline
-- ^ @OFFLINE@
-- Offline external sync only dumps and loads a one-time snapshot of the
-- primary instance\'s data
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ProjectsInstancesVerifyExternalSyncSettingsSyncMode
instance FromHttpApiData ProjectsInstancesVerifyExternalSyncSettingsSyncMode where
parseQueryParam = \case
"EXTERNAL_SYNC_MODE_UNSPECIFIED" -> Right PIVESSSMExternalSyncModeUnspecified
"ONLINE" -> Right PIVESSSMOnline
"OFFLINE" -> Right PIVESSSMOffline
x -> Left ("Unable to parse ProjectsInstancesVerifyExternalSyncSettingsSyncMode from: " <> x)
instance ToHttpApiData ProjectsInstancesVerifyExternalSyncSettingsSyncMode where
toQueryParam = \case
PIVESSSMExternalSyncModeUnspecified -> "EXTERNAL_SYNC_MODE_UNSPECIFIED"
PIVESSSMOnline -> "ONLINE"
PIVESSSMOffline -> "OFFLINE"
instance FromJSON ProjectsInstancesVerifyExternalSyncSettingsSyncMode where
parseJSON = parseJSONText "ProjectsInstancesVerifyExternalSyncSettingsSyncMode"
instance ToJSON ProjectsInstancesVerifyExternalSyncSettingsSyncMode where
toJSON = toJSONText
-- | The user type. It determines the method to authenticate the user during
-- login. The default is the database\'s built-in user type.
data UsersListBodyType
= BuiltIn
-- ^ @BUILT_IN@
-- The database\'s built-in user type.
| CloudIAMUser
-- ^ @CLOUD_IAM_USER@
-- Cloud IAM user.
| CloudIAMServiceAccount
-- ^ @CLOUD_IAM_SERVICE_ACCOUNT@
-- Cloud IAM service account.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable UsersListBodyType
instance FromHttpApiData UsersListBodyType where
parseQueryParam = \case
"BUILT_IN" -> Right BuiltIn
"CLOUD_IAM_USER" -> Right CloudIAMUser
"CLOUD_IAM_SERVICE_ACCOUNT" -> Right CloudIAMServiceAccount
x -> Left ("Unable to parse UsersListBodyType from: " <> x)
instance ToHttpApiData UsersListBodyType where
toQueryParam = \case
BuiltIn -> "BUILT_IN"
CloudIAMUser -> "CLOUD_IAM_USER"
CloudIAMServiceAccount -> "CLOUD_IAM_SERVICE_ACCOUNT"
instance FromJSON UsersListBodyType where
parseJSON = parseJSONText "UsersListBodyType"
instance ToJSON UsersListBodyType where
toJSON = toJSONText
-- | The type of data disk: **PD_SSD** (default) or **PD_HDD**.
data SettingsDataDiskType
= SQLDataDiskTypeUnspecified
-- ^ @SQL_DATA_DISK_TYPE_UNSPECIFIED@
-- This is an unknown data disk type.
| PdSsd
-- ^ @PD_SSD@
-- An SSD data disk.
| PdHdd
-- ^ @PD_HDD@
-- An HDD data disk.
| ObsoleteLocalSsd
-- ^ @OBSOLETE_LOCAL_SSD@
-- This field is deprecated and will be removed from a future version of
-- the API.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SettingsDataDiskType
instance FromHttpApiData SettingsDataDiskType where
parseQueryParam = \case
"SQL_DATA_DISK_TYPE_UNSPECIFIED" -> Right SQLDataDiskTypeUnspecified
"PD_SSD" -> Right PdSsd
"PD_HDD" -> Right PdHdd
"OBSOLETE_LOCAL_SSD" -> Right ObsoleteLocalSsd
x -> Left ("Unable to parse SettingsDataDiskType from: " <> x)
instance ToHttpApiData SettingsDataDiskType where
toQueryParam = \case
SQLDataDiskTypeUnspecified -> "SQL_DATA_DISK_TYPE_UNSPECIFIED"
PdSsd -> "PD_SSD"
PdHdd -> "PD_HDD"
ObsoleteLocalSsd -> "OBSOLETE_LOCAL_SSD"
instance FromJSON SettingsDataDiskType where
parseJSON = parseJSONText "SettingsDataDiskType"
instance ToJSON SettingsDataDiskType where
toJSON = toJSONText
-- | *SECOND_GEN*: Cloud SQL database instance. *EXTERNAL*: A database server
-- that is not managed by Google. This property is read-only; use the
-- *tier* property in the *settings* object to determine the database type.
data DatabaseInstanceBackendType
= DIBTSQLBackendTypeUnspecified
-- ^ @SQL_BACKEND_TYPE_UNSPECIFIED@
-- This is an unknown backend type for instance.
| DIBTFirstGen
-- ^ @FIRST_GEN@
-- V1 speckle instance.
| DIBTSecondGen
-- ^ @SECOND_GEN@
-- V2 speckle instance.
| DIBTExternal
-- ^ @EXTERNAL@
-- On premises instance.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DatabaseInstanceBackendType
instance FromHttpApiData DatabaseInstanceBackendType where
parseQueryParam = \case
"SQL_BACKEND_TYPE_UNSPECIFIED" -> Right DIBTSQLBackendTypeUnspecified
"FIRST_GEN" -> Right DIBTFirstGen
"SECOND_GEN" -> Right DIBTSecondGen
"EXTERNAL" -> Right DIBTExternal
x -> Left ("Unable to parse DatabaseInstanceBackendType from: " <> x)
instance ToHttpApiData DatabaseInstanceBackendType where
toQueryParam = \case
DIBTSQLBackendTypeUnspecified -> "SQL_BACKEND_TYPE_UNSPECIFIED"
DIBTFirstGen -> "FIRST_GEN"
DIBTSecondGen -> "SECOND_GEN"
DIBTExternal -> "EXTERNAL"
instance FromJSON DatabaseInstanceBackendType where
parseJSON = parseJSONText "DatabaseInstanceBackendType"
instance ToJSON DatabaseInstanceBackendType where
toJSON = toJSONText
data DatabaseInstanceSuspensionReasonItem
= SQLSuspensionReasonUnspecified
-- ^ @SQL_SUSPENSION_REASON_UNSPECIFIED@
-- This is an unknown suspension reason.
| BillingIssue
-- ^ @BILLING_ISSUE@
-- The instance is suspended due to billing issues (for example:, GCP
-- account issue)
| LegalIssue
-- ^ @LEGAL_ISSUE@
-- The instance is suspended due to illegal content (for example:, child
-- pornography, copyrighted material, etc.).
| OperationalIssue
-- ^ @OPERATIONAL_ISSUE@
-- The instance is causing operational issues (for example:, causing the
-- database to crash).
| KmsKeyIssue
-- ^ @KMS_KEY_ISSUE@
-- The KMS key used by the instance is either revoked or denied access to
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DatabaseInstanceSuspensionReasonItem
instance FromHttpApiData DatabaseInstanceSuspensionReasonItem where
parseQueryParam = \case
"SQL_SUSPENSION_REASON_UNSPECIFIED" -> Right SQLSuspensionReasonUnspecified
"BILLING_ISSUE" -> Right BillingIssue
"LEGAL_ISSUE" -> Right LegalIssue
"OPERATIONAL_ISSUE" -> Right OperationalIssue
"KMS_KEY_ISSUE" -> Right KmsKeyIssue
x -> Left ("Unable to parse DatabaseInstanceSuspensionReasonItem from: " <> x)
instance ToHttpApiData DatabaseInstanceSuspensionReasonItem where
toQueryParam = \case
SQLSuspensionReasonUnspecified -> "SQL_SUSPENSION_REASON_UNSPECIFIED"
BillingIssue -> "BILLING_ISSUE"
LegalIssue -> "LEGAL_ISSUE"
OperationalIssue -> "OPERATIONAL_ISSUE"
KmsKeyIssue -> "KMS_KEY_ISSUE"
instance FromJSON DatabaseInstanceSuspensionReasonItem where
parseJSON = parseJSONText "DatabaseInstanceSuspensionReasonItem"
instance ToJSON DatabaseInstanceSuspensionReasonItem where
toJSON = toJSONText
-- | The status of this run.
data BackupRunStatus
= BRSSQLBackupRunStatusUnspecified
-- ^ @SQL_BACKUP_RUN_STATUS_UNSPECIFIED@
-- The status of the run is unknown.
| BRSEnQueued
-- ^ @ENQUEUED@
-- The backup operation was enqueued.
| BRSOverdue
-- ^ @OVERDUE@
-- The backup is overdue across a given backup window. Indicates a problem.
-- Example: Long-running operation in progress during the whole window.
| BRSRunning
-- ^ @RUNNING@
-- The backup is in progress.
| BRSFailed
-- ^ @FAILED@
-- The backup failed.
| BRSSuccessful
-- ^ @SUCCESSFUL@
-- The backup was successful.
| BRSSkipped
-- ^ @SKIPPED@
-- The backup was skipped (without problems) for a given backup window.
-- Example: Instance was idle.
| BRSDeletionPending
-- ^ @DELETION_PENDING@
-- The backup is about to be deleted.
| BRSDeletionFailed
-- ^ @DELETION_FAILED@
-- The backup deletion failed.
| BRSDeleted
-- ^ @DELETED@
-- The backup has been deleted.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable BackupRunStatus
instance FromHttpApiData BackupRunStatus where
parseQueryParam = \case
"SQL_BACKUP_RUN_STATUS_UNSPECIFIED" -> Right BRSSQLBackupRunStatusUnspecified
"ENQUEUED" -> Right BRSEnQueued
"OVERDUE" -> Right BRSOverdue
"RUNNING" -> Right BRSRunning
"FAILED" -> Right BRSFailed
"SUCCESSFUL" -> Right BRSSuccessful
"SKIPPED" -> Right BRSSkipped
"DELETION_PENDING" -> Right BRSDeletionPending
"DELETION_FAILED" -> Right BRSDeletionFailed
"DELETED" -> Right BRSDeleted
x -> Left ("Unable to parse BackupRunStatus from: " <> x)
instance ToHttpApiData BackupRunStatus where
toQueryParam = \case
BRSSQLBackupRunStatusUnspecified -> "SQL_BACKUP_RUN_STATUS_UNSPECIFIED"
BRSEnQueued -> "ENQUEUED"
BRSOverdue -> "OVERDUE"
BRSRunning -> "RUNNING"
BRSFailed -> "FAILED"
BRSSuccessful -> "SUCCESSFUL"
BRSSkipped -> "SKIPPED"
BRSDeletionPending -> "DELETION_PENDING"
BRSDeletionFailed -> "DELETION_FAILED"
BRSDeleted -> "DELETED"
instance FromJSON BackupRunStatus where
parseJSON = parseJSONText "BackupRunStatus"
instance ToJSON BackupRunStatus where
toJSON = toJSONText
-- | The current serving state of the Cloud SQL instance. This can be one of
-- the following. *SQL_INSTANCE_STATE_UNSPECIFIED*: The state of the
-- instance is unknown. *RUNNABLE*: The instance is running, or has been
-- stopped by owner. *SUSPENDED*: The instance is not available, for
-- example due to problems with billing. *PENDING_DELETE*: The instance is
-- being deleted. *PENDING_CREATE*: The instance is being created.
-- *MAINTENANCE*: The instance is down for maintenance. *FAILED*: The
-- instance creation failed.
data DatabaseInstanceState
= DISSQLInstanceStateUnspecified
-- ^ @SQL_INSTANCE_STATE_UNSPECIFIED@
-- The state of the instance is unknown.
| DISRunnable
-- ^ @RUNNABLE@
-- The instance is running, or has been stopped by owner.
| DISSuspended
-- ^ @SUSPENDED@
-- The instance is not available, for example due to problems with billing.
| DISPendingDelete
-- ^ @PENDING_DELETE@
-- The instance is being deleted.
| DISPendingCreate
-- ^ @PENDING_CREATE@
-- The instance is being created.
| DISMaintenance
-- ^ @MAINTENANCE@
-- The instance is down for maintenance.
| DISFailed
-- ^ @FAILED@
-- The creation of the instance failed or a fatal error occurred during
-- maintenance.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DatabaseInstanceState
instance FromHttpApiData DatabaseInstanceState where
parseQueryParam = \case
"SQL_INSTANCE_STATE_UNSPECIFIED" -> Right DISSQLInstanceStateUnspecified
"RUNNABLE" -> Right DISRunnable
"SUSPENDED" -> Right DISSuspended
"PENDING_DELETE" -> Right DISPendingDelete
"PENDING_CREATE" -> Right DISPendingCreate
"MAINTENANCE" -> Right DISMaintenance
"FAILED" -> Right DISFailed
x -> Left ("Unable to parse DatabaseInstanceState from: " <> x)
instance ToHttpApiData DatabaseInstanceState where
toQueryParam = \case
DISSQLInstanceStateUnspecified -> "SQL_INSTANCE_STATE_UNSPECIFIED"
DISRunnable -> "RUNNABLE"
DISSuspended -> "SUSPENDED"
DISPendingDelete -> "PENDING_DELETE"
DISPendingCreate -> "PENDING_CREATE"
DISMaintenance -> "MAINTENANCE"
DISFailed -> "FAILED"
instance FromJSON DatabaseInstanceState where
parseJSON = parseJSONText "DatabaseInstanceState"
instance ToJSON DatabaseInstanceState where
toJSON = toJSONText
-- | The database engine type and version. The *databaseVersion* field cannot
-- be changed after instance creation. MySQL instances: *MYSQL_8_0*,
-- *MYSQL_5_7* (default), or *MYSQL_5_6*. PostgreSQL instances:
-- *POSTGRES_9_6*, *POSTGRES_10*, *POSTGRES_11*, *POSTGRES_12*,
-- *POSTGRES_13* (default). SQL Server instances:
-- *SQLSERVER_2019_STANDARD*, *SQLSERVER_2019_ENTERPRISE*,
-- *SQLSERVER_2019_EXPRESS*, or *SQLSERVER_2019_WEB*,
-- *SQLSERVER_2017_STANDARD* (default), *SQLSERVER_2017_ENTERPRISE*,
-- *SQLSERVER_2017_EXPRESS*, or *SQLSERVER_2017_WEB*.
data DatabaseInstanceDatabaseVersion
= DIDVSQLDatabaseVersionUnspecified
-- ^ @SQL_DATABASE_VERSION_UNSPECIFIED@
-- This is an unknown database version.
| DIDVMysql51
-- ^ @MYSQL_5_1@
-- The database version is MySQL 5.1.
| DIDVMysql55
-- ^ @MYSQL_5_5@
-- The database version is MySQL 5.5.
| DIDVMysql56
-- ^ @MYSQL_5_6@
-- The database version is MySQL 5.6.
| DIDVMysql57
-- ^ @MYSQL_5_7@
-- The database version is MySQL 5.7.
| DIDVPostgres96
-- ^ @POSTGRES_9_6@
-- The database version is PostgreSQL 9.6.
| DIDVPostgres11
-- ^ @POSTGRES_11@
-- The database version is PostgreSQL 11.
| DIDVSQLserver2017Standard
-- ^ @SQLSERVER_2017_STANDARD@
-- The database version is SQL Server 2017 Standard.
| DIDVSQLserver2017Enterprise
-- ^ @SQLSERVER_2017_ENTERPRISE@
-- The database version is SQL Server 2017 Enterprise.
| DIDVSQLserver2017Express
-- ^ @SQLSERVER_2017_EXPRESS@
-- The database version is SQL Server 2017 Express.
| DIDVSQLserver2017Web
-- ^ @SQLSERVER_2017_WEB@
-- The database version is SQL Server 2017 Web.
| DIDVPostgres10
-- ^ @POSTGRES_10@
-- The database version is PostgreSQL 10.
| DIDVPostgres12
-- ^ @POSTGRES_12@
-- The database version is PostgreSQL 12.
| DIDVPostgres13
-- ^ @POSTGRES_13@
-- The database version is PostgreSQL 13.
| DIDVSQLserver2019Standard
-- ^ @SQLSERVER_2019_STANDARD@
-- The database version is SQL Server 2019 Standard.
| DIDVSQLserver2019Enterprise
-- ^ @SQLSERVER_2019_ENTERPRISE@
-- The database version is SQL Server 2019 Enterprise.
| DIDVSQLserver2019Express
-- ^ @SQLSERVER_2019_EXPRESS@
-- The database version is SQL Server 2019 Express.
| DIDVSQLserver2019Web
-- ^ @SQLSERVER_2019_WEB@
-- The database version is SQL Server 2019 Web.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DatabaseInstanceDatabaseVersion
instance FromHttpApiData DatabaseInstanceDatabaseVersion where
parseQueryParam = \case
"SQL_DATABASE_VERSION_UNSPECIFIED" -> Right DIDVSQLDatabaseVersionUnspecified
"MYSQL_5_1" -> Right DIDVMysql51
"MYSQL_5_5" -> Right DIDVMysql55
"MYSQL_5_6" -> Right DIDVMysql56
"MYSQL_5_7" -> Right DIDVMysql57
"POSTGRES_9_6" -> Right DIDVPostgres96
"POSTGRES_11" -> Right DIDVPostgres11
"SQLSERVER_2017_STANDARD" -> Right DIDVSQLserver2017Standard
"SQLSERVER_2017_ENTERPRISE" -> Right DIDVSQLserver2017Enterprise
"SQLSERVER_2017_EXPRESS" -> Right DIDVSQLserver2017Express
"SQLSERVER_2017_WEB" -> Right DIDVSQLserver2017Web
"POSTGRES_10" -> Right DIDVPostgres10
"POSTGRES_12" -> Right DIDVPostgres12
"POSTGRES_13" -> Right DIDVPostgres13
"SQLSERVER_2019_STANDARD" -> Right DIDVSQLserver2019Standard
"SQLSERVER_2019_ENTERPRISE" -> Right DIDVSQLserver2019Enterprise
"SQLSERVER_2019_EXPRESS" -> Right DIDVSQLserver2019Express
"SQLSERVER_2019_WEB" -> Right DIDVSQLserver2019Web
x -> Left ("Unable to parse DatabaseInstanceDatabaseVersion from: " <> x)
instance ToHttpApiData DatabaseInstanceDatabaseVersion where
toQueryParam = \case
DIDVSQLDatabaseVersionUnspecified -> "SQL_DATABASE_VERSION_UNSPECIFIED"
DIDVMysql51 -> "MYSQL_5_1"
DIDVMysql55 -> "MYSQL_5_5"
DIDVMysql56 -> "MYSQL_5_6"
DIDVMysql57 -> "MYSQL_5_7"
DIDVPostgres96 -> "POSTGRES_9_6"
DIDVPostgres11 -> "POSTGRES_11"
DIDVSQLserver2017Standard -> "SQLSERVER_2017_STANDARD"
DIDVSQLserver2017Enterprise -> "SQLSERVER_2017_ENTERPRISE"
DIDVSQLserver2017Express -> "SQLSERVER_2017_EXPRESS"
DIDVSQLserver2017Web -> "SQLSERVER_2017_WEB"
DIDVPostgres10 -> "POSTGRES_10"
DIDVPostgres12 -> "POSTGRES_12"
DIDVPostgres13 -> "POSTGRES_13"
DIDVSQLserver2019Standard -> "SQLSERVER_2019_STANDARD"
DIDVSQLserver2019Enterprise -> "SQLSERVER_2019_ENTERPRISE"
DIDVSQLserver2019Express -> "SQLSERVER_2019_EXPRESS"
DIDVSQLserver2019Web -> "SQLSERVER_2019_WEB"
instance FromJSON DatabaseInstanceDatabaseVersion where
parseJSON = parseJSONText "DatabaseInstanceDatabaseVersion"
instance ToJSON DatabaseInstanceDatabaseVersion where
toJSON = toJSONText
-- | The user type. It determines the method to authenticate the user during
-- login. The default is the database\'s built-in user type.
data UserType
= UTBuiltIn
-- ^ @BUILT_IN@
-- The database\'s built-in user type.
| UTCloudIAMUser
-- ^ @CLOUD_IAM_USER@
-- Cloud IAM user.
| UTCloudIAMServiceAccount
-- ^ @CLOUD_IAM_SERVICE_ACCOUNT@
-- Cloud IAM service account.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable UserType
instance FromHttpApiData UserType where
parseQueryParam = \case
"BUILT_IN" -> Right UTBuiltIn
"CLOUD_IAM_USER" -> Right UTCloudIAMUser
"CLOUD_IAM_SERVICE_ACCOUNT" -> Right UTCloudIAMServiceAccount
x -> Left ("Unable to parse UserType from: " <> x)
instance ToHttpApiData UserType where
toQueryParam = \case
UTBuiltIn -> "BUILT_IN"
UTCloudIAMUser -> "CLOUD_IAM_USER"
UTCloudIAMServiceAccount -> "CLOUD_IAM_SERVICE_ACCOUNT"
instance FromJSON UserType where
parseJSON = parseJSONText "UserType"
instance ToJSON UserType where
toJSON = toJSONText
-- | The type of replication this instance uses. This can be either
-- **ASYNCHRONOUS** or **SYNCHRONOUS**. (Deprecated) This property was only
-- applicable to First Generation instances.
data SettingsReplicationType
= SQLReplicationTypeUnspecified
-- ^ @SQL_REPLICATION_TYPE_UNSPECIFIED@
-- This is an unknown replication type for a Cloud SQL instance.
| Synchronous
-- ^ @SYNCHRONOUS@
-- The synchronous replication mode for First Generation instances. It is
-- the default value.
| Asynchronous
-- ^ @ASYNCHRONOUS@
-- The asynchronous replication mode for First Generation instances. It
-- provides a slight performance gain, but if an outage occurs while this
-- option is set to asynchronous, you can lose up to a few seconds of
-- updates to your data.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SettingsReplicationType
instance FromHttpApiData SettingsReplicationType where
parseQueryParam = \case
"SQL_REPLICATION_TYPE_UNSPECIFIED" -> Right SQLReplicationTypeUnspecified
"SYNCHRONOUS" -> Right Synchronous
"ASYNCHRONOUS" -> Right Asynchronous
x -> Left ("Unable to parse SettingsReplicationType from: " <> x)
instance ToHttpApiData SettingsReplicationType where
toQueryParam = \case
SQLReplicationTypeUnspecified -> "SQL_REPLICATION_TYPE_UNSPECIFIED"
Synchronous -> "SYNCHRONOUS"
Asynchronous -> "ASYNCHRONOUS"
instance FromJSON SettingsReplicationType where
parseJSON = parseJSONText "SettingsReplicationType"
instance ToJSON SettingsReplicationType where
toJSON = toJSONText
-- | This field represents the state generated by the proactive database
-- wellness job for OutOfDisk issues. Writers: -- the proactive database
-- wellness job for OOD. Readers: -- the Pantheon frontend -- the proactive
-- database wellness job
data SQLOutOfDiskReportSQLOutOfDiskState
= SQLOutOfDiskStateUnspecified
-- ^ @SQL_OUT_OF_DISK_STATE_UNSPECIFIED@
-- Unspecified state
| Normal
-- ^ @NORMAL@
-- The instance has plenty space on data disk
| SoftShutdown
-- ^ @SOFT_SHUTDOWN@
-- Data disk is almost used up. It is shutdown to prevent data corruption.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SQLOutOfDiskReportSQLOutOfDiskState
instance FromHttpApiData SQLOutOfDiskReportSQLOutOfDiskState where
parseQueryParam = \case
"SQL_OUT_OF_DISK_STATE_UNSPECIFIED" -> Right SQLOutOfDiskStateUnspecified
"NORMAL" -> Right Normal
"SOFT_SHUTDOWN" -> Right SoftShutdown
x -> Left ("Unable to parse SQLOutOfDiskReportSQLOutOfDiskState from: " <> x)
instance ToHttpApiData SQLOutOfDiskReportSQLOutOfDiskState where
toQueryParam = \case
SQLOutOfDiskStateUnspecified -> "SQL_OUT_OF_DISK_STATE_UNSPECIFIED"
Normal -> "NORMAL"
SoftShutdown -> "SOFT_SHUTDOWN"
instance FromJSON SQLOutOfDiskReportSQLOutOfDiskState where
parseJSON = parseJSONText "SQLOutOfDiskReportSQLOutOfDiskState"
instance ToJSON SQLOutOfDiskReportSQLOutOfDiskState where
toJSON = toJSONText
|
brendanhay/gogol
|
gogol-sqladmin/gen/Network/Google/SQLAdmin/Types/Sum.hs
|
mpl-2.0
| 71,085
| 0
| 11
| 15,806
| 8,606
| 4,646
| 3,960
| 1,073
| 0
|
module Moonbase.Panel.Items.Tray
( systemTray
) where
import Control.Monad
import Control.Applicative
import qualified Graphics.UI.Gtk as Gtk
import qualified Graphics.UI.Gtk.Misc.TrayManager as Gtk
import Moonbase.Panel
import Moonbase.DBus
import Moonbase.Util
import Moonbase.Util.Gtk
systemTray :: PanelItems
systemTray = item $ do
trayBox <- liftIO $ do
box <- Gtk.hBoxNew False 5
trayManager <- Gtk.trayManagerNew
Just screen <- Gtk.screenGetDefault
Gtk.trayManagerManageScreen trayManager screen
Gtk.on trayManager Gtk.trayIconAdded $ \w -> do
ioasync $ Gtk.widgetShowAll w
Gtk.boxPackStart box w Gtk.PackNatural 0
return box
return $ PanelItem "systemTray" (Gtk.toWidget trayBox) Gtk.PackNatural
|
felixsch/moonbase-ng
|
src/Moonbase/Panel/Items/Tray.hs
|
lgpl-2.1
| 761
| 0
| 18
| 138
| 213
| 111
| 102
| 22
| 1
|
module Network.Proxy.Types where
import Data.Attoparsec.ByteString (Parser)
import Data.ByteString (ByteString)
data Proxy a = Proxy (Parser ByteString) (IO (Maybe a))
type Port = Int
type Host = String
|
mwotton/proxomatic
|
src/Network/Proxy/Types.hs
|
bsd-2-clause
| 236
| 0
| 10
| 60
| 70
| 42
| 28
| 6
| 0
|
{-# LANGUAGE TemplateHaskell, ExistentialQuantification #-}
{-| Implements Template Haskell generation of RPC server components from Haskell
functions.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.THH.RPC
( Request(..)
, RpcServer
, dispatch
, mkRpcM
) where
import Prelude ()
import Ganeti.Prelude
import Control.Arrow ((&&&))
import Control.Monad
import Control.Monad.Error.Class (MonadError(..))
import Data.Map (Map)
import qualified Data.Map as Map
import Language.Haskell.TH
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.THH.Types
import qualified Ganeti.UDSServer as US
data RpcFn m = forall i o . (J.JSON i, J.JSON o) => RpcFn (i -> m o)
type RpcServer m = US.Handler Request m J.JSValue
-- | A RPC request consiting of a method and its argument(s).
data Request = Request { rMethod :: String, rArgs :: J.JSValue }
deriving (Eq, Ord, Show)
decodeRequest :: J.JSValue -> J.JSValue -> Result Request
decodeRequest method args = Request <$> fromJVal method <*> pure args
dispatch :: (Monad m)
=> Map String (RpcFn (ResultT GanetiException m)) -> RpcServer m
dispatch fs =
US.Handler { US.hParse = decodeRequest
, US.hInputLogShort = rMethod
, US.hInputLogLong = rMethod
, US.hExec = liftToHandler . exec
}
where
orError :: (MonadError e m, FromString e) => Maybe a -> e -> m a
orError m e = maybe (throwError e) return m
exec (Request m as) = do
(RpcFn f) <- orError (Map.lookup m fs)
(mkFromString $ "No such method: " ++ m)
i <- fromJResultE "RPC input" . J.readJSON $ as
o <- f i -- lift $ f i
return $ J.showJSON o
liftToHandler :: (Monad m)
=> ResultT GanetiException m J.JSValue
-> US.HandlerResult m J.JSValue
liftToHandler = liftM ((,) True) . runResultT
-- | Converts a function into the appropriate @RpcFn m@ expression.
-- The function's result must be monadic.
toRpcFn :: Name -> Q Exp
toRpcFn name = [| RpcFn $( uncurryVar name ) |]
-- | Convert a list of named expressions into an expression containing a list
-- of name/expression pairs.
rpcFnsList :: [(String, Q Exp)] -> Q Exp
rpcFnsList = listE . map (\(name, expr) -> tupE [stringE name, expr])
-- | Takes a list of function names and creates a RPC handler that delegates
-- calls to them.
--
-- The functions must conform to
-- @(J.JSON i, J.JSON o) => i -> ResultT GanetiException m o@. The @m@
-- monads types of all the functions must unify.
--
-- The result expression is of type @RpcServer m@.
mkRpcM
:: [Name] -- ^ the names of functions to include
-> Q Exp
mkRpcM names = [| dispatch . Map.fromList $
$( rpcFnsList . map (nameBase &&& toRpcFn) $ names ) |]
|
leshchevds/ganeti
|
src/Ganeti/THH/RPC.hs
|
bsd-2-clause
| 4,129
| 0
| 13
| 890
| 707
| 397
| 310
| 54
| 1
|
-- http://www.codewars.com/kata/5259b20d6021e9e14c0010d4
module Reverse where
import Data.List.Split
reverseWords :: String -> String
reverseWords = unwords . (map reverse) . (splitOn " ")
|
Bodigrim/katas
|
src/haskell/6-Reverse-words.hs
|
bsd-2-clause
| 190
| 0
| 8
| 23
| 45
| 26
| 19
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- | Module to catch uncaught exceptions and send a notification email
module System.ExceptionMailer
( exceptionMailerTag
, setupExceptionMailer, setupExceptionMailer', setupExceptionMailer_adv
, mkAddress
, mailError
-- * Re-exported for convenience
, Address
) where
import Prelude hiding (catch)
import System.Environment (getProgName)
import Data.String (fromString)
import Data.Maybe
import qualified Data.Text.Lazy as LT
import Control.Exception (SomeException, catch)
import GHC.Conc (setUncaughtExceptionHandler)
import Network.Mail.Mime
import System.Log.Logger (errorM)
-- | String tag used for logging to "System.Log.Logger"
exceptionMailerTag :: String
exceptionMailerTag = "ExceptionMailer"
-- | Setup the global exception notifier. This will catch any otherwise uncaught exceptions and send an email to the
-- given address.
--
-- For example,
--
-- > setupExceptionMailer (mkAddress "My Program" "noreply@example.com")
-- > (mkAddress "Sysadmin" "sysadmin@example.com")
setupExceptionMailer :: Address -- ^ Make the email appear to be from this address
-> Address -- ^ Send the email to here
-> Maybe String -- ^ Subject
-> String -- ^ Prefix to put in the email head
-> IO ()
setupExceptionMailer from to subj pre =
setupExceptionMailer_adv from to subj pre (\_ -> return ())
-- | Convenience version of 'setupExceptionMailer' that just accepts the email addresses
setupExceptionMailer' :: String -- ^ Make the email appear to be from this address
-> String -- ^ Send the email to here
-> Maybe String -- ^ Subject
-> String -- ^ Prefix to put in the email head
-> IO ()
setupExceptionMailer' from to subj pre = setupExceptionMailer (Address Nothing $ fromString from) (Address Nothing $ fromString to) subj pre
-- | Setup the global exception notifier. Like 'setupExceptionMailer' but allows a
-- custom action after the email is send
setupExceptionMailer_adv :: Address -- ^ Make the email appear to be from this address
-> Address -- ^ Send the email to here
-> Maybe String -- ^ Subject
-> String -- ^ Prefix to put in the email head
-> (SomeException -> IO ())
-> IO ()
setupExceptionMailer_adv from to subj pre action =
setUncaughtExceptionHandler $ \e -> do
emailException from to subj pre e
action e
-- | Helper function to convert a name and email address into a proper 'Address'
mkAddress :: String -> String -> Address
mkAddress name email = Address (Just $ fromString name) $ fromString email
-- | Send an error email. Exported so that it may be re-used from your own exception handling routines
mailError :: Address -> Address -> Maybe String -> String -> IO ()
mailError from to subj msg = do
prog <- getProgName
let m = simpleMail' to from (fromString $ fromMaybe "Exception Mailer" subj)
(LT.concat ["Program: ", fromString $ prog ++ "\n"
,"Exception:\n", fromString msg])
renderSendMail m
emailException :: Show a => Address -> Address -> Maybe String -> String -> a -> IO ()
emailException from to subj pre e = do
errorM exceptionMailerTag $ "Uncaught exception. emailing ("++
show (addressEmail to)++") : "++show e
catch (mailError from to subj (pre ++ show e))
(\e2 -> errorM exceptionMailerTag $ "Unable to send email : "++show (e2 :: SomeException))
return ()
|
drpowell/exception-mailer
|
System/ExceptionMailer.hs
|
bsd-2-clause
| 3,694
| 0
| 15
| 967
| 701
| 371
| 330
| 57
| 1
|
-- | Includes the Clipping class for objects that support clipping rectanges,
-- its instances, and monadic convenience functions.
module Tea.Clipping ( Clipping (clip)
, clipM
) where
import Control.Monad.State
import Control.Monad.Trans
import Graphics.UI.SDL (withClipRect, Surface, Rect (..))
import Tea.Tea
import Tea.Screen
import Tea.Bitmap
import Tea.TeaState
withTea :: Tea s a -> s -> TeaState -> IO ((a, s), TeaState)
withTea = (runStateT .) . runStateT . extractTea
-- | A class instantiated over all types that support Clipping rectangles.
class Clipping v where
-- | Run a specified Tea action where the provided Clipping type has been
-- clipped to the size provided. The clipping only lasts for the duration
-- of the action.
clip :: v -- ^ Buffer to clip
-> (Int, Int) -- ^ Left-hand corner of clipping rectangle coordinates
-> Int -- ^ Width of clipping rectangle
-> Int -- ^ Height of clipping rectangle
-> Tea s z -- ^ Tea action to run
-> Tea s z
clip surf (x, y) w h m = do
scr <- getT
s <- get
((v, s'),st') <- liftIO $ withClipRect (clipping_buffer surf) (Just $ Rect x y w h) (withTea m s scr)
putT st'
put s'
return v
clipping_buffer :: v -> Surface
-- |A convenience version of Clip that takes a Tea action instead of a raw buffer.
clipM :: (Clipping v) => Tea s v -> (Int, Int) -> Int -> Int -> Tea s z
-> Tea s z
clipM v a b c d = v >>= \v' -> clip v' a b c d
instance Clipping Screen where
clipping_buffer = screenBuffer
instance Clipping Bitmap where
clipping_buffer = buffer
|
liamoc/tea-hs
|
Tea/Clipping.hs
|
bsd-3-clause
| 1,826
| 0
| 14
| 598
| 429
| 234
| 195
| 35
| 1
|
module Data.List.SlowSubstring(longestSubstring,sharedPrefix) where
import Data.List(sortBy,tails)
import Data.Ord
longestSubstring ::(Eq a, Ord a) => [a] -> [a] -> [a]
longestSubstring first second = head $ reverse $ sortBy (comparing length) comparisons
where comparisons = concatMap (\x -> map (sharedPrefix x) $ tails second) (tails first)
sharedPrefix :: (Eq a, Ord a) => [a] -> [a] -> [a]
sharedPrefix (a:as) (b:bs)
| a==b = a:sharedPrefix as bs
| otherwise = []
sharedPrefix _ _ = []
|
mwotton/string-similarity
|
Data/List/SlowSubstring.hs
|
bsd-3-clause
| 499
| 0
| 13
| 82
| 244
| 130
| 114
| 11
| 1
|
module Gloss
( module Graphics.Gloss.Interface.Game
, DPixel
, translate
, pixel
, line
, circle
, polygon
) where
import Graphics.Gloss.Interface.Game hiding (Vector, translate, line, circle, polygon)
import qualified Graphics.Gloss.Interface.Game as GL
import Numeric.NumType
import Numeric.Units.Dimensional.Extensible
import Numeric.Units.Dimensional
import DimensionalVector
newtype TPixel = TPixel TPixel
type DPixel = DExt TPixel Pos1 DOne
pixel :: Num a => Unit DPixel a
pixel = Dimensional 1
translate :: Vector DPixel -> Picture -> Picture
translate (x, y) = Translate (realToFrac $ x /~ pixel) (realToFrac $ y /~ pixel)
line :: [Vector DPixel] -> Picture
line = GL.line . map (mapV (realToFrac . (/~ pixel)))
polygon :: [Vector DPixel] -> Picture
polygon = GL.polygon . map (mapV (realToFrac . (/~ pixel)))
circle :: Scalar DPixel -> Picture
circle = GL.circle . (realToFrac . (/~ pixel))
|
Rotsor/wheeled-vehicle
|
Gloss.hs
|
bsd-3-clause
| 936
| 0
| 11
| 169
| 317
| 184
| 133
| 26
| 1
|
-- | Syntactic constructs for basic atomic values.
module CC.Atomic where
import Language.Syntactic
-- | An atomic plain value of a particular type. Note that this differs from
-- Syntactic's built-in Literal construct since One distinguishes between
-- different types of literals in the symbol domain.
data One a t where
One :: a -> One a (Full a)
-- | An atomic empty/unit value. Isomorphic to @One ()@.
data None t where
None :: None (Full a)
-- | Construct a One AST node.
one :: (One a :<: l) => a -> ASTF l a
one = inj . One
-- | Construct a None AST node.
none :: (None :<: l) => ASTF l a
none = inj None
-- | Construct an AST node from a Maybe value.
fromMaybe :: (One a :<: l, None :<: l) => Maybe a -> ASTF l a
fromMaybe (Just a) = one a
fromMaybe Nothing = none
instance Show a => Render (One a) where
renderSym (One a) = show a
instance Render None where
renderSym None = "●"
|
walkie/CC-Syntactic
|
src/CC/Atomic.hs
|
bsd-3-clause
| 916
| 0
| 9
| 204
| 248
| 132
| 116
| -1
| -1
|
module Language.Granule.Syntax.ExprSpec where
import Test.Hspec hiding (Spec)
import qualified Test.Hspec as Test
import Language.Granule.Syntax.Expr
import Language.Granule.Syntax.Helpers
import Language.Granule.Syntax.Identifiers
import Language.Granule.Syntax.Pattern
import Language.Granule.Syntax.Span
spec :: Test.Spec
spec = do
describe "Expression helpers" $
it "free variable test" $
freeVars (Val nullSpanNoFile ()
(Abs () (PVar nullSpanNoFile () $ mkId "x") Nothing
(Val nullSpanNoFile ()
(Abs () (PVar nullSpanNoFile () $ mkId "y") Nothing
(Val nullSpanNoFile () (Var () $ mkId "z")))))) `shouldBe` [mkId "z"]
|
dorchard/gram_lang
|
frontend/tests/hspec/Language/Granule/Syntax/ExprSpec.hs
|
bsd-3-clause
| 697
| 0
| 23
| 148
| 219
| 121
| 98
| 17
| 1
|
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Marshalling where
import Control.Applicative
import Data.Aeson
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString as BS
import Text.RawString.QQ
import Data.Text (Text)
import qualified Data.Text as T
import Data.Scientific (floatingOrInteger)
sectionJson :: LBS.ByteString
sectionJson = [r|
{ "section": {"host": "wikipedia.org"},
"whatisit": {"red": "intoothandclaw"}
}
|]
data TestData = TestData {
section :: Host
, what :: Color
} deriving (Eq, Show)
newtype Host = Host String deriving (Eq, Show)
type Annotation = String
data Color = Red Annotation
| Blue Annotation
| Yellow Annotation deriving (Eq, Show)
instance FromJSON TestData where
parseJSON (Object v) =
TestData <$> v .: "section"
<*> v .: "whatisit"
parseJSON _ = fail "Expected an object for TestData"
instance FromJSON Host where
parseJSON (Object v) = Host <$> v .: "host"
parseJSON _ = fail "Expected an object for Host"
instance FromJSON Color where
parseJSON (Object v) =
(Red <$> v .: "red")
<|> (Blue <$> v .: "blue")
<|> (Yellow <$> v .: "yellow")
parseJSON _ = fail "Expected an object for Color"
-- FromJSON
-- ByteString -> Value -> yourType
-- parse -> unmarshall
-- ToJSON
-- yourtype -> Value -> ByteString
-- marshall -> serialize
data NumberOrString = Numba Integer | Stringy Text deriving (Eq, Show)
instance FromJSON NumberOrString where
parseJSON (Number i) =
case floatingOrInteger i of
(Left _) -> fail "Must be integral number"
(Right integer) -> return $ Numba integer
parseJSON (String s) = return $ Stringy s
parseJSON _ = fail "NumberOrString must be number or string"
dec :: LBS.ByteString -> Maybe NumberOrString
dec = decode
eitherDec :: LBS.ByteString -> Either String NumberOrString
eitherDec = eitherDecode
main = do
let blah :: Maybe Value
blah = decode sectionJson
print blah
let d = decode sectionJson :: Maybe TestData
print d
let e1 = dec "13e10"
print e1
let e2 = eitherDec "testString"
print e2
|
chengzh2008/hpffp
|
src/ch24-Parser/marshalling.hs
|
bsd-3-clause
| 2,172
| 0
| 10
| 498
| 590
| 313
| 277
| 57
| 1
|
module LaTeXGrapher.Data.Tabular
(
showTabular
) where
import Data.List(intersperse, concat)
showTabular :: [[String]] -> String
showTabular rs = concat $ [(showRow cs) ++ "\n" | cs <- rs]
where ws = colWidths rs
showRow cs = concat $ map (uncurry showLeft) (zip ws cs)
showTabularWithRowSep :: String -> [[String]] -> String
showTabularWithRowSep sep rs = concat $ [(showRow cs) ++ "\n" | cs <- rs]
where ws = colWidths rs
showRow cs = concat $ intersperse sep $ map (uncurry showLeft) (zip ws cs)
showLeft :: Int -> String -> String
showLeft w s | n < 0 = take w s
| otherwise = concat [s, map (const ' ') [1..n]]
where n = w - length s
colWidths :: [[[a]]] -> [Int]
colWidths rs = [maximum $ (colWidth n rs) | n <- [0..]]
colWidth :: Int -> [[[a]]] -> [Int]
colWidth n rs = map length (cols n rs)
col :: Int -> [[a]] -> [a]
col 0 xs = safeHead xs
col n [] = []
col n (x:xs) = col (n-1) xs
cols :: Int -> [[[a]]] -> [[a]]
cols n rs = map (col n) rs
safeHead :: [[a]] -> [a]
safeHead [] = []
safeHead (a:_) = a
-- test = [["hello", "world"],["A greating","a place"],["","world"],["hello",""],[],["","","Third Column!"]]
|
fryguybob/LaTeXGrapher
|
src/LaTeXGrapher/Data/Tabular.hs
|
bsd-3-clause
| 1,236
| 0
| 10
| 327
| 578
| 307
| 271
| 29
| 1
|
module Experiment (experiment, generators) where
import Data.Monoid
import Simulation.Aivika
import Simulation.Aivika.Experiment
import Simulation.Aivika.Experiment.Chart
specs = Specs { spcStartTime = 0,
spcStopTime = 10000,
spcDT = 1,
spcMethod = RungeKutta4,
spcGeneratorType = SimpleGenerator }
experiment :: Experiment
experiment =
defaultExperiment {
experimentSpecs = specs,
experimentRunCount = 1,
experimentTitle = "Difference Equations",
experimentDescription = "Difference Equations as described in " ++
"the corresponded tutorial of Berkeley-Madonna " ++
"with small modification for calculating std." }
t = resultByName "t"
x = resultByName "x"
sumX = resultByName "sumX"
sumX2 = resultByName "sumX2"
avg = resultByName "avg"
std = resultByName "std"
generators :: ChartRendering r => [WebPageGenerator r]
generators =
[outputView defaultExperimentSpecsView,
outputView defaultInfoView,
outputView $ defaultTableView {
tableSeries =
t <> x <> sumX <> sumX2 <> avg <> std },
outputView $ defaultTimeSeriesView {
timeSeriesTitle = "Time Series",
timeSeriesLeftYSeries = x <> avg },
outputView $ defaultTimingStatsView {
timingStatsSeries = x },
outputView $ defaultTimeSeriesView {
timeSeriesTitle = "Sums",
timeSeriesLeftYSeries = sumX,
timeSeriesRightYSeries = sumX2 },
outputView $ defaultTimeSeriesView {
timeSeriesTitle = "Standard Deviation",
timeSeriesLeftYSeries = std } ]
|
dsorokin/aivika-experiment-chart
|
examples/DifferenceEquations/Experiment.hs
|
bsd-3-clause
| 1,625
| 0
| 13
| 410
| 322
| 189
| 133
| 44
| 1
|
{-# LANGUAGE CPP #-}
#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 702)
{-# LANGUAGE Safe #-}
#endif
module Data.IterIO.Http.Support.Utils where
import Data.ByteString.Lazy.Char8
import Data.IterIO
import Data.IterIO.Http
import Data.IterIO.Http.Support.Action
-- | For 'Action's where the body type is a 'ByteString', parse the
-- body with 'parseParams\'' and prepend the result to the 'Action''s
-- 'Param's
parseParams :: Monad m => Action t ByteString m [Param]
parseParams = do
req <- getHttpReq
body <- getBody
prms0 <- params
prms1 <- parseParams' req body
setParams $ prms1 ++ prms0
-- | Parse url encoded or form encoded paramters from an HTTP
-- body.
parseParams' :: Monad m => HttpReq a -> ByteString -> m [Param]
parseParams' req body = inumPure body |$ foldForm req handle []
where handle accm field = do
val <- pureI
return $ (Param (ffName field) val (ffHeaders field)):accm
|
scslab/iterio-server
|
Data/IterIO/Http/Support/Utils.hs
|
bsd-3-clause
| 943
| 0
| 14
| 175
| 224
| 119
| 105
| 18
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Parser
( parse
, Parser
, ParseResult
, ParseError
, whole
, digit
, char
, eof
, tests
) where
import Types
import Control.Applicative
import Data.Char
import Data.Either
import Test.Framework
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.Framework.TH
import Test.QuickCheck
type ParseError = String
type ParseResult a = Either ParseError (String, a)
newtype Parser a = Parser { applyParser :: String -> ParseResult a }
instance Alternative Parser where
empty = error "Unknown error"
a <|> b = parserPlus a b
where
parserPlus :: Parser a -> Parser a -> Parser a
parserPlus (Parser a) (Parser b) = Parser $ \input ->
a input `parseResultPlus` b input
parseResultPlus :: ParseResult a -> ParseResult a -> ParseResult a
parseResultPlus result@(Right _) _ = result
parseResultPlus _ result = result
instance Applicative Parser where
pure a = Parser $ \input -> Right (input, a)
a <*> b = parserAp a b
where
parserAp :: Parser (a -> b) -> Parser a -> Parser b
parserAp (Parser pf) pa = Parser $ \input -> pf input `parseResultAp` pa
where
parseResultAp :: ParseResult (a -> b) -> Parser a -> ParseResult b
parseResultAp (Right (cont, f)) (Parser p) = case p cont of
Right (cont', a) -> Right (cont', f a)
Left error -> Left error
parseResultAp (Left error) _ = Left error
instance Functor Parser where
fmap = parserMap
where
parserMap :: (a -> b) -> Parser a -> Parser b
parserMap f (Parser p) = Parser $ \input ->
case p input of
Right (cont, a) -> Right (cont, f a)
Left error -> Left error
parse :: Parser a -> String -> Either String a
parse (Parser f) input = case f input of
Right (cont, a) -> Right a
Left error -> Left error
eof :: Parser ()
eof = Parser eof'
where
eof' :: String -> ParseResult ()
eof' [] = Right ([], ())
eof' cs = Left ("Expected end-of-input, remaining: " ++ cs)
whole :: Parser (Positive Integer)
whole = Positive <$> read <$> some digit
prop_whole :: Positive Integer -> Bool
prop_whole (Positive i) = Right (Positive i) == parse whole (show i)
digit :: Parser Char
digit = Parser digit'
where
digit' :: String -> ParseResult Char
digit' [] = Left "End of input"
digit' (c:cs)
| isDigit c = Right (cs, c)
| otherwise = Left (show c ++ " is not a digit")
char :: Char -> Parser Char
char c = Parser char'
where
char' :: String -> ParseResult Char
char' [] = Left "End of input"
char' (c':cs')
| c == c' = Right (cs', c)
| otherwise = Left (show c' ++ " is unexpected, wanted " ++ show c)
prop_char_matching :: Char -> Bool
prop_char_matching c = Right c == parse (char c) [c]
prop_char_nonmatching :: Char -> Char -> Bool
prop_char_nonmatching c c' = c == c' || isLeft (parse (char c') [c])
tests = $(testGroupGenerator)
|
danstiner/infix-string-calculator
|
src/Parser.hs
|
bsd-3-clause
| 3,243
| 0
| 15
| 1,021
| 1,164
| 591
| 573
| 80
| 2
|
module Analysis where
import qualified Data.Conduit.List as CL
import qualified LHCDataParser as LHCP
import qualified Common as CM
import qualified Data.Conduit.Binary as CB
import qualified EarthQuakeParser as EQP
import qualified System.IO as SIO
import qualified Control.Exception as E
import Data.Conduit
import qualified Data.ByteString.Char8 as BC
posData = "/home/blazej/Programowanie/EarthQake/ROW_DATAAll.txt"
interval = 5*60*60 -- 5 houers
output = "/home/blazej/Programowanie/EarthQake/FILTERED DATA REPEAT/"
data TimeStatus =
Earlier
| InWindow
| Later
deriving (Show, Eq)
timeRange :: Double -> LHCP.POS_MEAN_H -> EQP.EarthQuake -> TimeStatus
timeRange interval pos eQ =
toRange dT (toRational interval)
where
dT = toRational $ ( LHCP.time pos) `CM.diffUTCTime` (EQP.time eQ)
toRange dt timeWindow
| dt < 0.0 = Earlier
| dt <= timeWindow = InWindow
| dt > timeWindow = Later
process ::
(CM.MonadThrow m, CM.MonadIO m)
=> [EQP.EarthQuake]
-> Conduit (CM.Perhaps LHCP.POS_MEAN_H) m (EQP.EarthQuake, LHCP.POS_MEAN_H)
process ls=
await >>= maybe (return()) (\x -> filterLHCData x ls)
where
filterLHCData (Left a) _ =
CM.liftIO $ print ("******* ERROR ************ " ++ show a)
filterLHCData (Right a) [] =
return ()
filterLHCData (Right a) (x:xs) =
do
let k = timeRange interval a x
case k of
Earlier -> process (x:xs)
InWindow -> do
yield $ (x, a)
process (x:xs)
Later -> filterLHCData (Right a) xs
coll ::
Monad m =>
Maybe(EQP.EarthQuake, [LHCP.POS_MEAN_H])
-> Conduit (EQP.EarthQuake, LHCP.POS_MEAN_H) m (EQP.EarthQuake, [LHCP.POS_MEAN_H])
coll st =
await >>= maybe (return()) (\x -> fun x st)
where
fun (e,l) Nothing = coll $ Just (e, [l])
fun (e,l) (Just(a, b)) | e == a = coll $ Just(a, l:b)
fun (e,l) (Just(a, b)) | e /= a = do
yield (a, reverse b)
coll $ Just (e, l : (fIsInWindow e b))
fIsInWindow e b = filter (\ x -> (timeRange interval x e) == InWindow) b
saveToFile :: CM.MonadResource m
=> Sink (EQP.EarthQuake, [LHCP.POS_MEAN_H]) m ()
saveToFile = do
xM <- await
case xM of
Nothing -> return ()
Just (fN,ls) -> if (length ls > 50)
then
(save fN ls) >> saveToFile
else
saveToFile
where
save fN ls =
CM.liftIO $ CM.runResourceT $
(CL.sourceList ls)
=$= (CL.map (\x -> LHCP.encode x))
=$= unlinesC
$$ (CB.sinkFile (output ++ show fN))
unlinesC :: Monad m => Conduit BC.ByteString m BC.ByteString
unlinesC = awaitForever (\x -> (yield x) >> (yield $ BC.singleton '\n'))
lhcData ::
CM.MonadResource m
=> FilePath
-> [EQP.EarthQuake] -> m ()--[(EQP.EarthQuake, [LHCP.POS_MEAN_H])]
lhcData fn ls = do
CB.sourceFile fn
=$= CB.lines
=$= CM.skip 3
=$= (CM.parserC LHCP.parsePosition)
=$= (process ls)
=$= (coll Nothing)
-- =$= CM.debug
$$ saveToFile-- CL.consume
main = do
ls <- EQP.earthQuakeList
case ls of
Left l -> print l
Right r -> do k <- CM.runResourceT $ lhcData posData (reverse r)
print k
-- tR = (timeRange (60.0*60)) <$> LHCP.lhcD <*> EQP.eqP
--start :: IO (Either String [(EQP.EarthQuake, LHCP.POS_MEAN_H)])
--start = CM.runResourceT (sequence <$> lhcData posData)
|
bkolad/earthquake
|
Analysis.hs
|
bsd-3-clause
| 3,779
| 0
| 16
| 1,251
| 1,255
| 657
| 598
| 92
| 5
|
module Numeric.Interpolate.Spline (
spline) where
import Data.Array.Repa as R
import Control.Monad
import Control.Monad.Identity
linInterp :: (Floating b, Num b) => b -> b -> b -> b -> b -> b
linInterp x1 x2 y1 y2 x = (x-x1)*(y2-y1)/(x2 - x1) + y1
interpFromKnots :: Array U DIM1 Double ->
Array U DIM1 Double -> Double -> Double
interpFromKnots x0 y0 x = runIdentity $
do
let (Z :. j) = R.extent x0
(n,_) = head $ filter (\x' -> snd x' <= x) ((zip [0,1..]) $ toList x0)
let x0' = x0 ! (Z:. n)
x1' = x0 ! (Z:. n+1)
y0' = y0 ! (Z:. n)
y1' = y0 ! (Z:. n+1)
return $ linInterp x0' x1' y0' y1' x
spline :: Array U DIM1 Double -> Array U DIM1 Double ->
Int -> Array D DIM1 Double -> Array D DIM1 Double
spline x0 y0 1 x = R.map (interpFromKnots x0 y0) x
|
cmiller730/NumHs
|
Numeric/Interpolate/Spline.hs
|
bsd-3-clause
| 845
| 0
| 17
| 252
| 419
| 218
| 201
| 21
| 1
|
module Todo where
data Diff a = Diff a
| DiffGroup [Diff a]
data TaskSeq a = Begin { diffTS :: Diff a
, continueTS :: TaskSeq a }
| DoneForNow
infixr 5 `andThen`
andThen :: Diff a -> TaskSeq a -> TaskSeq a
diff `andThen` continue =
Begin { diffTS = diff
, continueTS = continue }
project :: TaskSeq String
project =
Diff "be able to assemble & disassemble & reassemble stages aka arrows"
`andThen`
Diff "track all signals"
`andThen`
DiffGroup [ Diff "make uncontrolled IO controlled"
, Diff "make ugliness mechanically traceable" ]
`andThen`
Diff ( "make TODO dependencies statically defined so that working on "
++ "a task that depends on a more closely upcoming task is "
++ "forbidden by the compiler" )
`andThen`
DoneForNow
|
eallik/haskell-hangman
|
src/Todo.hs
|
bsd-3-clause
| 855
| 0
| 10
| 257
| 184
| 103
| 81
| 25
| 1
|
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses, FlexibleContexts #-}
-- |
-- Module : Simulation.Aivika.Experiment.Types
-- Copyright : Copyright (c) 2012-2017, David Sorokin <david.sorokin@gmail.com>
-- License : BSD3
-- Maintainer : David Sorokin <david.sorokin@gmail.com>
-- Stability : experimental
-- Tested with: GHC 8.0.1
--
-- The module defines the simulation experiments. They automate
-- the process of generating and analyzing the results. Moreover,
-- this module is open to extensions, allowing you to define
-- your own output views for the simulation results, for example,
-- such views that would allow saving the results in PDF or as
-- charts. To decrease the number of dependencies, such possible
-- extenstions are not included in this package, although simple
-- views are provided.
--
module Simulation.Aivika.Experiment.Types where
import Control.Monad
import Control.Monad.Trans
import Control.Exception
import Control.Concurrent.ParallelIO.Local
import Data.Maybe
import Data.Monoid
import Data.Either
import GHC.Conc (getNumCapabilities)
import Simulation.Aivika
import Simulation.Aivika.Trans.Exception
-- | It defines the simulation experiment with the specified rendering backend and its bound data.
data Experiment =
Experiment { experimentSpecs :: Specs,
-- ^ The simulation specs for the experiment.
experimentTransform :: ResultTransform,
-- ^ How the results must be transformed before rendering.
experimentLocalisation :: ResultLocalisation,
-- ^ Specifies a localisation applied when rendering the experiment.
experimentRunCount :: Int,
-- ^ How many simulation runs should be launched.
experimentTitle :: String,
-- ^ The experiment title.
experimentDescription :: String,
-- ^ The experiment description.
experimentVerbose :: Bool,
-- ^ Whether the process of generating the results is verbose.
experimentNumCapabilities :: IO Int
-- ^ The number of threads used for the Monte-Carlo simulation
-- if the executable was compiled with the support of multi-threading.
}
-- | The default experiment.
defaultExperiment :: Experiment
defaultExperiment =
Experiment { experimentSpecs = Specs 0 10 0.01 RungeKutta4 SimpleGenerator,
experimentTransform = id,
experimentLocalisation = englishResultLocalisation,
experimentRunCount = 1,
experimentTitle = "Simulation Experiment",
experimentDescription = "",
experimentVerbose = True,
experimentNumCapabilities = getNumCapabilities }
-- | It allows rendering the simulation results in an arbitrary way.
class ExperimentRendering r where
-- | Defines a context used when rendering the experiment.
data ExperimentContext r :: *
-- | Defines the experiment environment.
type ExperimentEnvironment r :: *
-- | Defines the experiment monad type.
type ExperimentMonad r :: * -> *
-- | Lift the experiment computation.
liftExperiment :: r -> ExperimentMonad r a -> IO a
-- | Prepare before rendering the experiment.
prepareExperiment :: Experiment -> r -> ExperimentMonad r (ExperimentEnvironment r)
-- | Render the experiment after the simulation is finished, for example,
-- creating the @index.html@ file in the specified directory.
renderExperiment :: Experiment -> r -> [ExperimentReporter r] -> ExperimentEnvironment r -> ExperimentMonad r ()
-- | It is called when the experiment has been completed.
onExperimentCompleted :: Experiment -> r -> ExperimentEnvironment r -> ExperimentMonad r ()
-- | It is called when the experiment rendering has failed.
onExperimentFailed :: Exception e => Experiment -> r -> ExperimentEnvironment r -> e -> ExperimentMonad r ()
-- | This is a generator of the reporter with the specified rendering backend.
data ExperimentGenerator r =
ExperimentGenerator { generateReporter :: Experiment -> r -> ExperimentEnvironment r -> ExperimentMonad r (ExperimentReporter r)
-- ^ Generate a reporter.
}
-- | Defines a view in which the simulation results should be saved.
-- You should extend this type class to define your own views such
-- as the PDF document.
class ExperimentRendering r => ExperimentView v r where
-- | Create a generator of the reporter.
outputView :: v -> ExperimentGenerator r
-- | It describes the source simulation data used in the experiment.
data ExperimentData =
ExperimentData { experimentResults :: Results,
-- ^ The simulation results used in the experiment.
experimentPredefinedSignals :: ResultPredefinedSignals
-- ^ The predefined signals provided by every model.
}
-- | Defines what creates the simulation reports by the specified renderer.
data ExperimentReporter r =
ExperimentReporter { reporterInitialise :: ExperimentMonad r (),
-- ^ Initialise the reporting before
-- the simulation runs are started.
reporterFinalise :: ExperimentMonad r (),
-- ^ Finalise the reporting after
-- all simulation runs are finished.
reporterSimulate :: ExperimentData -> Composite (),
-- ^ Start the simulation run in the start time.
reporterContext :: ExperimentContext r
-- ^ Return a context used by the renderer.
}
-- | Run the simulation experiment sequentially. For example,
-- it can be a Monte-Carlo simulation dependentent on the external
-- 'Parameter' values.
runExperiment :: (ExperimentRendering r,
Monad (ExperimentMonad r),
MonadIO (ExperimentMonad r),
MonadException (ExperimentMonad r))
=> Experiment
-- ^ the simulation experiment to run
-> [ExperimentGenerator r]
-- ^ generators used for rendering
-> r
-- ^ the rendering backend
-> Simulation Results
-- ^ the simulation results received from the model
-> IO (Either SomeException ())
{-# INLINABLE runExperiment #-}
runExperiment e generators r simulation =
runExperimentWithExecutor sequence_ e generators r simulation
-- | Run the simulation experiment in parallel.
--
-- Make sure that you compile with @-threaded@ and supply @+RTS -N2 -RTS@
-- to the generated Haskell executable on dual core processor,
-- or you won't get any parallelism. Generally, the mentioned
-- @N@ parameter should correspond to the number of cores for
-- your processor.
--
-- In case of need you might want to specify the number of
-- threads directly with help of 'experimentNumCapabilities',
-- although the real number of parallel threads can depend on many
-- factors.
runExperimentParallel :: (ExperimentRendering r,
Monad (ExperimentMonad r),
MonadIO (ExperimentMonad r),
MonadException (ExperimentMonad r))
=> Experiment
-- ^ the simulation experiment to run
-> [ExperimentGenerator r]
-- ^ generators used for rendering
-> r
-- ^ the rendering backend
-> Simulation Results
-- ^ the simulation results received from the model
-> IO (Either SomeException ())
{-# INLINABLE runExperimentParallel #-}
runExperimentParallel e generators r simulation =
do x <- runExperimentWithExecutor executor e generators r simulation
return (x >> return ())
where executor tasks =
do n <- experimentNumCapabilities e
withPool n $ \pool ->
parallel_ pool tasks
-- | Run the simulation experiment with the specified executor.
runExperimentWithExecutor :: (ExperimentRendering r,
Monad (ExperimentMonad r),
MonadIO (ExperimentMonad r),
MonadException (ExperimentMonad r))
=> ([IO ()] -> IO a)
-- ^ an executor that allows parallelizing the simulation if required
-> Experiment
-- ^ the simulation experiment to run
-> [ExperimentGenerator r]
-- ^ generators used for rendering
-> r
-- ^ the rendering backend
-> Simulation Results
-- ^ the simulation results received from the model
-> IO (Either SomeException a)
{-# INLINABLE runExperimentWithExecutor #-}
runExperimentWithExecutor executor e generators r simulation =
liftExperiment r $
do let specs = experimentSpecs e
runCount = experimentRunCount e
env <- prepareExperiment e r
let c1 =
do reporters <- mapM (\x -> generateReporter x e r env)
generators
forM_ reporters reporterInitialise
let simulate :: Simulation ()
simulate =
do signals <- newResultPredefinedSignals
results <- simulation
let d = ExperimentData { experimentResults = experimentTransform e results,
experimentPredefinedSignals = signals }
((), fs) <- runDynamicsInStartTime $
runEventWith EarlierEvents $
flip runComposite mempty $
forM_ reporters $ \reporter ->
reporterSimulate reporter d
let m1 =
runEventInStopTime $
return ()
m2 =
runEventInStopTime $
disposeEvent fs
mh (SimulationAbort e') =
return ()
finallySimulation (catchSimulation m1 mh) m2
a <- liftIO $
executor $ runSimulations simulate specs runCount
forM_ reporters reporterFinalise
renderExperiment e r reporters env
onExperimentCompleted e r env
return (Right a)
ch z@(SomeException e') =
do onExperimentFailed e r env e'
return (Left z)
catchComp c1 ch
|
dsorokin/aivika-experiment
|
Simulation/Aivika/Experiment/Types.hs
|
bsd-3-clause
| 11,190
| 0
| 23
| 3,903
| 1,411
| 762
| 649
| 128
| 1
|
module Yesod.Helpers.Upload where
import Prelude
import Yesod
import Yesod.Core.Types (fileSourceRaw)
import Data.Conduit (($=), ($$))
import qualified Data.Conduit.Binary as CB
import qualified Data.ByteString.Lazy as LB
-- | modify the fileSourceRaw member,
-- to restrict maximum bytes that are allowed to read from.
fiRestrictSourceSize :: Int -> FileInfo -> FileInfo
fiRestrictSourceSize max_size fi = fi { fileSourceRaw = new_source }
where
new_source = fileSourceRaw fi $= CB.isolate max_size
-- | read all content from source in FileInfo,
-- restricted by a maximum byte count,
-- if content length exceeds that limit, return Nothing.
fiReadLimited :: (MonadResource m) =>
Int -- ^ maximum bytes allowed
-> FileInfo
-> m (Maybe LB.ByteString)
fiReadLimited max_size fi = do
let fi' = fiRestrictSourceSize (max_size + 1) fi
lbs <- fiReadUnlimited fi'
if LB.length lbs > fromIntegral max_size
then return Nothing
else return $ Just lbs
fiReadUnlimited :: (MonadResource m) =>
FileInfo -> m LB.ByteString
fiReadUnlimited fi = fileSource fi $$ CB.sinkLbs
|
yoo-e/yesod-helpers
|
Yesod/Helpers/Upload.hs
|
bsd-3-clause
| 1,191
| 0
| 12
| 287
| 262
| 145
| 117
| 23
| 2
|
{-# LANGUAGE CPP #-}
module GHC.SourceGen.Pat.Internal where
import GHC.Hs.Pat (Pat(..))
#if MIN_VERSION_ghc(9,0,0)
import GHC.Hs.Type (HsConDetails(..))
import GHC.Types.SrcLoc (unLoc)
#else
import GHC.Hs.Type (HsConDetails(..))
import SrcLoc (unLoc)
#endif
import GHC.SourceGen.Lit.Internal (litNeedsParen, overLitNeedsParen)
import GHC.SourceGen.Syntax.Internal
-- Note: GHC>=8.6 inserts parentheses automatically when pretty-printing patterns.
-- When we stop supporting lower versions, we may be able to simplify this.
parenthesize :: Pat' -> Pat'
parenthesize p
| needsPar p = parPat p
| otherwise = p
needsPar :: Pat' -> Bool
#if MIN_VERSION_ghc(8,6,0)
needsPar (LitPat _ l) = litNeedsParen l
needsPar (NPat _ l _ _) = overLitNeedsParen $ unLoc l
#else
needsPar (LitPat l) = litNeedsParen l
needsPar (NPat l _ _ _) = overLitNeedsParen $ unLoc l
#endif
#if MIN_VERSION_ghc(9,2,0)
needsPar (ConPat _ _ (PrefixCon _ xs)) = not $ null xs
#elif MIN_VERSION_ghc(9,0,0)
needsPar (ConPat _ _ (PrefixCon xs)) = not $ null xs
#else
needsPar (ConPatIn _ (PrefixCon xs)) = not $ null xs
#endif
#if MIN_VERSION_ghc(9,0,0)
needsPar (ConPat _ _ (InfixCon _ _)) = True
#else
needsPar (ConPatIn _ (InfixCon _ _)) = True
needsPar ConPatOut{} = True
#endif
#if MIN_VERSION_ghc(8,6,0)
needsPar SigPat{} = True
#else
needsPar SigPatIn{} = True
needsPar SigPatOut{} = True
#endif
needsPar _ = False
parPat :: Pat' -> Pat'
parPat = withEpAnnNotUsed ParPat . builtPat
|
google/ghc-source-gen
|
src/GHC/SourceGen/Pat/Internal.hs
|
bsd-3-clause
| 1,466
| 0
| 9
| 226
| 282
| 160
| 122
| 22
| 1
|
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import LeapYear (isLeapYear)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "isLeapYear" $ for_ cases test
where
test Case{..} = it explanation assertion
where
explanation = unwords [show input, "-", description]
assertion = isLeapYear (fromIntegral input) `shouldBe` expected
data Case = Case { description :: String
, input :: Integer
, expected :: Bool
}
cases :: [Case]
cases = [ Case { description = "year not divisible by 4: common year"
, input = 2015
, expected = False
}
, Case { description = "year divisible by 4, not divisible by 100: leap year"
, input = 1996
, expected = True
}
, Case { description = "year divisible by 100, not divisible by 400: common year"
, input = 2100
, expected = False
}
, Case { description = "year divisible by 400: leap year"
, input = 2000
, expected = True
}
]
|
enolive/exercism
|
haskell/leap/test/Tests.hs
|
mit
| 1,443
| 0
| 10
| 538
| 302
| 180
| 122
| 29
| 1
|
-------------------------------------------------------------------------
--
-- StoreTest.hs
--
-- An abstract data type of stores of integers, together with
-- QuickCheck generator.
--
-- (c) Addison-Wesley, 1996-2011.
--
-------------------------------------------------------------------------
module StoreTest
( Store,
initial, -- Store
value, -- Store -> Var -> Integer
update -- Store -> Var -> Integer -> Store
) where
import Test.QuickCheck
-- Var is the type of variables.
type Var = Char
-- The implementation is given by a newtype declaration, with one
-- constructor, taking an argument of type [ (Integer,Var) ].
data Store = Store [ (Integer,Var) ]
instance Eq Store where
(Store sto1) == (Store sto2) = (sto1 == sto2)
instance Show Store where
showsPrec n (Store sto) = showsPrec n sto
--
initial :: Store
initial = Store []
value :: Store -> Var -> Integer
value (Store []) v = 0
value (Store ((n,w):sto)) v
| v==w = n
| otherwise = value (Store sto) v
update :: Store -> Var -> Integer -> Store
update (Store sto) v n = Store ((n,v):sto)
-- QuickCheck stuff
instance Arbitrary Store where
arbitrary = do
list <- listOf element
return $ Store list
where
element =
do
n <- arbitrary
v <- elements ['a'..'z']
return (n,v)
|
Numberartificial/workflow
|
snipets/src/Craft/Chapter16/StoreTest.hs
|
mit
| 1,625
| 0
| 12
| 590
| 362
| 196
| 166
| 30
| 1
|
{-# LANGUAGE PatternGuards, OverloadedStrings #-}
{-
Copyright (C) 2014 Jesse Rosenthal <jrosenthal@jhu.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.Docx
Copyright : Copyright (C) 2014 Jesse Rosenthal
License : GNU GPL, version 2 or above
Maintainer : Jesse Rosenthal <jrosenthal@jhu.edu>
Stability : alpha
Portability : portable
Conversion of Docx type (defined in Text.Pandoc.Readers.Docx.Parse)
to 'Pandoc' document. -}
{-
Current state of implementation of Docx entities ([x] means
implemented, [-] means partially implemented):
* Blocks
- [X] Para
- [X] CodeBlock (styled with `SourceCode`)
- [X] BlockQuote (styled with `Quote`, `BlockQuote`, or, optionally,
indented)
- [X] OrderedList
- [X] BulletList
- [X] DefinitionList (styled with adjacent `DefinitionTerm` and `Definition`)
- [X] Header (styled with `Heading#`)
- [ ] HorizontalRule
- [-] Table (column widths and alignments not yet implemented)
* Inlines
- [X] Str
- [X] Emph (From italics. `underline` currently read as span. In
future, it might optionally be emph as well)
- [X] Strong
- [X] Strikeout
- [X] Superscript
- [X] Subscript
- [X] SmallCaps
- [ ] Quoted
- [ ] Cite
- [X] Code (styled with `VerbatimChar`)
- [X] Space
- [X] LineBreak (these are invisible in Word: entered with Shift-Return)
- [ ] Math
- [X] Link (links to an arbitrary bookmark create a span with the target as
id and "anchor" class)
- [-] Image (Links to path in archive. Future option for
data-encoded URI likely.)
- [X] Note (Footnotes and Endnotes are silently combined.)
-}
module Text.Pandoc.Readers.Docx
( readDocx
) where
import Codec.Archive.Zip
import Text.Pandoc.Definition
import Text.Pandoc.Options
import Text.Pandoc.Builder
import Text.Pandoc.Walk
import Text.Pandoc.Readers.Docx.Parse
import Text.Pandoc.Readers.Docx.Lists
import Text.Pandoc.Readers.Docx.Reducible
import Text.Pandoc.Shared
import Text.Pandoc.MediaBag (insertMedia, MediaBag)
import Data.List (delete, (\\), intersect)
import Text.TeXMath (writeTeX)
import Data.Default (Default)
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as M
import Control.Monad.Reader
import Control.Monad.State
import Data.Sequence (ViewL(..), viewl)
import qualified Data.Sequence as Seq (null)
import Text.Pandoc.Error
import Text.Pandoc.Compat.Except
readDocx :: ReaderOptions
-> B.ByteString
-> Either PandocError (Pandoc, MediaBag)
readDocx opts bytes =
case archiveToDocx (toArchive bytes) of
Right docx -> (\(meta, blks, mediaBag) -> (Pandoc meta blks, mediaBag))
<$> (docxToOutput opts docx)
Left _ -> Left (ParseFailure "couldn't parse docx file")
data DState = DState { docxAnchorMap :: M.Map String String
, docxMediaBag :: MediaBag
, docxDropCap :: Inlines
}
instance Default DState where
def = DState { docxAnchorMap = M.empty
, docxMediaBag = mempty
, docxDropCap = mempty
}
data DEnv = DEnv { docxOptions :: ReaderOptions
, docxInHeaderBlock :: Bool }
instance Default DEnv where
def = DEnv def False
type DocxContext = ExceptT PandocError (ReaderT DEnv (State DState))
evalDocxContext :: DocxContext a -> DEnv -> DState -> Either PandocError a
evalDocxContext ctx env st = flip evalState st . flip runReaderT env . runExceptT $ ctx
-- This is empty, but we put it in for future-proofing.
spansToKeep :: [String]
spansToKeep = []
divsToKeep :: [String]
divsToKeep = ["list-item", "Definition", "DefinitionTerm"]
metaStyles :: M.Map String String
metaStyles = M.fromList [ ("Title", "title")
, ("Subtitle", "subtitle")
, ("Author", "author")
, ("Date", "date")
, ("Abstract", "abstract")]
sepBodyParts :: [BodyPart] -> ([BodyPart], [BodyPart])
sepBodyParts = span (\bp -> (isMetaPar bp || isEmptyPar bp))
isMetaPar :: BodyPart -> Bool
isMetaPar (Paragraph pPr _) =
not $ null $ intersect (pStyle pPr) (M.keys metaStyles)
isMetaPar _ = False
isEmptyPar :: BodyPart -> Bool
isEmptyPar (Paragraph _ parParts) =
all isEmptyParPart parParts
where
isEmptyParPart (PlainRun (Run _ runElems)) = all isEmptyElem runElems
isEmptyParPart _ = False
isEmptyElem (TextRun s) = trim s == ""
isEmptyElem _ = True
isEmptyPar _ = False
bodyPartsToMeta' :: [BodyPart] -> DocxContext (M.Map String MetaValue)
bodyPartsToMeta' [] = return M.empty
bodyPartsToMeta' (bp : bps)
| (Paragraph pPr parParts) <- bp
, (c : _)<- intersect (pStyle pPr) (M.keys metaStyles)
, (Just metaField) <- M.lookup c metaStyles = do
inlines <- concatReduce <$> mapM parPartToInlines parParts
remaining <- bodyPartsToMeta' bps
let
f (MetaInlines ils) (MetaInlines ils') = MetaBlocks [Para ils, Para ils']
f (MetaInlines ils) (MetaBlocks blks) = MetaBlocks ((Para ils) : blks)
f m (MetaList mv) = MetaList (m : mv)
f m n = MetaList [m, n]
return $ M.insertWith f metaField (MetaInlines (toList inlines)) remaining
bodyPartsToMeta' (_ : bps) = bodyPartsToMeta' bps
bodyPartsToMeta :: [BodyPart] -> DocxContext Meta
bodyPartsToMeta bps = do
mp <- bodyPartsToMeta' bps
let mp' =
case M.lookup "author" mp of
Just mv -> M.insert "author" (fixAuthors mv) mp
Nothing -> mp
return $ Meta mp'
fixAuthors :: MetaValue -> MetaValue
fixAuthors (MetaBlocks blks) =
MetaList $ map g $ filter f blks
where f (Para _) = True
f _ = False
g (Para ils) = MetaInlines ils
g _ = MetaInlines []
fixAuthors mv = mv
codeStyles :: [String]
codeStyles = ["VerbatimChar"]
codeDivs :: [String]
codeDivs = ["SourceCode"]
runElemToInlines :: RunElem -> Inlines
runElemToInlines (TextRun s) = text s
runElemToInlines (LnBrk) = linebreak
runElemToInlines (Tab) = space
runElemToInlines (SoftHyphen) = text "\xad"
runElemToInlines (NoBreakHyphen) = text "\x2011"
runElemToString :: RunElem -> String
runElemToString (TextRun s) = s
runElemToString (LnBrk) = ['\n']
runElemToString (Tab) = ['\t']
runElemToString (SoftHyphen) = ['\xad']
runElemToString (NoBreakHyphen) = ['\x2011']
runToString :: Run -> String
runToString (Run _ runElems) = concatMap runElemToString runElems
runToString _ = ""
parPartToString :: ParPart -> String
parPartToString (PlainRun run) = runToString run
parPartToString (InternalHyperLink _ runs) = concatMap runToString runs
parPartToString (ExternalHyperLink _ runs) = concatMap runToString runs
parPartToString _ = ""
blacklistedCharStyles :: [String]
blacklistedCharStyles = ["Hyperlink"]
resolveDependentRunStyle :: RunStyle -> RunStyle
resolveDependentRunStyle rPr
| Just (s, _) <- rStyle rPr, s `elem` blacklistedCharStyles =
rPr
| Just (_, cs) <- rStyle rPr =
let rPr' = resolveDependentRunStyle cs
in
RunStyle { isBold = case isBold rPr of
Just bool -> Just bool
Nothing -> isBold rPr'
, isItalic = case isItalic rPr of
Just bool -> Just bool
Nothing -> isItalic rPr'
, isSmallCaps = case isSmallCaps rPr of
Just bool -> Just bool
Nothing -> isSmallCaps rPr'
, isStrike = case isStrike rPr of
Just bool -> Just bool
Nothing -> isStrike rPr'
, rVertAlign = case rVertAlign rPr of
Just valign -> Just valign
Nothing -> rVertAlign rPr'
, rUnderline = case rUnderline rPr of
Just ulstyle -> Just ulstyle
Nothing -> rUnderline rPr'
, rStyle = rStyle rPr }
| otherwise = rPr
runStyleToTransform :: RunStyle -> (Inlines -> Inlines)
runStyleToTransform rPr
| Just (s, _) <- rStyle rPr
, s `elem` spansToKeep =
let rPr' = rPr{rStyle = Nothing}
in
(spanWith ("", [s], [])) . (runStyleToTransform rPr')
| Just True <- isItalic rPr =
emph . (runStyleToTransform rPr {isItalic = Nothing})
| Just True <- isBold rPr =
strong . (runStyleToTransform rPr {isBold = Nothing})
| Just True <- isSmallCaps rPr =
smallcaps . (runStyleToTransform rPr {isSmallCaps = Nothing})
| Just True <- isStrike rPr =
strikeout . (runStyleToTransform rPr {isStrike = Nothing})
| Just SupScrpt <- rVertAlign rPr =
superscript . (runStyleToTransform rPr {rVertAlign = Nothing})
| Just SubScrpt <- rVertAlign rPr =
subscript . (runStyleToTransform rPr {rVertAlign = Nothing})
| Just "single" <- rUnderline rPr =
emph . (runStyleToTransform rPr {rUnderline = Nothing})
| otherwise = id
runToInlines :: Run -> DocxContext Inlines
runToInlines (Run rs runElems)
| Just (s, _) <- rStyle rs
, s `elem` codeStyles =
let rPr = resolveDependentRunStyle rs
codeString = code $ concatMap runElemToString runElems
in
return $ case rVertAlign rPr of
Just SupScrpt -> superscript codeString
Just SubScrpt -> subscript codeString
_ -> codeString
| otherwise = do
let ils = concatReduce (map runElemToInlines runElems)
return $ (runStyleToTransform $ resolveDependentRunStyle rs) ils
runToInlines (Footnote bps) = do
blksList <- concatReduce <$> (mapM bodyPartToBlocks bps)
return $ note blksList
runToInlines (Endnote bps) = do
blksList <- concatReduce <$> (mapM bodyPartToBlocks bps)
return $ note blksList
runToInlines (InlineDrawing fp bs) = do
mediaBag <- gets docxMediaBag
modify $ \s -> s { docxMediaBag = insertMedia fp Nothing bs mediaBag }
return $ image fp "" ""
parPartToInlines :: ParPart -> DocxContext Inlines
parPartToInlines (PlainRun r) = runToInlines r
parPartToInlines (Insertion _ author date runs) = do
opts <- asks docxOptions
case readerTrackChanges opts of
AcceptChanges -> concatReduce <$> mapM runToInlines runs
RejectChanges -> return mempty
AllChanges -> do
ils <- concatReduce <$> mapM runToInlines runs
let attr = ("", ["insertion"], [("author", author), ("date", date)])
return $ spanWith attr ils
parPartToInlines (Deletion _ author date runs) = do
opts <- asks docxOptions
case readerTrackChanges opts of
AcceptChanges -> return mempty
RejectChanges -> concatReduce <$> mapM runToInlines runs
AllChanges -> do
ils <- concatReduce <$> mapM runToInlines runs
let attr = ("", ["deletion"], [("author", author), ("date", date)])
return $ spanWith attr ils
parPartToInlines (BookMark _ anchor) | anchor `elem` dummyAnchors =
return mempty
parPartToInlines (BookMark _ anchor) =
-- We record these, so we can make sure not to overwrite
-- user-defined anchor links with header auto ids.
do
-- get whether we're in a header.
inHdrBool <- asks docxInHeaderBlock
-- Get the anchor map.
anchorMap <- gets docxAnchorMap
-- We don't want to rewrite if we're in a header, since we'll take
-- care of that later, when we make the header anchor. If the
-- bookmark were already in uniqueIdent form, this would lead to a
-- duplication. Otherwise, we check to see if the id is already in
-- there. Rewrite if necessary. This will have the possible effect
-- of rewriting user-defined anchor links. However, since these
-- are not defined in pandoc, it seems like a necessary evil to
-- avoid an extra pass.
let newAnchor =
if not inHdrBool && anchor `elem` (M.elems anchorMap)
then uniqueIdent [Str anchor] (M.elems anchorMap)
else anchor
unless inHdrBool
(modify $ \s -> s { docxAnchorMap = M.insert anchor newAnchor anchorMap})
return $ spanWith (newAnchor, ["anchor"], []) mempty
parPartToInlines (Drawing fp bs) = do
mediaBag <- gets docxMediaBag
modify $ \s -> s { docxMediaBag = insertMedia fp Nothing bs mediaBag }
return $ image fp "" ""
parPartToInlines (InternalHyperLink anchor runs) = do
ils <- concatReduce <$> mapM runToInlines runs
return $ link ('#' : anchor) "" ils
parPartToInlines (ExternalHyperLink target runs) = do
ils <- concatReduce <$> mapM runToInlines runs
return $ link target "" ils
parPartToInlines (PlainOMath exps) = do
return $ math $ writeTeX exps
isAnchorSpan :: Inline -> Bool
isAnchorSpan (Span (_, classes, kvs) ils) =
classes == ["anchor"] &&
null kvs &&
null ils
isAnchorSpan _ = False
dummyAnchors :: [String]
dummyAnchors = ["_GoBack"]
makeHeaderAnchor :: Blocks -> DocxContext Blocks
makeHeaderAnchor bs = case viewl $ unMany bs of
(x :< xs) -> do
x' <- (makeHeaderAnchor' x)
xs' <- (makeHeaderAnchor $ Many xs)
return $ (singleton x') <> xs'
EmptyL -> return mempty
makeHeaderAnchor' :: Block -> DocxContext Block
-- If there is an anchor already there (an anchor span in the header,
-- to be exact), we rename and associate the new id with the old one.
makeHeaderAnchor' (Header n (_, classes, kvs) ils)
| (c:cs) <- filter isAnchorSpan ils
, (Span (ident, ["anchor"], _) _) <- c = do
hdrIDMap <- gets docxAnchorMap
let newIdent = uniqueIdent ils (M.elems hdrIDMap)
modify $ \s -> s {docxAnchorMap = M.insert ident newIdent hdrIDMap}
return $ Header n (newIdent, classes, kvs) (ils \\ (c:cs))
-- Otherwise we just give it a name, and register that name (associate
-- it with itself.)
makeHeaderAnchor' (Header n (_, classes, kvs) ils) =
do
hdrIDMap <- gets docxAnchorMap
let newIdent = uniqueIdent ils (M.elems hdrIDMap)
modify $ \s -> s {docxAnchorMap = M.insert newIdent newIdent hdrIDMap}
return $ Header n (newIdent, classes, kvs) ils
makeHeaderAnchor' blk = return blk
-- Rewrite a standalone paragraph block as a plain
singleParaToPlain :: Blocks -> Blocks
singleParaToPlain blks
| (Para (ils) :< seeq) <- viewl $ unMany blks
, Seq.null seeq =
singleton $ Plain ils
singleParaToPlain blks = blks
cellToBlocks :: Cell -> DocxContext Blocks
cellToBlocks (Cell bps) = do
blks <- concatReduce <$> mapM bodyPartToBlocks bps
return $ fromList $ blocksToDefinitions $ blocksToBullets $ toList blks
rowToBlocksList :: Row -> DocxContext [Blocks]
rowToBlocksList (Row cells) = do
blksList <- mapM cellToBlocks cells
return $ map singleParaToPlain blksList
trimLineBreaks :: [Inline] -> [Inline]
trimLineBreaks [] = []
trimLineBreaks (LineBreak : ils) = trimLineBreaks ils
trimLineBreaks ils
| (LineBreak : ils') <- reverse ils = trimLineBreaks (reverse ils')
trimLineBreaks ils = ils
parStyleToTransform :: ParagraphStyle -> (Blocks -> Blocks)
parStyleToTransform pPr
| (c:cs) <- pStyle pPr
, c `elem` divsToKeep =
let pPr' = pPr { pStyle = cs }
in
(divWith ("", [c], [])) . (parStyleToTransform pPr')
| (c:cs) <- pStyle pPr,
c `elem` listParagraphDivs =
let pPr' = pPr { pStyle = cs, indentation = Nothing}
in
(divWith ("", [c], [])) . (parStyleToTransform pPr')
| (_:cs) <- pStyle pPr
, Just True <- pBlockQuote pPr =
let pPr' = pPr { pStyle = cs }
in
blockQuote . (parStyleToTransform pPr')
| (_:cs) <- pStyle pPr =
let pPr' = pPr { pStyle = cs}
in
parStyleToTransform pPr'
| null (pStyle pPr)
, Just left <- indentation pPr >>= leftParIndent
, Just hang <- indentation pPr >>= hangingParIndent =
let pPr' = pPr { indentation = Nothing }
in
case (left - hang) > 0 of
True -> blockQuote . (parStyleToTransform pPr')
False -> parStyleToTransform pPr'
| null (pStyle pPr),
Just left <- indentation pPr >>= leftParIndent =
let pPr' = pPr { indentation = Nothing }
in
case left > 0 of
True -> blockQuote . (parStyleToTransform pPr')
False -> parStyleToTransform pPr'
parStyleToTransform _ = id
bodyPartToBlocks :: BodyPart -> DocxContext Blocks
bodyPartToBlocks (Paragraph pPr parparts)
| not $ null $ codeDivs `intersect` (pStyle pPr) =
return
$ parStyleToTransform pPr
$ codeBlock
$ concatMap parPartToString parparts
| Just (style, n) <- pHeading pPr = do
ils <- local (\s-> s{docxInHeaderBlock=True}) $
(concatReduce <$> mapM parPartToInlines parparts)
makeHeaderAnchor $
headerWith ("", delete style (pStyle pPr), []) n ils
| otherwise = do
ils <- concatReduce <$> mapM parPartToInlines parparts >>=
(return . fromList . trimLineBreaks . normalizeSpaces . toList)
dropIls <- gets docxDropCap
let ils' = dropIls <> ils
if dropCap pPr
then do modify $ \s -> s { docxDropCap = ils' }
return mempty
else do modify $ \s -> s { docxDropCap = mempty }
return $ case isNull ils' of
True -> mempty
_ -> parStyleToTransform pPr $ para ils'
bodyPartToBlocks (ListItem pPr numId lvl levelInfo parparts) = do
let
kvs = case levelInfo of
(_, fmt, txt, Just start) -> [ ("level", lvl)
, ("num-id", numId)
, ("format", fmt)
, ("text", txt)
, ("start", (show start))
]
(_, fmt, txt, Nothing) -> [ ("level", lvl)
, ("num-id", numId)
, ("format", fmt)
, ("text", txt)
]
blks <- bodyPartToBlocks (Paragraph pPr parparts)
return $ divWith ("", ["list-item"], kvs) blks
bodyPartToBlocks (Tbl _ _ _ []) =
return $ para mempty
bodyPartToBlocks (Tbl cap _ look (r:rs)) = do
let caption = text cap
(hdr, rows) = case firstRowFormatting look of
True -> (Just r, rs)
False -> (Nothing, r:rs)
hdrCells <- case hdr of
Just r' -> rowToBlocksList r'
Nothing -> return []
cells <- mapM rowToBlocksList rows
let size = case null hdrCells of
True -> length $ head cells
False -> length $ hdrCells
--
-- The two following variables (horizontal column alignment and
-- relative column widths) go to the default at the
-- moment. Width information is in the TblGrid field of the Tbl,
-- so should be possible. Alignment might be more difficult,
-- since there doesn't seem to be a column entity in docx.
alignments = replicate size AlignDefault
widths = replicate size 0 :: [Double]
return $ table caption (zip alignments widths) hdrCells cells
bodyPartToBlocks (OMathPara e) = do
return $ para $ displayMath (writeTeX e)
-- replace targets with generated anchors.
rewriteLink' :: Inline -> DocxContext Inline
rewriteLink' l@(Link ils ('#':target, title)) = do
anchorMap <- gets docxAnchorMap
return $ case M.lookup target anchorMap of
Just newTarget -> (Link ils ('#':newTarget, title))
Nothing -> l
rewriteLink' il = return il
rewriteLinks :: [Block] -> DocxContext [Block]
rewriteLinks = mapM (walkM rewriteLink')
bodyToOutput :: Body -> DocxContext (Meta, [Block], MediaBag)
bodyToOutput (Body bps) = do
let (metabps, blkbps) = sepBodyParts bps
meta <- bodyPartsToMeta metabps
blks <- concatReduce <$> mapM bodyPartToBlocks blkbps
blks' <- rewriteLinks $ blocksToDefinitions $ blocksToBullets $ toList blks
mediaBag <- gets docxMediaBag
return $ (meta,
blks',
mediaBag)
docxToOutput :: ReaderOptions -> Docx -> Either PandocError (Meta, [Block], MediaBag)
docxToOutput opts (Docx (Document _ body)) =
let dEnv = def { docxOptions = opts} in
evalDocxContext (bodyToOutput body) dEnv def
|
alexvong1995/pandoc
|
src/Text/Pandoc/Readers/Docx.hs
|
gpl-2.0
| 20,525
| 0
| 17
| 5,133
| 5,869
| 2,993
| 2,876
| 403
| 7
|
{- |
Module : ./HasCASL/RawSym.hs
Description : raw symbol functions
Copyright : (c) Christian Maeder and Uni Bremen 2002-2003
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
raw symbols bridge symb items and the symbols of a signature
-}
module HasCASL.RawSym where
import HasCASL.As
import HasCASL.AsUtils
import HasCASL.Builtin
import HasCASL.ClassAna
import HasCASL.Le
import HasCASL.Merge (addUnit)
import HasCASL.PrintLe (addClassMap)
import HasCASL.VarDecl
import Common.DocUtils
import Common.Id
import Common.Result
import Common.Lib.State
import qualified Data.Map as Map
statSymbMapItems :: Env -> [SymbMapItems] -> Result RawSymbolMap
statSymbMapItems e sl = do
rs <- mapM ( \ (SymbMapItems kind l _ _)
-> mapM (symbOrMapToRaw e kind) l) sl
foldr ( \ (r1, r2) mm -> do
m <- mm
if Map.member r1 m then do
Result [Diag Error ("duplicate mapping for: " ++
showDoc r1 "\n ignoring: " ++ showDoc r2 "")
$ posOfId $ rawSymName r2] $ Just ()
return m
else return $ Map.insert r1 r2 m)
(return Map.empty) $ concat rs
symbOrMapToRaw :: Env -> SymbKind -> SymbOrMap -> Result (RawSymbol, RawSymbol)
symbOrMapToRaw e k (SymbOrMap s mt _) = do
s1 <- symbToRaw (Just e) k s
s2 <- case mt of
Nothing -> return s1
Just t -> symbToRaw Nothing k t
return (s1, s2)
statSymbItems :: Env -> [SymbItems] -> Result [RawSymbol]
statSymbItems e sl = do
rs <- mapM (\ (SymbItems kind l _ _)
-> mapM (symbToRaw (Just e) kind) l) sl
return $ concat rs
symbToRaw :: Maybe Env -> SymbKind -> Symb -> Result RawSymbol
symbToRaw me k (Symb idt mt _) = case mt of
Nothing -> return $ symbKindToRaw k idt
Just (SymbType sc@(TypeScheme vs t _)) -> case me of
Nothing ->
hint (symbKindToRaw k idt) "ignoring target symbol qualification"
$ getRange sc
Just e ->
let qi ty = ASymbol $ Symbol idt ty
rsc = if k == SyKpred then predTypeScheme (posOfId idt) sc else sc
r = do
let cm = addClassMap cpoMap (classMap e)
(mtysc, rLe) = runState (anaTypeScheme rsc) e
{ typeMap = addUnit cm $ typeMap e
, classMap = cm }
case mtysc of
Nothing -> Result (reverse
$ mkDiag Error "no function type" rsc : envDiags rLe) Nothing
Just asc -> return $ qi $ OpAsItemType asc
rk = if null vs then do
(_, ck) <- convTypeToKind t
maybeResult $ anaKindM ck $ classMap e
else Nothing
in case rk of
Nothing -> case k of
Implicit -> r
SyKop -> r
SyKpred -> r
_ -> mkError "not a valid kind" t
Just fk -> case k of
Implicit -> -- check here which symbol is in the signature
case maybeResult r of
Just sy -> return sy
_ -> return $ qi $ TypeAsItemType fk
SyKop -> r
SyKpred -> r
SyKclass -> return $ qi $ ClassAsItemType fk
_ -> return $ qi $ TypeAsItemType fk
matchSymb :: Symbol -> RawSymbol -> Bool
matchSymb sy rsy = let ty = symType sy in
symName sy == rawSymName rsy && case rsy of
AnID _ -> True
AKindedId k _ -> symbTypeToKind ty == k
ASymbol sy2 -> sy == sy2
instance GetRange RawSymbol where
getRange = getRange . rawSymName
|
spechub/Hets
|
HasCASL/RawSym.hs
|
gpl-2.0
| 3,695
| 0
| 25
| 1,267
| 1,126
| 556
| 570
| 85
| 15
|
module Solarized where
solarizedBase03 = "#002b36"
solarizedBase02 = "#073642"
solarizedBase01 = "#586e75"
solarizedBase00 = "#657b83"
solarizedBase0 = "#839496"
solarizedBase1 = "#93a1a1"
solarizedBase2 = "#eee8d5"
solarizedBase3 = "#fdf6e3"
solarizedYellow = "#b58900"
solarizedOrange = "#cb4b16"
solarizedRed = "#dc322f"
solarizedMagenta = "#d33682"
solarizedViolet = "#6c71c4"
solarizedBlue = "#268bd2"
solarizedCyan = "#2aa198"
solarizedGreen = "#859900"
|
NorfairKing/sus-depot
|
shared/shared/xmonad/Solarized.hs
|
gpl-2.0
| 488
| 0
| 4
| 79
| 84
| 51
| 33
| 17
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Text.BlogLiterately
-- Copyright : (c) 2012 Brent Yorgey
-- License : GPL (see LICENSE)
-- Maintainer : Brent Yorgey <byorgey@gmail.com>
--
-- This module is provided as a convenient wrapper which re-exports
-- all the other @Text.BlogLiterately.*@ modules.
--
-----------------------------------------------------------------------------
module Text.BlogLiterately
( module Text.BlogLiterately.Block
, module Text.BlogLiterately.Ghci
, module Text.BlogLiterately.Highlight
, module Text.BlogLiterately.Image
, module Text.BlogLiterately.LaTeX
, module Text.BlogLiterately.Options
, module Text.BlogLiterately.Post
, module Text.BlogLiterately.Run
, module Text.BlogLiterately.Transform
) where
import Text.BlogLiterately.Block
import Text.BlogLiterately.Ghci
import Text.BlogLiterately.Highlight
import Text.BlogLiterately.Image
import Text.BlogLiterately.LaTeX
import Text.BlogLiterately.Options
import Text.BlogLiterately.Post
import Text.BlogLiterately.Run
import Text.BlogLiterately.Transform
|
jwiegley/BlogLiterately
|
src/Text/BlogLiterately.hs
|
gpl-3.0
| 1,152
| 0
| 5
| 153
| 136
| 97
| 39
| 19
| 0
|
---------------------------------------------------------
-- The main program for the hpc-markup tool, part of HPC.
-- Andy Gill and Colin Runciman, June 2006
---------------------------------------------------------
module HpcMarkup (markup_plugin) where
import Trace.Hpc.Mix
import Trace.Hpc.Tix
import Trace.Hpc.Util (HpcPos, fromHpcPos, writeFileUtf8)
import HpcFlags
import HpcUtils
import System.FilePath
import Data.List
import Data.Maybe(fromJust)
import Data.Semigroup as Semi
import Data.Array
import Control.Monad
import qualified Data.Set as Set
------------------------------------------------------------------------------
markup_options :: FlagOptSeq
markup_options
= excludeOpt
. includeOpt
. srcDirOpt
. hpcDirOpt
. resetHpcDirsOpt
. funTotalsOpt
. altHighlightOpt
. destDirOpt
. verbosityOpt
markup_plugin :: Plugin
markup_plugin = Plugin { name = "markup"
, usage = "[OPTION] .. <TIX_FILE> [<MODULE> [<MODULE> ..]]"
, options = markup_options
, summary = "Markup Haskell source with program coverage"
, implementation = markup_main
, init_flags = default_flags
, final_flags = default_final_flags
}
------------------------------------------------------------------------------
markup_main :: Flags -> [String] -> IO ()
markup_main flags (prog:modNames) = do
let hpcflags1 = flags
{ includeMods = Set.fromList modNames
`Set.union`
includeMods flags }
let Flags
{ funTotals = theFunTotals
, altHighlight = invertOutput
, destDir = dest_dir
} = hpcflags1
mtix <- readTix (getTixFileName prog)
Tix tixs <- case mtix of
Nothing -> hpcError markup_plugin $ "unable to find tix file for: " ++ prog
Just a -> return a
mods <-
sequence [ genHtmlFromMod dest_dir hpcflags1 tix theFunTotals invertOutput
| tix <- tixs
, allowModule hpcflags1 (tixModuleName tix)
]
let index_name = "hpc_index"
index_fun = "hpc_index_fun"
index_alt = "hpc_index_alt"
index_exp = "hpc_index_exp"
let writeSummary filename cmp = do
let mods' = sortBy cmp mods
unless (verbosity flags < Normal) $
putStrLn $ "Writing: " ++ (filename <.> "html")
writeFileUtf8 (dest_dir </> filename <.> "html") $
"<html>" ++
"<head>" ++
"<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" ++
"<style type=\"text/css\">" ++
"table.bar { background-color: #f25913; }\n" ++
"td.bar { background-color: #60de51; }\n" ++
"td.invbar { background-color: #f25913; }\n" ++
"table.dashboard { border-collapse: collapse ; border: solid 1px black }\n" ++
".dashboard td { border: solid 1px black }\n" ++
".dashboard th { border: solid 1px black }\n" ++
"</style>\n" ++
"</head>" ++
"<body>" ++
"<table class=\"dashboard\" width=\"100%\" border=1>\n" ++
"<tr>" ++
"<th rowspan=2><a href=\"" ++ index_name ++ ".html\">module</a></th>" ++
"<th colspan=3><a href=\"" ++ index_fun ++ ".html\">Top Level Definitions</a></th>" ++
"<th colspan=3><a href=\"" ++ index_alt ++ ".html\">Alternatives</a></th>" ++
"<th colspan=3><a href=\"" ++ index_exp ++ ".html\">Expressions</a></th>" ++
"</tr>" ++
"<tr>" ++
"<th>%</th>" ++
"<th colspan=2>covered / total</th>" ++
"<th>%</th>" ++
"<th colspan=2>covered / total</th>" ++
"<th>%</th>" ++
"<th colspan=2>covered / total</th>" ++
"</tr>" ++
concat [ showModuleSummary (modName,fileName,modSummary)
| (modName,fileName,modSummary) <- mods'
] ++
"<tr></tr>" ++
showTotalSummary (mconcat
[ modSummary
| (_,_,modSummary) <- mods'
])
++ "</table></body></html>\n"
writeSummary index_name $ \ (n1,_,_) (n2,_,_) -> compare n1 n2
writeSummary index_fun $ \ (_,_,s1) (_,_,s2) ->
compare (percent (topFunTicked s2) (topFunTotal s2))
(percent (topFunTicked s1) (topFunTotal s1))
writeSummary index_alt $ \ (_,_,s1) (_,_,s2) ->
compare (percent (altTicked s2) (altTotal s2))
(percent (altTicked s1) (altTotal s1))
writeSummary index_exp $ \ (_,_,s1) (_,_,s2) ->
compare (percent (expTicked s2) (expTotal s2))
(percent (expTicked s1) (expTotal s1))
markup_main _ []
= hpcError markup_plugin $ "no .tix file or executable name specified"
-- Add characters to the left of a string until it is at least as
-- large as requested.
padLeft :: Int -> Char -> String -> String
padLeft n c str = go n str
where
-- If the string is already long enough, stop traversing it.
go 0 _ = str
go k [] = replicate k c ++ str
go k (_:xs) = go (k-1) xs
genHtmlFromMod
:: String
-> Flags
-> TixModule
-> Bool
-> Bool
-> IO (String, [Char], ModuleSummary)
genHtmlFromMod dest_dir flags tix theFunTotals invertOutput = do
let theHsPath = srcDirs flags
let modName0 = tixModuleName tix
(Mix origFile _ _ tabStop mix') <- readMixWithFlags flags (Right tix)
let arr_tix :: Array Int Integer
arr_tix = listArray (0,length (tixModuleTixs tix) - 1)
$ tixModuleTixs tix
let tickedWith :: Int -> Integer
tickedWith n = arr_tix ! n
isTicked n = tickedWith n /= 0
let info = [ (pos,theMarkup)
| (gid,(pos,boxLabel)) <- zip [0 ..] mix'
, let binBox = case (isTicked gid,isTicked (gid+1)) of
(False,False) -> []
(True,False) -> [TickedOnlyTrue]
(False,True) -> [TickedOnlyFalse]
(True,True) -> []
, let tickBox = if isTicked gid
then [IsTicked]
else [NotTicked]
, theMarkup <- case boxLabel of
ExpBox {} -> tickBox
TopLevelBox {}
-> TopLevelDecl theFunTotals (tickedWith gid) : tickBox
LocalBox {} -> tickBox
BinBox _ True -> binBox
_ -> []
]
let modSummary = foldr (.) id
[ \ st ->
case boxLabel of
ExpBox False
-> st { expTicked = ticked (expTicked st)
, expTotal = succ (expTotal st)
}
ExpBox True
-> st { expTicked = ticked (expTicked st)
, expTotal = succ (expTotal st)
, altTicked = ticked (altTicked st)
, altTotal = succ (altTotal st)
}
TopLevelBox _ ->
st { topFunTicked = ticked (topFunTicked st)
, topFunTotal = succ (topFunTotal st)
}
_ -> st
| (gid,(_pos,boxLabel)) <- zip [0 ..] mix'
, let ticked = if isTicked gid
then succ
else id
] $ mempty
-- add prefix to modName argument
content <- readFileFromPath (hpcError markup_plugin) origFile theHsPath
let content' = markup tabStop info content
let addLine n xs = "<span class=\"lineno\">" ++ padLeft 5 ' ' (show n) ++ " </span>" ++ xs
let addLines = unlines . map (uncurry addLine) . zip [1 :: Int ..] . lines
let fileName = modName0 <.> "hs" <.> "html"
unless (verbosity flags < Normal) $
putStrLn $ "Writing: " ++ fileName
writeFileUtf8 (dest_dir </> fileName) $
unlines ["<html>",
"<head>",
"<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">",
"<style type=\"text/css\">",
"span.lineno { color: white; background: #aaaaaa; border-right: solid white 12px }",
if invertOutput
then "span.nottickedoff { color: #404040; background: white; font-style: oblique }"
else "span.nottickedoff { background: " ++ yellow ++ "}",
if invertOutput
then "span.istickedoff { color: black; background: #d0c0ff; font-style: normal; }"
else "span.istickedoff { background: white }",
"span.tickonlyfalse { margin: -1px; border: 1px solid " ++ red ++ "; background: " ++ red ++ " }",
"span.tickonlytrue { margin: -1px; border: 1px solid " ++ green ++ "; background: " ++ green ++ " }",
"span.funcount { font-size: small; color: orange; z-index: 2; position: absolute; right: 20 }",
if invertOutput
then "span.decl { font-weight: bold; background: #d0c0ff }"
else "span.decl { font-weight: bold }",
"span.spaces { background: white }",
"</style>",
"</head>",
"<body>",
"<pre>",
concat [
"<span class=\"decl\">",
"<span class=\"nottickedoff\">never executed</span> ",
"<span class=\"tickonlytrue\">always true</span> ",
"<span class=\"tickonlyfalse\">always false</span></span>"],
"</pre>",
"<pre>"] ++ addLines content' ++ "\n</pre>\n</body>\n</html>\n";
modSummary `seq` return (modName0,fileName,modSummary)
data Loc = Loc !Int !Int
deriving (Eq,Ord,Show)
data Markup
= NotTicked
| TickedOnlyTrue
| TickedOnlyFalse
| IsTicked
| TopLevelDecl
Bool -- display entry totals
Integer
deriving (Eq,Show)
markup :: Int -- ^tabStop
-> [(HpcPos,Markup)] -- random list of tick location pairs
-> String -- text to mark up
-> String
markup tabStop mix str = addMarkup tabStop str (Loc 1 1) [] sortedTickLocs
where
tickLocs = [ (Loc ln1 c1,Loc ln2 c2,mark)
| (pos,mark) <- mix
, let (ln1,c1,ln2,c2) = fromHpcPos pos
]
sortedTickLocs = sortBy (\(locA1,locZ1,_) (locA2,locZ2,_) ->
(locA1,locZ2) `compare` (locA2,locZ1)) tickLocs
addMarkup :: Int -- tabStop
-> String -- text to mark up
-> Loc -- current location
-> [(Loc,Markup)] -- stack of open ticks, with closing location
-> [(Loc,Loc,Markup)] -- sorted list of tick location pairs
-> String
-- check the pre-condition.
--addMarkup tabStop cs loc os ticks
-- | not (isSorted (map fst os)) = error $ "addMarkup: bad closing ordering: " ++ show os
--addMarkup tabStop cs loc os@(_:_) ticks
-- | trace (show (loc,os,take 10 ticks)) False = undefined
-- close all open ticks, if we have reached the end
addMarkup _ [] _loc os [] =
concatMap (const closeTick) os
addMarkup tabStop cs loc ((o,_):os) ticks | loc > o =
closeTick ++ addMarkup tabStop cs loc os ticks
--addMarkup tabStop cs loc os ((t1,t2,tik@(TopLevelDecl {})):ticks) | loc == t1 =
-- openTick tik ++ closeTick ++ addMarkup tabStop cs loc os ticks
addMarkup tabStop cs loc os ((t1,t2,tik0):ticks) | loc == t1 =
case os of
((_,tik'):_)
| not (allowNesting tik0 tik')
-> addMarkup tabStop cs loc os ticks -- already marked or bool within marked bool
_ -> openTick tik0 ++ addMarkup tabStop cs loc (addTo (t2,tik0) os) ticks
where
addTo (t,tik) [] = [(t,tik)]
addTo (t,tik) ((t',tik'):xs) | t <= t' = (t,tik):(t',tik'):xs
| otherwise = (t',tik):(t',tik'):xs
addMarkup tabStop0 cs loc os ((t1,_t2,_tik):ticks) | loc > t1 =
-- throw away this tick, because it is from a previous place ??
addMarkup tabStop0 cs loc os ticks
addMarkup tabStop0 ('\n':cs) loc@(Loc ln col) os@((Loc ln2 col2,_):_) ticks
| ln == ln2 && col < col2
= addMarkup tabStop0 (' ':'\n':cs) loc os ticks
addMarkup tabStop0 (c0:cs) loc@(Loc _ p) os ticks =
if c0=='\n' && os/=[] then
concatMap (const closeTick) (downToTopLevel os) ++
c0 : "<span class=\"spaces\">" ++ expand 1 w ++ "</span>" ++
concatMap (openTick.snd) (reverse (downToTopLevel os)) ++
addMarkup tabStop0 cs' loc' os ticks
else if c0=='\t' then
expand p "\t" ++ addMarkup tabStop0 cs (incBy c0 loc) os ticks
else
escape c0 ++ addMarkup tabStop0 cs (incBy c0 loc) os ticks
where
(w,cs') = span (`elem` " \t") cs
loc' = foldl (flip incBy) loc (c0:w)
escape '>' = ">"
escape '<' = "<"
escape '"' = """
escape '&' = "&"
escape c = [c]
expand :: Int -> String -> String
expand _ "" = ""
expand c ('\t':s) = replicate (c' - c) ' ' ++ expand c' s
where
c' = tabStopAfter 8 c
expand c (' ':s) = ' ' : expand (c+1) s
expand _ _ = error "bad character in string for expansion"
incBy :: Char -> Loc -> Loc
incBy '\n' (Loc ln _c) = Loc (succ ln) 1
incBy '\t' (Loc ln c) = Loc ln (tabStopAfter tabStop0 c)
incBy _ (Loc ln c) = Loc ln (succ c)
tabStopAfter :: Int -> Int -> Int
tabStopAfter tabStop c = fromJust (find (>c) [1,(tabStop + 1)..])
addMarkup tabStop cs loc os ticks = "ERROR: " ++ show (take 10 cs,tabStop,loc,take 10 os,take 10 ticks)
openTick :: Markup -> String
openTick NotTicked = "<span class=\"nottickedoff\">"
openTick IsTicked = "<span class=\"istickedoff\">"
openTick TickedOnlyTrue = "<span class=\"tickonlytrue\">"
openTick TickedOnlyFalse = "<span class=\"tickonlyfalse\">"
openTick (TopLevelDecl False _) = openTopDecl
openTick (TopLevelDecl True 0)
= "<span class=\"funcount\">-- never entered</span>" ++
openTopDecl
openTick (TopLevelDecl True 1)
= "<span class=\"funcount\">-- entered once</span>" ++
openTopDecl
openTick (TopLevelDecl True n0)
= "<span class=\"funcount\">-- entered " ++ showBigNum n0 ++ " times</span>" ++ openTopDecl
where showBigNum n | n <= 9999 = show n
| otherwise = case n `quotRem` 1000 of
(q, r) -> showBigNum' q ++ "," ++ showWith r
showBigNum' n | n <= 999 = show n
| otherwise = case n `quotRem` 1000 of
(q, r) -> showBigNum' q ++ "," ++ showWith r
showWith n = padLeft 3 '0' $ show n
closeTick :: String
closeTick = "</span>"
openTopDecl :: String
openTopDecl = "<span class=\"decl\">"
downToTopLevel :: [(Loc,Markup)] -> [(Loc,Markup)]
downToTopLevel ((_,TopLevelDecl {}):_) = []
downToTopLevel (o : os) = o : downToTopLevel os
downToTopLevel [] = []
-- build in logic for nesting bin boxes
allowNesting :: Markup -- innermost
-> Markup -- outermost
-> Bool
allowNesting n m | n == m = False -- no need to double nest
allowNesting IsTicked TickedOnlyFalse = False
allowNesting IsTicked TickedOnlyTrue = False
allowNesting _ _ = True
------------------------------------------------------------------------------
data ModuleSummary = ModuleSummary
{ expTicked :: !Int
, expTotal :: !Int
, topFunTicked :: !Int
, topFunTotal :: !Int
, altTicked :: !Int
, altTotal :: !Int
}
deriving (Show)
showModuleSummary :: (String, String, ModuleSummary) -> String
showModuleSummary (modName,fileName,modSummary) =
"<tr>\n" ++
"<td> <tt>module <a href=\"" ++ fileName ++ "\">"
++ modName ++ "</a></tt></td>\n" ++
showSummary (topFunTicked modSummary) (topFunTotal modSummary) ++
showSummary (altTicked modSummary) (altTotal modSummary) ++
showSummary (expTicked modSummary) (expTotal modSummary) ++
"</tr>\n"
showTotalSummary :: ModuleSummary -> String
showTotalSummary modSummary =
"<tr style=\"background: #e0e0e0\">\n" ++
"<th align=left> Program Coverage Total</tt></th>\n" ++
showSummary (topFunTicked modSummary) (topFunTotal modSummary) ++
showSummary (altTicked modSummary) (altTotal modSummary) ++
showSummary (expTicked modSummary) (expTotal modSummary) ++
"</tr>\n"
showSummary :: (Integral t, Show t) => t -> t -> String
showSummary ticked total =
"<td align=\"right\">" ++ showP (percent ticked total) ++ "</td>" ++
"<td>" ++ show ticked ++ "/" ++ show total ++ "</td>" ++
"<td width=100>" ++
(case percent ticked total of
Nothing -> " "
Just w -> bar w "bar"
) ++ "</td>"
where
showP Nothing = "- "
showP (Just x) = show x ++ "%"
bar 0 _ = bar 100 "invbar"
bar w inner = "<table cellpadding=0 cellspacing=0 width=\"100\" class=\"bar\">" ++
"<tr><td><table cellpadding=0 cellspacing=0 width=\"" ++ show w ++ "%\">" ++
"<tr><td height=12 class=" ++ show inner ++ "></td></tr>" ++
"</table></td></tr></table>"
percent :: (Integral a) => a -> a -> Maybe a
percent ticked total = if total == 0 then Nothing else Just (ticked * 100 `div` total)
instance Semi.Semigroup ModuleSummary where
(ModuleSummary eTik1 eTot1 tTik1 tTot1 aTik1 aTot1) <> (ModuleSummary eTik2 eTot2 tTik2 tTot2 aTik2 aTot2)
= ModuleSummary (eTik1 + eTik2) (eTot1 + eTot2) (tTik1 + tTik2) (tTot1 + tTot2) (aTik1 + aTik2) (aTot1 + aTot2)
instance Monoid ModuleSummary where
mempty = ModuleSummary
{ expTicked = 0
, expTotal = 0
, topFunTicked = 0
, topFunTotal = 0
, altTicked = 0
, altTotal = 0
}
mappend = (<>)
------------------------------------------------------------------------------
-- global color palette
red,green,yellow :: String
red = "#f20913"
green = "#60de51"
yellow = "yellow"
|
sdiehl/ghc
|
utils/hpc/HpcMarkup.hs
|
bsd-3-clause
| 19,027
| 0
| 55
| 6,428
| 4,768
| 2,510
| 2,258
| 390
| 16
|
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"../dtd/helpset_2_0.dtd">
<helpset version="1.0">
<!-- title -->
<title>LightZone - Hjælp</title>
<!-- maps -->
<maps>
<homeID>top</homeID>
<mapref location="Danish/Map.jhm"/>
</maps>
<!-- views -->
<view>
<name>TOC</name>
<label>Indhodsfortegnelse</label>
<type>javax.help.TOCView</type>
<data>Danish/LightZoneTOC.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
Danish/JavaHelpSearch
</data>
</view>
<presentation default="true" displayviewimages="false">
<name>main window</name>
<size width="700" height="400" />
<location x="200" y="200" />
<title>LightZone - Hjælp</title>
<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.HomeAction</helpaction>
<helpaction>javax.help.ReloadAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.PrintAction</helpaction>
<helpaction>javax.help.PrintSetupAction</helpaction>
</toolbar>
</presentation>
<presentation>
<name>main</name>
<size width="400" height="400" />
<location x="200" y="200" />
<title>LightZone - Hjælp</title>
</presentation>
</helpset>
|
MarinnaCole/LightZone
|
linux/help/LightZone_da.hs
|
bsd-3-clause
| 1,649
| 120
| 48
| 302
| 668
| 335
| 333
| -1
| -1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE UnboxedTuples #-}
module UniqSupply (
-- * Main data type
UniqSupply, -- Abstractly
-- ** Operations on supplies
uniqFromSupply, uniqsFromSupply, -- basic ops
takeUniqFromSupply,
mkSplitUniqSupply,
splitUniqSupply, listSplitUniqSupply,
-- * Unique supply monad and its abstraction
UniqSM, MonadUnique(..),
-- ** Operations on the monad
initUs, initUs_,
lazyThenUs, lazyMapUs,
) where
import Unique
import GHC.IO
import MonadUtils
import Control.Monad
import Data.Bits
import Data.Char
{-
************************************************************************
* *
\subsection{Splittable Unique supply: @UniqSupply@}
* *
************************************************************************
-}
-- | A value of type 'UniqSupply' is unique, and it can
-- supply /one/ distinct 'Unique'. Also, from the supply, one can
-- also manufacture an arbitrary number of further 'UniqueSupply' values,
-- which will be distinct from the first and from all others.
data UniqSupply
= MkSplitUniqSupply {-# UNPACK #-} !Int -- make the Unique with this
UniqSupply UniqSupply
-- when split => these two supplies
mkSplitUniqSupply :: Char -> IO UniqSupply
-- ^ Create a unique supply out of thin air. The character given must
-- be distinct from those of all calls to this function in the compiler
-- for the values generated to be truly unique.
splitUniqSupply :: UniqSupply -> (UniqSupply, UniqSupply)
-- ^ Build two 'UniqSupply' from a single one, each of which
-- can supply its own 'Unique'.
listSplitUniqSupply :: UniqSupply -> [UniqSupply]
-- ^ Create an infinite list of 'UniqSupply' from a single one
uniqFromSupply :: UniqSupply -> Unique
-- ^ Obtain the 'Unique' from this particular 'UniqSupply'
uniqsFromSupply :: UniqSupply -> [Unique] -- Infinite
-- ^ Obtain an infinite list of 'Unique' that can be generated by constant splitting of the supply
takeUniqFromSupply :: UniqSupply -> (Unique, UniqSupply)
-- ^ Obtain the 'Unique' from this particular 'UniqSupply', and a new supply
mkSplitUniqSupply c
= case ord c `shiftL` 24 of
mask -> let
-- here comes THE MAGIC:
-- This is one of the most hammered bits in the whole compiler
mk_supply
-- NB: Use unsafeInterleaveIO for thread-safety.
= unsafeInterleaveIO (
genSym >>= \ u ->
mk_supply >>= \ s1 ->
mk_supply >>= \ s2 ->
return (MkSplitUniqSupply (mask .|. u) s1 s2)
)
in
mk_supply
foreign import ccall unsafe "genSym" genSym :: IO Int
splitUniqSupply (MkSplitUniqSupply _ s1 s2) = (s1, s2)
listSplitUniqSupply (MkSplitUniqSupply _ s1 s2) = s1 : listSplitUniqSupply s2
uniqFromSupply (MkSplitUniqSupply n _ _) = mkUniqueGrimily n
uniqsFromSupply (MkSplitUniqSupply n _ s2) = mkUniqueGrimily n : uniqsFromSupply s2
takeUniqFromSupply (MkSplitUniqSupply n s1 _) = (mkUniqueGrimily n, s1)
{-
************************************************************************
* *
\subsubsection[UniqSupply-monad]{@UniqSupply@ monad: @UniqSM@}
* *
************************************************************************
-}
-- | A monad which just gives the ability to obtain 'Unique's
newtype UniqSM result = USM { unUSM :: UniqSupply -> (# result, UniqSupply #) }
instance Monad UniqSM where
return = returnUs
(>>=) = thenUs
(>>) = thenUs_
instance Functor UniqSM where
fmap f (USM x) = USM (\us -> case x us of
(# r, us' #) -> (# f r, us' #))
instance Applicative UniqSM where
pure = returnUs
(USM f) <*> (USM x) = USM $ \us -> case f us of
(# ff, us' #) -> case x us' of
(# xx, us'' #) -> (# ff xx, us'' #)
(*>) = thenUs_
-- | Run the 'UniqSM' action, returning the final 'UniqSupply'
initUs :: UniqSupply -> UniqSM a -> (a, UniqSupply)
initUs init_us m = case unUSM m init_us of { (# r, us #) -> (r,us) }
-- | Run the 'UniqSM' action, discarding the final 'UniqSupply'
initUs_ :: UniqSupply -> UniqSM a -> a
initUs_ init_us m = case unUSM m init_us of { (# r, _ #) -> r }
{-# INLINE thenUs #-}
{-# INLINE lazyThenUs #-}
{-# INLINE returnUs #-}
{-# INLINE splitUniqSupply #-}
-- @thenUs@ is where we split the @UniqSupply@.
liftUSM :: UniqSM a -> UniqSupply -> (a, UniqSupply)
liftUSM (USM m) us = case m us of (# a, us' #) -> (a, us')
instance MonadFix UniqSM where
mfix m = USM (\us -> let (r,us') = liftUSM (m r) us in (# r,us' #))
thenUs :: UniqSM a -> (a -> UniqSM b) -> UniqSM b
thenUs (USM expr) cont
= USM (\us -> case (expr us) of
(# result, us' #) -> unUSM (cont result) us')
lazyThenUs :: UniqSM a -> (a -> UniqSM b) -> UniqSM b
lazyThenUs expr cont
= USM (\us -> let (result, us') = liftUSM expr us in unUSM (cont result) us')
thenUs_ :: UniqSM a -> UniqSM b -> UniqSM b
thenUs_ (USM expr) (USM cont)
= USM (\us -> case (expr us) of { (# _, us' #) -> cont us' })
returnUs :: a -> UniqSM a
returnUs result = USM (\us -> (# result, us #))
getUs :: UniqSM UniqSupply
getUs = USM (\us -> case splitUniqSupply us of (us1,us2) -> (# us1, us2 #))
-- | A monad for generating unique identifiers
class Monad m => MonadUnique m where
-- | Get a new UniqueSupply
getUniqueSupplyM :: m UniqSupply
-- | Get a new unique identifier
getUniqueM :: m Unique
-- | Get an infinite list of new unique identifiers
getUniquesM :: m [Unique]
-- This default definition of getUniqueM, while correct, is not as
-- efficient as it could be since it needlessly generates and throws away
-- an extra Unique. For your instances consider providing an explicit
-- definition for 'getUniqueM' which uses 'takeUniqFromSupply' directly.
getUniqueM = liftM uniqFromSupply getUniqueSupplyM
getUniquesM = liftM uniqsFromSupply getUniqueSupplyM
instance MonadUnique UniqSM where
getUniqueSupplyM = getUs
getUniqueM = getUniqueUs
getUniquesM = getUniquesUs
getUniqueUs :: UniqSM Unique
getUniqueUs = USM (\us -> case takeUniqFromSupply us of
(u,us') -> (# u, us' #))
getUniquesUs :: UniqSM [Unique]
getUniquesUs = USM (\us -> case splitUniqSupply us of
(us1,us2) -> (# uniqsFromSupply us1, us2 #))
-- {-# SPECIALIZE mapM :: (a -> UniqSM b) -> [a] -> UniqSM [b] #-}
-- {-# SPECIALIZE mapAndUnzipM :: (a -> UniqSM (b,c)) -> [a] -> UniqSM ([b],[c]) #-}
-- {-# SPECIALIZE mapAndUnzip3M :: (a -> UniqSM (b,c,d)) -> [a] -> UniqSM ([b],[c],[d]) #-}
lazyMapUs :: (a -> UniqSM b) -> [a] -> UniqSM [b]
lazyMapUs _ [] = returnUs []
lazyMapUs f (x:xs)
= f x `lazyThenUs` \ r ->
lazyMapUs f xs `lazyThenUs` \ rs ->
returnUs (r:rs)
|
acowley/ghc
|
compiler/basicTypes/UniqSupply.hs
|
bsd-3-clause
| 7,267
| 0
| 24
| 1,972
| 1,516
| 828
| 688
| 103
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring list comprehensions, monad comprehensions and array comprehensions
-}
{-# LANGUAGE CPP, NamedFieldPuns #-}
module DsListComp ( dsListComp, dsPArrComp, dsMonadComp ) where
#include "HsVersions.h"
import {-# SOURCE #-} DsExpr ( dsExpr, dsLExpr, dsLocalBinds, dsSyntaxExpr )
import HsSyn
import TcHsSyn
import CoreSyn
import MkCore
import DsMonad -- the monadery used in the desugarer
import DsUtils
import DynFlags
import CoreUtils
import Id
import Type
import TysWiredIn
import Match
import PrelNames
import SrcLoc
import Outputable
import TcType
import ListSetOps( getNth )
import Util
{-
List comprehensions may be desugared in one of two ways: ``ordinary''
(as you would expect if you read SLPJ's book) and ``with foldr/build
turned on'' (if you read Gill {\em et al.}'s paper on the subject).
There will be at least one ``qualifier'' in the input.
-}
dsListComp :: [ExprLStmt Id]
-> Type -- Type of entire list
-> DsM CoreExpr
dsListComp lquals res_ty = do
dflags <- getDynFlags
let quals = map unLoc lquals
elt_ty = case tcTyConAppArgs res_ty of
[elt_ty] -> elt_ty
_ -> pprPanic "dsListComp" (ppr res_ty $$ ppr lquals)
if not (gopt Opt_EnableRewriteRules dflags) || gopt Opt_IgnoreInterfacePragmas dflags
-- Either rules are switched off, or we are ignoring what there are;
-- Either way foldr/build won't happen, so use the more efficient
-- Wadler-style desugaring
|| isParallelComp quals
-- Foldr-style desugaring can't handle parallel list comprehensions
then deListComp quals (mkNilExpr elt_ty)
else mkBuildExpr elt_ty (\(c, _) (n, _) -> dfListComp c n quals)
-- Foldr/build should be enabled, so desugar
-- into foldrs and builds
where
-- We must test for ParStmt anywhere, not just at the head, because an extension
-- to list comprehensions would be to add brackets to specify the associativity
-- of qualifier lists. This is really easy to do by adding extra ParStmts into the
-- mix of possibly a single element in length, so we do this to leave the possibility open
isParallelComp = any isParallelStmt
isParallelStmt (ParStmt {}) = True
isParallelStmt _ = False
-- This function lets you desugar a inner list comprehension and a list of the binders
-- of that comprehension that we need in the outer comprehension into such an expression
-- and the type of the elements that it outputs (tuples of binders)
dsInnerListComp :: (ParStmtBlock Id Id) -> DsM (CoreExpr, Type)
dsInnerListComp (ParStmtBlock stmts bndrs _)
= do { let bndrs_tuple_type = mkBigCoreVarTupTy bndrs
-- really use original bndrs below!
; expr <- dsListComp (stmts ++ [noLoc $ mkLastStmt (mkBigLHsVarTupId bndrs)])
(mkListTy bndrs_tuple_type)
; return (expr, bndrs_tuple_type) }
-- This function factors out commonality between the desugaring strategies for GroupStmt.
-- Given such a statement it gives you back an expression representing how to compute the transformed
-- list and the tuple that you need to bind from that list in order to proceed with your desugaring
dsTransStmt :: ExprStmt Id -> DsM (CoreExpr, LPat Id)
dsTransStmt (TransStmt { trS_form = form, trS_stmts = stmts, trS_bndrs = binderMap
, trS_by = by, trS_using = using }) = do
let (from_bndrs, to_bndrs) = unzip binderMap
let from_bndrs_tys = map idType from_bndrs
to_bndrs_tys = map idType to_bndrs
to_bndrs_tup_ty = mkBigCoreTupTy to_bndrs_tys
-- Desugar an inner comprehension which outputs a list of tuples of the "from" binders
(expr', from_tup_ty) <- dsInnerListComp (ParStmtBlock stmts from_bndrs noSyntaxExpr)
-- Work out what arguments should be supplied to that expression: i.e. is an extraction
-- function required? If so, create that desugared function and add to arguments
usingExpr' <- dsLExpr using
usingArgs' <- case by of
Nothing -> return [expr']
Just by_e -> do { by_e' <- dsLExpr by_e
; lam' <- matchTuple from_bndrs by_e'
; return [lam', expr'] }
-- Create an unzip function for the appropriate arity and element types and find "map"
unzip_stuff' <- mkUnzipBind form from_bndrs_tys
map_id <- dsLookupGlobalId mapName
-- Generate the expressions to build the grouped list
let -- First we apply the grouping function to the inner list
inner_list_expr' = mkApps usingExpr' usingArgs'
-- Then we map our "unzip" across it to turn the lists of tuples into tuples of lists
-- We make sure we instantiate the type variable "a" to be a list of "from" tuples and
-- the "b" to be a tuple of "to" lists!
-- Then finally we bind the unzip function around that expression
bound_unzipped_inner_list_expr'
= case unzip_stuff' of
Nothing -> inner_list_expr'
Just (unzip_fn', unzip_rhs') ->
Let (Rec [(unzip_fn', unzip_rhs')]) $
mkApps (Var map_id) $
[ Type (mkListTy from_tup_ty)
, Type to_bndrs_tup_ty
, Var unzip_fn'
, inner_list_expr' ]
-- Build a pattern that ensures the consumer binds into the NEW binders,
-- which hold lists rather than single values
let pat = mkBigLHsVarPatTupId to_bndrs -- NB: no '!
return (bound_unzipped_inner_list_expr', pat)
dsTransStmt _ = panic "dsTransStmt: Not given a TransStmt"
{-
************************************************************************
* *
\subsection[DsListComp-ordinary]{Ordinary desugaring of list comprehensions}
* *
************************************************************************
Just as in Phil's chapter~7 in SLPJ, using the rules for
optimally-compiled list comprehensions. This is what Kevin followed
as well, and I quite happily do the same. The TQ translation scheme
transforms a list of qualifiers (either boolean expressions or
generators) into a single expression which implements the list
comprehension. Because we are generating 2nd-order polymorphic
lambda-calculus, calls to NIL and CONS must be applied to a type
argument, as well as their usual value arguments.
\begin{verbatim}
TE << [ e | qs ] >> = TQ << [ e | qs ] ++ Nil (typeOf e) >>
(Rule C)
TQ << [ e | ] ++ L >> = Cons (typeOf e) TE <<e>> TE <<L>>
(Rule B)
TQ << [ e | b , qs ] ++ L >> =
if TE << b >> then TQ << [ e | qs ] ++ L >> else TE << L >>
(Rule A')
TQ << [ e | p <- L1, qs ] ++ L2 >> =
letrec
h = \ u1 ->
case u1 of
[] -> TE << L2 >>
(u2 : u3) ->
(( \ TE << p >> -> ( TQ << [e | qs] ++ (h u3) >> )) u2)
[] (h u3)
in
h ( TE << L1 >> )
"h", "u1", "u2", and "u3" are new variables.
\end{verbatim}
@deListComp@ is the TQ translation scheme. Roughly speaking, @dsExpr@
is the TE translation scheme. Note that we carry around the @L@ list
already desugared. @dsListComp@ does the top TE rule mentioned above.
To the above, we add an additional rule to deal with parallel list
comprehensions. The translation goes roughly as follows:
[ e | p1 <- e11, let v1 = e12, p2 <- e13
| q1 <- e21, let v2 = e22, q2 <- e23]
=>
[ e | ((x1, .., xn), (y1, ..., ym)) <-
zip [(x1,..,xn) | p1 <- e11, let v1 = e12, p2 <- e13]
[(y1,..,ym) | q1 <- e21, let v2 = e22, q2 <- e23]]
where (x1, .., xn) are the variables bound in p1, v1, p2
(y1, .., ym) are the variables bound in q1, v2, q2
In the translation below, the ParStmt branch translates each parallel branch
into a sub-comprehension, and desugars each independently. The resulting lists
are fed to a zip function, we create a binding for all the variables bound in all
the comprehensions, and then we hand things off the the desugarer for bindings.
The zip function is generated here a) because it's small, and b) because then we
don't have to deal with arbitrary limits on the number of zip functions in the
prelude, nor which library the zip function came from.
The introduced tuples are Boxed, but only because I couldn't get it to work
with the Unboxed variety.
-}
deListComp :: [ExprStmt Id] -> CoreExpr -> DsM CoreExpr
deListComp [] _ = panic "deListComp"
deListComp (LastStmt body _ _ : quals) list
= -- Figure 7.4, SLPJ, p 135, rule C above
ASSERT( null quals )
do { core_body <- dsLExpr body
; return (mkConsExpr (exprType core_body) core_body list) }
-- Non-last: must be a guard
deListComp (BodyStmt guard _ _ _ : quals) list = do -- rule B above
core_guard <- dsLExpr guard
core_rest <- deListComp quals list
return (mkIfThenElse core_guard core_rest list)
-- [e | let B, qs] = let B in [e | qs]
deListComp (LetStmt (L _ binds) : quals) list = do
core_rest <- deListComp quals list
dsLocalBinds binds core_rest
deListComp (stmt@(TransStmt {}) : quals) list = do
(inner_list_expr, pat) <- dsTransStmt stmt
deBindComp pat inner_list_expr quals list
deListComp (BindStmt pat list1 _ _ _ : quals) core_list2 = do -- rule A' above
core_list1 <- dsLExpr list1
deBindComp pat core_list1 quals core_list2
deListComp (ParStmt stmtss_w_bndrs _ _ _ : quals) list
= do { exps_and_qual_tys <- mapM dsInnerListComp stmtss_w_bndrs
; let (exps, qual_tys) = unzip exps_and_qual_tys
; (zip_fn, zip_rhs) <- mkZipBind qual_tys
-- Deal with [e | pat <- zip l1 .. ln] in example above
; deBindComp pat (Let (Rec [(zip_fn, zip_rhs)]) (mkApps (Var zip_fn) exps))
quals list }
where
bndrs_s = [bs | ParStmtBlock _ bs _ <- stmtss_w_bndrs]
-- pat is the pattern ((x1,..,xn), (y1,..,ym)) in the example above
pat = mkBigLHsPatTupId pats
pats = map mkBigLHsVarPatTupId bndrs_s
deListComp (RecStmt {} : _) _ = panic "deListComp RecStmt"
deListComp (ApplicativeStmt {} : _) _ =
panic "deListComp ApplicativeStmt"
deBindComp :: OutPat Id
-> CoreExpr
-> [ExprStmt Id]
-> CoreExpr
-> DsM (Expr Id)
deBindComp pat core_list1 quals core_list2 = do
let u3_ty@u1_ty = exprType core_list1 -- two names, same thing
-- u1_ty is a [alpha] type, and u2_ty = alpha
let u2_ty = hsLPatType pat
let res_ty = exprType core_list2
h_ty = u1_ty `mkFunTy` res_ty
[h, u1, u2, u3] <- newSysLocalsDs [h_ty, u1_ty, u2_ty, u3_ty]
-- the "fail" value ...
let
core_fail = App (Var h) (Var u3)
letrec_body = App (Var h) core_list1
rest_expr <- deListComp quals core_fail
core_match <- matchSimply (Var u2) (StmtCtxt ListComp) pat rest_expr core_fail
let
rhs = Lam u1 $
Case (Var u1) u1 res_ty
[(DataAlt nilDataCon, [], core_list2),
(DataAlt consDataCon, [u2, u3], core_match)]
-- Increasing order of tag
return (Let (Rec [(h, rhs)]) letrec_body)
{-
************************************************************************
* *
\subsection[DsListComp-foldr-build]{Foldr/Build desugaring of list comprehensions}
* *
************************************************************************
@dfListComp@ are the rules used with foldr/build turned on:
\begin{verbatim}
TE[ e | ] c n = c e n
TE[ e | b , q ] c n = if b then TE[ e | q ] c n else n
TE[ e | p <- l , q ] c n = let
f = \ x b -> case x of
p -> TE[ e | q ] c b
_ -> b
in
foldr f n l
\end{verbatim}
-}
dfListComp :: Id -> Id -- 'c' and 'n'
-> [ExprStmt Id] -- the rest of the qual's
-> DsM CoreExpr
dfListComp _ _ [] = panic "dfListComp"
dfListComp c_id n_id (LastStmt body _ _ : quals)
= ASSERT( null quals )
do { core_body <- dsLExpr body
; return (mkApps (Var c_id) [core_body, Var n_id]) }
-- Non-last: must be a guard
dfListComp c_id n_id (BodyStmt guard _ _ _ : quals) = do
core_guard <- dsLExpr guard
core_rest <- dfListComp c_id n_id quals
return (mkIfThenElse core_guard core_rest (Var n_id))
dfListComp c_id n_id (LetStmt (L _ binds) : quals) = do
-- new in 1.3, local bindings
core_rest <- dfListComp c_id n_id quals
dsLocalBinds binds core_rest
dfListComp c_id n_id (stmt@(TransStmt {}) : quals) = do
(inner_list_expr, pat) <- dsTransStmt stmt
-- Anyway, we bind the newly grouped list via the generic binding function
dfBindComp c_id n_id (pat, inner_list_expr) quals
dfListComp c_id n_id (BindStmt pat list1 _ _ _ : quals) = do
-- evaluate the two lists
core_list1 <- dsLExpr list1
-- Do the rest of the work in the generic binding builder
dfBindComp c_id n_id (pat, core_list1) quals
dfListComp _ _ (ParStmt {} : _) = panic "dfListComp ParStmt"
dfListComp _ _ (RecStmt {} : _) = panic "dfListComp RecStmt"
dfListComp _ _ (ApplicativeStmt {} : _) =
panic "dfListComp ApplicativeStmt"
dfBindComp :: Id -> Id -- 'c' and 'n'
-> (LPat Id, CoreExpr)
-> [ExprStmt Id] -- the rest of the qual's
-> DsM CoreExpr
dfBindComp c_id n_id (pat, core_list1) quals = do
-- find the required type
let x_ty = hsLPatType pat
let b_ty = idType n_id
-- create some new local id's
[b, x] <- newSysLocalsDs [b_ty, x_ty]
-- build rest of the comprehesion
core_rest <- dfListComp c_id b quals
-- build the pattern match
core_expr <- matchSimply (Var x) (StmtCtxt ListComp)
pat core_rest (Var b)
-- now build the outermost foldr, and return
mkFoldrExpr x_ty b_ty (mkLams [x, b] core_expr) (Var n_id) core_list1
{-
************************************************************************
* *
\subsection[DsFunGeneration]{Generation of zip/unzip functions for use in desugaring}
* *
************************************************************************
-}
mkZipBind :: [Type] -> DsM (Id, CoreExpr)
-- mkZipBind [t1, t2]
-- = (zip, \as1:[t1] as2:[t2]
-- -> case as1 of
-- [] -> []
-- (a1:as'1) -> case as2 of
-- [] -> []
-- (a2:as'2) -> (a1, a2) : zip as'1 as'2)]
mkZipBind elt_tys = do
ass <- mapM newSysLocalDs elt_list_tys
as' <- mapM newSysLocalDs elt_tys
as's <- mapM newSysLocalDs elt_list_tys
zip_fn <- newSysLocalDs zip_fn_ty
let inner_rhs = mkConsExpr elt_tuple_ty
(mkBigCoreVarTup as')
(mkVarApps (Var zip_fn) as's)
zip_body = foldr mk_case inner_rhs (zip3 ass as' as's)
return (zip_fn, mkLams ass zip_body)
where
elt_list_tys = map mkListTy elt_tys
elt_tuple_ty = mkBigCoreTupTy elt_tys
elt_tuple_list_ty = mkListTy elt_tuple_ty
zip_fn_ty = mkFunTys elt_list_tys elt_tuple_list_ty
mk_case (as, a', as') rest
= Case (Var as) as elt_tuple_list_ty
[(DataAlt nilDataCon, [], mkNilExpr elt_tuple_ty),
(DataAlt consDataCon, [a', as'], rest)]
-- Increasing order of tag
mkUnzipBind :: TransForm -> [Type] -> DsM (Maybe (Id, CoreExpr))
-- mkUnzipBind [t1, t2]
-- = (unzip, \ys :: [(t1, t2)] -> foldr (\ax :: (t1, t2) axs :: ([t1], [t2])
-- -> case ax of
-- (x1, x2) -> case axs of
-- (xs1, xs2) -> (x1 : xs1, x2 : xs2))
-- ([], [])
-- ys)
--
-- We use foldr here in all cases, even if rules are turned off, because we may as well!
mkUnzipBind ThenForm _
= return Nothing -- No unzipping for ThenForm
mkUnzipBind _ elt_tys
= do { ax <- newSysLocalDs elt_tuple_ty
; axs <- newSysLocalDs elt_list_tuple_ty
; ys <- newSysLocalDs elt_tuple_list_ty
; xs <- mapM newSysLocalDs elt_tys
; xss <- mapM newSysLocalDs elt_list_tys
; unzip_fn <- newSysLocalDs unzip_fn_ty
; [us1, us2] <- sequence [newUniqueSupply, newUniqueSupply]
; let nil_tuple = mkBigCoreTup (map mkNilExpr elt_tys)
concat_expressions = map mkConcatExpression (zip3 elt_tys (map Var xs) (map Var xss))
tupled_concat_expression = mkBigCoreTup concat_expressions
folder_body_inner_case = mkTupleCase us1 xss tupled_concat_expression axs (Var axs)
folder_body_outer_case = mkTupleCase us2 xs folder_body_inner_case ax (Var ax)
folder_body = mkLams [ax, axs] folder_body_outer_case
; unzip_body <- mkFoldrExpr elt_tuple_ty elt_list_tuple_ty folder_body nil_tuple (Var ys)
; return (Just (unzip_fn, mkLams [ys] unzip_body)) }
where
elt_tuple_ty = mkBigCoreTupTy elt_tys
elt_tuple_list_ty = mkListTy elt_tuple_ty
elt_list_tys = map mkListTy elt_tys
elt_list_tuple_ty = mkBigCoreTupTy elt_list_tys
unzip_fn_ty = elt_tuple_list_ty `mkFunTy` elt_list_tuple_ty
mkConcatExpression (list_element_ty, head, tail) = mkConsExpr list_element_ty head tail
{-
************************************************************************
* *
\subsection[DsPArrComp]{Desugaring of array comprehensions}
* *
************************************************************************
-}
-- entry point for desugaring a parallel array comprehension
--
-- [:e | qss:] = <<[:e | qss:]>> () [:():]
--
dsPArrComp :: [ExprStmt Id]
-> DsM CoreExpr
-- Special case for parallel comprehension
dsPArrComp (ParStmt qss _ _ _ : quals) = dePArrParComp qss quals
-- Special case for simple generators:
--
-- <<[:e' | p <- e, qs:]>> = <<[: e' | qs :]>> p e
--
-- if matching again p cannot fail, or else
--
-- <<[:e' | p <- e, qs:]>> =
-- <<[:e' | qs:]>> p (filterP (\x -> case x of {p -> True; _ -> False}) e)
--
dsPArrComp (BindStmt p e _ _ _ : qs) = do
filterP <- dsDPHBuiltin filterPVar
ce <- dsLExpr e
let ety'ce = parrElemType ce
false = Var falseDataConId
true = Var trueDataConId
v <- newSysLocalDs ety'ce
pred <- matchSimply (Var v) (StmtCtxt PArrComp) p true false
let gen | isIrrefutableHsPat p = ce
| otherwise = mkApps (Var filterP) [Type ety'ce, mkLams [v] pred, ce]
dePArrComp qs p gen
dsPArrComp qs = do -- no ParStmt in `qs'
sglP <- dsDPHBuiltin singletonPVar
let unitArray = mkApps (Var sglP) [Type unitTy, mkCoreTup []]
dePArrComp qs (noLoc $ WildPat unitTy) unitArray
-- the work horse
--
dePArrComp :: [ExprStmt Id]
-> LPat Id -- the current generator pattern
-> CoreExpr -- the current generator expression
-> DsM CoreExpr
dePArrComp [] _ _ = panic "dePArrComp"
--
-- <<[:e' | :]>> pa ea = mapP (\pa -> e') ea
--
dePArrComp (LastStmt e' _ _ : quals) pa cea
= ASSERT( null quals )
do { mapP <- dsDPHBuiltin mapPVar
; let ty = parrElemType cea
; (clam, ty'e') <- deLambda ty pa e'
; return $ mkApps (Var mapP) [Type ty, Type ty'e', clam, cea] }
--
-- <<[:e' | b, qs:]>> pa ea = <<[:e' | qs:]>> pa (filterP (\pa -> b) ea)
--
dePArrComp (BodyStmt b _ _ _ : qs) pa cea = do
filterP <- dsDPHBuiltin filterPVar
let ty = parrElemType cea
(clam,_) <- deLambda ty pa b
dePArrComp qs pa (mkApps (Var filterP) [Type ty, clam, cea])
--
-- <<[:e' | p <- e, qs:]>> pa ea =
-- let ef = \pa -> e
-- in
-- <<[:e' | qs:]>> (pa, p) (crossMap ea ef)
--
-- if matching again p cannot fail, or else
--
-- <<[:e' | p <- e, qs:]>> pa ea =
-- let ef = \pa -> filterP (\x -> case x of {p -> True; _ -> False}) e
-- in
-- <<[:e' | qs:]>> (pa, p) (crossMapP ea ef)
--
dePArrComp (BindStmt p e _ _ _ : qs) pa cea = do
filterP <- dsDPHBuiltin filterPVar
crossMapP <- dsDPHBuiltin crossMapPVar
ce <- dsLExpr e
let ety'cea = parrElemType cea
ety'ce = parrElemType ce
false = Var falseDataConId
true = Var trueDataConId
v <- newSysLocalDs ety'ce
pred <- matchSimply (Var v) (StmtCtxt PArrComp) p true false
let cef | isIrrefutableHsPat p = ce
| otherwise = mkApps (Var filterP) [Type ety'ce, mkLams [v] pred, ce]
(clam, _) <- mkLambda ety'cea pa cef
let ety'cef = ety'ce -- filter doesn't change the element type
pa' = mkLHsPatTup [pa, p]
dePArrComp qs pa' (mkApps (Var crossMapP)
[Type ety'cea, Type ety'cef, cea, clam])
--
-- <<[:e' | let ds, qs:]>> pa ea =
-- <<[:e' | qs:]>> (pa, (x_1, ..., x_n))
-- (mapP (\v@pa -> let ds in (v, (x_1, ..., x_n))) ea)
-- where
-- {x_1, ..., x_n} = DV (ds) -- Defined Variables
--
dePArrComp (LetStmt (L _ ds) : qs) pa cea = do
mapP <- dsDPHBuiltin mapPVar
let xs = collectLocalBinders ds
ty'cea = parrElemType cea
v <- newSysLocalDs ty'cea
clet <- dsLocalBinds ds (mkCoreTup (map Var xs))
let'v <- newSysLocalDs (exprType clet)
let projBody = mkCoreLet (NonRec let'v clet) $
mkCoreTup [Var v, Var let'v]
errTy = exprType projBody
errMsg = text "DsListComp.dePArrComp: internal error!"
cerr <- mkErrorAppDs pAT_ERROR_ID errTy errMsg
ccase <- matchSimply (Var v) (StmtCtxt PArrComp) pa projBody cerr
let pa' = mkLHsPatTup [pa, mkLHsPatTup (map nlVarPat xs)]
proj = mkLams [v] ccase
dePArrComp qs pa' (mkApps (Var mapP)
[Type ty'cea, Type errTy, proj, cea])
--
-- The parser guarantees that parallel comprehensions can only appear as
-- singleton qualifier lists, which we already special case in the caller.
-- So, encountering one here is a bug.
--
dePArrComp (ParStmt {} : _) _ _ =
panic "DsListComp.dePArrComp: malformed comprehension AST: ParStmt"
dePArrComp (TransStmt {} : _) _ _ = panic "DsListComp.dePArrComp: TransStmt"
dePArrComp (RecStmt {} : _) _ _ = panic "DsListComp.dePArrComp: RecStmt"
dePArrComp (ApplicativeStmt {} : _) _ _ =
panic "DsListComp.dePArrComp: ApplicativeStmt"
-- <<[:e' | qs | qss:]>> pa ea =
-- <<[:e' | qss:]>> (pa, (x_1, ..., x_n))
-- (zipP ea <<[:(x_1, ..., x_n) | qs:]>>)
-- where
-- {x_1, ..., x_n} = DV (qs)
--
dePArrParComp :: [ParStmtBlock Id Id] -> [ExprStmt Id] -> DsM CoreExpr
dePArrParComp qss quals = do
(pQss, ceQss) <- deParStmt qss
dePArrComp quals pQss ceQss
where
deParStmt [] =
-- empty parallel statement lists have no source representation
panic "DsListComp.dePArrComp: Empty parallel list comprehension"
deParStmt (ParStmtBlock qs xs _:qss) = do -- first statement
let res_expr = mkLHsVarTuple xs
cqs <- dsPArrComp (map unLoc qs ++ [mkLastStmt res_expr])
parStmts qss (mkLHsVarPatTup xs) cqs
---
parStmts [] pa cea = return (pa, cea)
parStmts (ParStmtBlock qs xs _:qss) pa cea = do -- subsequent statements (zip'ed)
zipP <- dsDPHBuiltin zipPVar
let pa' = mkLHsPatTup [pa, mkLHsVarPatTup xs]
ty'cea = parrElemType cea
res_expr = mkLHsVarTuple xs
cqs <- dsPArrComp (map unLoc qs ++ [mkLastStmt res_expr])
let ty'cqs = parrElemType cqs
cea' = mkApps (Var zipP) [Type ty'cea, Type ty'cqs, cea, cqs]
parStmts qss pa' cea'
-- generate Core corresponding to `\p -> e'
--
deLambda :: Type -- type of the argument
-> LPat Id -- argument pattern
-> LHsExpr Id -- body
-> DsM (CoreExpr, Type)
deLambda ty p e =
mkLambda ty p =<< dsLExpr e
-- generate Core for a lambda pattern match, where the body is already in Core
--
mkLambda :: Type -- type of the argument
-> LPat Id -- argument pattern
-> CoreExpr -- desugared body
-> DsM (CoreExpr, Type)
mkLambda ty p ce = do
v <- newSysLocalDs ty
let errMsg = text "DsListComp.deLambda: internal error!"
ce'ty = exprType ce
cerr <- mkErrorAppDs pAT_ERROR_ID ce'ty errMsg
res <- matchSimply (Var v) (StmtCtxt PArrComp) p ce cerr
return (mkLams [v] res, ce'ty)
-- obtain the element type of the parallel array produced by the given Core
-- expression
--
parrElemType :: CoreExpr -> Type
parrElemType e =
case splitTyConApp_maybe (exprType e) of
Just (tycon, [ty]) | tycon == parrTyCon -> ty
_ -> panic
"DsListComp.parrElemType: not a parallel array type"
-- Translation for monad comprehensions
-- Entry point for monad comprehension desugaring
dsMonadComp :: [ExprLStmt Id] -> DsM CoreExpr
dsMonadComp stmts = dsMcStmts stmts
dsMcStmts :: [ExprLStmt Id] -> DsM CoreExpr
dsMcStmts [] = panic "dsMcStmts"
dsMcStmts (L loc stmt : lstmts) = putSrcSpanDs loc (dsMcStmt stmt lstmts)
---------------
dsMcStmt :: ExprStmt Id -> [ExprLStmt Id] -> DsM CoreExpr
dsMcStmt (LastStmt body _ ret_op) stmts
= ASSERT( null stmts )
do { body' <- dsLExpr body
; dsSyntaxExpr ret_op [body'] }
-- [ .. | let binds, stmts ]
dsMcStmt (LetStmt (L _ binds)) stmts
= do { rest <- dsMcStmts stmts
; dsLocalBinds binds rest }
-- [ .. | a <- m, stmts ]
dsMcStmt (BindStmt pat rhs bind_op fail_op bind_ty) stmts
= do { rhs' <- dsLExpr rhs
; dsMcBindStmt pat rhs' bind_op fail_op bind_ty stmts }
-- Apply `guard` to the `exp` expression
--
-- [ .. | exp, stmts ]
--
dsMcStmt (BodyStmt exp then_exp guard_exp _) stmts
= do { exp' <- dsLExpr exp
; rest <- dsMcStmts stmts
; guard_exp' <- dsSyntaxExpr guard_exp [exp']
; dsSyntaxExpr then_exp [guard_exp', rest] }
-- Group statements desugar like this:
--
-- [| (q, then group by e using f); rest |]
-- ---> f {qt} (\qv -> e) [| q; return qv |] >>= \ n_tup ->
-- case unzip n_tup of qv' -> [| rest |]
--
-- where variables (v1:t1, ..., vk:tk) are bound by q
-- qv = (v1, ..., vk)
-- qt = (t1, ..., tk)
-- (>>=) :: m2 a -> (a -> m3 b) -> m3 b
-- f :: forall a. (a -> t) -> m1 a -> m2 (n a)
-- n_tup :: n qt
-- unzip :: n qt -> (n t1, ..., n tk) (needs Functor n)
dsMcStmt (TransStmt { trS_stmts = stmts, trS_bndrs = bndrs
, trS_by = by, trS_using = using
, trS_ret = return_op, trS_bind = bind_op
, trS_bind_arg_ty = n_tup_ty' -- n (a,b,c)
, trS_fmap = fmap_op, trS_form = form }) stmts_rest
= do { let (from_bndrs, to_bndrs) = unzip bndrs
; let from_bndr_tys = map idType from_bndrs -- Types ty
-- Desugar an inner comprehension which outputs a list of tuples of the "from" binders
; expr' <- dsInnerMonadComp stmts from_bndrs return_op
-- Work out what arguments should be supplied to that expression: i.e. is an extraction
-- function required? If so, create that desugared function and add to arguments
; usingExpr' <- dsLExpr using
; usingArgs' <- case by of
Nothing -> return [expr']
Just by_e -> do { by_e' <- dsLExpr by_e
; lam' <- matchTuple from_bndrs by_e'
; return [lam', expr'] }
-- Generate the expressions to build the grouped list
-- Build a pattern that ensures the consumer binds into the NEW binders,
-- which hold monads rather than single values
; let tup_n_ty' = mkBigCoreVarTupTy to_bndrs
; body <- dsMcStmts stmts_rest
; n_tup_var' <- newSysLocalDs n_tup_ty'
; tup_n_var' <- newSysLocalDs tup_n_ty'
; tup_n_expr' <- mkMcUnzipM form fmap_op n_tup_var' from_bndr_tys
; us <- newUniqueSupply
; let rhs' = mkApps usingExpr' usingArgs'
body' = mkTupleCase us to_bndrs body tup_n_var' tup_n_expr'
; dsSyntaxExpr bind_op [rhs', Lam n_tup_var' body'] }
-- Parallel statements. Use `Control.Monad.Zip.mzip` to zip parallel
-- statements, for example:
--
-- [ body | qs1 | qs2 | qs3 ]
-- -> [ body | (bndrs1, (bndrs2, bndrs3))
-- <- [bndrs1 | qs1] `mzip` ([bndrs2 | qs2] `mzip` [bndrs3 | qs3]) ]
--
-- where `mzip` has type
-- mzip :: forall a b. m a -> m b -> m (a,b)
-- NB: we need a polymorphic mzip because we call it several times
dsMcStmt (ParStmt blocks mzip_op bind_op bind_ty) stmts_rest
= do { exps_w_tys <- mapM ds_inner blocks -- Pairs (exp :: m ty, ty)
; mzip_op' <- dsExpr mzip_op
; let -- The pattern variables
pats = [ mkBigLHsVarPatTupId bs | ParStmtBlock _ bs _ <- blocks]
-- Pattern with tuples of variables
-- [v1,v2,v3] => (v1, (v2, v3))
pat = foldr1 (\p1 p2 -> mkLHsPatTup [p1, p2]) pats
(rhs, _) = foldr1 (\(e1,t1) (e2,t2) ->
(mkApps mzip_op' [Type t1, Type t2, e1, e2],
mkBoxedTupleTy [t1,t2]))
exps_w_tys
; dsMcBindStmt pat rhs bind_op noSyntaxExpr bind_ty stmts_rest }
where
ds_inner (ParStmtBlock stmts bndrs return_op)
= do { exp <- dsInnerMonadComp stmts bndrs return_op
; return (exp, mkBigCoreVarTupTy bndrs) }
dsMcStmt stmt _ = pprPanic "dsMcStmt: unexpected stmt" (ppr stmt)
matchTuple :: [Id] -> CoreExpr -> DsM CoreExpr
-- (matchTuple [a,b,c] body)
-- returns the Core term
-- \x. case x of (a,b,c) -> body
matchTuple ids body
= do { us <- newUniqueSupply
; tup_id <- newSysLocalDs (mkBigCoreVarTupTy ids)
; return (Lam tup_id $ mkTupleCase us ids body tup_id (Var tup_id)) }
-- general `rhs' >>= \pat -> stmts` desugaring where `rhs'` is already a
-- desugared `CoreExpr`
dsMcBindStmt :: LPat Id
-> CoreExpr -- ^ the desugared rhs of the bind statement
-> SyntaxExpr Id
-> SyntaxExpr Id
-> Type -- ^ S in (>>=) :: Q -> (R -> S) -> T
-> [ExprLStmt Id]
-> DsM CoreExpr
dsMcBindStmt pat rhs' bind_op fail_op res1_ty stmts
= do { body <- dsMcStmts stmts
; var <- selectSimpleMatchVarL pat
; match <- matchSinglePat (Var var) (StmtCtxt DoExpr) pat
res1_ty (cantFailMatchResult body)
; match_code <- handle_failure pat match fail_op
; dsSyntaxExpr bind_op [rhs', Lam var match_code] }
where
-- In a monad comprehension expression, pattern-match failure just calls
-- the monadic `fail` rather than throwing an exception
handle_failure pat match fail_op
| matchCanFail match
= do { dflags <- getDynFlags
; fail_msg <- mkStringExpr (mk_fail_msg dflags pat)
; fail_expr <- dsSyntaxExpr fail_op [fail_msg]
; extractMatchResult match fail_expr }
| otherwise
= extractMatchResult match (error "It can't fail")
mk_fail_msg :: DynFlags -> Located e -> String
mk_fail_msg dflags pat
= "Pattern match failure in monad comprehension at " ++
showPpr dflags (getLoc pat)
-- Desugar nested monad comprehensions, for example in `then..` constructs
-- dsInnerMonadComp quals [a,b,c] ret_op
-- returns the desugaring of
-- [ (a,b,c) | quals ]
dsInnerMonadComp :: [ExprLStmt Id]
-> [Id] -- Return a tuple of these variables
-> SyntaxExpr Id -- The monomorphic "return" operator
-> DsM CoreExpr
dsInnerMonadComp stmts bndrs ret_op
= dsMcStmts (stmts ++ [noLoc (LastStmt (mkBigLHsVarTupId bndrs) False ret_op)])
-- The `unzip` function for `GroupStmt` in a monad comprehensions
--
-- unzip :: m (a,b,..) -> (m a,m b,..)
-- unzip m_tuple = ( liftM selN1 m_tuple
-- , liftM selN2 m_tuple
-- , .. )
--
-- mkMcUnzipM fmap ys [t1, t2]
-- = ( fmap (selN1 :: (t1, t2) -> t1) ys
-- , fmap (selN2 :: (t1, t2) -> t2) ys )
mkMcUnzipM :: TransForm
-> HsExpr TcId -- fmap
-> Id -- Of type n (a,b,c)
-> [Type] -- [a,b,c]
-> DsM CoreExpr -- Of type (n a, n b, n c)
mkMcUnzipM ThenForm _ ys _
= return (Var ys) -- No unzipping to do
mkMcUnzipM _ fmap_op ys elt_tys
= do { fmap_op' <- dsExpr fmap_op
; xs <- mapM newSysLocalDs elt_tys
; let tup_ty = mkBigCoreTupTy elt_tys
; tup_xs <- newSysLocalDs tup_ty
; let mk_elt i = mkApps fmap_op' -- fmap :: forall a b. (a -> b) -> n a -> n b
[ Type tup_ty, Type (getNth elt_tys i)
, mk_sel i, Var ys]
mk_sel n = Lam tup_xs $
mkTupleSelector xs (getNth xs n) tup_xs (Var tup_xs)
; return (mkBigCoreTup (map mk_elt [0..length elt_tys - 1])) }
|
olsner/ghc
|
compiler/deSugar/DsListComp.hs
|
bsd-3-clause
| 34,002
| 1
| 19
| 10,190
| 6,688
| 3,418
| 3,270
| -1
| -1
|
{-# OPTIONS_GHC -Wno-redundant-constraints -Wno-simplifiable-class-constraints #-}
-- trac #1406: Constraint doesn't reduce in the presence of quantified
-- type variables
{-# LANGUAGE FlexibleInstances, UndecidableInstances, RankNTypes,
MultiParamTypeClasses, FunctionalDependencies #-}
module Problem where
data Z
data S a
class HPrefix l
instance (NSub (S Z) ndiff, HDrop ndiff l l) => HPrefix l
-- Weird test case: (NSub (S Z) ndiff) is simplifiable
class NSub n1 n3 | n1 -> n3
instance NSub Z Z
instance NSub n1 n3 => NSub (S n1) n3
class HDrop n l1 l2 | n l1 -> l2
instance HDrop Z l l
t_hPrefix :: HPrefix l => l -> ()
-- Weird test case: (HPrefix l) is simplifiable
t_hPrefix = undefined
-- In ghc 6.6.1 this works...
thr' :: (forall r. l -> a) -> a
thr' f = f undefined
thP4' = thr' t_hPrefix
-- ... but this doesn't work...?
thr :: (forall r. r -> a) -> a
thr f = f undefined
thP4 = thr t_hPrefix
|
ezyang/ghc
|
testsuite/tests/typecheck/should_compile/tc229.hs
|
bsd-3-clause
| 943
| 0
| 8
| 199
| 240
| 128
| 112
| -1
| -1
|
-- trac #2806
{-# LANGUAGE MagicHash, UnboxedTuples, BangPatterns #-}
module Foo where
import GHC.Base
pass1 = 'a'
where !x = 5#
pass2 = 'a'
where !(I# x) = 5
pass3 = 'a'
where !(b, I# x) = (True, 5)
pass4 = 'a'
where !(# b, I# x #) = (# True, 5 #)
pass5 = 'a'
where !(# b, x #) = (# True, 5# #)
fail1 = 'a'
where x = 5#
fail2 = 'a'
where (I# x) = 5
fail3 = 'a'
where (b, I# x) = (True, 5)
fail4 = 'a'
where (# b, I# x #) = (# True, 5 #)
fail5 = 'a'
where (# b, x #) = (# True, 5# #)
fail6 = 'a'
where (I# !x) = 5
fail7 = 'a'
where (b, !(I# x)) = (True, 5)
fail8 = 'a'
where (# b, !(I# x) #) = (# True, 5 #)
fail9 = 'a'
where (# b, !x #) = (# True, 5# #)
{-
-- Now in tcfail203a.hs, because it's an error
fail10 = 'a'
where !(b, ~(c, (I# x))) = (True, (False, 5))
-}
|
ghc-android/ghc
|
testsuite/tests/typecheck/should_fail/tcfail203.hs
|
bsd-3-clause
| 844
| 0
| 10
| 262
| 340
| 186
| 154
| 31
| 1
|
-- exporting everything but the Foo dcon.
module Mod132_B (module Mod132_A) where
import Mod132_A hiding (Foo)
import Mod132_A (Foo)
|
urbanslug/ghc
|
testsuite/tests/module/Mod132_B.hs
|
bsd-3-clause
| 134
| 0
| 5
| 20
| 28
| 19
| 9
| 3
| 0
|
-- Implementation of a Binary Search Tree in Haskell
-- author : Thomas Minier
data BST a = EmptyTree | Node a (BST a) (BST a) deriving (Show, Read, Eq)
-- Insert a value in a Binary Search Tree
insert :: (Ord a) => a -> BST a -> BST a
insert x EmptyTree = Node x EmptyTree EmptyTree
insert x (Node y left right)
| x == y = Node x left right
| x < y = Node y (insert x left) right
| x > y = Node y left (insert x right)
-- Test if a Binary search Tree contains a specific value,
contains :: (Ord a) => a -> BST a -> Bool
contains x EmptyTree = False
contains x (Node y left right)
| x == y = True
| x < y = contains x left
| x > y = contains x right
-- Remove a value from a Binary Search Tree
delete :: (Ord a) => a -> BST a -> BST a
delete _ EmptyTree = EmptyTree
delete x (Node y left right)
| x < y = Node y (delete x left) right
| x > y = Node y left (delete x right)
| x == y = case (left, right) of
(EmptyTree, _) -> right
(_, EmptyTree) -> left
(Node _ _ _, _) -> Node y' left' right where (y', left') = deleteMax left
-- Remove the max value of a non-empty Binary Search BST and returns both this value and the new tree
deleteMax :: (Ord a) => BST a -> (a, BST a)
deleteMax EmptyTree = error "Cannot delete the maximum of a empty tree"
deleteMax (Node x left EmptyTree) = (x, left)
deleteMax (Node x left right) = (x', Node x left' right) where (x', left') = deleteMax left
-- Test if a Binary Search Tree is correctly build
validTree :: (Ord a) => BST a -> Bool
validTree EmptyTree = True
validTree (Node _ EmptyTree EmptyTree) = True
validTree (Node _ left EmptyTree) = validTree left
validTree (Node _ EmptyTree right) = validTree right
validTree (Node x left@(Node y _ _) right@(Node z _ _))
| (x < y) && (x > z) = validTree left && validTree right
| otherwise = False
-- Find the minimum value of a Binary Search Tree
minTree :: (Ord a) => BST a -> a
minTree (Node x EmptyTree _) = x
minTree (Node _ left _) = minTree left
-- Find the maximum value of a Binary search Tree
maxTree :: (Ord a) => BST a -> a
maxTree (Node x _ EmptyTree) = x
maxTree (Node _ _ right) = maxTree right
-- Construct a Binary Search Tree from a list of values
fromList :: (Ord a) => [a] => BST a
fromList x = foldr insert EmptyTree x
-- Construct a list from a Binary Search Tree
toList :: (Ord a) => BST a -> [a]
toList EmptyTree = []
toList (Node x left right) = (toList left) ++ [x] ++ (toList right)
|
Callidon/toolkit
|
haskell/binaryTree.hs
|
mit
| 2,448
| 6
| 10
| 565
| 1,017
| 514
| 503
| -1
| -1
|
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.ConfigMapKeySelector
( ConfigMapKeySelector (..)
, name
, key
, mkConfigMapKeySelector
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions, deriveJSON,
fieldLabelModifier)
import Data.Text (Text)
import GHC.Generics (Generic)
import Prelude hiding (drop, error, max, min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | Selects a key from a ConfigMap.
data ConfigMapKeySelector = ConfigMapKeySelector
{ _name :: !(Maybe Text)
, _key :: !(Text)
} deriving (Show, Eq, Generic)
makeLenses ''ConfigMapKeySelector
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''ConfigMapKeySelector)
instance Arbitrary ConfigMapKeySelector where
arbitrary = ConfigMapKeySelector <$> arbitrary <*> arbitrary
-- | Use this method to build a ConfigMapKeySelector
mkConfigMapKeySelector :: Text -> ConfigMapKeySelector
mkConfigMapKeySelector xkeyx = ConfigMapKeySelector Nothing xkeyx
|
soundcloud/haskell-kubernetes
|
lib/Kubernetes/Model/V1/ConfigMapKeySelector.hs
|
mit
| 1,603
| 0
| 14
| 443
| 277
| 166
| 111
| 31
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Language.SpirV.SourceLanguage where
import Data.Word (Word32)
import Language.SpirV.SpirEnum
import qualified Language.SpirV.Capability as Capability
data SourceLanguage = Unknown
| ESSL
| GLSL
| OpenCL
deriving(Read, Show, Eq, Ord)
instance SpirEnum SourceLanguage Word32 where
toWord Unknown = 0
toWord ESSL = 1
toWord GLSL = 2
toWord OpenCL = 3
fromWord 0 = Just Unknown
fromWord 1 = Just ESSL
fromWord 2 = Just GLSL
fromWord 3 = Just OpenCL
fromWord _ = Nothing
requiredCapabilities _ = []
|
expipiplus1/spir-v
|
Language/SpirV/SourceLanguage.hs
|
mit
| 633
| 0
| 6
| 173
| 172
| 93
| 79
| 21
| 0
|
module Main where
import Control.Monad
import Control.Monad.Error
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
instance Show LispVal where show = showVal
instance Show LispError where show = showError
instance Error LispError where
noMsg = Default "An error has occured"
strMsg = Default
type ThrowsError = Either LispError
trapError action = catchError action (return . show)
extractValue :: ThrowsError a -> a
extractValue (Right val) = val
main :: IO ()
main = do
args <- getArgs
evaled <- return $ liftM show $ readExpr (args !! 0) >>= eval
putStrLn $ extractValue $ trapError evaled
symbol :: Parser Char
symbol = oneOf "!$%&|*+-/:<=?>@^_~#"
readExpr :: String -> ThrowsError LispVal
readExpr input =
case parse parseExpr "lisp" input of
Left err -> throwError $ Parser err
Right val -> return val
eval :: LispVal -> ThrowsError LispVal
eval val@(String _) = return val -- Wtf???
eval val@(Number _) = return val
eval val@(Bool _) = return val
eval (List [Atom "quote", val]) = return val
eval (List (Atom func:args)) = mapM eval args >>= apply func -- apply func $ map eval args
eval badForm = throwError $ BadSpecialForm "Unrecognized special form" badForm
apply :: String -> [LispVal] -> ThrowsError LispVal
apply func args =
maybe
(throwError $ NotFunction "Unrecognized primitive function args" func)
($ args)
(lookup func primitives)
primitives :: [(String, [LispVal] -> ThrowsError LispVal)]
primitives = [("+", numericBinop (+)),
("-", numericBinop (-)),
("*", numericBinop (*)),
("/", numericBinop div),
("mod", numericBinop mod),
("quotient", numericBinop quot),
("remainder", numericBinop rem)]
numericBinop :: (Integer -> Integer -> Integer) -> [LispVal] -> ThrowsError LispVal
numericBinop op singleVal@[_] = throwError $ NumArgs 2 singleVal
numericBinop op params = mapM unpackNum params >>= return . Number . foldl1 op
unpackNum :: LispVal -> ThrowsError Integer
unpackNum (Number n) = return n
unpackNum (String n) =
let parsed = reads n
in if null parsed
then throwError $ TypeMismatch "number" $ String n
else return $ fst $ parsed !! 0
unpackNum (List [n]) = unpackNum n
unpackNum notNum = throwError $ TypeMismatch "number" notNum
showVal :: LispVal -> String
showVal (String contents) = "\"" ++ contents ++ "\""
showVal (Atom name) = name
showVal (Number contents) = show contents
showVal (Bool True) = "#t"
showVal (Bool False) = "#f"
showVal (List contents) = "(" ++ unwordsList contents ++ ")"
showVal (DottedList head tail) = "(" ++ unwordsList head ++ ". " ++ showVal tail ++ ")"
unwordsList :: [LispVal] -> String
unwordsList = unwords . map showVal
spaces :: Parser ()
spaces = skipMany1 space
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = do
head <- endBy parseExpr spaces
tail <- char '.' >> spaces >> parseExpr
return $ DottedList head tail
parseExpr :: Parser LispVal -- Why??? We pass parseExpr into parse but d --idn't take any arguments
parseExpr =
parseAtom <|> parseString <|> parseNumber <|> parseQuoted <|> do
char '('
x <- (try parseList) <|> parseDottedList
char ')'
return x
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (noneOf "\"")
char '"'
return $ String x
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = [first] ++ rest
return $
case atom of
"#t" -> Bool True
"#f" -> Bool False
otherwise -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
data LispError = NumArgs Integer [LispVal]
| TypeMismatch String LispVal
| Parser ParseError
| BadSpecialForm String LispVal
| NotFunction String String
| UnboundVar String String
| Default String
showError :: LispError -> String
showError (UnboundVar message varname) = message ++ ": " ++ varname
showError (BadSpecialForm message form) = message ++ ": " ++ show form
showError (NotFunction message func) = message ++ ": " ++ show func
showError (NumArgs expected found) = "Expected " ++ show expected ++ " args: found values " ++ unwordsList found
showError (TypeMismatch expected found) = "Invalid type: expected " ++ expected ++ ", found " ++ show found
showError (Parser parseErr) = "Parse error at " ++ show parseErr
|
mortum5/programming
|
haskell/usefull/parser/errorcheck.hs
|
mit
| 4,989
| 0
| 12
| 1,221
| 1,657
| 834
| 823
| 128
| 3
|
module Shakespeare.Ophelia.QQ (
gertrude
, module Shakespeare.Ophelia.Parser.VDOM.Types
, module Shakespeare.Ophelia.Parser.VDOM.Live
) where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Shakespeare.Ophelia.Parser
import Shakespeare.Ophelia.Parser.VDOM.Live
import Shakespeare.Ophelia.Parser.VDOM.Types
import Text.Trifecta.Result
-- opheliaExp :: String -> Q Exp
-- opheliaExp s = do
-- rN <- parseVNodeS s
-- case rN of
-- Success vn -> lift vn
-- Failure fString -> fail $ show fString
-- ophelia :: QuasiQuoter
-- ophelia = QuasiQuoter opheliaExp undefined undefined undefined
-- | Parser from string to LiveVDom
liveGertrude :: String -> Q Exp
liveGertrude s = do
rN <- parseStringTrees parsePLiveVDom s
case rN of
Success vn -> if length vn > 1
then fail "One or more nodes can not be the main html. Maybe you're trying to use ophelia?"
else if length vn < 1
then fail "Unable to parse empty template"
else toLiveVDomTH $ vn !! 0
Failure fString -> fail $ show fString
-- | Quasiquoter used to parse HTML similar to hamlet
-- but allow it to be rendered live
gertrude :: QuasiQuoter
gertrude = QuasiQuoter liveGertrude undefined undefined undefined
|
plow-technologies/shakespeare-dynamic
|
ghcjs-shakespeare-dynamic/src/Shakespeare/Ophelia/QQ.hs
|
mit
| 1,357
| 0
| 13
| 356
| 209
| 123
| 86
| 22
| 4
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE CPP #-}
module Yesod.Core.Dispatch
( -- * Quasi-quoted routing
parseRoutes
, parseRoutesNoCheck
, parseRoutesFile
, parseRoutesFileNoCheck
, mkYesod
, mkYesodWith
-- ** More fine-grained
, mkYesodData
, mkYesodSubData
, mkYesodDispatch
, mkYesodSubDispatch
-- *** Helpers
, getGetMaxExpires
-- ** Path pieces
, PathPiece (..)
, PathMultiPiece (..)
, Texts
-- * Convert to WAI
, toWaiApp
, toWaiAppPlain
, toWaiAppYre
, warp
, warpDebug
, warpEnv
, mkDefaultMiddlewares
, defaultMiddlewaresNoLogging
-- * WAI subsites
, WaiSubsite (..)
, WaiSubsiteWithAuth (..)
) where
import Prelude hiding (exp)
import Yesod.Core.Internal.TH
import Language.Haskell.TH.Syntax (qLocation)
import Web.PathPieces
import qualified Network.Wai as W
import Data.ByteString.Lazy.Char8 ()
import Data.Text (Text)
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid (mappend)
#endif
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as S8
import Data.ByteString.Builder (byteString, toLazyByteString)
import Network.HTTP.Types (status301, status307)
import Yesod.Routes.Parse
import Yesod.Core.Types
import Yesod.Core.Class.Yesod
import Yesod.Core.Class.Dispatch
import Yesod.Core.Internal.Run
import Safe (readMay)
import System.Environment (getEnvironment)
import qualified System.Random as Random
import Control.AutoUpdate (mkAutoUpdate, defaultUpdateSettings, updateAction, updateFreq)
import Yesod.Core.Internal.Util (getCurrentMaxExpiresRFC1123)
import Network.Wai.Middleware.Autohead
import Network.Wai.Middleware.AcceptOverride
import Network.Wai.Middleware.RequestLogger
import Network.Wai.Middleware.Gzip
import Network.Wai.Middleware.MethodOverride
import qualified Network.Wai.Handler.Warp
import System.Log.FastLogger
import Control.Monad.Logger
import Control.Monad (when)
import qualified Paths_yesod_core
import Data.Version (showVersion)
-- | Convert the given argument into a WAI application, executable with any WAI
-- handler. This function will provide no middlewares; if you want commonly
-- used middlewares, please use 'toWaiApp'.
toWaiAppPlain :: YesodDispatch site => site -> IO W.Application
toWaiAppPlain site = do
logger <- makeLogger site
sb <- makeSessionBackend site
getMaxExpires <- getGetMaxExpires
return $ toWaiAppYre YesodRunnerEnv
{ yreLogger = logger
, yreSite = site
, yreSessionBackend = sb
, yreGen = defaultGen
, yreGetMaxExpires = getMaxExpires
}
defaultGen :: IO Int
defaultGen = Random.getStdRandom Random.next
-- | Pure low level function to construct WAI application. Usefull
-- when you need not standard way to run your app, or want to embed it
-- inside another app.
--
-- @since 1.4.29
toWaiAppYre :: YesodDispatch site => YesodRunnerEnv site -> W.Application
toWaiAppYre yre req =
case cleanPath site $ W.pathInfo req of
Left pieces -> sendRedirect site pieces req
Right pieces -> yesodDispatch yre req
{ W.pathInfo = pieces
}
where
site = yreSite yre
sendRedirect :: Yesod master => master -> [Text] -> W.Application
sendRedirect y segments' env sendResponse =
sendResponse $ W.responseLBS status
[ ("Content-Type", "text/plain")
, ("Location", BL.toStrict $ toLazyByteString dest')
] "Redirecting"
where
-- Ensure that non-GET requests get redirected correctly. See:
-- https://github.com/yesodweb/yesod/issues/951
status
| W.requestMethod env == "GET" = status301
| otherwise = status307
dest = joinPath y (resolveApproot y env) segments' []
dest' =
if S.null (W.rawQueryString env)
then dest
else dest `mappend`
byteString (W.rawQueryString env)
-- | Same as 'toWaiAppPlain', but provides a default set of middlewares. This
-- set may change with future releases, but currently covers:
--
-- * Logging
--
-- * GZIP compression
--
-- * Automatic HEAD method handling
--
-- * Request method override with the _method query string parameter
--
-- * Accept header override with the _accept query string parameter
toWaiApp :: YesodDispatch site => site -> IO W.Application
toWaiApp site = do
logger <- makeLogger site
toWaiAppLogger logger site
toWaiAppLogger :: YesodDispatch site => Logger -> site -> IO W.Application
toWaiAppLogger logger site = do
sb <- makeSessionBackend site
getMaxExpires <- getGetMaxExpires
let yre = YesodRunnerEnv
{ yreLogger = logger
, yreSite = site
, yreSessionBackend = sb
, yreGen = defaultGen
, yreGetMaxExpires = getMaxExpires
}
messageLoggerSource
site
logger
$(qLocation >>= liftLoc)
"yesod-core"
LevelInfo
(toLogStr ("Application launched" :: S.ByteString))
middleware <- mkDefaultMiddlewares logger
return $ middleware $ toWaiAppYre yre
-- | A convenience method to run an application using the Warp webserver on the
-- specified port. Automatically calls 'toWaiApp'. Provides a default set of
-- middlewares. This set may change at any point without a breaking version
-- number. Currently, it includes:
--
-- If you need more fine-grained control of middlewares, please use 'toWaiApp'
-- directly.
--
-- Since 1.2.0
warp :: YesodDispatch site => Int -> site -> IO ()
warp port site = do
logger <- makeLogger site
toWaiAppLogger logger site >>= Network.Wai.Handler.Warp.runSettings (
Network.Wai.Handler.Warp.setPort port $
Network.Wai.Handler.Warp.setServerName serverValue $
Network.Wai.Handler.Warp.setOnException (\_ e ->
when (shouldLog' e) $
messageLoggerSource
site
logger
$(qLocation >>= liftLoc)
"yesod-core"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
Network.Wai.Handler.Warp.defaultSettings)
where
shouldLog' = Network.Wai.Handler.Warp.defaultShouldDisplayException
serverValue :: S8.ByteString
serverValue = S8.pack $ concat
[ "Warp/"
, Network.Wai.Handler.Warp.warpVersion
, " + Yesod/"
, showVersion Paths_yesod_core.version
, " (core)"
]
-- | A default set of middlewares.
--
-- Since 1.2.0
mkDefaultMiddlewares :: Logger -> IO W.Middleware
mkDefaultMiddlewares logger = do
logWare <- mkRequestLogger def
{ destination = Network.Wai.Middleware.RequestLogger.Logger $ loggerSet logger
, outputFormat = Apache FromSocket
}
return $ logWare . defaultMiddlewaresNoLogging
-- | All of the default middlewares, excluding logging.
--
-- Since 1.2.12
defaultMiddlewaresNoLogging :: W.Middleware
defaultMiddlewaresNoLogging = acceptOverride . autohead . gzip def . methodOverride
-- | Deprecated synonym for 'warp'.
warpDebug :: YesodDispatch site => Int -> site -> IO ()
warpDebug = warp
{-# DEPRECATED warpDebug "Please use warp instead" #-}
-- | Runs your application using default middlewares (i.e., via 'toWaiApp'). It
-- reads port information from the PORT environment variable, as used by tools
-- such as Keter and the FP Complete School of Haskell.
--
-- Note that the exact behavior of this function may be modified slightly over
-- time to work correctly with external tools, without a change to the type
-- signature.
warpEnv :: YesodDispatch site => site -> IO ()
warpEnv site = do
env <- getEnvironment
case lookup "PORT" env of
Nothing -> error "warpEnv: no PORT environment variable found"
Just portS ->
case readMay portS of
Nothing -> error $ "warpEnv: invalid PORT environment variable: " ++ show portS
Just port -> warp port site
-- | Default constructor for 'yreGetMaxExpires' field. Low level
-- function for simple manual construction of 'YesodRunnerEnv'.
--
-- @since 1.4.29
getGetMaxExpires :: IO (IO Text)
getGetMaxExpires = mkAutoUpdate defaultUpdateSettings
{ updateAction = getCurrentMaxExpiresRFC1123
, updateFreq = 24 * 60 * 60 * 1000000 -- Update once per day
}
|
psibi/yesod
|
yesod-core/Yesod/Core/Dispatch.hs
|
mit
| 8,654
| 0
| 18
| 2,047
| 1,554
| 886
| 668
| 172
| 3
|
import Control.Monad
-- so the idea is to follow wiki
-- http://en.wikipedia.org/wiki/Fibonacci_number#Recognizing_Fibonacci_numbers
-- and this http://math.stackexchange.com/questions/9999/checking-if-a-number-is-a-fibonacci-or-not
-- n is fibo if (5*n^2 + 4) or (5*n^2 - 4) is perfect square
-- for square test check sherlock and squares implementation
-- ternary operator
data Cond a = a :? a
infixl 0 ?
infixl 1 :?
(?) :: Bool -> Cond a -> a
True ? (x :? _) = x
False ? (_ :? y) = y
isqrt 0 = 0
isqrt 1 = 1
isqrt n = head $ dropWhile (\x -> x*x > n) $ iterate (\x -> (x + n `div` x) `div` 2) (n `div` 2)
isSquare :: Integer -> Bool
isSquare x = sr * sr == x where sr = isqrt x
isFibo n = if isSquare (t - 4) || isSquare (t + 4) then "IsFibo" else "IsNotFibo"
where t = 5 * (n ^ 2)
main :: IO ()
main = do
n <- readLn :: IO Int
list <- replicateM n getLine
-- must be Integer, not Int, or some values will overflow
let numbers = map read list :: [Integer]
-- print numbers
let ans = map isFibo numbers
mapM_ putStrLn ans
|
mgrebenets/hackerrank
|
alg/warmup/is-fibo.hs
|
mit
| 1,049
| 0
| 12
| 228
| 357
| 190
| 167
| 21
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Grammar.Greek.Morph.Clitic.Types where
import Prelude hiding (Word)
import GHC.Generics (Generic)
import Data.Data
import Data.Serialize (Serialize)
import Grammar.Greek.Morph.Types
import Grammar.Greek.Script.Types
import Grammar.Greek.Script.Word
data WordClitic = WordClitic
CoreWord
(Maybe WordAccent)
Enclitic
Proclitic
Crasis
Elision
MarkPreservation
DiaeresisConvention
Capitalization
HasWordPunctuation
deriving (Eq, Ord, Show, Generic, Data, Typeable)
instance Serialize WordClitic
|
ancientlanguage/haskell-analysis
|
greek-morph/src/Grammar/Greek/Morph/Clitic/Types.hs
|
mit
| 590
| 0
| 8
| 79
| 133
| 79
| 54
| 23
| 0
|
{-# LANGUAGE TupleSections #-}
module Hakyll.Web.Template.Numeric
( byNumericFieldAsc
, byNumericFieldDesc
) where
import Hakyll
import Data.List as L
import Data.Maybe (fromMaybe)
import Data.Ord (comparing)
import Text.Read (readMaybe)
byNumericFieldAsc :: MonadMetadata m => String -> [Item a] -> m [Item a]
byNumericFieldAsc key = sortOnM $ \i -> do
maybeInt <- getMetadataField (itemIdentifier i) key
return $ fromMaybe (0 :: Int) (readMaybe =<< maybeInt)
where
sortOnM :: (Monad m, Ord k) => (a -> m k) -> [a] -> m [a]
sortOnM f xs = map fst . L.sortBy (comparing snd) <$> mapM (\ x -> (x,) <$> f x) xs
byNumericFieldDesc :: MonadMetadata m => String -> [Item a] -> m [Item a]
byNumericFieldDesc key is = reverse <$> byNumericFieldAsc key is
|
wenkokke/sf
|
hs/Hakyll/Web/Template/Numeric.hs
|
mit
| 778
| 0
| 12
| 152
| 311
| 164
| 147
| 17
| 1
|
module Writer (
printByteString
, writeByteString
, writeByteStrings
, writeByteStringPairs
) where
import Data.ByteString
import Data.ByteString.Char8
printByteString :: Maybe ByteString -> IO ()
printByteString (Just s) = (Data.ByteString.Char8.putStrLn s)
printByteString Nothing = error "Can't display nothing"
writeByteString :: String -> Maybe ByteString -> IO ()
writeByteString f (Just s) = Data.ByteString.writeFile f s
writeByteString f Nothing = error "Can't write nothing"
writeByteStringPairs :: String -> String -> [(Maybe ByteString, Maybe ByteString)] -> IO ()
writeByteStringPairs f1 f2 (((Just b1), (Just b2)):bs) = do
Data.ByteString.Char8.appendFile f1 b1
Data.ByteString.Char8.appendFile f2 b2
writeByteStringPairs f1 f2 bs
writeByteStringPairs f1 f2 ((_, _):bs) = writeByteStringPairs f1 f2 bs
writeByteStringPairs f1 f2 [] = return ()
writeByteStrings :: String -> [Maybe ByteString] -> IO ()
writeByteStrings f b = Prelude.head (Prelude.map (writeByteString f) b)
|
chris-wood/ccnx-pktgen
|
src/Writer.hs
|
mit
| 1,022
| 0
| 10
| 162
| 353
| 181
| 172
| 22
| 1
|
module Proxy.AI.AIControl where
import Control.Concurrent.MVar
import Proxy.Server.Messages
import Settings
type FrameCalculation aiState = GameInfo -> GameState -> [GameState] -> Maybe aiState -> ([Command],Maybe aiState)
type StartCalculation = GameInfo -> GameInfo
aiThread :: MVar GameState -> MVar [Command] -> GameInfo -> FrameCalculation a -> IO ()
aiThread stateVar commVar onStartData onFrame = aiLoop [] Nothing
where aiLoop history aiState = do
gameState <- takeMVar stateVar
let (commands, newAIState) = onFrame onStartData gameState history aiState
putMVar commVar commands
let newHistory = history `seq` (take Settings.historyLength (gameState : history))
aiLoop newHistory newAIState
|
mapinguari/SC_HS_Proxy
|
src/Proxy/AI/AIControl.hs
|
mit
| 754
| 0
| 16
| 144
| 227
| 117
| 110
| 14
| 1
|
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import RailFenceCipher (encode, decode)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = do
describe "encode" $ for_ encodeCases testE
describe "decode" $ for_ decodeCases testD
where
testE Case{..} = it description $ encode key text `shouldBe` expected
testD Case{..} = it description $ decode key text `shouldBe` expected
data Case = Case { description :: String
, key :: Int
, text :: String
, expected :: String
}
encodeCases :: [Case]
encodeCases = [ Case { description = "encode with two rails"
, key = 2
, text = "XOXOXOXOXOXOXOXOXO"
, expected = "XXXXXXXXXOOOOOOOOO"
}
, Case { description = "encode with three rails"
, key = 3
, text = "WEAREDISCOVEREDFLEEATONCE"
, expected = "WECRLTEERDSOEEFEAOCAIVDEN"
}
, Case { description = "encode with ending in the middle"
, key = 4
, text = "EXERCISES"
, expected = "ESXIEECSR"
}
]
decodeCases :: [Case]
decodeCases = [ Case { description = "decode with three rails"
, key = 3
, text = "TEITELHDVLSNHDTISEIIEA"
, expected = "THEDEVILISINTHEDETAILS"
}
, Case { description = "decode with five rails"
, key = 5
, text = "EIEXMSMESAORIWSCE"
, expected = "EXERCISMISAWESOME"
}
, Case { description = "decode with six rails"
, key = 6
, text = "133714114238148966225439541018335470986172518171757571896261"
, expected = "112358132134558914423337761098715972584418167651094617711286"
}
]
-- edc42842b767d721fc307033b3d5b04c2c6ac8ce
|
exercism/xhaskell
|
exercises/practice/rail-fence-cipher/test/Tests.hs
|
mit
| 2,435
| 0
| 9
| 1,075
| 427
| 255
| 172
| 43
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Views.Pages.Comics
( comicsPageView
, comicsPageContentView
) where
import BasicPrelude
import Text.Blaze (AttributeValue)
import Text.Blaze.Html5 (Html, toValue, (!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Models.Comic (Comic)
import Models.Pagination (Pagination)
import Routes
( Route
, RouteUrl(ComicsUrl)
, PaginationQuery(..)
)
import Views.Components.ComicsList (comicsListView)
import Views.Components.ResultsPagination (resultsPaginationView)
import Views.Layout (layoutView)
comicsPageView :: Route -> Text -> Pagination -> [Comic] -> Html
comicsPageView currentRoute pageTitle pagination comics =
layoutView currentRoute pageTitle
(comicsPageContentView pagination comics)
comicsPageContentView :: Pagination -> [Comic] -> Html
comicsPageContentView pagination comics = do
H.div ! A.class_ "page-header" $ H.h1 "Comics"
resultsPaginationView makePaginationUrl pagination
comicsListView comics
makePaginationUrl :: Int -> AttributeValue
makePaginationUrl _offset =
toValue (ComicsUrl PaginationQuery { offset=Just _offset })
|
nicolashery/example-marvel-haskell
|
Views/Pages/Comics.hs
|
mit
| 1,204
| 0
| 10
| 157
| 295
| 170
| 125
| 31
| 1
|
{-
**************************************************************
* Filename : Reversal.hs *
* Author : Markus Forsberg *
* d97forma@dtek.chalmers.se *
* Last Modified : 7 July, 2001 *
* Lines : 28 *
**************************************************************
-}
module FST.Reversal ( reversal -- Reverse an automaton.
) where
import FST.Automaton
import Data.Array
reversal :: Eq a => Automaton a -> Automaton a
reversal automaton = reverseTrans (rename (transitionTable automaton)
(alphabet automaton)
(finals automaton)
(initials automaton)
(firstState automaton))
reverseTrans :: Eq a => Automaton a -> Automaton a
reverseTrans automaton = let bs = (firstState automaton, lastState automaton)
table = assocs $ accumArray (\tl1 tl2 -> tl1 ++ tl2) []
bs [(s1,[(a,s)]) | (s,tl) <- transitionTable automaton,
(a,s1) <- tl]
in construct bs table (alphabet automaton) (initials automaton) (finals automaton)
|
SAdams601/ParRegexSearch
|
test/fst-0.9.0.1/FST/Reversal.hs
|
mit
| 1,460
| 0
| 14
| 673
| 265
| 138
| 127
| 15
| 1
|
{-# LANGUAGE OverloadedStrings, BangPatterns#-}
module Types where
import Data.Time
import System.Locale
import Prelude hiding (FilePath)
import Safe
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.Csv
import Data.Text
import System.Directory
import Filesystem
import Filesystem.Path
data OnpingTagHistory = OnpingTagHistory {
time :: Maybe UTCTime,
pid:: Maybe Int,
val :: Maybe Double
} deriving (Read, Show, Eq,Ord)
instance ToNamedRecord OnpingTagHistory where
toNamedRecord (OnpingTagHistory time pid val) = namedRecord [ "time" .= (encodeArchiveTime time),"pid" .= pid , "val" .= val]
instance ToRecord OnpingTagHistory where
toRecord (OnpingTagHistory t p v) = record [toField (encodeArchiveTime t), toField v]
data NameAndLine = NameAndLine { nlName::Text, nlLine::Text}
data BuildableObject = B_OTH !OnpingTagHistory
deriving (Read,Show,Eq,Ord)
type Buildable a = NameAndLine -> Either String (a,Text)
newtype FileFilter = FileFilter { getFileFilter :: (ParamFile -> Maybe ParamFile)}
newtype StartTime a = StartTime { getStartTime :: a}
deriving (Eq,Read,Show)
newtype EndTime a = EndTime { getEndTime :: a }
deriving (Eq,Read,Show)
-- | Simple Parsers for Time and Value
parseFileDate :: String -> Maybe UTCTime
parseFileDate = (parseTime defaultTimeLocale "%F.txt")
parseArchiveTime::Text -> Maybe UTCTime
parseArchiveTime = (parseTime defaultTimeLocale "%F %X").unpack.fst.(breakOn ",")
parseArchiveTime':: String -> Maybe UTCTime
parseArchiveTime' = (parseTime defaultTimeLocale "%F %X")
encodeArchiveTime :: Maybe UTCTime => String
encodeArchiveTime (Just t) = (formatTime defaultTimeLocale "%F %X") t
encodeArchiveTime Nothing = ""
parseArchiveValue :: Text -> Maybe Double
parseArchiveValue = readMay.unpack.strip.snd.(breakOnEnd ",")
parseArchiveValue' :: String -> Maybe Double
parseArchiveValue' = readMay
parsePidValue :: Text -> Maybe Int
parsePidValue = readMay.unpack
-- | Helper Type to pull the date out to the front for sorting against
-- the File Touch Time
data DatedFile = DatedFile { touchDate :: UTCTime,
touchFile :: FilePath
}
deriving (Eq,Show,Ord)
-- | Newtypes for Location and PID folders
newtype LocationPath = LocationPath {getLocationPath :: DatedFile}
deriving (Eq,Show,Ord)
newtype ParamPath = ParamPath {getParamPath :: DatedFile}
deriving (Eq,Show,Ord)
newtype ParamFile = ParamFile {getParamFile :: DatedFile}
deriving (Eq,Show,Ord)
-- | Mongo Config options
data MongoConfig = MongoConfig {
mongoHost :: String
,mongoDB :: Text
,mongoCollection :: Text
} deriving (Eq,Read,Show)
data ConfigOptions = Test | Help | Run RunConfig |Fail
data OS = Windows | Linux
data RunConfig = RunConfig {
startDate :: Maybe UTCTime
,endDate :: Maybe UTCTime
,archivePath :: FilePath}
deriving (Show)
|
smurphy8/dbTransport
|
Types.hs
|
mit
| 3,112
| 2
| 10
| 700
| 816
| 464
| 352
| -1
| -1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Buchhaltung.Import
where
import Buchhaltung.Common
import Buchhaltung.Uniques
import Control.Monad.RWS.Strict
import qualified Data.HashMap.Strict as M
import Data.List
import Data.Ord
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Time.LocalTime
import Hledger.Data
import Hledger.Read
import System.IO
import qualified System.IO.Strict as S
import Text.ParserCombinators.Parsec
import Text.Printf
assertParseEqual' :: (Either ParseError a) -> String
assertParseEqual' = const "a"
-- | convert a batch of importedEntries to Ledger Transactions
fillTxn
:: (MonadError Msg m, MonadReader (Options User Config env) m) =>
T.Text -- ^ current time string
-> ImportedEntry -> m FilledEntry
fillTxn datetime e@(ImportedEntry t postings source) = do
tag <- askTag
todo <- readConfig cTodoAccount
postings' <- mapM toPosting postings
let amount = sum $ pamount <$> postings'
tx = injectSource tag source $
t{tcomment = "generated by 'buchhaltung' "
<> datetime <> com (tcomment t)
,tpostings = postings' ++
if isZeroMixedAmount amount then []
else
[ nullposting
{paccount= todo <> ":" <> todoAcc
(isNegativeMixedAmount amount)
,pamount = missingmixedamt }
-- leaves amount missing. (alternative: use
-- balanceTransaction Nothing)
]}
todoAcc Nothing = "Mixed"
todoAcc (Just False) = "Negative"
todoAcc (Just True) = "Positive"
return $ e{ieT = either (const tx) id $ balanceTransaction Nothing tx
-- try to balance transaction. leave missing amount if
-- this fails, which should never happen
, iePostings=()}
where
com "" = ""
com b = " (" <> b <> ")"
toPosting (accId, am, suff, negateQ) = do
acc <- lookupErrM "Account not configured" M.lookup accId
=<< askAccountMap
return nullposting{paccount= acc <> maybe "" (":" <>) suff
,pamount = (if negateQ
then Mixed . fmap negate . amounts else id)
$ mamountp' $ T.unpack am }
-- use this to debug amount parsing: mamountp'
-- | read entries from handle linewise, process and add to ledger
importCat ::
Maybe FilePath
-- ^ File to check for already processed transactions
-> CommonM env [ImportedEntry]
-> CommonM env Journal
importCat journalPath ientries = do
oldJ <- liftIO $ maybe (return mempty)
(fmap (either error id) . readJournalFile definputopts)
journalPath
datetime <- liftIO $ fshow <$> getZonedTime
entries <- mapM (fillTxn datetime) =<< ientries
newTxns <- addNewEntriesToJournal entries oldJ
liftIO $ hPutStrLn stderr $ printf "found %d new of %d total transactions"
(length newTxns - length (jtxns oldJ)) $ length entries
comp <- dateAmountSource <$> askTag
return oldJ{jtxns = sortBy comp $ ieT <$> newTxns}
dateAmountSource
:: ImportTag -> Transaction -> Transaction -> Ordering
dateAmountSource tag a b =
comparing tdate a b
<> comparing (pamount . head . tpostings) a b
<> comparing (fmap wSource . extractSource tag) a b
importWrite :: CommonM env [ImportedEntry] -> CommonM env ()
importWrite ientries = do
journalPath <- absolute =<< readLedger imported
liftIO . writeJournal journalPath
=<< importCat (Just journalPath) ientries
importReadWrite
:: Importer env -> FullOptions (env, Maybe Version) -> FilePath -> ErrorT IO ()
importReadWrite conv options file =
withFileM file ReadMode $ \handle ->
void $ runRWST (importWrite $ conv $ Right handle) options ()
writeJournal :: FilePath -> Journal -> IO ()
writeJournal journalPath = writeFile journalPath . showTransactions
-- testCat :: Maybe FilePath -- ^ journal
-- -> FilePath -- ^ import
-- -> CustomImport
-- -> Bool -- ^ overwrite
-- -> IO Journal
-- testCat journalPath testfile ci overwrite =
-- withFile testfile ReadMode $ \h -> do
-- j <- importCat def journalPath ci h
-- when overwrite $ maybe mempty (flip writeJournal j) journalPath
-- return j
testRaw _ testfile (f,chH) = withFile testfile ReadMode (\h ->
maybe (return ()) ($ h) chH >> S.hGetContents h >>= return . show . f)
-- main = readFile "/tmp/a" >>=
-- addNew "VISA" [] "/home/data/finanzen/jo/bankimport.dat" . lines
|
johannesgerer/buchhaltung
|
src/Buchhaltung/Import.hs
|
mit
| 4,693
| 0
| 21
| 1,250
| 1,139
| 594
| 545
| 88
| 6
|
-- | Exercises for the slides here:
--
-- http://www.scs.stanford.edu/16wi-cs240h/slides/basics-slides.html
module Basics where
data Move = Rock | Paper | Scissors
deriving (Eq, Read, Show, Enum, Bounded)
parseMove :: String -> Maybe Move
parseMove "Rock" = Just Rock
parseMove "Paper" = Just Paper
parseMove "Scissors" = Just Scissors
parseMove _ = Nothing
-- | But you can also use `reads`.
parseMove' :: String -> Maybe Move
parseMove' m =
case reads m of
[(mv, "")] -> Just mv
_ -> Nothing
-- | Note that the definitions of @parseMove@ and @parseMove'@ are not
-- equivalent. The latter will accept trailing whitespaces. Look at this other
-- definition instead.
parseMove'' :: String -> Maybe Move
parseMove'' str | [(m, rest)] <- reads str, ok rest = Just m
| otherwise = Nothing
where ok = all (`elem` " \r\n")
-- * Strict evaluation.
factorial n0 = loop 1 n0
where loop acc 0 = acc
loop acc m = loop (acc * m) (m - 1)
-- Can we define a strict version of factorial using @$!@?
factorialStrict n0 = loop 1 n0
where loop acc 0 = acc
loop acc m = loop ((acc *) $! m) (m - 1)
-- | Is the version given by the instructor better?
--
-- Indeed it is. Because in my version I'm evaluating @m@ to weak head normal
-- form, which does not do anything, since it is a number already. In case of
-- the version below, we are reducing @acc * m@ to WHNF, which prevents the
-- formation of thunks!
factorialStrict' n0 = loop 1 n0
where loop acc n | n > 1 = (loop $! acc * n) (n - 1)
| otherwise = acc
|
capitanbatata/functional-systems-in-haskell
|
fsh-exercises/src/Basics.hs
|
mit
| 1,618
| 0
| 11
| 412
| 413
| 216
| 197
| 26
| 2
|
{-# LANGUAGE OverloadedStrings,
ScopedTypeVariables,
DeriveDataTypeable #-}
module DatabaseTests where
import Prelude hiding (lookup)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.HUnit hiding (Test)
import Test.QuickCheck hiding (label)
import Test.QuickCheck.Monadic
import qualified Test.QuickCheck.Monadic as Q
import Data.Maybe
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid (mempty)
import Data.Text ()
import qualified Database.MongoDB as Mongo
import Control.Monad
import Control.Exception hiding (catch)
import Data.Typeable
import LIO
import LIO.Labeled
import LIO.TCB
import LIO.DCLabel
import LIO.DCLabel.Instances ()
import Hails.Data.Hson
import Hails.Data.Hson.TCB
import Hails.Data.Hson.Instances
import Hails.PolicyModule
import Hails.PolicyModule.TCB
import Hails.Database.Core
import Hails.Database.TCB
import Hails.Database.Query
import System.Posix.Env (setEnv)
import System.IO.Unsafe
tests :: [Test]
tests = [
testGroup "withPolicy tests" [
testCase "Succeed loading existant policy module"
test_withPolicyModuleP_ok
, testCase "Fail loading non-existant policy module"
test_withPolicyModuleP_fail
]
, testGroup "Label database/collection-set tests" [
testProperty "Can label database with bounded label"
prop_labelDatabase_ok
, testProperty "Cannot label database with unbounded label"
prop_setDatabaseLabel_fail
, testProperty "Cannot label collection-set with unbounded label"
prop_setCollectionSetLabel_fail
]
, testGroup "Creating collections" [
testProperty "Create ok with empty policies and bounded labels"
prop_createCollection_empty_ok
, testProperty "Create fail with empty policies and unbounded labels"
prop_createCollection_empty_fail
]
, testGroup "Type check documents against policy" [
testProperty "Type check ok when all fields exist and well-formed"
typeCheckDocument_all_named_exist
, testProperty "Type check fail when all fields don't exist and well-formed 1"
typeCheckDocument_all_named_exist_fail1
, testProperty "Type check fail when all fields don't exist and well-formed 2"
typeCheckDocument_all_named_exist_fail2
]
, testGroup "Policy application" [
testProperty "Simple, all-public policy"
test_applyCollectionPolicyP_allPub
, testProperty "Simple, all-public policy. Field is pre-labeled to bottom"
test_applyCollectionPolicyP_allPub_field_bottom_fail
, testProperty "Document label is above the collection clearance"
test_applyCollectionPolicyP_allPub_bad_doc_policy_fail
, testProperty "Field label is above the collection clearance"
test_applyCollectionPolicyP_allPub_bad_field_policy_fail
, testProperty "Simple data-depedent policy"
test_applyCollectionPolicyP_label_by_field
]
, testGroup "Insert" [
testProperty "Simple, all-public policy insert"
test_basic_insert
, testProperty "Simple, insert with policy on document and field"
test_basic_insert_with_pl
, testProperty "Test insert after taint: failure"
test_basic_insert_fail
]
, testGroup "Find" [
testProperty "Simple, all-public find"
test_basic_find
, testProperty "Simple, find with policy on document and field"
test_basic_find_with_pl
]
, testGroup "Save" [
testProperty "Simple, all-public save"
test_basic_save
, testProperty "Simple, save with policy on document and field"
test_basic_save_with_pl
]
, testGroup "Labeled insert" [
testProperty "Simple, all-public policy insert of already-labeled document"
test_basic_labeled_insert
, testProperty "Simple, all-public policy insert of already-labeled document fail"
test_basic_labeled_insert_fail
, testProperty "Simple, insert with policy on already-labeled document and field"
test_basic_labeled_insert_with_pl
, testProperty "Simple, fail insert with policy on already-labeled document and field"
test_basic_labeled_insert_with_pl_fail
]
, testGroup "Labeled save" [
testProperty "Simple, all-public policy save of already-labeled document"
test_basic_labeled_save
, testProperty "Simple, all-public policy save of already-labeled document fail"
test_basic_labeled_save_fail
, testProperty "Simple, save with policy on already-labeled document and field"
test_basic_labeled_save_with_pl
]
]
--
-- Pretend app running on behalf of some user
--
-- | Default clearance
defClr :: DCLabel
defClr = ("A" :: String) %% True
-- | Initial clearance is set to some user's principal
doEvalDC :: DC a -> IO a
doEvalDC act = evalLIO act $
LIOState { lioLabel = dcPublic, lioClearance = defClr }
unlabelTCB :: Labeled DCLabel a -> a
unlabelTCB (LabeledTCB _ a) = a
--
-- Define test policy module
--
-- | Empty registered policy module
newtype TestPM1 = TestPM1TCB DCPriv deriving (Show, Typeable)
instance PolicyModule TestPM1 where
initPolicyModule p = return (TestPM1TCB p)
withTestPM1 :: (TestPM1 -> DBAction a) -> DC a
withTestPM1 = withPolicyModule
-- | Policy module that is not registered
data TestPM1Fake = TestPM1FakeTCB deriving Typeable
instance PolicyModule TestPM1Fake where
initPolicyModule _ = return TestPM1FakeTCB
withTestPM1Fake :: (TestPM1Fake -> DBAction a) -> DC a
withTestPM1Fake = withPolicyModule
--
-- Create policy module configuration file
--
-- | DB config file path
dbConfFile :: FilePath
dbConfFile = "/tmp/test_hails_database.conf"
testPM1Principal :: String
testPM1Principal = '_' : mkName (TestPM1TCB undefined)
-- | TestPM1's privileges
testPM1Priv :: DCPriv
testPM1Priv = PrivTCB . toCNF $ testPM1Principal
-- | Only register TestPM1
mkDBConfFile :: IO ()
mkDBConfFile = do
writeFile dbConfFile (unlines [show tpm1, show tpm2])
setEnv "DATABASE_CONFIG_FILE" dbConfFile False
where tpm1,tpm2 :: (String, String)
tpm1 = (mkName (TestPM1TCB undefined), "testPM1_db")
tpm2 = (mkName (TestPM2TCB undefined), "testPM2_db")
mkName :: PolicyModule pm => pm -> TypeName
mkName x = tyConPackage tp ++ ":" ++ tyConModule tp ++ "." ++ tyConName tp
where tp = typeRepTyCon $ typeOf x
constCatch :: a -> SomeException -> a
constCatch a _ = a
--
-- withPolicy tests
--
-- | Test that the loading of the TestPM1 policy module does not throw an
-- exception
test_withPolicyModuleP_ok :: Assertion
test_withPolicyModuleP_ok = do
mkDBConfFile
doEvalDC . withTestPM1 . const $ return ()
-- | Test that the loading of the TestPM1Fake policy module throws an
-- exception
test_withPolicyModuleP_fail :: Assertion
test_withPolicyModuleP_fail = do
mkDBConfFile
(r, _) <- tryDC $ withTestPM1Fake $ \_ -> return ()
case r of
Left _ -> return ()
Right _ -> assertFailure "withPolicyModule should fail with non-existant DB"
--
-- Testing label database
--
monadicDC :: PropertyM DC a -> Property
monadicDC (MkPropertyM m) =
property $ unsafePerformIO `liftM` doEvalDC `liftM` m f
where f = const . return . return . property $ True
-- | As if done in 'initPolicyModule' without bracket
initTestPM1 :: PMAction a -> DC a
initTestPM1 act = do
ioTCB mkDBConfFile
withTestPM1 . const . unPMActionTCB $
withClearanceP' testPM1Priv $ act
where withClearanceP' priv io = do
c <- liftLIO $ getClearance
let lpriv = (%%) (privDesc priv) (privDesc priv) `lub` c
liftLIO $ setClearanceP priv lpriv
res <- io
c' <- liftLIO $ getClearance
liftLIO $ setClearanceP priv (downgradeP priv c' `lub` c)
return res
-- | Execute a monadic quickcheck action against policy module TestPM1
monadicPM1 :: (DCPriv -> PropertyM PMAction a) -> Property
monadicPM1 g =
let (MkPropertyM m) = g testPM1Priv
in property $ unsafePerformIO `liftM` doEvalDC
`liftM` initTestPM1
`liftM` m f
where f = const . return . return . property $ True
-- | Execute a monadic quickcheck action against policy module TestPM1
monadicPM1_fail :: (DCPriv -> PropertyM PMAction a) -> Property
monadicPM1_fail g =
let (MkPropertyM m) = g testPM1Priv
in property $ unsafePerformIO `liftM` doEvalDC
`liftM` initTestPM1'
`liftM` m f
where f = const . return . return . property $ True
initTestPM1' act = (initTestPM1 act)
`catch` (constCatch $ return (property True))
--
-- Label database and collection-set
--
-- | Can label database with label bounded by current label and clearance
prop_labelDatabase_ok :: Property
prop_labelDatabase_ok = monadicPM1 $ \priv ->
forAllM arbitrary $ \ldb ->
forAllM arbitrary $ \lcol -> do
l <- run $ liftDB $ liftLIO getLabel
c <- run $ liftDB $ liftLIO $ getClearance
pre $ canFlowToP priv l ldb && ldb `canFlowTo` c
pre $ canFlowToP priv l lcol && lcol `canFlowTo` c
run $ labelDatabaseP priv ldb lcol
Q.assert True
-- | Cannot label database with label outside current label/clearance
prop_setDatabaseLabel_fail :: Property
prop_setDatabaseLabel_fail = monadicPM1_fail $ \priv -> do
forAllM arbitrary $ \ldb -> do
l <- run $ liftDB $ liftLIO getLabel
c <- run $ liftDB $ liftLIO getClearance
pre . not $ canFlowToP priv l ldb && ldb `canFlowTo` c
run $ setDatabaseLabelP priv ldb
Q.assert False
-- | Cannot label colelction-set with label outside current label/clearance
prop_setCollectionSetLabel_fail :: Property
prop_setCollectionSetLabel_fail = monadicPM1_fail $ \priv -> do
forAllM arbitrary $ \lcol -> do
l <- run $ liftDB $ liftLIO getLabel
c <- run $ liftDB $ liftLIO getClearance
pre . not $ canFlowToP priv l lcol && lcol `canFlowTo` c
run $ setCollectionSetLabelP priv lcol
Q.assert False
--
-- Create collections
--
prop_createCollection_empty_ok :: Property
prop_createCollection_empty_ok = monadicPM1 $ \priv ->
forAllM arbitrary $ \lcol ->
forAllM arbitrary $ \ccol -> do
l <- run $ liftDB $ liftLIO getLabel
c <- run $ liftDB $ liftLIO getClearance
pre $ canFlowToP priv l lcol && lcol `canFlowTo` c
pre $ canFlowToP priv l ccol && ccol `canFlowTo` c
let policy = CollectionPolicy {
documentLabelPolicy = const dcPublic
, fieldLabelPolicies = Map.empty }
run $ createCollectionP priv "somefuncollection" lcol ccol policy
Q.assert True
prop_createCollection_empty_fail :: Property
prop_createCollection_empty_fail = monadicPM1_fail $ \priv ->
forAllM arbitrary $ \lcol ->
forAllM arbitrary $ \ccol -> do
l <- run $ liftDB $ liftLIO getLabel
c <- run $ liftDB $ liftLIO getClearance
pre . not $ (canFlowToP priv l lcol && lcol `canFlowTo` c) &&
(canFlowToP priv l ccol && ccol `canFlowTo` c)
let policy = CollectionPolicy {
documentLabelPolicy = const dcPublic
, fieldLabelPolicies = Map.empty }
run $ createCollectionP priv "somefuncollection" lcol ccol policy
Q.assert False
--
-- Type check doc
--
-- | Remove any policy labeled values
removePolicyLabeled :: HsonDocument -> HsonDocument
removePolicyLabeled = filter (not . isPolicyLabeled)
labelOfPL :: PolicyLabeled -> DCLabel
labelOfPL (HasPolicyTCB lv) = labelOf lv
labelOfPL _ = error "should have been labeled"
isPolicyLabeled :: HsonField -> Bool
isPolicyLabeled (HsonField _ (HsonLabeled _)) = True
isPolicyLabeled _ = False
-- | Policies used by the typeCheckDocument tests
typeCheckDoc_policies :: Map FieldName FieldPolicy
typeCheckDoc_policies =
Map.fromList [ ("s1", SearchableField)
, ("s2", SearchableField)
, ("p1", FieldPolicy (const dcPublic))
, ("p2", FieldPolicy (const dcPublic)) ]
-- | Check that all fields of 'typeCheckDoc_policies' exist in a
-- document and are typed-correctly.
typeCheckDocument_all_named_exist :: Property
typeCheckDocument_all_named_exist = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- hasPolicy `liftM` pick arbitrary
pl2 <- needPolicy `liftM` pick arbitrary
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
run $ typeCheckDocument typeCheckDoc_policies doc
Q.assert True
-- | Check that all fields of 'typeCheckDoc_policies' exist in a
-- document and are typed-correctly: fail
-- A searchable/policy labeled field does not exist
typeCheckDocument_all_named_exist_fail1 :: Property
typeCheckDocument_all_named_exist_fail1 = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- hasPolicy `liftM` pick arbitrary
pl2 <- needPolicy `liftM` pick arbitrary
rm <- pick $ elements ["s1", "s2", "p1", "p2" ]
let doc = exclude [rm] $
[ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
res <- run $ (typeCheckDocument typeCheckDoc_policies doc >> return False)
`catch` (constCatch $ return True)
Q.assert res
-- | Check that all fields of 'typeCheckDoc_policies' exist in a
-- document and are typed-correctly: fail
-- A policy labeled field not named by the policy exists
typeCheckDocument_all_named_exist_fail2 :: Property
typeCheckDocument_all_named_exist_fail2 = monadicDC $ do
doc2 <- pick arbitrary
pre $ any isPolicyLabeled doc2
pl1 <- hasPolicy `liftM` pick arbitrary
pl2 <- needPolicy `liftM` pick arbitrary
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
res <- run $ (typeCheckDocument typeCheckDoc_policies doc >> return False)
`catch` (constCatch $ return True)
Q.assert res
--
-- Test applyCollection policy
--
-- | Apply all-public policies
test_applyCollectionPolicyP_allPub :: Property
test_applyCollectionPolicyP_allPub = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- needPolicy `liftM` pick arbitrary
pl2 <- needPolicy `liftM` pick arbitrary
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
ldoc <- run $ applyCollectionPolicyP mempty col doc
Q.assert $ labelOf ldoc == dcPublic
let doc' = unlabelTCB ldoc
Q.assert $ labelOfPL (at "p1" doc') == dcPublic
Q.assert $ labelOfPL (at "p2" doc') == dcPublic
Q.assert . not $ any isPolicyLabeled $ exclude ["p1", "p2"] doc'
where col = collectionTCB "myColl" dcPublic dcPublic cPolicy
cPolicy = CollectionPolicy {
documentLabelPolicy = const dcPublic
, fieldLabelPolicies = typeCheckDoc_policies }
-- | Apply all-public policies, field has higher integrity: fail
test_applyCollectionPolicyP_allPub_field_bottom_fail :: Property
test_applyCollectionPolicyP_allPub_field_bottom_fail = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- needPolicy `liftM` pick arbitrary
pl2 <- hasPolicy `liftM` pick arbitrary
pre $ labelOfPL pl2 /= dcPublic
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
res <- run $ (applyCollectionPolicyP mempty col doc >> return False)
`catch` (constCatch $ return True)
Q.assert res
where col = collectionTCB "myColl" dcPublic dcPublic cPolicy
cPolicy = CollectionPolicy {
documentLabelPolicy = const dcPublic
, fieldLabelPolicies = typeCheckDoc_policies }
-- | Apply all-public policies, document policy is above clearance
test_applyCollectionPolicyP_allPub_bad_doc_policy_fail :: Property
test_applyCollectionPolicyP_allPub_bad_doc_policy_fail = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- needPolicy `liftM` pick arbitrary
pl2 <- hasPolicy `liftM` pick arbitrary
pre $ labelOfPL pl2 /= dcPublic
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
res <- run $ (applyCollectionPolicyP mempty col doc >> return False)
`catch` (constCatch $ return True)
Q.assert res
where col = collectionTCB "myColl" dcPublic dcPublic cPolicy
cPolicy = CollectionPolicy {
documentLabelPolicy = const defClr
, fieldLabelPolicies = typeCheckDoc_policies }
-- | Apply all-public policies, document policy is above clearance
test_applyCollectionPolicyP_allPub_bad_field_policy_fail :: Property
test_applyCollectionPolicyP_allPub_bad_field_policy_fail = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- needPolicy `liftM` pick arbitrary
pl2 <- needPolicy `liftM` pick arbitrary
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
res <- run $ (applyCollectionPolicyP mempty col doc >> return False)
`catch` (constCatch $ return True)
Q.assert res
where col = collectionTCB "myColl" dcPublic dcPublic cPolicy
cPolicy = CollectionPolicy {
documentLabelPolicy = const dcPublic
, fieldLabelPolicies = Map.fromList [ ("s1", SearchableField)
, ("s2", SearchableField)
, ("p1", FieldPolicy (const dcPublic))
, ("p2", FieldPolicy (const defClr)) ] }
-- | Apply all-public policies
test_applyCollectionPolicyP_label_by_field :: Property
test_applyCollectionPolicyP_label_by_field = monadicDC $ do
doc2 <- removePolicyLabeled `liftM` pick arbitrary
pl1 <- needPolicy `liftM` pick arbitrary
pl2 <- needPolicy `liftM` pick arbitrary
let doc = [ "s1" -: prin, "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int), "p1" -: pl1, "p2" -: pl2]
`merge` doc2
ldoc <- run $ applyCollectionPolicyP priv col doc
Q.assert $ labelOf ldoc == lbl
let doc' = unlabelTCB ldoc
Q.assert $ labelOfPL (at "p1" doc') == lbl
Q.assert $ labelOfPL (at "p2" doc') == lbl
Q.assert . not $ any isPolicyLabeled $ exclude ["p1", "p2"] doc'
Q.assert True
where col = collectionTCB "myColl" dcPublic lbl cPolicy
fpol d = let n = at "s1" d :: String
in (%%) (n \/ ("A" :: String)) cTrue
lbl = (%%) (prin \/ ("A" :: String)) cTrue
prin = "w00t" :: String
priv = PrivTCB . toCNF $ prin
cPolicy = CollectionPolicy {
documentLabelPolicy = fpol
, fieldLabelPolicies = Map.fromList [ ("s1", SearchableField)
, ("s2", SearchableField)
, ("p1", FieldPolicy fpol)
, ("p2", FieldPolicy fpol)] }
--
-- Test insert and find
--
-- | Execute a mongo action against the testPM2 database
withMongo :: Mongo.Action IO a -> IO a
withMongo act = do
pipe <- Mongo.runIOE $ Mongo.connect (Mongo.host "localhost")
res <- Mongo.access pipe Mongo.master "testPM2_db" act
Mongo.close pipe
case res of
Left f -> throwIO (userError $ "Failed with " ++ show f)
Right v -> return v
testPM2Principal :: String
testPM2Principal = '_' : mkName (TestPM2TCB undefined)
-- | TestPM2's privileges
testPM2Priv :: DCPriv
testPM2Priv = PrivTCB . toCNF $ testPM2Principal
-- | Empty registered policy module
newtype TestPM2 = TestPM2TCB DCPriv deriving (Show, Typeable)
instance PolicyModule TestPM2 where
initPolicyModule p = do
-- label db & collection-set
labelDatabaseP p dcPublic lDB
-- create public storage
createCollectionP p "public" dcPublic dcPublic cPubPolicy
-- create collection with a policy-label for document and field
createCollectionP p "simple_pl" dcPublic cCol cSimplePlPolicy
return $ TestPM2TCB p
where this = privDesc p
cCol = (%%) this cTrue
lDB = (%%) cTrue (privDesc p)
cPubPolicy = CollectionPolicy { documentLabelPolicy = const dcPublic
, fieldLabelPolicies = Map.empty }
fpol d = let n = at "s" d :: String
in (%%) (n \/ this) cTrue
cSimplePlPolicy = CollectionPolicy {
documentLabelPolicy = fpol
, fieldLabelPolicies = Map.fromList [ ("pl", FieldPolicy fpol)
, ("s", SearchableField)] }
withTestPM2 :: (TestPM2 -> DBAction a) -> DC a
withTestPM2 f = do
ioTCB mkDBConfFile
--ioTCB $ withMongo $ Mongo.delete (Mongo.select [] "public")
--ioTCB $ withMongo $ Mongo.delete (Mongo.select [] "simple_pl")
withPolicyModule f
-- | Test insert in all-public collection
test_basic_insert :: Property
test_basic_insert = monadicDC $ do
doc <- (removePolicyLabeled . clean) `liftM` pick arbitrary
_id <- run $ withTestPM2 $ const $ do
insert "public" doc
mdoc <- run $ ioTCB $ withMongo $ Mongo.findOne
(Mongo.select ["_id" Mongo.=: _id] "public")
let bdoc = fromJust mdoc
doc' = sortDoc $ merge ["_id" -: _id] doc
Q.assert $ isJust mdoc &&
(sortDoc (dataBsonDocToHsonDocTCB bdoc) == doc')
-- | Test insert containing a policy labeled value
test_basic_insert_with_pl :: Property
test_basic_insert_with_pl = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
pl <- needPolicy `liftM` pick arbitrary
let s = "A" :: String
let doc = merge ["s" -: s, "pl" -: pl] doc0
_id <- run $ withTestPM2 $ const $ do
insert "simple_pl" doc
mdoc <- run $ ioTCB $ withMongo $ Mongo.findOne
(Mongo.select ["_id" Mongo.=: _id] "simple_pl")
let bdoc = fromJust mdoc
doc' = sortDoc $ merge ["_id" -: _id] doc
Q.assert $ isJust mdoc &&
(sortDoc (dataBsonDocToHsonDocTCB bdoc) == doc')
-- | Test insert after taint: failure.
test_basic_insert_fail :: Property
test_basic_insert_fail = monadicDC $ do
res <- run $ (withTestPM2 $ const $ do
liftLIO $ getClearance >>= taint
insert_ "public" (["my" -: (1::Int)] :: HsonDocument)
return False) `catch` (\(_::SomeException) -> return True)
Q.assert res
-- | Test insert in all-public collection
test_basic_find :: Property
test_basic_find = monadicDC $ do
let doc = [ "s1" -: (1 :: Int), "s3" -: (3 :: Int), "s2" -: (2 :: Int)
, "x1" -: (4 :: Int)]
_id <- run $ withTestPM2 $ const $ insert "public" doc
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "public")
Q.assert $ isJust mdoc
doc' <- run $ unlabel $ fromJust mdoc
Q.assert $ sortDoc doc' == sortDoc (merge ["_id" -: _id] doc)
-- | Test find containing a policy labeled value
test_basic_find_with_pl :: Property
test_basic_find_with_pl = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
plv <- pick arbitrary
let pl = needPolicy (plv :: BsonValue)
s = "A" :: String
doc = merge ["s" -: s , "pl" -: pl] doc0
_id <- run $ withTestPM2 $ const $ insert "simple_pl" doc
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "simple_pl")
Q.assert $ isJust mdoc
let priv = PrivTCB . toCNF $ s
doc' <- run $ unlabelP priv $ fromJust mdoc
Q.assert $ (sortDoc . exclude ["pl"] $ doc') ==
(sortDoc . merge ["_id" -: _id] . exclude ["pl"] $ doc)
let ~mlv@(Just lv) = getPolicyLabeled $ "pl" `at` doc'
Q.assert $ isJust mlv && unlabelTCB lv == plv
-- | Test save in all-public collection
test_basic_save :: Property
test_basic_save = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
doc1 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
_id <- run $ withTestPM2 $ const $ insert "public" doc0
let doc = merge ["_id" -: _id] doc1
run $ withTestPM2 $ const $ save "public" doc
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "public")
Q.assert $ isJust mdoc
doc' <- run $ unlabel $ fromJust mdoc
Q.assert $ sortDoc doc == sortDoc doc'
-- | Test save containing a policy labeled value
test_basic_save_with_pl :: Property
test_basic_save_with_pl = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
plv <- pick arbitrary
let pl = needPolicy (plv :: BsonValue)
let s = "A" :: String
priv = PrivTCB . toCNF $ s
doc1 = merge ["s" -: s , "pl" -: pl] doc0
_id <- run $ withTestPM2 $ const $ insert "simple_pl" doc1
let doc2 = merge ["_id" -: _id, "x" -: ("f00ba12" :: String)] doc1
run $ withTestPM2 $ const $ saveP priv "simple_pl" $ doc2
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "simple_pl")
Q.assert $ isJust mdoc
doc' <- run $ unlabelP priv $ fromJust mdoc
Q.assert $ (sortDoc . exclude ["pl"] $ doc') ==
(sortDoc . exclude ["pl"] $ doc2)
let ~mlv@(Just lv) = getPolicyLabeled $ "pl" `at` doc'
Q.assert $ isJust mlv && unlabelTCB lv == plv
-- | Test labeled insert in all-public collection
test_basic_labeled_insert :: Property
test_basic_labeled_insert = monadicDC $ do
doc <- (removePolicyLabeled . clean) `liftM` pick arbitrary
ldoc <- run $ label dcPublic doc
_id <- run $ withTestPM2 $ const $ do
insert "public" ldoc
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "public")
Q.assert $ isJust mdoc && labelOf (fromJust mdoc) == dcPublic
doc' <- run $ unlabel $ fromJust mdoc
Q.assert $ sortDoc doc' == sortDoc (merge ["_id" -: _id] doc)
-- | Test labled insert containing a policy labeled value
test_basic_labeled_insert_with_pl :: Property
test_basic_labeled_insert_with_pl = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
let s = "A" :: String
l = (%%) (s \/ testPM2Principal) cTrue
plv <- pick arbitrary
pl <- run $ label l (plv :: BsonValue)
let doc = merge ["s" -: s, "pl" -: pl] doc0
ldoc <- run $ label l doc
_id <- run $ withTestPM2 $ const $ insert "simple_pl" ldoc
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "simple_pl")
Q.assert $ isJust mdoc
let doc' = unlabelTCB $ fromJust mdoc
doc'' = merge ["_id" -: _id] doc
Q.assert $ (sortDoc . exclude ["pl"] $ doc') ==
(sortDoc . exclude ["pl"] $ doc'')
let ~mlv@(Just lv) = getPolicyLabeled $ "pl" `at` doc'
Q.assert $ isJust mlv && unlabelTCB lv == plv
-- | Test labeled insert in all-public collection
test_basic_labeled_insert_fail :: Property
test_basic_labeled_insert_fail = monadicDC $ do
doc <- (removePolicyLabeled . clean) `liftM` pick arbitrary
clr <- run $ getClearance
ldoc <- run $ label clr doc
res <- run $ (withTestPM2 $ const $ do
insert_ "public" ldoc
return False) `catch` (\(_::SomeException) -> return True)
Q.assert res
-- | Test labled insert containing a policy labeled value, fail
test_basic_labeled_insert_with_pl_fail :: Property
test_basic_labeled_insert_with_pl_fail = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
let s = "A" :: String
lOfdoc = (%%) (s \/ testPM2Principal) cTrue
l = (%%) (s \/ testPM2Principal \/ ("failureCause" :: String)) cTrue
plv <- pick arbitrary
pl <- run $ label l (plv :: BsonValue)
let doc = merge ["s" -: s, "pl" -: pl] doc0
ldoc <- run $ label lOfdoc doc
res <- run $ (withTestPM2 $ const $ do
insert_ "simple_pl" ldoc
return False) `catch` (\(_::SomeException) -> return True)
Q.assert res
-- | Test labeled save in all-public collection
test_basic_labeled_save :: Property
test_basic_labeled_save = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
doc1 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
ldoc0 <- run $ label dcPublic doc0
_id <- run $ withTestPM2 $ const $ insert "public" ldoc0
ldoc1 <- run $ label dcPublic $ merge ["_id" -: _id] doc1
run $ withTestPM2 $ const $ save "public" ldoc1
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "public")
Q.assert $ isJust mdoc && labelOf (fromJust mdoc) == dcPublic
doc' <- run $ unlabel $ fromJust mdoc
Q.assert $ sortDoc doc' == sortDoc (unlabelTCB ldoc1)
-- | Test labled save containing a policy labeled value
test_basic_labeled_save_with_pl :: Property
test_basic_labeled_save_with_pl = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
doc1 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
let s = "A" :: String
lOfdoc = (%%) (s \/ testPM2Principal \/ ("B" :: String)) cTrue
l = (%%) (s \/ testPM2Principal) cTrue
plv <- pick arbitrary
pl <- run $ label l (plv :: BsonValue)
ldoc0 <- run $ label lOfdoc $ merge ["s" -: s, "pl" -: pl] doc0
_id <- run $ withTestPM2 $ const $ insert "simple_pl" ldoc0
ldoc1 <- run $ label dcPublic $ merge ["_id" -: _id,"s" -: s, "pl" -: pl] doc1
run $ withTestPM2 $ const $ save "simple_pl" ldoc1
mdoc <- run $ withTestPM2 $ const $ findOne (select ["_id" -: _id] "simple_pl")
Q.assert $ isJust mdoc
let doc' = unlabelTCB $ fromJust mdoc
doc'' = merge ["_id" -: _id] $ unlabelTCB ldoc1
Q.assert $ (sortDoc . exclude ["pl"] $ doc') ==
(sortDoc . exclude ["pl"] $ doc'')
let ~mlv@(Just lv) = getPolicyLabeled $ "pl" `at` doc'
Q.assert $ isJust mlv && unlabelTCB lv == plv
-- | Test labeled save in all-public collection
test_basic_labeled_save_fail :: Property
test_basic_labeled_save_fail = monadicDC $ do
doc0 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
doc1 <- (removePolicyLabeled . clean) `liftM` pick arbitrary
ldoc0 <- run $ label dcPublic doc0
_id <- run $ withTestPM2 $ const $ insert "public" ldoc0
clr <- run $ getClearance
ldoc1 <- run $ label clr $ merge ["_id" -: _id] doc1
res <- run $ (withTestPM2 $ const $ do
save "public" ldoc1
return False) `catch` (constCatch $ return True)
Q.assert res
|
scslab/hails
|
tests/DatabaseTests.hs
|
mit
| 30,980
| 0
| 19
| 7,578
| 8,661
| 4,462
| 4,199
| 597
| 2
|
{-# Language FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Krill.Parser.Syntax
-- Description : Abstract, un-elaborated syntax of Krill
-- Maintainer : coskuacay@gmail.com
-- Stability : experimental
-----------------------------------------------------------------------------
module Language.Krill.Parser.Syntax
( File (..)
, Module (..)
, Declaration (..)
, Type (..)
, Exp (..)
, ExpLine (..)
, Ident (..)
, Constructor (..)
, Channel (..)
, Label (..)
, Branch (..)
, branchLabel
, branchUnpack
, branchMap
, branchLookup
) where
import Data.Function (on)
import Text.PrettyPrint
import Text.PrettyPrint.HughesPJClass (Pretty (..), prettyShow)
import Language.Krill.Utility.Pretty
import Language.Krill.Parser.Annotated (Annotated (..))
import Language.Krill.Parser.Named (Named (..))
data File annot = File annot [Module annot]
data Module annot = Module annot (Ident annot) [Declaration annot]
data Declaration annot
= TypeDef annot (Constructor annot) (Type annot)
| TypeSig annot (Ident annot) (Type annot)
| FunClause annot (Channel annot) (Ident annot) [Channel annot] (Exp annot)
data Type annot = TVar annot (Constructor annot)
| TUnit annot
| TProduct annot (Type annot) (Type annot)
| TArrow annot (Type annot) (Type annot)
| TInternal annot [Branch Type annot]
| TExternal annot [Branch Type annot]
| TIntersect annot (Type annot) (Type annot)
| TUnion annot (Type annot) (Type annot)
data Exp annot = Exp annot [ExpLine annot]
data ExpLine annot = ECut annot (Channel annot) (Ident annot) [Channel annot]
| EFwd annot (Channel annot) (Channel annot)
| EClose annot (Channel annot)
| EWait annot (Channel annot)
| ESend annot (Channel annot) (Channel annot, Exp annot)
| ESendChannel annot (Channel annot) (Channel annot)
| ERecv annot (Channel annot) (Channel annot)
| ESelect annot (Channel annot) (Label annot)
| ECase annot (Channel annot) [Branch Exp annot]
data Ident annot = Ident annot String
data Constructor annot = Constructor annot String
data Channel annot = Channel annot String
data Label annot = Label annot String
data Branch t annot = Branch annot (Label annot) (t annot)
branchLabel :: Branch t annot -> Label annot
branchLabel (Branch _ lab _) = lab
branchUnpack :: Branch t annot -> (Label annot, t annot)
branchUnpack (Branch _ lab t) = (lab, t)
branchMap :: (t1 annot -> t2 annot) -> Branch t1 annot -> Branch t2 annot
branchMap f (Branch annot lab t) = Branch annot lab (f t)
branchLookup :: Label annot -> [Branch t annot] -> Maybe (t annot)
branchLookup lab = lookup lab . map branchUnpack
{--------------------------------------------------------------------------
Instances
--------------------------------------------------------------------------}
instance Eq (Ident annot) where
(==) = (==) `on` name
instance Eq (Constructor annot) where
(==) = (==) `on` name
instance Eq (Channel annot) where
(==) = (==) `on` name
instance Eq (Label annot) where
(==) = (==) `on` name
instance Ord (Ident annot) where
compare = compare `on` name
instance Ord (Constructor annot) where
compare = compare `on` name
instance Ord (Channel annot) where
compare = compare `on` name
instance Ord (Label annot) where
compare = compare `on` name
{--------------------------------------------------------------------------
Annotations
--------------------------------------------------------------------------}
instance Annotated File where
annot (File annot _) = annot
instance Annotated Module where
annot (Module annot _ _) = annot
instance Annotated Declaration where
annot (TypeDef annot _ _) = annot
annot (TypeSig annot _ _) = annot
annot (FunClause annot _ _ _ _) = annot
instance Annotated Type where
annot (TVar annot _) = annot
annot (TUnit annot) = annot
annot (TProduct annot _ _) = annot
annot (TArrow annot _ _) = annot
annot (TInternal annot _) = annot
annot (TExternal annot _) = annot
annot (TIntersect annot _ _) = annot
annot (TUnion annot _ _) = annot
instance Annotated Exp where
annot (Exp annot _) = annot
instance Annotated ExpLine where
annot (ECut annot _ _ _) = annot
annot (EFwd annot _ _) = annot
annot (EClose annot _) = annot
annot (EWait annot _) = annot
annot (ESend annot _ _) = annot
annot (ESendChannel annot _ _) = annot
annot (ERecv annot _ _) = annot
annot (ESelect annot _ _) = annot
annot (ECase annot _ _) = annot
instance Annotated Ident where
annot (Ident annot _) = annot
instance Annotated Constructor where
annot (Constructor annot _) = annot
instance Annotated Channel where
annot (Channel annot _) = annot
instance Annotated Label where
annot (Label annot _) = annot
instance Annotated (Branch t) where
annot (Branch annot _ _) = annot
{--------------------------------------------------------------------------
Names
--------------------------------------------------------------------------}
instance Named (Module annot) where
name (Module _ ident _) = name ident
instance Named (Declaration annot) where
name (TypeDef _ con _) = name con
name (TypeSig _ ident _) = name ident
name (FunClause _ _ ident _ _) = name ident
instance Named (Ident annot) where
name (Ident _ n) = n
instance Named (Constructor annot) where
name (Constructor _ n) = n
instance Named (Channel annot) where
name (Channel _ n) = n
instance Named (Label annot) where
name (Label _ n) = n
{--------------------------------------------------------------------------
Printing
--------------------------------------------------------------------------}
instance Pretty (File annot) where
pPrint (File _ ms) = vcat (punctuate nl $ map pPrint ms)
instance Pretty (Module annot) where
pPrint (Module _ name decls) = text "module" <+> pPrint name <+> text "where"
$$ nest indentation (vcat $ map pPrint decls)
instance Pretty (Declaration annot) where
pPrint (TypeDef _ con t) =
text "" $+$ text "type" <+> pPrint con <+> text "=" <+> pPrint t
pPrint (TypeSig _ ident t) = text "" $+$ pPrint ident <+> colon <+> pPrint t
pPrint (FunClause _ c ident args e) = pPrint c <+> leftArrow
<+> pPrint ident <+> hsep (map pPrint args) <+> text "=" <+> text "do"
$$ nest indentation (pPrint e)
-- TODO: better parens
instance Pretty (Type annot) where
pPrint (TVar _ con) = pPrint con
pPrint (TUnit _) = text "1"
pPrint (TProduct _ a b) = parens (pPrint a <+> text "*" <+> pPrint b)
pPrint (TArrow _ a b) = parens (pPrint a <+> lolli <+> pPrint b)
pPrint (TInternal _ br) =
text "+" <> braces (hsep $ punctuate comma $ map pPrint br)
pPrint (TExternal _ br) =
text "&" <> braces (hsep $ punctuate comma $ map pPrint br)
pPrint (TIntersect _ a b) = parens (pPrint a <+> text "and" <+> pPrint b)
pPrint (TUnion _ a b) = parens (pPrint a <+> text "or" <+> pPrint b)
instance Pretty (Exp annot) where
pPrint (Exp _ es) = vcat (map pPrint es)
instance Pretty (ExpLine annot) where
pPrint (ECut _ c ident args) = pPrint c <+> leftArrow <+> pPrint ident
<+> hsep (map pPrint args)
pPrint (EFwd _ c d) = pPrint c <+> leftArrow <+> pPrint d
pPrint (EClose _ c) = text "close" <+> pPrint c
pPrint (EWait _ c) = text "wait" <+> pPrint c
pPrint (ESend _ c (d, e)) =
text "send" <+> pPrint c <+> parens (pPrint d <+> leftArrow <+> pPrint e)
pPrint (ESendChannel _ c d) = text "send" <+> pPrint c <+> pPrint d
pPrint (ERecv _ c d) = pPrint c <+> leftArrow <+> text "recv" <+> pPrint d
pPrint (ESelect _ c lab) = pPrint c <> char '.' <> pPrint lab
pPrint (ECase _ c br) = text "case" <+> pPrint c <+> text "of"
$$ nest indentation (vcat $ map pPrint br)
instance Pretty (Ident annot) where
pPrint (Ident _ ident) = text ident
instance Pretty (Constructor annot) where
pPrint (Constructor _ con) = text con
instance Pretty (Channel annot) where
pPrint (Channel _ c) = text c
instance Pretty (Label annot) where
pPrint (Label _ lab) = text lab
instance Pretty (Branch Type annot) where
pPrint (Branch _ lab t) = pPrint lab <+> colon <+> pPrint t
instance Pretty (Branch Exp annot) where
pPrint (Branch _ lab e) = pPrint lab <+> rightArrow <+> pPrint e
{--------------------------------------------------------------------------
Showing
--------------------------------------------------------------------------}
instance Show (File annot) where
show = prettyShow
instance Show (Module annot) where
show = prettyShow
instance Show (Declaration annot) where
show = prettyShow
instance Show (Type annot) where
show = prettyShow
instance Show (Exp annot) where
show = prettyShow
instance Show (Ident annot) where
show = prettyShow
instance Show (Constructor annot) where
show = prettyShow
instance Show (Channel annot) where
show = prettyShow
instance Show (Label annot) where
show = prettyShow
|
cacay/language-sill
|
Language/Krill/Parser/Syntax.hs
|
mit
| 9,187
| 0
| 12
| 1,947
| 3,383
| 1,735
| 1,648
| 197
| 1
|
{-# LANGUAGE RankNTypes, StandaloneDeriving, TupleSections, DeriveGeneric, DeriveAnyClass, TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses, ViewPatterns, GADTs, ConstraintKinds, FlexibleContexts, ScopedTypeVariables #-}
module Language.Jasmin.Transformation.SBV where
import Data.IORef
import Data.Proxy
import Data.Bifunctor
import Data.SBV.Exts hiding ((<+>))
import Data.Map (Map(..))
import qualified Data.Map as Map
import Language.Vars
import Data.Generics hiding (Generic,typeOf)
import GHC.Generics
import Data.Maybe
import Safe
import Control.Monad.Except
import Control.Monad.State (StateT(..))
import qualified Control.Monad.State as State
import Language.Jasmin.Syntax
import Language.Position
import Language.Location
import Language.Jasmin.Error
import Language.Jasmin.TypeChecker.TyInfo
import Language.Jasmin.Transformation.Simplify
import Language.Jasmin.Transformation.VCGen
import Text.PrettyPrint.Exts
import Options
import System.IO
import Utils
isOkThmResult :: ThmResult -> Bool
isOkThmResult (ThmResult (Unsatisfiable _)) = True
isOkThmResult _ = False
pprogramToSBV :: Options -> Pprogram TyInfo -> IO [ThmResult]
pprogramToSBV opts prog@(Pprogram xs) = do
concatMapM (pitemToSBV opts) xs
pitemToSBV :: Options -> Pitem TyInfo -> IO [ThmResult]
pitemToSBV opts (PFundef f) = pfundefToSBV opts f
pitemToSBV opts (PParam i) = error "sbv param not supported"
pfundefToSBV :: Options -> Pfundef TyInfo -> IO [ThmResult]
pfundefToSBV opts f = do
vcs <- genVCsPfundef f
mapM (uncurry (vcToSBV opts)) vcs
vcToSBV :: Options -> [Pinstr TyInfo] -> Pexpr TyInfo -> IO ThmResult
vcToSBV opts is assert = do
let go = do
mapM_ pinstrToSBV is
pexprToSBVBool assert
proveSBVMWith opts (infoLoc $ loc assert) go
panninstr_rToSBV :: TyInfo -> StatementAnnotation_r TyInfo -> SBVM Symbolic ()
panninstr_rToSBV t (AssumeAnn False e) = do
sbve <- pexprToSBVBool e
lift2 $ constrain sbve
panninstr_rToSBV t (AssertAnn False e) = genError (infoLoc t) $ text "no SMT asserts"
panninstr_rToSBV t (EmbedAnn isLeak i) = unless isLeak $ pinstrToSBV i
panninstr_rToSBV t (VarDefAnn arg) = annargToSBV arg
annargToSBV :: Annarg TyInfo -> SBVM Symbolic ()
annargToSBV (Annarg ty n e) = do
e' <- mapM pexprToSBV e
addSBVar (infoLoc $ loc n) ty (funit n) e'
pblockToSBV :: Pblock TyInfo -> SBVM Symbolic ()
pblockToSBV (Pblock t is) = mapM_ pinstrToSBV is
pinstrToSBV :: Pinstr TyInfo -> SBVM Symbolic ()
pinstrToSBV (Pinstr t i) = pinstr_rToSBV t i
pinstr_rToSBV :: TyInfo -> Pinstr_r TyInfo -> SBVM Symbolic ()
pinstr_rToSBV t (PIIf _ c s1 s2) = do
c' <- pexprToSBVBool c
let m1 = pblockToSBV s1
let m2 = maybe (return ()) pblockToSBV s2
sbvIf c' m1 m2
pinstr_rToSBV t (PIAssign lvs RawEq e Nothing) = do
e' <- pexprToSBV e
passignToSBV t lvs [e']
pinstr_rToSBV t (Copn ls o es) = popn_rToSBV t ls o es
pinstr_rToSBV t (Anninstr i) = panninstr_rToSBV t i
pinstr_rToSBV t i = genError (infoLoc t) $ text "instruction can't be converted to SMT"
passignToSBV :: TyInfo -> [Plvalue TyInfo] -> [SBVar] -> SBVM Symbolic ()
passignToSBV t [Plvalue ty (PLVar n)] [v] = addSBVar (infoLoc t) (infoTy ty) (funit n) (Just v)
passignToSBV t [Plvalue ty (PLArray n e)] [v] = do
e' <- pexprToSBVInteger e
writeSBArr (infoLoc t) (funit n) e' v
passignToSBV t ls vs | length ls > 1 && length ls == length vs = do
forM_ (zip ls vs) $ \(l,v) -> passignToSBV t [l] [v]
passignToSBV t lv v = genError (infoLoc t) $ text "lvalue not supported in SMT"
popn_rToSBV :: TyInfo -> [Plvalue TyInfo] -> Op -> [Pexpr TyInfo] -> SBVM Symbolic ()
popn_rToSBV t ls Oaddcarry [e1,e2,cf] = do
e1' <- pexprToSBV e1
e2' <- pexprToSBV e2
cf' <- pexprToSBVBool cf
let (vcf,ve) = sbvAdc e1' e2' cf'
passignToSBV t ls [SBBool vcf,ve]
popn_rToSBV t ls o es = error "popn_rToSBV"
sbvAdc :: SBVar -> SBVar -> SBool -> (SBool,SBVar)
sbvAdc (SBWord64 x) (SBWord64 y) cf = let (cf',z) = adc x y cf in (cf',SBWord64 z)
adc :: SIntegral a => SBV a -> SBV a -> SBool -> (SBool,SBV a)
adc x y cf = (cf',s')
where
s = x + y
scf = ite cf 1 0
s' = s + scf
cf' = s .< x ||| s .< y ||| s' .< s
sbvIf :: SBool -> SBVM Symbolic () -> SBVM Symbolic () -> SBVM Symbolic ()
sbvIf c m1 m2 = do
env1 <- liftM snd $ blockSBVM m1
env2 <- liftM snd $ blockSBVM m2
let env' = ite c env1 env2
State.put env'
pexprToSBVBool :: Pexpr TyInfo -> SBVM Symbolic SBool
pexprToSBVBool e = do
SBBool e' <- pexprToSBV e
return e'
pexprToSBVInteger :: Pexpr TyInfo -> SBVM Symbolic SInteger
pexprToSBVInteger e = do
SBInteger e' <- pexprToSBV e
return e'
pexprToSBV :: Pexpr TyInfo -> SBVM Symbolic SBVar
pexprToSBV (Pexpr t e) = pexpr_rToSBV t e
pexpr_rToSBV :: TyInfo -> Pexpr_r TyInfo -> SBVM Symbolic SBVar
pexpr_rToSBV t (PEVar v) = getSBVar (infoLoc t) (funit v)
pexpr_rToSBV t (PEBool b) = return $ SBBool $ literal b
pexpr_rToSBV t (PEInt i) = return $ sbvInteger (infoTy t) i
pexpr_rToSBV t (PEOp1 o e1) = op1_rToSBV t o e1
pexpr_rToSBV t (PEOp2 o e1 e2) = op2_rToSBV t o e1 e2
pexpr_rToSBV t (PEGet n e) = do
e' <- pexprToSBVInteger e
readSBArr (infoLoc t) (funit n) e'
pexpr_rToSBV t (Pcast te e) = pcast_rToSBV t te e
pexpr_rToSBV t (PEParens [e]) = pexprToSBV e
--pexpr_rToSBV t (QuantifiedExpr q args e) = forAll
pexpr_rToSBV t e = do
pe <- pp e
genError (infoLoc t) $ text "expression not encoded in SBV: " <+> pe
pcast_rToSBV :: TyInfo -> Ptype TyInfo -> Pexpr TyInfo -> SBVM Symbolic SBVar
pcast_rToSBV l t e = do
e' <- pexprToSBV e
return $ sbvCast t e'
sbvInteger :: Ptype TyInfo -> Integer -> SBVar
sbvInteger (TInt Nothing) i = SBInteger $ literal i
sbvInteger (TWord 8) i = SBWord8 $ literal $ fromIntegral i
sbvInteger (TWord 16) i = SBWord16 $ literal $ fromIntegral i
sbvInteger (TWord 32) i = SBWord32 $ literal $ fromIntegral i
sbvInteger (TWord 64) i = SBWord64 $ literal $ fromIntegral i
sbvInteger (TWord 128) i = SBWord128 $ literal $ fromIntegral i
sbvInteger (TWord 256) i = SBWord256 $ literal $ fromIntegral i
sbvInteger (TWord 512) i = SBWord512 $ literal $ fromIntegral i
sbvInteger t i = error $ "sbvInteger " ++ pprid t
op1_rToSBV :: TyInfo -> Peop1 -> Pexpr TyInfo -> SBVM Symbolic SBVar
op1_rToSBV t Not1 e1 = nativeOp1 (sbvBoolean1 bnot) e1
op1_rToSBV t o e1 = error "op1_rToSBV"
sbvCast :: Ptype TyInfo -> SBVar -> SBVar
sbvCast (TInt Nothing) (SBInteger i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord8 i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord16 i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord32 i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord64 i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord128 i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord256 i) = SBInteger $ sFromIntegral i
sbvCast (TInt Nothing) (SBWord512 i) = SBInteger $ sFromIntegral i
sbvCast (TWord 64) (SBInteger i) = SBWord64 $ sFromIntegral i
sbvCast (TWord 64) (SBWord8 i) = SBWord64 $ extend $ extend $ extend i
sbvCast (TWord 64) (SBWord16 i) = SBWord64 $ extend $ extend i
sbvCast (TWord 64) (SBWord32 i) = SBWord64 $ extend i
sbvCast (TWord 64) (SBWord64 i) = SBWord64 $ i
sbvCast (TWord 512) (SBInteger i) = SBWord512 $ sFromIntegral i
sbvCast (TWord 512) (SBWord8 i) = SBWord512 $ extend $ extend $ extend $ extend $ extend $ extend i
sbvCast (TWord 512) (SBWord16 i) = SBWord512 $ extend $ extend $ extend $ extend $ extend i
sbvCast (TWord 512) (SBWord32 i) = SBWord512 $ extend $ extend $ extend $ extend i
sbvCast (TWord 512) (SBWord64 i) = SBWord512 $ extend $ extend $ extend i
sbvCast (TWord 512) (SBWord128 i) = SBWord512 $ extend $ extend i
sbvCast (TWord 512) (SBWord256 i) = SBWord512 $ extend i
sbvCast (TWord 512) (SBWord512 i) = SBWord512 i
sbvCast t x = error $ show $ text "sbvCast" <+> ppid t <+> ppid x
op2_rToSBV :: TyInfo -> Peop2 -> Pexpr TyInfo -> Pexpr TyInfo -> SBVM Symbolic SBVar
op2_rToSBV t Add2 e1 e2 = nativeOp2 (sbvNum2 (+)) e1 e2
op2_rToSBV t Sub2 e1 e2 = nativeOp2 (sbvNum2 (-)) e1 e2
op2_rToSBV t Mul2 e1 e2 = nativeOp2 (sbvNum2 (*)) e1 e2
op2_rToSBV t And2 e1 e2 = nativeOp2 (sbvBoolean2 (&&&)) e1 e2
op2_rToSBV t Or2 e1 e2 = nativeOp2 (sbvBoolean2 (|||)) e1 e2
op2_rToSBV t BAnd2 e1 e2 = nativeOp2 (sbvBits2 (.&.)) e1 e2
op2_rToSBV t BOr2 e1 e2 = nativeOp2 (sbvBits2 (.|.)) e1 e2
op2_rToSBV t BXor2 e1 e2 = nativeOp2 (sbvBits2 xor) e1 e2
op2_rToSBV t (Shr2 Unsigned) e1 e2 = nativeOp2 (sbvIntegral2 sShiftRight) e1 e2
op2_rToSBV t Shl2 e1 e2 = nativeOp2 (sbvIntegral2 sShiftLeft) e1 e2
op2_rToSBV t Eq2 e1 e2 = nativeOp2Bool (sbvEqSymbolic2 (.==)) e1 e2
op2_rToSBV t Neq2 e1 e2 = nativeOp2Bool (sbvEqSymbolic2 (./=)) e1 e2
op2_rToSBV t (Lt2 Unsigned) e1 e2 = nativeOp2Bool (sbvOrdSymbolic2 (.<)) e1 e2
op2_rToSBV t (Le2 Unsigned) e1 e2 = nativeOp2Bool (sbvOrdSymbolic2 (.<=)) e1 e2
op2_rToSBV t (Gt2 Unsigned) e1 e2 = nativeOp2Bool (sbvOrdSymbolic2 (.>)) e1 e2
op2_rToSBV t (Ge2 Unsigned) e1 e2 = nativeOp2Bool (sbvOrdSymbolic2 (.>=)) e1 e2
op2_rToSBV t Mod2 e1 e2 = nativeOp2 (sbvDivisible2 sMod) e1 e2
op2_rToSBV t o e1 e2 = error "op2_rToSBV"
nativeOp1 :: (SBVar -> SBVar) -> Pexpr TyInfo -> SBVM Symbolic SBVar
nativeOp1 f e1 = do
e1' <- pexprToSBV e1
return $ f e1'
nativeOp2Bool :: (SBVar -> SBVar -> SBool) -> Pexpr TyInfo -> Pexpr TyInfo -> SBVM Symbolic SBVar
nativeOp2Bool f e1 e2 = nativeOp2 f' e1 e2
where
f' x y = SBBool $ f x y
nativeOp2 :: (SBVar -> SBVar -> SBVar) -> Pexpr TyInfo -> Pexpr TyInfo -> SBVM Symbolic SBVar
nativeOp2 f e1 e2 = do
e1' <- pexprToSBV e1
e2' <- pexprToSBV e2
return $ f e1' e2'
-- * State
solverCfg :: Solver -> SMTConfig
solverCfg Boolector = boolector
solverCfg CVC4 = cvc4
solverCfg Yices = yices
solverCfg Z3 = z3
solverCfg MathSAT = mathSAT
solverCfg ABC = abc
proveSBVMWith :: Options -> Position -> SBVM Symbolic SBool -> IO ThmResult
proveSBVMWith opts p m = do
let cfg = solverCfg (solver' opts)
when (debugVerification opts) $ liftIO $ hPutStrLn stderr $ "Solving SMT verification condition at " ++ pprid p
res <- proveWith cfg mgoal
when (debugVerification opts) $ liftIO $ hPutStrLn stderr $ show res
return res
where
mgoal = do
e <- runSBVM m emptySBVSt
case e of
Left err -> do
when (debugVerification opts) $ liftIO $ hPutStrLn stderr $ pprid err
return false
Right (b,_) -> return b
runSBVM :: SBVK m => SBVM m a -> SBVSt -> m (Either JasminError (a,SBVSt))
runSBVM m st = runExceptT $ runStateT m st
blockSBVM :: SBVK m => SBVM m a -> SBVM m (a,SBVSt)
blockSBVM m = do
env <- State.get
e <- lift2 $ runSBVM m env
case e of
Left err -> throwError err
Right (x,env') -> return (x,env')
emptySBVSt = SBVSt Map.empty
type SBVK m = (MonadIO m,GenVar Piden m)
type SBVM m = StateT SBVSt (ExceptT JasminError m)
addSBVar :: Position -> Ptype TyInfo -> Piden -> Maybe SBVar -> SBVM Symbolic ()
addSBVar p t n mbv = do
val <- maybe (sbVar p (pprid n) t) return mbv
State.modify $ \env -> env { vars = Map.insert n val (vars env) }
sbVar :: Position -> String -> Ptype TyInfo -> SBVM Symbolic SBVar
sbVar p n TBool = liftM SBBool $ lift2 $ sBool n
sbVar p n (TInt Nothing) = liftM SBInteger $ lift2 $ sInteger n
sbVar p n (TWord 8) = liftM SBWord8 $ lift2 $ sWord8 n
sbVar p n (TWord 16) = liftM SBWord16 $ lift2 $ sWord16 n
sbVar p n (TWord 32) = liftM SBWord32 $ lift2 $ sWord32 n
sbVar p n (TWord 64) = liftM SBWord64 $ lift2 $ sWord64 n
sbVar p n (TWord 128) = liftM SBWord128 $ lift2 $ sWord128 n
sbVar p n (TWord 256) = liftM SBWord256 $ lift2 $ sWord256 n
sbVar p n (TWord 512) = liftM SBWord512 $ lift2 $ sWord512 n
sbVar p n (TArray w sz) = do
sz' <- pexprToSBVInteger sz
case unliteral sz' of
Just szi -> do
vs <- forM [0..szi-1] $ \wi -> do
liftM (literal wi,) $ sbVar p ("n"++show wi) (TWord $ fromEnum w)
return $ SBArr $ list2fun vs
Nothing -> return $ SBArr $ list2fun []
list2fun :: [(SInteger,SBVar)] -> (SInteger -> SBVar)
list2fun [] = const $ error "unitialized array"
list2fun ((x,y):xs) = \a -> ite (x .== a) y (list2fun xs a)
readSBArr :: Position -> Piden -> SInteger -> SBVM Symbolic SBVar
readSBArr p n i = do
vs <- State.gets vars
case Map.lookup n vs of
Just (SBArr arr) -> return $ arr i
otherwise -> genError p $ text "readSBArr" <+> ppid n
writeSBArr :: Position -> Piden -> SInteger -> SBVar -> SBVM Symbolic ()
writeSBArr p n (i) (e) = do
State.modify $ \env -> env { vars = Map.update writeArr n (vars env) }
where writeArr (SBArr f) = Just $ SBArr $ \a' -> ite (i .== a') e (f a')
getSBVar :: Position -> Piden -> SBVM Symbolic SBVar
getSBVar p n = do
vs <- State.gets vars
case Map.lookup n vs of
Just x -> return x
otherwise -> genError p $ text "getSBVal" <+> ppid n
data SBVSt = SBVSt
{ vars :: Map Piden SBVar
}
deriving (Generic,Mergeable)
instance (Ord k,Mergeable b) => Mergeable (Map k b) where
symbolicMerge f b m1 m2 = Map.intersectionWith (symbolicMerge f b) m1 m2
instance Mergeable SBVar where
symbolicMerge f b (SBBool v1) (SBBool v2) = SBBool $ symbolicMerge f b v1 v2
symbolicMerge f b (SBInteger v1) (SBInteger v2) = SBInteger $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord8 v1) (SBWord8 v2) = SBWord8 $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord16 v1) (SBWord16 v2) = SBWord16 $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord32 v1) (SBWord32 v2) = SBWord32 $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord64 v1) (SBWord64 v2) = SBWord64 $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord128 v1) (SBWord128 v2) = SBWord128 $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord256 v1) (SBWord256 v2) = SBWord256 $ symbolicMerge f b v1 v2
symbolicMerge f b (SBWord512 v1) (SBWord512 v2) = SBWord512 $ symbolicMerge f b v1 v2
symbolicMerge _ b (SBArr g) (SBArr h) = SBArr $ \x -> ite b (g x) (h x)
sbvEqual :: SBVar -> SBVar -> SBool
sbvEqual = undefined
data SBVar where
SBBool :: SBool -> SBVar
SBInteger :: SInteger -> SBVar
SBWord8 :: SWord8 -> SBVar
SBWord16 :: SWord16 -> SBVar
SBWord32 :: SWord32 -> SBVar
SBWord64 :: SWord64 -> SBVar
SBWord128 :: SWord128 -> SBVar
SBWord256 :: SWord256 -> SBVar
SBWord512 :: SWord512 -> SBVar
SBArr :: (SInteger -> SBVar) -> SBVar
deriving (Generic)
instance Show SBVar where
show = pprid
instance Monad m => PP m SBVar where
pp (SBBool b) = do
return $ parens (text "SBBool" <+> text "*")
pp (SBInteger b) = do
return $ parens (text "SBInteger" <+> text "*")
pp (SBWord8 b) = do
return $ parens (text "SBWord8" <+> text "*")
pp (SBWord16 b) = do
return $ parens (text "SBWord16" <+> text "*")
pp (SBWord32 b) = do
return $ parens (text "SBWord32" <+> text "*")
pp (SBWord64 b) = do
return $ parens (text "SBWord64" <+> text "*")
pp (SBWord128 b) = do
return $ parens (text "SBWord128" <+> text "*")
pp (SBWord256 b) = do
return $ parens (text "SBWord256" <+> text "*")
pp (SBWord512 b) = do
return $ parens (text "SBWord512" <+> text "*")
pp (SBArr b) = do
return $ parens (text "SBArr" <+> text "*")
instance GenVar Piden Symbolic where
mkVar str = liftIO $ mkVar str
newVar x = liftIO $ newVar x
sbvNum2 :: (forall a . Num a => a -> a -> a) -> SBVar -> SBVar -> SBVar
sbvNum2 f (SBInteger i1) (SBInteger i2) = SBInteger $ f i1 i2
sbvNum2 f (SBWord8 i1) (SBWord8 i2) = SBWord8 $ f i1 i2
sbvNum2 f (SBWord16 i1) (SBWord16 i2) = SBWord16 $ f i1 i2
sbvNum2 f (SBWord32 i1) (SBWord32 i2) = SBWord32 $ f i1 i2
sbvNum2 f (SBWord64 i1) (SBWord64 i2) = SBWord64 $ f i1 i2
sbvNum2 f (SBWord128 i1) (SBWord128 i2) = SBWord128 $ f i1 i2
sbvNum2 f (SBWord256 i1) (SBWord256 i2) = SBWord256 $ f i1 i2
sbvNum2 f (SBWord512 i1) (SBWord512 i2) = SBWord512 $ f i1 i2
sbvNum2 f x y = error $ "sbvNum2 " ++ show x ++ " " ++ show y
sbvDivisible2 :: (forall a . SDivisible a => a -> a -> a) -> SBVar -> SBVar -> SBVar
sbvDivisible2 f (SBWord8 i1) (SBWord8 i2) = SBWord8 $ f i1 i2
sbvDivisible2 f (SBWord16 i1) (SBWord16 i2) = SBWord16 $ f i1 i2
sbvDivisible2 f (SBWord32 i1) (SBWord32 i2) = SBWord32 $ f i1 i2
sbvDivisible2 f (SBWord64 i1) (SBWord64 i2) = SBWord64 $ f i1 i2
sbvDivisible2 f (SBWord128 i1) (SBWord128 i2) = SBWord128 $ f i1 i2
sbvDivisible2 f (SBWord256 i1) (SBWord256 i2) = SBWord256 $ f i1 i2
sbvDivisible2 f (SBWord512 i1) (SBWord512 i2) = SBWord512 $ f i1 i2
sbvDivisible2 f (SBWord8 i1) (SBInteger i2) = SBWord8 $ f i1 (sFromIntegral i2)
sbvDivisible2 f (SBWord16 i1) (SBInteger i2) = SBWord16 $ f i1 (sFromIntegral i2)
sbvDivisible2 f (SBWord32 i1) (SBInteger i2) = SBWord32 $ f i1 (sFromIntegral i2)
sbvDivisible2 f (SBWord64 i1) (SBInteger i2) = SBWord64 $ f i1 (sFromIntegral i2)
sbvDivisible2 f (SBWord128 i1) (SBInteger i2) = SBWord128 $ f i1 (sFromIntegral i2)
sbvDivisible2 f (SBWord256 i1) (SBInteger i2) = SBWord256 $ f i1 (sFromIntegral i2)
sbvDivisible2 f (SBWord512 i1) (SBInteger i2) = SBWord512 $ f i1 (sFromIntegral i2)
sbvDivisible2 f x y = error $ "sbvDivisible2 " ++ show x ++ " " ++ show y
sbvIntegral2 :: (forall a b. (SIntegral a,SIntegral b) => SBV a -> SBV b -> SBV a) -> SBVar -> SBVar -> SBVar
sbvIntegral2 f (SBWord8 i1) (SBWord8 i2) = SBWord8 $ f i1 i2
sbvIntegral2 f (SBWord16 i1) (SBWord16 i2) = SBWord16 $ f i1 i2
sbvIntegral2 f (SBWord32 i1) (SBWord32 i2) = SBWord32 $ f i1 i2
sbvIntegral2 f (SBWord64 i1) (SBWord64 i2) = SBWord64 $ f i1 i2
sbvIntegral2 f (SBWord128 i1) (SBWord128 i2) = SBWord128 $ f i1 i2
sbvIntegral2 f (SBWord256 i1) (SBWord256 i2) = SBWord256 $ f i1 i2
sbvIntegral2 f (SBWord512 i1) (SBWord512 i2) = SBWord512 $ f i1 i2
sbvIntegral2 f (SBWord8 i1) (SBInteger i2) = SBWord8 $ f i1 i2
sbvIntegral2 f (SBWord16 i1) (SBInteger i2) = SBWord16 $ f i1 i2
sbvIntegral2 f (SBWord32 i1) (SBInteger i2) = SBWord32 $ f i1 i2
sbvIntegral2 f (SBWord64 i1) (SBInteger i2) = SBWord64 $ f i1 i2
sbvIntegral2 f (SBWord128 i1) (SBInteger i2) = SBWord128 $ f i1 i2
sbvIntegral2 f (SBWord256 i1) (SBInteger i2) = SBWord256 $ f i1 i2
sbvIntegral2 f (SBWord512 i1) (SBInteger i2) = SBWord512 $ f i1 i2
sbvIntegral2 f x y = error $ "sbvIntegral2 " ++ show x ++ " " ++ show y
sbvBoolean1 :: (forall a . Boolean a => a -> a) -> SBVar -> SBVar
sbvBoolean1 f (SBBool x) = SBBool $ f x
sbvBoolean2 :: (forall a . Boolean a => a -> a -> a) -> SBVar -> SBVar -> SBVar
sbvBoolean2 f (SBBool x) (SBBool y) = SBBool (f x y)
sbvEqSymbolic2 :: (forall a. EqSymbolic a => a -> a -> SBool) -> SBVar -> SBVar -> SBool
sbvEqSymbolic2 f (SBBool i1) (SBBool i2) = f i1 i2
sbvEqSymbolic2 f (SBInteger i1) (SBInteger i2) = f i1 i2
sbvEqSymbolic2 f (SBWord8 i1) (SBWord8 i2) = f i1 i2
sbvEqSymbolic2 f (SBWord16 i1) (SBWord16 i2) = f i1 i2
sbvEqSymbolic2 f (SBWord32 i1) (SBWord32 i2) = f i1 i2
sbvEqSymbolic2 f (SBWord64 i1) (SBWord64 i2) = f i1 i2
sbvEqSymbolic2 f (SBWord128 i1) (SBWord128 i2) = f i1 i2
sbvEqSymbolic2 f (SBWord256 i1) (SBWord256 i2) = f i1 i2
sbvEqSymbolic2 f (SBWord512 i1) (SBWord512 i2) = f i1 i2
sbvEqSymbolic2 f x y = error $ "sbvEqSymbolic2 " ++ show x ++ " " ++ show y
sbvOrdSymbolic2 :: (forall a. OrdSymbolic a => a -> a -> SBool) -> SBVar -> SBVar -> SBool
sbvOrdSymbolic2 f (SBBool i1) (SBBool i2) = f i1 i2
sbvOrdSymbolic2 f (SBInteger i1) (SBInteger i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord8 i1) (SBWord8 i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord16 i1) (SBWord16 i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord32 i1) (SBWord32 i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord64 i1) (SBWord64 i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord128 i1) (SBWord128 i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord256 i1) (SBWord256 i2) = f i1 i2
sbvOrdSymbolic2 f (SBWord512 i1) (SBWord512 i2) = f i1 i2
sbvOrdSymbolic2 f x y = error $ "sbvOrdSymbolic2 " ++ show x ++ " " ++ show y
sbvBits2 :: (forall a. Bits a => a -> a -> a) -> SBVar -> SBVar -> SBVar
sbvBits2 f (SBWord8 i1) (SBWord8 i2) = SBWord8 $ f i1 i2
sbvBits2 f (SBWord16 i1) (SBWord16 i2) = SBWord16 $ f i1 i2
sbvBits2 f (SBWord32 i1) (SBWord32 i2) = SBWord32 $ f i1 i2
sbvBits2 f (SBWord64 i1) (SBWord64 i2) = SBWord64 $ f i1 i2
sbvBits2 f (SBWord128 i1) (SBWord128 i2) = SBWord128 $ f i1 i2
sbvBits2 f (SBWord256 i1) (SBWord256 i2) = SBWord256 $ f i1 i2
sbvBits2 f (SBWord512 i1) (SBWord512 i2) = SBWord512 $ f i1 i2
sbvBits2 f x y = error $ "sbvBits2 " ++ show x ++ " " ++ show y
|
hpacheco/jasminv
|
src/Language/Jasmin/Transformation/SBV.hs
|
gpl-3.0
| 20,690
| 1
| 22
| 4,500
| 9,188
| 4,481
| 4,707
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : Hie.Language.Haskell.Exts
-- Copyright : (c) Niklas Broberg 2004-2009
-- License : BSD-style (see the file LICENSE.txt)
--
-- Maintainer : Niklas Broberg, d00nibro@chalmers.se
-- Stability : stable
-- Portability : portable
--
-- An umbrella module for the various functionality
-- of the package. Also provides some convenient
-- functionality for dealing directly with source files.
--
-----------------------------------------------------------------------------
module Hie.Language.Haskell.Exts.Annotated (
-- * Re-exported modules
module Hie.Language.Haskell.Exts.Annotated.Syntax
, module Hie.Language.Haskell.Exts.Annotated.Build
, module Hie.Language.Haskell.Exts.Parser
, module Hie.Language.Haskell.Exts.Pretty
, module Hie.Language.Haskell.Exts.Annotated.Fixity
, module Hie.Language.Haskell.Exts.Annotated.ExactPrint
, module Hie.Language.Haskell.Exts.SrcLoc
, module Hie.Language.Haskell.Exts.Comments
, module Hie.Language.Haskell.Exts.Extension
-- * Parsing of Haskell source files
, parseFile
, parseFileWithMode
, parseFileWithExts
, parseFileWithComments
, parseFileContents
, parseFileContentsWithMode
, parseFileContentsWithExts
, parseFileContentsWithComments
-- * Parsing of Haskell source elements,
, parseModule, parseModuleWithMode, parseModuleWithComments
, parseExp, parseExpWithMode, parseExpWithComments
, parseStmt, parseStmtWithMode, parseStmtWithComments
, parsePat, parsePatWithMode, parsePatWithComments
, parseDecl, parseDeclWithMode, parseDeclWithComments
, parseType, parseTypeWithMode, parseTypeWithComments
-- * Read extensions declared in LANGUAGE pragmas
, readExtensions
) where
import Hie.Language.Haskell.Exts.Annotated.Build
import Hie.Language.Haskell.Exts.Annotated.Syntax
import Hie.Language.Haskell.Exts.Parser ( Parseable(..), ParseResult(..), fromParseResult, ParseMode(..), defaultParseMode )
import Hie.Language.Haskell.Exts.Pretty
import Hie.Language.Haskell.Exts.Annotated.Fixity
import Hie.Language.Haskell.Exts.Annotated.ExactPrint
import Hie.Language.Haskell.Exts.SrcLoc
import Hie.Language.Haskell.Exts.Extension
import Hie.Language.Haskell.Exts.Comments
import Hie.Language.Haskell.Exts.InternalParser
import Data.List
import Language.Preprocessor.Unlit
-- | Parse a source file on disk, using the default parse mode.
parseFile :: FilePath -> IO (ParseResult (Module SrcSpanInfo))
parseFile fp = parseFileWithMode (defaultParseMode { parseFilename = fp }) fp
-- | Parse a source file on disk, with an extra set of extensions to know about
-- on top of what the file itself declares.
parseFileWithExts :: [Extension] -> FilePath -> IO (ParseResult (Module SrcSpanInfo))
parseFileWithExts exts fp = parseFileWithMode (defaultParseMode { extensions = exts, parseFilename = fp }) fp
-- | Parse a source file on disk, supplying a custom parse mode.
parseFileWithMode :: ParseMode -> FilePath -> IO (ParseResult (Module SrcSpanInfo))
parseFileWithMode p fp = readFile fp >>= (return . parseFileContentsWithMode p)
parseFileWithComments :: ParseMode -> FilePath -> IO (ParseResult (Module SrcSpanInfo, [Comment]))
parseFileWithComments p fp = readFile fp >>= (return . parseFileContentsWithComments p)
-- | Parse a source file from a string using the default parse mode.
parseFileContents :: String -> ParseResult (Module SrcSpanInfo)
parseFileContents = parseFileContentsWithMode defaultParseMode
-- | Parse a source file from a string, with an extra set of extensions to know about
-- on top of what the file itself declares.
parseFileContentsWithExts :: [Extension] -> String -> ParseResult (Module SrcSpanInfo)
parseFileContentsWithExts exts = parseFileContentsWithMode (defaultParseMode { extensions = exts })
-- | Parse a source file from a string using a custom parse mode.
parseFileContentsWithMode :: ParseMode -> String -> ParseResult (Module SrcSpanInfo)
parseFileContentsWithMode p@(ParseMode fn exts ign _ _) rawStr =
let md = delit fn $ ppContents rawStr
allExts = -- impliesExts $
case (ign, readExtensions md) of
(False,Just es) -> exts ++ es
_ -> exts
in parseModuleWithMode (p { extensions = allExts }) md
parseFileContentsWithComments :: ParseMode -> String -> ParseResult (Module SrcSpanInfo, [Comment])
parseFileContentsWithComments p@(ParseMode fn exts ign _ _) rawStr =
let md = delit fn $ ppContents rawStr
allExts = impliesExts $ case (ign, readExtensions md) of
(False,Just es) -> exts ++ es
_ -> exts
in parseModuleWithComments (p { extensions = allExts }) md
-- | Gather the extensions declared in LANGUAGE pragmas
-- at the top of the file. Returns 'Nothing' if the
-- parse of the pragmas fails.
readExtensions :: String -> Maybe [Extension]
readExtensions str = case getTopPragmas str of
ParseOk pgms -> Just (concatMap getExts pgms)
_ -> Nothing
where getExts :: ModulePragma l -> [Extension]
getExts (LanguagePragma _ ns) = map readExt ns
getExts _ = []
readExt (Ident _ e) = classifyExtension e
ppContents :: String -> String
ppContents = unlines . f . lines
where f (('#':_):rest) = rest
f x = x
delit :: String -> String -> String
delit fn = if ".lhs" `isSuffixOf` fn then unlit fn else id
|
monsanto/hie
|
Hie/Language/Haskell/Exts/Annotated.hs
|
gpl-3.0
| 5,661
| 0
| 14
| 1,128
| 1,115
| 654
| 461
| 78
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : Hoodle.ModelAction.Adjustment
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Hoodle.ModelAction.Adjustment where
-- from other package
import Graphics.UI.Gtk
-- from hoodle-platform
import Data.Hoodle.BBox (BBox(..))
import Data.Hoodle.Simple (Dimension(..))
-- from this package
import Hoodle.Type.PageArrangement
import Hoodle.View.Coordinate
-- | adjust values, upper limit and page size according to canvas geometry
adjustScrollbarWithGeometry :: CanvasGeometry
-> ((Adjustment,Maybe (ConnectId Adjustment))
,(Adjustment,Maybe (ConnectId Adjustment)))
-> IO ()
adjustScrollbarWithGeometry geometry ((hadj,mconnidh),(vadj,mconnidv)) = do
let DesktopDimension (Dim w h) = desktopDim geometry
ViewPortBBox (BBox (x0,y0) (x1,y1)) = canvasViewPort geometry
xsize = x1-x0
ysize = y1-y0
maybe (return ()) signalBlock mconnidh
maybe (return ()) signalBlock mconnidv
adjustmentSetUpper hadj w
adjustmentSetUpper vadj h
adjustmentSetValue hadj x0
adjustmentSetValue vadj y0
adjustmentSetPageSize hadj xsize -- (min xsize w)
adjustmentSetPageSize vadj ysize -- (min ysize h)
adjustmentSetPageIncrement hadj (xsize*0.9)
adjustmentSetPageIncrement vadj (ysize*0.9)
maybe (return ()) signalUnblock mconnidh
maybe (return ()) signalUnblock mconnidv
-- |
setAdjustments :: ((Adjustment,Maybe (ConnectId Adjustment))
,(Adjustment,Maybe (ConnectId Adjustment)))
-> (Double,Double)
-> (Double,Double)
-> (Double,Double)
-> (Double,Double)
-> IO ()
setAdjustments ((hadj,mconnidh),(vadj,mconnidv))
(upperx,uppery) (lowerx,lowery)
(valuex,valuey) (pagex,pagey) = do
maybe (return ()) signalBlock mconnidh
maybe (return ()) signalBlock mconnidv
adjustmentSetUpper hadj upperx
adjustmentSetUpper vadj uppery
adjustmentSetLower hadj lowerx
adjustmentSetLower vadj lowery
adjustmentSetValue hadj valuex
adjustmentSetValue vadj valuey
adjustmentSetPageSize hadj pagex
adjustmentSetPageSize vadj pagey
maybe (return ()) signalUnblock mconnidh
maybe (return ()) signalUnblock mconnidv
|
wavewave/hoodle-core
|
src/Hoodle/ModelAction/Adjustment.hs
|
gpl-3.0
| 2,641
| 0
| 13
| 621
| 663
| 345
| 318
| 49
| 1
|
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
module While.Domain.Sign.Domain where
import Test.QuickCheck hiding (Positive)
import Abstat.Interface.GaloisConnection
import Abstat.Common.FlatDomain
data Sign
= Positive
| Zero
| Negative
deriving (Eq,Show,Ord)
type Domain = FlatDomain Sign
instance GaloisConnection Integer Domain where
concretization _ = error "not implemented"
singleAbstraction n | n == 0 = Val Zero
singleAbstraction n | n < 0 = Val Negative
singleAbstraction _ = Val Positive
instance Arbitrary Sign where
arbitrary = elements [Positive, Zero, Negative]
|
fpoli/abstat
|
src/While/Domain/Sign/Domain.hs
|
gpl-3.0
| 652
| 0
| 9
| 118
| 168
| 90
| 78
| 18
| 0
|
----------------------------------------------------------------------
-- |
-- Module : Text.TeX.Filter.Plain
-- Copyright : 2015-2017 Mathias Schenner,
-- 2015-2016 Language Science Press.
-- License : GPL-3
--
-- Maintainer : mschenner.dev@gmail.com
-- Stability : experimental
-- Portability : GHC
--
-- Data maps containing symbols and diacritics defined in the
-- Plain TeX format.
----------------------------------------------------------------------
module Text.TeX.Filter.Plain
( symbols
, diacritics
, dbldiacritics
) where
import Data.Map.Strict (Map, fromList)
-- | Symbols defined in the Plain TeX format.
--
-- We do not distinguish math mode from text mode at this point (yet).
symbols :: Map String String
symbols = fromList $
[ -- text symbols
("%", "%") -- PERCENT SIGN
, ("&", "&") -- AMPERSAND
, ("#", "#") -- NUMBER SIGN
, ("$", "$") -- DOLLAR SIGN
, ("_", "_") -- LOW LINE
, ("i", "\x0131") -- LATIN SMALL LETTER DOTLESS I
, ("j", "\x0237") -- LATIN SMALL LETTER DOTLESS J
, ("ss", "\x00DF") -- LATIN SMALL LETTER SHARP S
, ("aa", "\x00E5") -- LATIN SMALL LETTER A WITH RING ABOVE
, ("AA", "\x00C5") -- LATIN CAPITAL LETTER A WITH RING ABOVE
, ("ae", "\x00E6") -- LATIN SMALL LETTER AE
, ("AE", "\x00C6") -- LATIN CAPITAL LETTER AE
, ("oe", "\x0153") -- LATIN SMALL LIGATURE OE
, ("OE", "\x0152") -- LATIN CAPITAL LIGATURE OE
, ("o", "\x00D8") -- LATIN CAPITAL LETTER O WITH STROKE
, ("O", "\x00F8") -- LATIN SMALL LETTER O WITH STROKE
, ("l", "\x0142") -- LATIN SMALL LETTER L WITH STROKE
, ("L", "\x0141") -- LATIN CAPITAL LETTER L WITH STROKE
, ("dag", "\x2020") -- DAGGER
, ("ddag", "\x2021") -- DOUBLE DAGGER
, ("S", "\x00A7") -- SECTION SIGN
, ("P", "\x00B6") -- PILCROW SIGN
, ("Orb", "\x25CB") -- WHITE CIRCLE
, ("copyright", "\x00A9") -- COPYRIGHT SIGN
, ("dots", "\x2026") -- HORIZONTAL ELLIPSIS
-- text symbols, multiple characters
, ("TeX", "TeX")
-- math symbols: greek letters
, ("alpha", "\x03B1") -- GREEK SMALL LETTER ALPHA
, ("beta", "\x03B2") -- GREEK SMALL LETTER BETA
, ("gamma", "\x03B3") -- GREEK SMALL LETTER GAMMA
, ("delta", "\x03B4") -- GREEK SMALL LETTER DELTA
, ("epsilon", "\x03F5") -- GREEK LUNATE EPSILON SYMBOL
, ("varepsilon", "\x03B5") -- GREEK SMALL LETTER EPSILON
, ("zeta", "\x03B6") -- GREEK SMALL LETTER ZETA
, ("eta", "\x03B7") -- GREEK SMALL LETTER ETA
, ("theta", "\x03B8") -- GREEK SMALL LETTER THETA
, ("vartheta", "\x03D1") -- GREEK THETA SYMBOL
, ("iota", "\x03B9") -- GREEK SMALL LETTER IOTA
, ("kappa", "\x03BA") -- GREEK SMALL LETTER KAPPA
, ("lambda", "\x03BB") -- GREEK SMALL LETTER LAMDA
, ("mu", "\x03BC") -- GREEK SMALL LETTER MU
, ("nu", "\x03BD") -- GREEK SMALL LETTER NU
, ("xi", "\x03BE") -- GREEK SMALL LETTER XI
, ("pi", "\x03C0") -- GREEK SMALL LETTER PI
, ("varpi", "\x03D6") -- GREEK PI SYMBOL
, ("rho", "\x03C1") -- GREEK SMALL LETTER RHO
, ("varrho", "\x03F1") -- GREEK RHO SYMBOL
, ("sigma", "\x03C3") -- GREEK SMALL LETTER SIGMA
, ("varsigma", "\x03C2") -- GREEK SMALL LETTER FINAL SIGMA
, ("tau", "\x03C4") -- GREEK SMALL LETTER TAU
, ("upsilon", "\x03C5") -- GREEK SMALL LETTER UPSILON
, ("phi", "\x03D5") -- GREEK PHI SYMBOL
, ("varphi", "\x03C6") -- GREEK SMALL LETTER PHI
, ("chi", "\x03C7") -- GREEK SMALL LETTER CHI
, ("psi", "\x03C8") -- GREEK SMALL LETTER PSI
, ("omega", "\x03C9") -- GREEK SMALL LETTER OMEGA
, ("Gamma", "\x0393") -- GREEK CAPITAL LETTER GAMMA
, ("Delta", "\x0394") -- GREEK CAPITAL LETTER DELTA
, ("Theta", "\x0398") -- GREEK CAPITAL LETTER THETA
, ("Lambda", "\x039B") -- GREEK CAPITAL LETTER LAMDA
, ("Xi", "\x039E") -- GREEK CAPITAL LETTER XI
, ("Pi", "\x03A0") -- GREEK CAPITAL LETTER PI
, ("Sigma", "\x03A3") -- GREEK CAPITAL LETTER SIGMA
, ("Upsilon", "\x03A5") -- GREEK CAPITAL LETTER UPSILON
, ("Phi", "\x03A6") -- GREEK CAPITAL LETTER PHI
, ("Psi", "\x03A8") -- GREEK CAPITAL LETTER PSI
, ("Omega", "\x03A9") -- GREEK CAPITAL LETTER OMEGA
-- math symbols
, ("aleph", "\x2135") -- ALEF SYMBOL
, ("imath", "\x0131") -- LATIN SMALL LETTER DOTLESS I
, ("jmath", "\x0237") -- LATIN SMALL LETTER DOTLESS J
, ("ell", "\x2113") -- SCRIPT SMALL L
, ("hbar", "\x210F") -- PLANCK CONSTANT OVER TWO PI
, ("surd", "\x221A") -- SQUARE ROOT
, ("angle", "\x2220") -- ANGLE
, ("wp", "\x2118") -- SCRIPT CAPITAL P
, ("Re", "\x211C") -- BLACK-LETTER CAPITAL R
, ("Im", "\x2111") -- BLACK-LETTER CAPITAL I
, ("partial", "\x2202") -- PARTIAL DIFFERENTIAL
, ("infty", "\x221E") -- INFINITY
, ("prime", "\x2032") -- PRIME
, ("emptyset", "\x2205") -- EMPTY SET
, ("nabla", "\x2207") -- NABLA
, ("top", "\x22A4") -- DOWN TACK
, ("bot", "\x22A5") -- UP TACK
, ("forall", "\x2200") -- FOR ALL
, ("exists", "\x2203") -- THERE EXISTS
, ("neg", "\x00AC") -- NOT SIGN
, ("lnot", "\x00AC") -- NOT SIGN
, ("coprod", "\x2210") -- N-ARY COPRODUCT
, ("bigvee", "\x22C1") -- N-ARY LOGICAL OR
, ("bigwedge", "\x22C0") -- N-ARY LOGICAL AND
, ("biguplus", "\x228E") -- MULTISET UNION
, ("bigcap", "\x22C2") -- N-ARY INTERSECTION
, ("bigcup", "\x22C3") -- N-ARY UNION
, ("int", "\x222B") -- INTEGRAL
, ("intop", "\x222B") -- INTEGRAL
, ("prod", "\x220F") -- N-ARY PRODUCT
, ("sum", "\x2211") -- N-ARY SUMMATION
, ("bigotimes", "\x2297") -- CIRCLED TIMES
, ("bigoplus", "\x2295") -- CIRCLED PLUS
, ("bigodot", "\x2299") -- CIRCLED DOT OPERATOR
, ("oint", "\x222E") -- CONTOUR INTEGRAL
, ("ointop", "\x222E") -- CONTOUR INTEGRAL
, ("bigsqcup", "\x2294") -- SQUARE CUP
, ("smallint", "\x222B") -- INTEGRAL
, ("wedge", "\x2227") -- LOGICAL AND
, ("land", "\x2227") -- LOGICAL AND
, ("vee", "\x2228") -- LOGICAL OR
, ("lor", "\x2228") -- LOGICAL OR
, ("cap", "\x2229") -- INTERSECTION
, ("cup", "\x222A") -- UNION
, ("dagger", "\x2020") -- DAGGER
, ("ddagger", "\x2021") -- DOUBLE DAGGER
, ("sqcap", "\x2293") -- SQUARE CAP
, ("sqcup", "\x2294") -- SQUARE CUP
, ("uplus", "\x228E") -- MULTISET UNION
, ("amalg", "\x2210") -- N-ARY COPRODUCT
, ("diamond", "\x22C4") -- DIAMOND OPERATOR
, ("bullet", "\x2219") -- BULLET OPERATOR
, ("wr", "\x2240") -- WREATH PRODUCT
, ("div", "\x00F7") -- DIVISION SIGN
, ("odot", "\x2299") -- CIRCLED DOT OPERATOR
, ("oslash", "\x2298") -- CIRCLED DIVISION SLASH
, ("otimes", "\x2297") -- CIRCLED TIMES
, ("ominus", "\x2296") -- CIRCLED MINUS
, ("oplus", "\x2295") -- CIRCLED PLUS
, ("mp", "\x2213") -- MINUS-OR-PLUS SIGN
, ("pm", "\x00B1") -- PLUS-MINUS SIGN
, ("circ", "\x2218") -- RING OPERATOR
, ("bigcirc", "\x25CB") -- WHITE CIRCLE
, ("setminus", "\x2216") -- SET MINUS
, ("cdot", "\x22C5") -- DOT OPERATOR
, ("ast", "\x2217") -- ASTERISK OPERATOR
, ("times", "\x00D7") -- MULTIPLICATION SIGN
, ("star", "\x22C6") -- STAR OPERATOR
, ("propto", "\x221D") -- PROPORTIONAL TO
, ("sqsubseteq", "\x228F") -- SQUARE IMAGE OF
, ("sqsupseteq", "\x2291") -- SQUARE IMAGE OF OR EQUAL TO
, ("parallel", "\x2225") -- PARALLEL TO
, ("mid", "\x2223") -- DIVIDES
, ("dashv", "\x22A3") -- LEFT TACK
, ("vdash", "\x22A2") -- RIGHT TACK
, ("nearrow", "\x2197") -- NORTH EAST ARROW
, ("searrow", "\x2198") -- SOUTH EAST ARROW
, ("nwarrow", "\x2196") -- NORTH WEST ARROW
, ("swarrow", "\x2199") -- SOUTH WEST ARROW
, ("neq", "\x2260") -- NOT EQUAL TO
, ("ne", "\x2260") -- NOT EQUAL TO
, ("leq", "\x2264") -- LESS-THAN OR EQUAL TO
, ("le", "\x2264") -- LESS-THAN OR EQUAL TO
, ("geq", "\x2265") -- GREATER-THAN OR EQUAL TO
, ("ge", "\x2265") -- GREATER-THAN OR EQUAL TO
, ("succ", "\x227B") -- SUCCEEDS
, ("prec", "\x227A") -- PRECEDES
, ("approx", "\x2248") -- ALMOST EQUAL TO
, ("succeq", "\x2AB0") -- SUCCEEDS ABOVE SINGLE-LINE EQUALS SIGN
, ("preceq", "\x2AAF") -- PRECEDES ABOVE SINGLE-LINE EQUALS SIGN
, ("supset", "\x2283") -- SUPERSET OF
, ("subset", "\x2282") -- SUBSET OF
, ("supseteq", "\x2287") -- SUPERSET OF OR EQUAL TO
, ("subseteq", "\x2286") -- SUBSET OF OR EQUAL TO
, ("in", "\x2208") -- ELEMENT OF
, ("ni", "\x220B") -- CONTAINS AS MEMBER
, ("owns", "\x220B") -- CONTAINS AS MEMBER
, ("gg", "\x226B") -- MUCH GREATER-THAN
, ("ll", "\x226A") -- MUCH LESS-THAN
, ("sim", "\x223C") -- TILDE OPERATOR
, ("simeq", "\x2243") -- ASYMPTOTICALLY EQUAL TO
, ("perp", "\x27C2") -- PERPENDICULAR
, ("equiv", "\x2261") -- IDENTICAL TO
, ("asymp", "\x224D") -- EQUIVALENT TO
, ("smile", "\x2323") -- SMILE
, ("frown", "\x2322") -- FROWN
, ("bowtie", "\x22C8") -- BOWTIE
, ("models", "\x22A7") -- MODELS
, ("iff", "\x21D4") -- LEFT RIGHT DOUBLE ARROW
, ("ldotp", "\x002E") -- FULL STOP
, ("cdotp", "\x22C5") -- DOT OPERATOR
, ("colon", "\x003A") -- COLON
, ("ldots", "\x2026") -- HORIZONTAL ELLIPSIS
, ("cdots", "\x22EF") -- MIDLINE HORIZONTAL ELLIPSIS
, ("vdots", "\x22EE") -- VERTICAL ELLIPSIS
, ("ddots", "\x22F1") -- DOWN RIGHT DIAGONAL ELLIPSIS
, ("lgroup", "\x0028") -- LEFT PARENTHESIS
, ("rgroup", "\x0029") -- RIGHT PARENTHESIS
, ("{", "\x007B") -- LEFT CURLY BRACKET
, ("}", "\x007D") -- RIGHT CURLY BRACKET
, ("lbrace", "\x007B") -- LEFT CURLY BRACKET
, ("rbrace", "\x007D") -- RIGHT CURLY BRACKET
, ("langle", "\x27E8") -- MATHEMATICAL LEFT ANGLE BRACKET
, ("rangle", "\x27E9") -- MATHEMATICAL RIGHT ANGLE BRACKET
, ("lceil", "\x2308") -- LEFT CEILING
, ("rceil", "\x2309") -- RIGHT CEILING
, ("lfloor", "\x230A") -- LEFT FLOOR
, ("rfloor", "\x230B") -- RIGHT FLOOR
, ("lmoustache", "\x23B0") -- UPPER LEFT OR LOWER RIGHT CURLY BRACKET SECTION
, ("rmoustache", "\x23B1") -- UPPER RIGHT OR LOWER LEFT CURLY BRACKET SECTION
, ("arrowvert", "\x007C") -- VERTICAL LINE
, ("Arrowvert", "\x2225") -- PARALLEL TO
, ("bracevert", "\x007C") -- VERTICAL LINE
, ("Vert", "\x2225") -- PARALLEL TO
, ("vert", "\x007C") -- VERTICAL LINE
, ("backslash", "\x005C") -- REVERSE SOLIDUS
, ("sqrt", "\x221A") -- SQUARE ROOT
, ("cong", "\x2245") -- APPROXIMATELY EQUAL TO
, ("notin", "\x2209") -- NOT AN ELEMENT OF
, ("doteq", "\x2250") -- APPROACHES THE LIMIT
, ("flat", "\x266D") -- MUSIC FLAT SIGN
, ("natural", "\x266E") -- MUSIC NATURAL SIGN
, ("sharp", "\x266F") -- MUSIC SHARP SIGN
, ("clubsuit", "\x2663") -- BLACK CLUB SUIT
, ("diamondsuit", "\x2662") -- WHITE DIAMOND SUIT
, ("heartsuit", "\x2661") -- WHITE HEART SUIT
, ("spadesuit", "\x2660") -- BLACK SPADE SUIT
, ("triangle", "\x25B3") -- WHITE UP-POINTING TRIANGLE
, ("triangleleft", "\x25C1") -- WHITE LEFT-POINTING TRIANGLE
, ("triangleright", "\x25B7") -- WHITE RIGHT-POINTING TRIANGLE
, ("bigtriangleup", "\x25B3") -- WHITE UP-POINTING TRIANGLE
, ("bigtriangledown", "\x25BD") -- WHITE DOWN-POINTING TRIANGLE
, ("leftarrow", "\x2190") -- LEFTWARDS ARROW
, ("gets", "\x2190") -- LEFTWARDS ARROW
, ("Leftarrow", "\x21D0") -- LEFTWARDS DOUBLE ARROW
, ("longleftarrow", "\x27F5") -- LONG LEFTWARDS ARROW
, ("Longleftarrow", "\x27F8") -- LONG LEFTWARDS DOUBLE ARROW
, ("rightarrow", "\x2192") -- RIGHTWARDS ARROW
, ("to", "\x2192") -- RIGHTWARDS ARROW
, ("mapsto", "\x21A6") -- RIGHTWARDS ARROW FROM BAR
, ("Rightarrow", "\x21D2") -- RIGHTWARDS DOUBLE ARROW
, ("longrightarrow", "\x27F6") -- LONG RIGHTWARDS ARROW
, ("longmapsto", "\x27FC") -- LONG RIGHTWARDS ARROW FROM BAR
, ("Longrightarrow", "\x27F9") -- LONG RIGHTWARDS DOUBLE ARROW
, ("leftrightarrow", "\x2194") -- LEFT RIGHT ARROW
, ("Leftrightarrow", "\x21D4") -- LEFT RIGHT DOUBLE ARROW
, ("longleftrightarrow", "\x27F7") -- LONG LEFT RIGHT ARROW
, ("Longleftrightarrow", "\x27FA") -- LONG LEFT RIGHT DOUBLE ARROW
, ("leftharpoonup", "\x21BC") -- LEFTWARDS HARPOON WITH BARB UPWARDS
, ("leftharpoondown", "\x21BD") -- LEFTWARDS HARPOON WITH BARB DOWNWARDS
, ("rightharpoonup", "\x21C0") -- RIGHTWARDS HARPOON WITH BARB UPWARDS
, ("rightharpoondown", "\x21C1") -- RIGHTWARDS HARPOON WITH BARB DOWNWARDS
, ("rightleftharpoons", "\x21CC") -- RIGHTWARDS HARPOON OVER LEFTWARDS HARPOON
, ("hookleftarrow", "\x21A9") -- LEFTWARDS ARROW WITH HOOK
, ("hookrightarrow", "\x21AA") -- RIGHTWARDS ARROW WITH HOOK
, ("uparrow", "\x2191") -- UPWARDS ARROW
, ("Uparrow", "\x21D1") -- UPWARDS DOUBLE ARROW
, ("downarrow", "\x2193") -- DOWNWARDS ARROW
, ("Downarrow", "\x21D3") -- DOWNWARDS DOUBLE ARROW
, ("updownarrow", "\x2195") -- UP DOWN ARROW
, ("Updownarrow", "\x21D5") -- UP DOWN DOUBLE ARROW
] ++ map (\x -> (x,x)) namedFunctions
namedFunctions :: [String]
namedFunctions =
[ "arccos"
, "arcsin"
, "arctan"
, "arg"
, "cos"
, "cosh"
, "cot"
, "coth"
, "csc"
, "deg"
, "det"
, "dim"
, "exp"
, "gcd"
, "hom"
, "inf"
, "ker"
, "lg"
, "lim"
, "liminf"
, "limsup"
, "ln"
, "log"
, "max"
, "min"
, "Pr"
, "sec"
, "sin"
, "sinh"
, "sup"
, "tan"
, "tanh"
]
-- | Diacritics (accents) defined in the Plain TeX format.
--
-- We do not distinguish math mode from text mode at this point (yet).
diacritics :: Map String String
diacritics = fromList
[ -- diacritics in text mode
("`", "\x0300") -- COMBINING GRAVE ACCENT
, ("'", "\x0301") -- COMBINING ACUTE ACCENT
, ("^", "\x0302") -- COMBINING CIRCUMFLEX ACCENT
, ("~", "\x0303") -- COMBINING TILDE
, ("=", "\x0304") -- COMBINING MACRON
, ("u", "\x0306") -- COMBINING BREVE
, (".", "\x0307") -- COMBINING DOT ABOVE
, ("\"", "\x0308") -- COMBINING DIAERESIS
, ("r", "\x030A") -- COMBINING RING ABOVE
, ("H", "\x030B") -- COMBINING DOUBLE ACUTE ACCENT
, ("v", "\x030C") -- COMBINING CARON
, ("d", "\x0323") -- COMBINING DOT BELOW
, ("c", "\x0327") -- COMBINING CEDILLA
, ("k", "\x0328") -- COMBINING OGONEK
, ("b", "\x0331") -- COMBINING MACRON BELOW
-- diacritics in math mode
, ("grave", "\x0300") -- COMBINING GRAVE ACCENT
, ("acute", "\x0301") -- COMBINING ACUTE ACCENT
, ("hat", "\x0302") -- COMBINING CIRCUMFLEX ACCENT
, ("tilde", "\x0303") -- COMBINING TILDE
, ("bar", "\x0304") -- COMBINING MACRON
, ("breve", "\x0306") -- COMBINING BREVE
, ("dot", "\x0307") -- COMBINING DOT ABOVE
, ("ddot", "\x0308") -- COMBINING DIAERESIS
, ("check", "\x030C") -- COMBINING CARON
, ("vec", "\x20d7") -- COMBINING RIGHT ARROW ABOVE
-- combining symbols in math mode
, ("not", "\x0338") -- COMBINING LONG SOLIDUS OVERLAY
]
-- More relevant tie-like commands:
-- widetilde, widehat,
-- overrightarrow, overleftarrow,
-- overbrace, underbrace.
--
-- | Double diacritics defined in the Plain TeX format.
--
-- Double diacritics (or ties) are diacritics that combine two
-- adjacent base characters.
--
-- We do not distinguish math mode from text mode at this point (yet).
dbldiacritics :: Map String String
dbldiacritics = fromList
[ ("t", "\x0361") -- COMBINING DOUBLE INVERTED BREVE
]
|
synsem/texhs
|
src/Text/TeX/Filter/Plain.hs
|
gpl-3.0
| 16,699
| 0
| 9
| 4,779
| 2,997
| 2,090
| 907
| 322
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Support.RefreshTrustedAdvisorCheck
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Requests a refresh of the Trusted Advisor check that has the specified check
-- ID. Check IDs can be obtained by calling 'DescribeTrustedAdvisorChecks'.
--
-- The response contains a 'TrustedAdvisorCheckRefreshStatus' object, which
-- contains these fields:
--
-- Status. The refresh status of the check: "none", "enqueued", "processing",
-- "success", or "abandoned". MillisUntilNextRefreshable. The amount of time,
-- in milliseconds, until the check is eligible for refresh. CheckId. The
-- unique identifier for the check.
--
-- <http://docs.aws.amazon.com/awssupport/latest/APIReference/API_RefreshTrustedAdvisorCheck.html>
module Network.AWS.Support.RefreshTrustedAdvisorCheck
(
-- * Request
RefreshTrustedAdvisorCheck
-- ** Request constructor
, refreshTrustedAdvisorCheck
-- ** Request lenses
, rtacCheckId
-- * Response
, RefreshTrustedAdvisorCheckResponse
-- ** Response constructor
, refreshTrustedAdvisorCheckResponse
-- ** Response lenses
, rtacrStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Support.Types
import qualified GHC.Exts
newtype RefreshTrustedAdvisorCheck = RefreshTrustedAdvisorCheck
{ _rtacCheckId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'RefreshTrustedAdvisorCheck' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rtacCheckId' @::@ 'Text'
--
refreshTrustedAdvisorCheck :: Text -- ^ 'rtacCheckId'
-> RefreshTrustedAdvisorCheck
refreshTrustedAdvisorCheck p1 = RefreshTrustedAdvisorCheck
{ _rtacCheckId = p1
}
-- | The unique identifier for the Trusted Advisor check.
rtacCheckId :: Lens' RefreshTrustedAdvisorCheck Text
rtacCheckId = lens _rtacCheckId (\s a -> s { _rtacCheckId = a })
newtype RefreshTrustedAdvisorCheckResponse = RefreshTrustedAdvisorCheckResponse
{ _rtacrStatus :: TrustedAdvisorCheckRefreshStatus
} deriving (Eq, Read, Show)
-- | 'RefreshTrustedAdvisorCheckResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rtacrStatus' @::@ 'TrustedAdvisorCheckRefreshStatus'
--
refreshTrustedAdvisorCheckResponse :: TrustedAdvisorCheckRefreshStatus -- ^ 'rtacrStatus'
-> RefreshTrustedAdvisorCheckResponse
refreshTrustedAdvisorCheckResponse p1 = RefreshTrustedAdvisorCheckResponse
{ _rtacrStatus = p1
}
-- | The current refresh status for a check, including the amount of time until
-- the check is eligible for refresh.
rtacrStatus :: Lens' RefreshTrustedAdvisorCheckResponse TrustedAdvisorCheckRefreshStatus
rtacrStatus = lens _rtacrStatus (\s a -> s { _rtacrStatus = a })
instance ToPath RefreshTrustedAdvisorCheck where
toPath = const "/"
instance ToQuery RefreshTrustedAdvisorCheck where
toQuery = const mempty
instance ToHeaders RefreshTrustedAdvisorCheck
instance ToJSON RefreshTrustedAdvisorCheck where
toJSON RefreshTrustedAdvisorCheck{..} = object
[ "checkId" .= _rtacCheckId
]
instance AWSRequest RefreshTrustedAdvisorCheck where
type Sv RefreshTrustedAdvisorCheck = Support
type Rs RefreshTrustedAdvisorCheck = RefreshTrustedAdvisorCheckResponse
request = post "RefreshTrustedAdvisorCheck"
response = jsonResponse
instance FromJSON RefreshTrustedAdvisorCheckResponse where
parseJSON = withObject "RefreshTrustedAdvisorCheckResponse" $ \o -> RefreshTrustedAdvisorCheckResponse
<$> o .: "status"
|
dysinger/amazonka
|
amazonka-support/gen/Network/AWS/Support/RefreshTrustedAdvisorCheck.hs
|
mpl-2.0
| 4,565
| 0
| 9
| 880
| 447
| 276
| 171
| 56
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudShell.Users.Environments.Authorize
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sends OAuth credentials to a running environment on behalf of a user.
-- When this completes, the environment will be authorized to run various
-- Google Cloud command line tools without requiring the user to manually
-- authenticate.
--
-- /See:/ <https://cloud.google.com/shell/docs/ Cloud Shell API Reference> for @cloudshell.users.environments.authorize@.
module Network.Google.Resource.CloudShell.Users.Environments.Authorize
(
-- * REST Resource
UsersEnvironmentsAuthorizeResource
-- * Creating a Request
, usersEnvironmentsAuthorize
, UsersEnvironmentsAuthorize
-- * Request Lenses
, ueaXgafv
, ueaUploadProtocol
, ueaAccessToken
, ueaUploadType
, ueaPayload
, ueaName
, ueaCallback
) where
import Network.Google.CloudShell.Types
import Network.Google.Prelude
-- | A resource alias for @cloudshell.users.environments.authorize@ method which the
-- 'UsersEnvironmentsAuthorize' request conforms to.
type UsersEnvironmentsAuthorizeResource =
"v1" :>
CaptureMode "name" "authorize" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AuthorizeEnvironmentRequest :>
Post '[JSON] Operation
-- | Sends OAuth credentials to a running environment on behalf of a user.
-- When this completes, the environment will be authorized to run various
-- Google Cloud command line tools without requiring the user to manually
-- authenticate.
--
-- /See:/ 'usersEnvironmentsAuthorize' smart constructor.
data UsersEnvironmentsAuthorize =
UsersEnvironmentsAuthorize'
{ _ueaXgafv :: !(Maybe Xgafv)
, _ueaUploadProtocol :: !(Maybe Text)
, _ueaAccessToken :: !(Maybe Text)
, _ueaUploadType :: !(Maybe Text)
, _ueaPayload :: !AuthorizeEnvironmentRequest
, _ueaName :: !Text
, _ueaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersEnvironmentsAuthorize' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ueaXgafv'
--
-- * 'ueaUploadProtocol'
--
-- * 'ueaAccessToken'
--
-- * 'ueaUploadType'
--
-- * 'ueaPayload'
--
-- * 'ueaName'
--
-- * 'ueaCallback'
usersEnvironmentsAuthorize
:: AuthorizeEnvironmentRequest -- ^ 'ueaPayload'
-> Text -- ^ 'ueaName'
-> UsersEnvironmentsAuthorize
usersEnvironmentsAuthorize pUeaPayload_ pUeaName_ =
UsersEnvironmentsAuthorize'
{ _ueaXgafv = Nothing
, _ueaUploadProtocol = Nothing
, _ueaAccessToken = Nothing
, _ueaUploadType = Nothing
, _ueaPayload = pUeaPayload_
, _ueaName = pUeaName_
, _ueaCallback = Nothing
}
-- | V1 error format.
ueaXgafv :: Lens' UsersEnvironmentsAuthorize (Maybe Xgafv)
ueaXgafv = lens _ueaXgafv (\ s a -> s{_ueaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ueaUploadProtocol :: Lens' UsersEnvironmentsAuthorize (Maybe Text)
ueaUploadProtocol
= lens _ueaUploadProtocol
(\ s a -> s{_ueaUploadProtocol = a})
-- | OAuth access token.
ueaAccessToken :: Lens' UsersEnvironmentsAuthorize (Maybe Text)
ueaAccessToken
= lens _ueaAccessToken
(\ s a -> s{_ueaAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ueaUploadType :: Lens' UsersEnvironmentsAuthorize (Maybe Text)
ueaUploadType
= lens _ueaUploadType
(\ s a -> s{_ueaUploadType = a})
-- | Multipart request metadata.
ueaPayload :: Lens' UsersEnvironmentsAuthorize AuthorizeEnvironmentRequest
ueaPayload
= lens _ueaPayload (\ s a -> s{_ueaPayload = a})
-- | Name of the resource that should receive the credentials, for example
-- \`users\/me\/environments\/default\` or
-- \`users\/someone\'example.com\/environments\/default\`.
ueaName :: Lens' UsersEnvironmentsAuthorize Text
ueaName = lens _ueaName (\ s a -> s{_ueaName = a})
-- | JSONP
ueaCallback :: Lens' UsersEnvironmentsAuthorize (Maybe Text)
ueaCallback
= lens _ueaCallback (\ s a -> s{_ueaCallback = a})
instance GoogleRequest UsersEnvironmentsAuthorize
where
type Rs UsersEnvironmentsAuthorize = Operation
type Scopes UsersEnvironmentsAuthorize =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient UsersEnvironmentsAuthorize'{..}
= go _ueaName _ueaXgafv _ueaUploadProtocol
_ueaAccessToken
_ueaUploadType
_ueaCallback
(Just AltJSON)
_ueaPayload
cloudShellService
where go
= buildClient
(Proxy :: Proxy UsersEnvironmentsAuthorizeResource)
mempty
|
brendanhay/gogol
|
gogol-cloudshell/gen/Network/Google/Resource/CloudShell/Users/Environments/Authorize.hs
|
mpl-2.0
| 5,704
| 0
| 16
| 1,231
| 785
| 461
| 324
| 112
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.People.OtherContacts.CopyOtherContactToMyContactsGroup
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Copies an \"Other contact\" to a new contact in the user\'s
-- \"myContacts\" group
--
-- /See:/ <https://developers.google.com/people/ People API Reference> for @people.otherContacts.copyOtherContactToMyContactsGroup@.
module Network.Google.Resource.People.OtherContacts.CopyOtherContactToMyContactsGroup
(
-- * REST Resource
OtherContactsCopyOtherContactToMyContactsGroupResource
-- * Creating a Request
, otherContactsCopyOtherContactToMyContactsGroup
, OtherContactsCopyOtherContactToMyContactsGroup
-- * Request Lenses
, occoctmcgXgafv
, occoctmcgUploadProtocol
, occoctmcgResourceName
, occoctmcgAccessToken
, occoctmcgUploadType
, occoctmcgPayload
, occoctmcgCallback
) where
import Network.Google.People.Types
import Network.Google.Prelude
-- | A resource alias for @people.otherContacts.copyOtherContactToMyContactsGroup@ method which the
-- 'OtherContactsCopyOtherContactToMyContactsGroup' request conforms to.
type OtherContactsCopyOtherContactToMyContactsGroupResource
=
"v1" :>
CaptureMode "resourceName"
"copyOtherContactToMyContactsGroup"
Text
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
CopyOtherContactToMyContactsGroupRequest
:> Post '[JSON] Person
-- | Copies an \"Other contact\" to a new contact in the user\'s
-- \"myContacts\" group
--
-- /See:/ 'otherContactsCopyOtherContactToMyContactsGroup' smart constructor.
data OtherContactsCopyOtherContactToMyContactsGroup =
OtherContactsCopyOtherContactToMyContactsGroup'
{ _occoctmcgXgafv :: !(Maybe Xgafv)
, _occoctmcgUploadProtocol :: !(Maybe Text)
, _occoctmcgResourceName :: !Text
, _occoctmcgAccessToken :: !(Maybe Text)
, _occoctmcgUploadType :: !(Maybe Text)
, _occoctmcgPayload :: !CopyOtherContactToMyContactsGroupRequest
, _occoctmcgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OtherContactsCopyOtherContactToMyContactsGroup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'occoctmcgXgafv'
--
-- * 'occoctmcgUploadProtocol'
--
-- * 'occoctmcgResourceName'
--
-- * 'occoctmcgAccessToken'
--
-- * 'occoctmcgUploadType'
--
-- * 'occoctmcgPayload'
--
-- * 'occoctmcgCallback'
otherContactsCopyOtherContactToMyContactsGroup
:: Text -- ^ 'occoctmcgResourceName'
-> CopyOtherContactToMyContactsGroupRequest -- ^ 'occoctmcgPayload'
-> OtherContactsCopyOtherContactToMyContactsGroup
otherContactsCopyOtherContactToMyContactsGroup pOccoctmcgResourceName_ pOccoctmcgPayload_ =
OtherContactsCopyOtherContactToMyContactsGroup'
{ _occoctmcgXgafv = Nothing
, _occoctmcgUploadProtocol = Nothing
, _occoctmcgResourceName = pOccoctmcgResourceName_
, _occoctmcgAccessToken = Nothing
, _occoctmcgUploadType = Nothing
, _occoctmcgPayload = pOccoctmcgPayload_
, _occoctmcgCallback = Nothing
}
-- | V1 error format.
occoctmcgXgafv :: Lens' OtherContactsCopyOtherContactToMyContactsGroup (Maybe Xgafv)
occoctmcgXgafv
= lens _occoctmcgXgafv
(\ s a -> s{_occoctmcgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
occoctmcgUploadProtocol :: Lens' OtherContactsCopyOtherContactToMyContactsGroup (Maybe Text)
occoctmcgUploadProtocol
= lens _occoctmcgUploadProtocol
(\ s a -> s{_occoctmcgUploadProtocol = a})
-- | Required. The resource name of the \"Other contact\" to copy.
occoctmcgResourceName :: Lens' OtherContactsCopyOtherContactToMyContactsGroup Text
occoctmcgResourceName
= lens _occoctmcgResourceName
(\ s a -> s{_occoctmcgResourceName = a})
-- | OAuth access token.
occoctmcgAccessToken :: Lens' OtherContactsCopyOtherContactToMyContactsGroup (Maybe Text)
occoctmcgAccessToken
= lens _occoctmcgAccessToken
(\ s a -> s{_occoctmcgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
occoctmcgUploadType :: Lens' OtherContactsCopyOtherContactToMyContactsGroup (Maybe Text)
occoctmcgUploadType
= lens _occoctmcgUploadType
(\ s a -> s{_occoctmcgUploadType = a})
-- | Multipart request metadata.
occoctmcgPayload :: Lens' OtherContactsCopyOtherContactToMyContactsGroup CopyOtherContactToMyContactsGroupRequest
occoctmcgPayload
= lens _occoctmcgPayload
(\ s a -> s{_occoctmcgPayload = a})
-- | JSONP
occoctmcgCallback :: Lens' OtherContactsCopyOtherContactToMyContactsGroup (Maybe Text)
occoctmcgCallback
= lens _occoctmcgCallback
(\ s a -> s{_occoctmcgCallback = a})
instance GoogleRequest
OtherContactsCopyOtherContactToMyContactsGroup
where
type Rs
OtherContactsCopyOtherContactToMyContactsGroup
= Person
type Scopes
OtherContactsCopyOtherContactToMyContactsGroup
=
'["https://www.googleapis.com/auth/contacts",
"https://www.googleapis.com/auth/contacts.other.readonly"]
requestClient
OtherContactsCopyOtherContactToMyContactsGroup'{..}
= go _occoctmcgResourceName _occoctmcgXgafv
_occoctmcgUploadProtocol
_occoctmcgAccessToken
_occoctmcgUploadType
_occoctmcgCallback
(Just AltJSON)
_occoctmcgPayload
peopleService
where go
= buildClient
(Proxy ::
Proxy
OtherContactsCopyOtherContactToMyContactsGroupResource)
mempty
|
brendanhay/gogol
|
gogol-people/gen/Network/Google/Resource/People/OtherContacts/CopyOtherContactToMyContactsGroup.hs
|
mpl-2.0
| 6,722
| 0
| 16
| 1,418
| 781
| 456
| 325
| 131
| 1
|
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Home where
import Import
getHomeR :: Handler Html
getHomeR =
defaultLayout $ do
setTitle "Snowdrift.coop | clearing the path to a Free/Libre/Open world"
$(widgetFile "homepage")
|
Happy0/snowdrift
|
Handler/Home.hs
|
agpl-3.0
| 265
| 0
| 10
| 53
| 43
| 22
| 21
| 8
| 1
|
module Codewars.Kata.VampireNumbers where
import Data.List
isVampire :: Integer -> Integer -> Bool
isVampire a b = let x = sort $ show a ++ show b
y = sort $ show $ a * b in
x == y
--
|
ice1000/OI-codes
|
codewars/1-100/vampire-numbers-1.hs
|
agpl-3.0
| 208
| 0
| 11
| 64
| 82
| 43
| 39
| 6
| 1
|
{-# OPTIONS -fno-warn-orphans #-}
module Test.SwiftNav.SBP.Encoding
( tests
) where
import BasicPrelude
import qualified Data.Aeson as A
import Data.ByteString as BS
import SwiftNav.SBP.Encoding ()
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck as QC
instance Arbitrary BS.ByteString where
arbitrary = BS.pack <$> arbitrary
shrink xs = BS.pack <$> shrink (BS.unpack xs)
instance CoArbitrary BS.ByteString where
coarbitrary = coarbitrary . BS.unpack
testParse :: TestTree
testParse =
testGroup "Empty Test"
[ testCase "Empty data" $ do
(A.decode . A.encode $ ("dddd" :: BS.ByteString)) @?= Just ("dddd" :: BS.ByteString)
]
testRoundtrip :: TestTree
testRoundtrip = QC.testProperty "Aeson" prop
where prop ws = (A.decode . A.encode $ (ws :: BS.ByteString)) === Just (ws :: BS.ByteString)
tests :: TestTree
tests = testGroup "Roundtrip JSON serialization"
[ testRoundtrip
, testParse
]
|
swift-nav/libsbp
|
haskell/test/Test/SwiftNav/SBP/Encoding.hs
|
lgpl-3.0
| 1,084
| 0
| 14
| 279
| 284
| 161
| 123
| 28
| 1
|
{-# LANGUAGE GADTs, RecordWildCards #-}
module Generate where
import Control.Exception (SomeException)
import Data.Aeson (encode)
import Data.ByteString.Lazy (writeFile)
import Data.Either (isLeft)
import Data.Monoid ((<>))
import Prelude hiding (writeFile)
import Crypto.Noise
import Crypto.Noise.DH
import Keys
import Types
import VectorFile
genMessage :: (Cipher c, DH d, Hash h)
=> Bool -- ^ True if we are writing a message
-> [ScrubbedBytes] -- ^ List of PSKs available for use
-> ScrubbedBytes -- ^ The payload to write/read
-> NoiseState c d h -- ^ The NoiseState to use
-> ([ScrubbedBytes], NoiseResult c d h)
genMessage write psks payload state = case result of
NoiseResultNeedPSK s -> if null psks
then (psks, NoiseResultException . error $ "not enough PSKs provided for handshake pattern")
else genMessage write (tail psks) (head psks) s
r -> (psks, r)
where
operation = if write then writeMessage else readMessage
result = operation payload state
genMessages :: (Cipher c, DH d, Hash h)
=> Bool -- ^ Set to False for one-way patterns
-> NoiseState c d h -- ^ Initiator NoiseState
-> NoiseState c d h -- ^ Responder NoiseState
-> [ScrubbedBytes] -- ^ Initiator PSKs
-> [ScrubbedBytes] -- ^ Responder PSKs
-> [ScrubbedBytes] -- ^ Payloads
-> ([Either SomeException Message], ScrubbedBytes)
genMessages swap = go []
where
go acc s _ _ _ [] = (acc, handshakeHash s)
go acc sendingState receivingState spsks rpsks (payload : rest) =
case genMessage True spsks payload sendingState of
(spsks', NoiseResultMessage ct sendingState') ->
case genMessage False rpsks ct receivingState of
(rpsks', NoiseResultMessage pt receivingState') ->
if swap
then go (acc <> [Right (Message pt ct)]) receivingState' sendingState' rpsks' spsks' rest
else go (acc <> [Right (Message pt ct)]) sendingState' receivingState' spsks' rpsks' rest
(_, NoiseResultException ex) -> (acc <> [Left ex], handshakeHash sendingState)
_ -> undefined -- the genMessage function should handle this
(_, NoiseResultException ex) -> (acc <> [Left ex], handshakeHash sendingState)
_ -> undefined -- the genMessage function should handle this
genNoiseStates :: (Cipher c, DH d, Hash h)
=> CipherType c
-> HashType h
-> PatternName
-> (HandshakeOpts d, HandshakeOpts d)
-> (NoiseState c d h, NoiseState c d h)
genNoiseStates _ _ pat (iopts, ropts) =
(noiseState iopts hs, noiseState ropts hs)
where
hs = patternToHandshake pat
genOpts :: DH d
=> DHType d
-> Vector
-> (HandshakeOpts d, HandshakeOpts d)
genOpts _ Vector{..} = (iopts, ropts)
where
idho = defaultHandshakeOpts InitiatorRole viPrologue
rdho = defaultHandshakeOpts ResponderRole vrPrologue
iopts = setLocalEphemeral (dhBytesToPair =<< viEphemeral)
. setLocalStatic (dhBytesToPair =<< viStatic)
. setRemoteStatic (dhBytesToPub =<< virStatic)
$ idho
ropts = setLocalEphemeral (dhBytesToPair =<< vrEphemeral)
. setLocalStatic (dhBytesToPair =<< vrStatic)
. setRemoteStatic (dhBytesToPub =<< vrrStatic)
$ rdho
populateVector :: SomeCipherType
-> SomeDHType
-> SomeHashType
-> [ScrubbedBytes]
-> Vector
-> Either [Either SomeException Message] Vector
populateVector (WrapCipherType c)
(WrapDHType d)
(WrapHashType h)
payloads
v@Vector{..} = do
let (msgs, hsHash) = genMessages swap ins rns viPSKs vrPSKs payloads
if any isLeft msgs
then Left msgs
else pure $ v { vHash = Just hsHash
, vMessages = either undefined id <$> msgs
}
where
pat = hsPatternName vProtoName
swap = pat /= PatternN && pat /= PatternK && pat /= PatternX &&
pat /= PatternNpsk0 && pat /= PatternKpsk0 && pat /= PatternXpsk1
opts = genOpts d v
(ins, rns) = genNoiseStates c h pat opts
genVector :: HandshakeName
-> [ScrubbedBytes]
-> Vector
genVector pat payloads = finalVector
where
emptyVector = Vector
{ vName = Nothing
, vProtoName = pat
, vFail = False
, viPrologue = "John Galt"
, viPSKs = []
, viEphemeral = Nothing
, viStatic = Nothing
, virStatic = Nothing
, vrPrologue = "John Galt"
, vrPSKs = []
, vrEphemeral = Nothing
, vrStatic = Nothing
, vrrStatic = Nothing
, vHash = Nothing
, vMessages = []
}
c = hsCipher pat
d = hsDH pat
h = hsHash pat
finalVector = either (\err -> error ("Pattern: " <> (show pat) <> " Error: " <> (show err)))
id
(populateVector c d h payloads . setKeys $ emptyVector)
allHandshakes :: [HandshakeName]
allHandshakes = do
pattern <- [minBound .. maxBound]
cipher <- [ WrapCipherType AESGCM
, WrapCipherType ChaChaPoly1305
]
dh <- [ WrapDHType Curve25519
, WrapDHType Curve448
]
hash <- [ WrapHashType BLAKE2b
, WrapHashType BLAKE2s
, WrapHashType SHA256
, WrapHashType SHA512
]
return $ HandshakeName pattern cipher dh hash
genVectorFile :: FilePath
-> IO ()
genVectorFile f = do
let payloads = [ "Ludwig von Mises"
, "Murray Rothbard"
, "F. A. Hayek"
, "Carl Menger"
, "Jean-Baptiste Say"
, "Eugen Böhm von Bawerk"
]
vectors = [ genVector hs payloads | hs <- allHandshakes ]
writeFile f . encode . VectorFile $ vectors
|
centromere/cacophony
|
tests/vectors/Generate.hs
|
unlicense
| 6,169
| 0
| 20
| 2,086
| 1,624
| 865
| 759
| 142
| 7
|
moduel HigherOrder where
data Employee = Coder | Manager | Veep | CEO deriving (Eq, Ord, Show)
reportBoss :: Employee -> Employee -> IO ()
reportBoss e e' = putStrLn $ show e ++ " is the boss of " ++ show e'
employeeRank :: Employee -> Employee -> IO ()
employeeRank e e' =
case compare e e' of
GT -> reportBoss e e'
EQ -> putStrLn "no boss"
LT -> reportBoss (flip reportBoss) e e'
|
thewoolleyman/haskellbook
|
07/05/maor/higherOrder.hs
|
unlicense
| 397
| 0
| 10
| 92
| 162
| 79
| 83
| -1
| -1
|
{-# LANGUAGE OverloadedStrings, CPP #-}
module FormStructure.Chapter4 (ch4DataStorage) where
#ifndef __HASTE__
--import Data.Text.Lazy (pack)
#endif
import FormEngine.FormItem
import FormStructure.Common
ch4DataStorage :: FormItem
ch4DataStorage = Chapter
{ chDescriptor = defaultFIDescriptor { iLabel = Just "4.Storage " }
, chItems = [volumes, providers, remark]
}
where
volumes :: FormItem
volumes = SimpleGroup
{ sgDescriptor = defaultFIDescriptor
{ iLabel = Just "Data volumes"
, iShortDescription = Just
"Just scientic data volumes (without backups and scratch/tmp) are in question."
, iMandatory = True
}
, sgLevel = 0
, sgItems = [ NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "Total volume produced in 2016"
, iIdent = Just "total-volume"
, iRules = [ReadOnlyRule]
}
, nfiUnit = SingleUnit "TB"
}
, NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "Total volume of data stored at the end of 2016"
, iMandatory = True
}
, nfiUnit = MultipleUnit ["MB", "GB", "TB", "PB"]
}
, NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "Total volume of backups"
, iMandatory = True
}
, nfiUnit = MultipleUnit ["MB", "GB", "TB", "PB"]
}
]
}
providers :: FormItem
providers = SimpleGroup
{ sgDescriptor = defaultFIDescriptor
{ iLabel = Just "Storage providers"
, iMandatory = True
}
, sgLevel = 0
, sgItems = [ NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "Group's local"
, iIdent = Just "storage-provider-group"
, iRules = [storageSumRule, NumValueRule (\n -> n >= 0 && n <= 100)]
, iMandatory = True
}
, nfiUnit = SingleUnit "%"
}
, NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "Institutional"
, iIdent = Just "storage-provider-institutional"
, iRules = [storageSumRule, NumValueRule (\n -> n >= 0 && n <= 100)]
, iMandatory = True
}
, nfiUnit = SingleUnit "%"
}
, NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "External Provider"
, iIdent = Just "storage-provider-external"
, iRules = [storageSumRule, NumValueRule (\n -> n >= 0 && n <= 100)]
, iMandatory = True
}
, nfiUnit = SingleUnit "%"
}
, NumberFI
{ nfiDescriptor = defaultFIDescriptor
{ iLabel = Just "Sum"
, iIdent = Just "storage-providers-sum"
, iRules = [ReadOnlyRule, NumValueRule (== 100)]
, iMandatory = True
}
, nfiUnit = SingleUnit "%"
}
]
}
where
storageSumRule :: FormRule
storageSumRule = SumRule
[ "storage-provider-group"
, "storage-provider-institutional"
, "storage-provider-external"
]
"storage-providers-sum"
|
DataStewardshipPortal/ds-elixir-cz
|
FormStructure/Chapter4.hs
|
apache-2.0
| 4,036
| 0
| 20
| 1,942
| 655
| 392
| 263
| 72
| 1
|
import Data.List
hit [] [] = 0
hit (a0:as) (b0:bs) =
let s = if a0 == b0
then 1
else 0
in
s + (hit as bs)
blow :: [Int] -> [Int] -> Int
blow x y =
let b = sum $ map (\e -> if elem e y then 1 else 0) x
h = hit x y
in
b-h
play :: [[Int]] -> [[Int]]
play [] = []
play (x:y:xs) =
let h = hit x y
b = blow x y
r = play xs
in
[h,b]:r
main = do
c <- getContents
let i = map (map read) $ map words $ lines c :: [[Int]]
o = play i
mapM_ putStrLn $ map unwords $ map (map show) o
|
a143753/AOJ
|
0025.hs
|
apache-2.0
| 549
| 0
| 14
| 205
| 348
| 178
| 170
| 24
| 2
|
import qualified Common.Matrix.Matrix as M
import qualified Common.Numbers.Numbers as N
import Common.NumMod.NumMod
modulo = 1307674368000 :: Int
fibonacci :: Int -> Int -> IntMod
fibonacci n m = head $ M.fromList 2 2 (map (fromInt m) [1, 1, 1, 0]) `M.power` (n-1)
where
head = \m -> m M.! (1, 1)
f :: Int -> Int -> IntMod
f n x = fromInt modulo $ b `div` a
where
m = modulo * a :: Int
x' = fromInt m x :: IntMod
a = x * x + x - 1 :: Int
b = toInt $ f1 * (x' `N.fastpow` (n+2)) + f2 * (x' `N.fastpow` (n+1)) - x'
f1 = fibonacci n m
f2 = fibonacci (n + 1) m
main = print $ sum [ f (10^15) i | i <- [1 .. 100] ]
|
foreverbell/project-euler-solutions
|
src/435.hs
|
bsd-3-clause
| 657
| 0
| 14
| 186
| 346
| 195
| 151
| 16
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Objective.C.Object
( Object(..), Class_, Meta_
, setClass
, getClassByName
, getMetaClassByName
, copyObject
) where
import Data.Data
import Data.Hashable
import Foreign.C.String
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.ForeignPtr.Unsafe as Unsafe
import Foreign.Ptr
import Objective.C.Prim
import Objective.C.Class
import Objective.C.Id
import Objective.C.Util
-- Objects with unknown classes
newtype Object = Object (ForeignPtr ObjcObject) deriving (Eq,Ord,Show,Data,Typeable,Nil)
instance Hashable Object where
hashWithSalt n (Object fp) = hashWithSalt n (fromIntegral (ptrToIntPtr (Unsafe.unsafeForeignPtrToPtr fp)) :: Int)
instance Id Object where
withId (Object fp) = withForeignPtr fp
-- | Unknown Class
type Class_ = Class Object
-- | Unknown MetaClass
type Meta_ = Meta Object
getClassByName :: String -> IO Class_
getClassByName n = withCString n objc_getClass
getMetaClassByName :: String -> IO Meta_
getMetaClassByName n = withCString n objc_getMetaClass
setClass :: Object -> Class a -> IO (Class a)
setClass a c = withId a $ \p -> object_setClass p c
copyObject :: Id a => a -> CSize -> IO Object
copyObject a s = withId a $ \p -> do
p' <- object_copy p s
fp' <- newForeignPtr_ p'
return $ Object fp'
foreign import ccall unsafe "objc/runtime.h" objc_getClass :: CString -> IO Class_
foreign import ccall unsafe "objc/runtime.h" objc_getMetaClass :: CString -> IO Meta_
foreign import ccall unsafe "objc/runtime.h" object_setClass :: CId -> Class a -> IO (Class a)
foreign import ccall unsafe "objc/runtime.h" object_copy :: CId -> CSize -> IO CId
{-
getAssociatedObject :: Id a => a -> String -> IO Object
setAssociatedObject :: (Id a, Id b) => a -> String -> IO b
removeAssociatedObjects :: Id a => a -> IO ()
-}
|
ekmett/objective-c
|
Objective/C/Object.hs
|
bsd-3-clause
| 1,927
| 0
| 13
| 321
| 521
| 278
| 243
| 42
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.