code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.AdvertiserGroups.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one advertiser group by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.advertiserGroups.get@.
module Network.Google.Resource.DFAReporting.AdvertiserGroups.Get
(
-- * REST Resource
AdvertiserGroupsGetResource
-- * Creating a Request
, advertiserGroupsGet
, AdvertiserGroupsGet
-- * Request Lenses
, agggProFileId
, agggId
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.advertiserGroups.get@ method which the
-- 'AdvertiserGroupsGet' request conforms to.
type AdvertiserGroupsGetResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"advertiserGroups" :>
Capture "id" (Textual Int64) :>
QueryParam "alt" AltJSON :>
Get '[JSON] AdvertiserGroup
-- | Gets one advertiser group by ID.
--
-- /See:/ 'advertiserGroupsGet' smart constructor.
data AdvertiserGroupsGet = AdvertiserGroupsGet'
{ _agggProFileId :: !(Textual Int64)
, _agggId :: !(Textual Int64)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AdvertiserGroupsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'agggProFileId'
--
-- * 'agggId'
advertiserGroupsGet
:: Int64 -- ^ 'agggProFileId'
-> Int64 -- ^ 'agggId'
-> AdvertiserGroupsGet
advertiserGroupsGet pAgggProFileId_ pAgggId_ =
AdvertiserGroupsGet'
{ _agggProFileId = _Coerce # pAgggProFileId_
, _agggId = _Coerce # pAgggId_
}
-- | User profile ID associated with this request.
agggProFileId :: Lens' AdvertiserGroupsGet Int64
agggProFileId
= lens _agggProFileId
(\ s a -> s{_agggProFileId = a})
. _Coerce
-- | Advertiser group ID.
agggId :: Lens' AdvertiserGroupsGet Int64
agggId
= lens _agggId (\ s a -> s{_agggId = a}) . _Coerce
instance GoogleRequest AdvertiserGroupsGet where
type Rs AdvertiserGroupsGet = AdvertiserGroup
type Scopes AdvertiserGroupsGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient AdvertiserGroupsGet'{..}
= go _agggProFileId _agggId (Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy AdvertiserGroupsGetResource)
mempty
|
rueshyna/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/AdvertiserGroups/Get.hs
|
mpl-2.0
| 3,365
| 0
| 14
| 779
| 421
| 249
| 172
| 65
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Licenses.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified License resource. Get a list of available licenses
-- by making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.licenses.get@.
module Network.Google.Resource.Compute.Licenses.Get
(
-- * REST Resource
LicensesGetResource
-- * Creating a Request
, licensesGet
, LicensesGet
-- * Request Lenses
, lgProject
, lgLicense
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.licenses.get@ method which the
-- 'LicensesGet' request conforms to.
type LicensesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"licenses" :>
Capture "license" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] License
-- | Returns the specified License resource. Get a list of available licenses
-- by making a list() request.
--
-- /See:/ 'licensesGet' smart constructor.
data LicensesGet = LicensesGet'
{ _lgProject :: !Text
, _lgLicense :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LicensesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lgProject'
--
-- * 'lgLicense'
licensesGet
:: Text -- ^ 'lgProject'
-> Text -- ^ 'lgLicense'
-> LicensesGet
licensesGet pLgProject_ pLgLicense_ =
LicensesGet'
{ _lgProject = pLgProject_
, _lgLicense = pLgLicense_
}
-- | Project ID for this request.
lgProject :: Lens' LicensesGet Text
lgProject
= lens _lgProject (\ s a -> s{_lgProject = a})
-- | Name of the License resource to return.
lgLicense :: Lens' LicensesGet Text
lgLicense
= lens _lgLicense (\ s a -> s{_lgLicense = a})
instance GoogleRequest LicensesGet where
type Rs LicensesGet = License
type Scopes LicensesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient LicensesGet'{..}
= go _lgProject _lgLicense (Just AltJSON)
computeService
where go
= buildClient (Proxy :: Proxy LicensesGetResource)
mempty
|
rueshyna/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/Licenses/Get.hs
|
mpl-2.0
| 3,238
| 0
| 15
| 782
| 393
| 237
| 156
| 64
| 1
|
{-# LANGUAGE InstanceSigs #-}
module MaybeT where
newtype MaybeT m a =
MaybeT { runMaybeT :: m (Maybe a) }
-- instance (Functor f, Functor g) =>
-- Functor (Compose f g) where
-- fmap f (Compose fga) =
-- Compose $ (fmap . fmap) f fga
instance (Functor m) => Functor (MaybeT m) where
fmap f (MaybeT ma) = MaybeT $ (fmap . fmap) f ma
instance (Applicative m) => Applicative (MaybeT m) where
pure x = MaybeT (pure (pure x))
MaybeT fab <*> MaybeT mma =
MaybeT $ (<*>) <$> fab <*> mma
instance (Monad m) => Monad (MaybeT m) where
return = pure
(>>=) :: MaybeT m a
-> (a -> MaybeT m b)
-> MaybeT m b
MaybeT ma >>= f =
MaybeT $ do
v <- ma
case v of
Nothing -> return Nothing
Just y -> runMaybeT (f y)
newtype EitherT e m a =
EitherT { runEitherT :: m (Either e a) }
instance Functor m => Functor (EitherT e m) where
fmap f (EitherT mea) = EitherT $ (fmap . fmap) f mea
instance Applicative m => Applicative (EitherT e m) where
pure x = EitherT (pure (pure x))
EitherT fab <*> EitherT mma =
EitherT $ (<*>) <$> fab <*> mma
instance Monad m => Monad (EitherT e m) where
return = pure
(>>=) :: EitherT e m a
-> (a -> EitherT e m b)
-> EitherT e m b
EitherT ma >>= f =
EitherT $ do
v <- ma
case v of
Left e -> return $ Left e
Right a -> runEitherT (f a)
swapEither :: Either e a -> Either a e
swapEither x =
case x of
Left e -> Right e
Right a -> Left a
swapEitherT :: (Functor m) => EitherT e m a -> EitherT a m e
swapEitherT (EitherT x) = EitherT $ swapEither <$> x
either' :: (a -> c) -> (b -> c) -> Either a b -> c
either' fe _ (Left e) = fe e
either' _ fa (Right a) = fa a
eitherT :: Monad m =>
(a -> m c)
-> (b -> m c)
-> EitherT a m b
-> m c
eitherT fa fb (EitherT x) = x >>= either fa fb
|
dmvianna/haskellbook
|
src/Ch26-MaybeT.hs
|
unlicense
| 1,899
| 0
| 14
| 592
| 853
| 425
| 428
| 56
| 2
|
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Some.Other.Name (program) where
program = drawingOf(blank)
|
google/codeworld
|
codeworld-compiler/test/testcases/differentModuleName/source.hs
|
apache-2.0
| 678
| 0
| 6
| 124
| 24
| 15
| 9
| 2
| 1
|
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Network.Riak.Protocol.PingRequest (PingRequest(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data PingRequest = PingRequest{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data, Prelude'.Generic)
instance P'.Mergeable PingRequest where
mergeAppend PingRequest PingRequest = PingRequest
instance P'.Default PingRequest where
defaultValue = PingRequest
instance P'.Wire PingRequest where
wireSize ft' self'@(PingRequest)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(PingRequest)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> PingRequest) PingRequest where
getVal m' f' = f' m'
instance P'.GPB PingRequest
instance P'.ReflectDescriptor PingRequest where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".Protocol.PingRequest\", haskellPrefix = [MName \"Network\",MName \"Riak\"], parentModule = [MName \"Protocol\"], baseName = MName \"PingRequest\"}, descFilePath = [\"Network\",\"Riak\",\"Protocol\",\"PingRequest.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType PingRequest where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg PingRequest where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue
where
|
k-bx/riak-haskell-client
|
protobuf/src/Network/Riak/Protocol/PingRequest.hs
|
apache-2.0
| 2,631
| 1
| 16
| 524
| 569
| 301
| 268
| 55
| 0
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Database.SqlServer.Definition.FullTextCatalog
(
FullTextCatalog
) where
import Database.SqlServer.Definition.Identifier
import Database.SqlServer.Definition.Entity
import Test.QuickCheck
import Text.PrettyPrint
import Data.DeriveTH
data FullTextCatalog = FullTextCatalog
{
catalogName :: RegularIdentifier
, filegroup :: Maybe RegularIdentifier
, accentSensitive :: Maybe Bool
, asDefault :: Bool
-- TODO ignore users
}
derive makeArbitrary ''FullTextCatalog
renderFileGroup :: RegularIdentifier -> Doc
renderFileGroup n = text "ON FILEGROUP" <+> renderRegularIdentifier n
renderOptions :: Bool -> Doc
renderOptions True = text "WITH ACCENT_SENSITIVITY = ON"
renderOptions False = text "WITH ACCENT_SENSITIVITY = OFF"
instance Entity FullTextCatalog where
name = catalogName
toDoc ftc = text "CREATE FULLTEXT CATALOG" <+>
renderName ftc $+$
maybe empty renderFileGroup (filegroup ftc) $+$
maybe empty renderOptions (accentSensitive ftc) $+$
if asDefault ftc then text "AS DEFAULT" else empty $+$
text "GO\n"
|
SuperDrew/sql-server-gen
|
src/Database/SqlServer/Definition/FullTextCatalog.hs
|
bsd-2-clause
| 1,202
| 0
| 11
| 254
| 242
| 129
| 113
| 29
| 1
|
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, FlexibleContexts #-}
--------------------------------------------------------------------------------
-- This code is generated by util/genC.hs.
--------------------------------------------------------------------------------
module Graphics.UI.GLFW.C where
--------------------------------------------------------------------------------
import Data.Bits ((.&.))
import Data.Char (chr, ord)
import Foreign.C.Types (CDouble, CFloat, CInt, CUChar, CUInt, CUShort)
import Foreign.Ptr (Ptr)
import Bindings.GLFW
import Graphics.UI.GLFW.Types
--------------------------------------------------------------------------------
class C c h where
fromC :: c -> h
toC :: h -> c
--------------------------------------------------------------------------------
instance (C CInt b) => C CInt (Maybe b) where
fromC i | i == c'GLFW_DONT_CARE = Nothing
| otherwise = Just $ fromC i
toC = maybe c'GLFW_DONT_CARE toC
--------------------------------------------------------------------------------
instance C CInt Char where
fromC = chr . fromIntegral
toC = fromIntegral . ord
instance C CUInt Char where
fromC = chr . fromIntegral
toC = fromIntegral . ord
instance C CDouble Double where
fromC = realToFrac
toC = realToFrac
instance C CInt Int where
fromC = fromIntegral
toC = fromIntegral
instance C CUInt Int where
fromC = fromIntegral
toC = fromIntegral
instance C CUShort Int where
fromC = fromIntegral
toC = fromIntegral
instance C CFloat Double where
fromC = realToFrac
toC = realToFrac
instance C (Ptr C'GLFWmonitor) Monitor where
fromC = Monitor
toC = unMonitor
instance C (Ptr C'GLFWwindow) Window where
fromC = Window
toC = unWindow
instance C CInt ModifierKeys where
fromC v = ModifierKeys
{ modifierKeysShift = (v .&. c'GLFW_MOD_SHIFT) /= 0
, modifierKeysControl = (v .&. c'GLFW_MOD_CONTROL) /= 0
, modifierKeysAlt = (v .&. c'GLFW_MOD_ALT) /= 0
, modifierKeysSuper = (v .&. c'GLFW_MOD_SUPER) /= 0
, modifierKeysCapsLock = (v .&. c'GLFW_MOD_CAPS_LOCK) /= 0
, modifierKeysNumLock = (v .&. c'GLFW_MOD_NUM_LOCK) /= 0
}
toC = undefined
instance C C'GLFWvidmode VideoMode where
fromC gvm = VideoMode
{ videoModeWidth = fromIntegral $ c'GLFWvidmode'width gvm
, videoModeHeight = fromIntegral $ c'GLFWvidmode'height gvm
, videoModeRedBits = fromIntegral $ c'GLFWvidmode'redBits gvm
, videoModeGreenBits = fromIntegral $ c'GLFWvidmode'greenBits gvm
, videoModeBlueBits = fromIntegral $ c'GLFWvidmode'blueBits gvm
, videoModeRefreshRate = fromIntegral $ c'GLFWvidmode'refreshRate gvm
}
toC = undefined
instance C CInt StandardCursorShape where
fromC v
| v == c'GLFW_ARROW_CURSOR = StandardCursorShape'Arrow
| v == c'GLFW_IBEAM_CURSOR = StandardCursorShape'IBeam
| v == c'GLFW_CROSSHAIR_CURSOR = StandardCursorShape'Crosshair
| v == c'GLFW_HAND_CURSOR = StandardCursorShape'Hand
| v == c'GLFW_HRESIZE_CURSOR = StandardCursorShape'HResize
| v == c'GLFW_VRESIZE_CURSOR = StandardCursorShape'VResize
| otherwise = error $ "C CInt StandardCursorShape fromC: " ++ show v
toC StandardCursorShape'Arrow = c'GLFW_ARROW_CURSOR
toC StandardCursorShape'IBeam = c'GLFW_IBEAM_CURSOR
toC StandardCursorShape'Crosshair = c'GLFW_CROSSHAIR_CURSOR
toC StandardCursorShape'Hand = c'GLFW_HAND_CURSOR
toC StandardCursorShape'HResize = c'GLFW_HRESIZE_CURSOR
toC StandardCursorShape'VResize = c'GLFW_VRESIZE_CURSOR
--------------------------------------------------------------------------------
instance C CInt Bool where
fromC v
| v == c'GLFW_FALSE = False
| v == c'GLFW_TRUE = True
| otherwise = error $ "C CInt Bool fromC: " ++ show v
toC False = c'GLFW_FALSE
toC True = c'GLFW_TRUE
instance C CInt Error where
fromC v
| v == c'GLFW_NOT_INITIALIZED = Error'NotInitialized
| v == c'GLFW_NO_CURRENT_CONTEXT = Error'NoCurrentContext
| v == c'GLFW_INVALID_ENUM = Error'InvalidEnum
| v == c'GLFW_INVALID_VALUE = Error'InvalidValue
| v == c'GLFW_OUT_OF_MEMORY = Error'OutOfMemory
| v == c'GLFW_API_UNAVAILABLE = Error'ApiUnavailable
| v == c'GLFW_VERSION_UNAVAILABLE = Error'VersionUnavailable
| v == c'GLFW_PLATFORM_ERROR = Error'PlatformError
| v == c'GLFW_FORMAT_UNAVAILABLE = Error'FormatUnavailable
| otherwise = error $ "C CInt Error fromC: " ++ show v
toC Error'NotInitialized = c'GLFW_NOT_INITIALIZED
toC Error'NoCurrentContext = c'GLFW_NO_CURRENT_CONTEXT
toC Error'InvalidEnum = c'GLFW_INVALID_ENUM
toC Error'InvalidValue = c'GLFW_INVALID_VALUE
toC Error'OutOfMemory = c'GLFW_OUT_OF_MEMORY
toC Error'ApiUnavailable = c'GLFW_API_UNAVAILABLE
toC Error'VersionUnavailable = c'GLFW_VERSION_UNAVAILABLE
toC Error'PlatformError = c'GLFW_PLATFORM_ERROR
toC Error'FormatUnavailable = c'GLFW_FORMAT_UNAVAILABLE
instance C CInt InitHint where
fromC v
| v == c'GLFW_JOYSTICK_HAT_BUTTONS = InitHint'JoystickHatButtons
| v == c'GLFW_COCOA_CHDIR_RESOURCES = InitHint'CocoaChdirResources
| v == c'GLFW_COCOA_MENUBAR = InitHint'CocoaMenubar
| otherwise = error $ "C CInt InitHint fromC: " ++ show v
toC InitHint'JoystickHatButtons = c'GLFW_JOYSTICK_HAT_BUTTONS
toC InitHint'CocoaChdirResources = c'GLFW_COCOA_CHDIR_RESOURCES
toC InitHint'CocoaMenubar = c'GLFW_COCOA_MENUBAR
instance C CInt MonitorState where
fromC v
| v == c'GLFW_CONNECTED = MonitorState'Connected
| v == c'GLFW_DISCONNECTED = MonitorState'Disconnected
| otherwise = error $ "C CInt MonitorState fromC: " ++ show v
toC MonitorState'Connected = c'GLFW_CONNECTED
toC MonitorState'Disconnected = c'GLFW_DISCONNECTED
instance C CInt ContextRobustness where
fromC v
| v == c'GLFW_NO_ROBUSTNESS = ContextRobustness'NoRobustness
| v == c'GLFW_NO_RESET_NOTIFICATION = ContextRobustness'NoResetNotification
| v == c'GLFW_LOSE_CONTEXT_ON_RESET = ContextRobustness'LoseContextOnReset
| otherwise = error $ "C CInt ContextRobustness fromC: " ++ show v
toC ContextRobustness'NoRobustness = c'GLFW_NO_ROBUSTNESS
toC ContextRobustness'NoResetNotification = c'GLFW_NO_RESET_NOTIFICATION
toC ContextRobustness'LoseContextOnReset = c'GLFW_LOSE_CONTEXT_ON_RESET
instance C CInt ContextReleaseBehavior where
fromC v
| v == c'GLFW_ANY_RELEASE_BEHAVIOR = ContextReleaseBehavior'Any
| v == c'GLFW_RELEASE_BEHAVIOR_NONE = ContextReleaseBehavior'None
| v == c'GLFW_RELEASE_BEHAVIOR_FLUSH = ContextReleaseBehavior'Flush
| otherwise = error $ "C CInt ContextReleaseBehavior fromC: " ++ show v
toC ContextReleaseBehavior'Any = c'GLFW_ANY_RELEASE_BEHAVIOR
toC ContextReleaseBehavior'None = c'GLFW_RELEASE_BEHAVIOR_NONE
toC ContextReleaseBehavior'Flush = c'GLFW_RELEASE_BEHAVIOR_FLUSH
instance C CInt OpenGLProfile where
fromC v
| v == c'GLFW_OPENGL_ANY_PROFILE = OpenGLProfile'Any
| v == c'GLFW_OPENGL_COMPAT_PROFILE = OpenGLProfile'Compat
| v == c'GLFW_OPENGL_CORE_PROFILE = OpenGLProfile'Core
| otherwise = error $ "C CInt OpenGLProfile fromC: " ++ show v
toC OpenGLProfile'Any = c'GLFW_OPENGL_ANY_PROFILE
toC OpenGLProfile'Compat = c'GLFW_OPENGL_COMPAT_PROFILE
toC OpenGLProfile'Core = c'GLFW_OPENGL_CORE_PROFILE
instance C CInt ClientAPI where
fromC v
| v == c'GLFW_NO_API = ClientAPI'NoAPI
| v == c'GLFW_OPENGL_API = ClientAPI'OpenGL
| v == c'GLFW_OPENGL_ES_API = ClientAPI'OpenGLES
| otherwise = error $ "C CInt ClientAPI fromC: " ++ show v
toC ClientAPI'NoAPI = c'GLFW_NO_API
toC ClientAPI'OpenGL = c'GLFW_OPENGL_API
toC ClientAPI'OpenGLES = c'GLFW_OPENGL_ES_API
instance C CInt ContextCreationAPI where
fromC v
| v == c'GLFW_NATIVE_CONTEXT_API = ContextCreationAPI'Native
| v == c'GLFW_EGL_CONTEXT_API = ContextCreationAPI'EGL
| v == c'GLFW_OSMESA_CONTEXT_API = ContextCreationAPI'OSMesa
| otherwise = error $ "C CInt ContextCreationAPI fromC: " ++ show v
toC ContextCreationAPI'Native = c'GLFW_NATIVE_CONTEXT_API
toC ContextCreationAPI'EGL = c'GLFW_EGL_CONTEXT_API
toC ContextCreationAPI'OSMesa = c'GLFW_OSMESA_CONTEXT_API
instance C CInt Key where
fromC v
| v == c'GLFW_KEY_UNKNOWN = Key'Unknown
| v == c'GLFW_KEY_SPACE = Key'Space
| v == c'GLFW_KEY_APOSTROPHE = Key'Apostrophe
| v == c'GLFW_KEY_COMMA = Key'Comma
| v == c'GLFW_KEY_MINUS = Key'Minus
| v == c'GLFW_KEY_PERIOD = Key'Period
| v == c'GLFW_KEY_SLASH = Key'Slash
| v == c'GLFW_KEY_0 = Key'0
| v == c'GLFW_KEY_1 = Key'1
| v == c'GLFW_KEY_2 = Key'2
| v == c'GLFW_KEY_3 = Key'3
| v == c'GLFW_KEY_4 = Key'4
| v == c'GLFW_KEY_5 = Key'5
| v == c'GLFW_KEY_6 = Key'6
| v == c'GLFW_KEY_7 = Key'7
| v == c'GLFW_KEY_8 = Key'8
| v == c'GLFW_KEY_9 = Key'9
| v == c'GLFW_KEY_SEMICOLON = Key'Semicolon
| v == c'GLFW_KEY_EQUAL = Key'Equal
| v == c'GLFW_KEY_A = Key'A
| v == c'GLFW_KEY_B = Key'B
| v == c'GLFW_KEY_C = Key'C
| v == c'GLFW_KEY_D = Key'D
| v == c'GLFW_KEY_E = Key'E
| v == c'GLFW_KEY_F = Key'F
| v == c'GLFW_KEY_G = Key'G
| v == c'GLFW_KEY_H = Key'H
| v == c'GLFW_KEY_I = Key'I
| v == c'GLFW_KEY_J = Key'J
| v == c'GLFW_KEY_K = Key'K
| v == c'GLFW_KEY_L = Key'L
| v == c'GLFW_KEY_M = Key'M
| v == c'GLFW_KEY_N = Key'N
| v == c'GLFW_KEY_O = Key'O
| v == c'GLFW_KEY_P = Key'P
| v == c'GLFW_KEY_Q = Key'Q
| v == c'GLFW_KEY_R = Key'R
| v == c'GLFW_KEY_S = Key'S
| v == c'GLFW_KEY_T = Key'T
| v == c'GLFW_KEY_U = Key'U
| v == c'GLFW_KEY_V = Key'V
| v == c'GLFW_KEY_W = Key'W
| v == c'GLFW_KEY_X = Key'X
| v == c'GLFW_KEY_Y = Key'Y
| v == c'GLFW_KEY_Z = Key'Z
| v == c'GLFW_KEY_LEFT_BRACKET = Key'LeftBracket
| v == c'GLFW_KEY_BACKSLASH = Key'Backslash
| v == c'GLFW_KEY_RIGHT_BRACKET = Key'RightBracket
| v == c'GLFW_KEY_GRAVE_ACCENT = Key'GraveAccent
| v == c'GLFW_KEY_WORLD_1 = Key'World1
| v == c'GLFW_KEY_WORLD_2 = Key'World2
| v == c'GLFW_KEY_ESCAPE = Key'Escape
| v == c'GLFW_KEY_ENTER = Key'Enter
| v == c'GLFW_KEY_TAB = Key'Tab
| v == c'GLFW_KEY_BACKSPACE = Key'Backspace
| v == c'GLFW_KEY_INSERT = Key'Insert
| v == c'GLFW_KEY_DELETE = Key'Delete
| v == c'GLFW_KEY_RIGHT = Key'Right
| v == c'GLFW_KEY_LEFT = Key'Left
| v == c'GLFW_KEY_DOWN = Key'Down
| v == c'GLFW_KEY_UP = Key'Up
| v == c'GLFW_KEY_PAGE_UP = Key'PageUp
| v == c'GLFW_KEY_PAGE_DOWN = Key'PageDown
| v == c'GLFW_KEY_HOME = Key'Home
| v == c'GLFW_KEY_END = Key'End
| v == c'GLFW_KEY_CAPS_LOCK = Key'CapsLock
| v == c'GLFW_KEY_SCROLL_LOCK = Key'ScrollLock
| v == c'GLFW_KEY_NUM_LOCK = Key'NumLock
| v == c'GLFW_KEY_PRINT_SCREEN = Key'PrintScreen
| v == c'GLFW_KEY_PAUSE = Key'Pause
| v == c'GLFW_KEY_F1 = Key'F1
| v == c'GLFW_KEY_F2 = Key'F2
| v == c'GLFW_KEY_F3 = Key'F3
| v == c'GLFW_KEY_F4 = Key'F4
| v == c'GLFW_KEY_F5 = Key'F5
| v == c'GLFW_KEY_F6 = Key'F6
| v == c'GLFW_KEY_F7 = Key'F7
| v == c'GLFW_KEY_F8 = Key'F8
| v == c'GLFW_KEY_F9 = Key'F9
| v == c'GLFW_KEY_F10 = Key'F10
| v == c'GLFW_KEY_F11 = Key'F11
| v == c'GLFW_KEY_F12 = Key'F12
| v == c'GLFW_KEY_F13 = Key'F13
| v == c'GLFW_KEY_F14 = Key'F14
| v == c'GLFW_KEY_F15 = Key'F15
| v == c'GLFW_KEY_F16 = Key'F16
| v == c'GLFW_KEY_F17 = Key'F17
| v == c'GLFW_KEY_F18 = Key'F18
| v == c'GLFW_KEY_F19 = Key'F19
| v == c'GLFW_KEY_F20 = Key'F20
| v == c'GLFW_KEY_F21 = Key'F21
| v == c'GLFW_KEY_F22 = Key'F22
| v == c'GLFW_KEY_F23 = Key'F23
| v == c'GLFW_KEY_F24 = Key'F24
| v == c'GLFW_KEY_F25 = Key'F25
| v == c'GLFW_KEY_KP_0 = Key'Pad0
| v == c'GLFW_KEY_KP_1 = Key'Pad1
| v == c'GLFW_KEY_KP_2 = Key'Pad2
| v == c'GLFW_KEY_KP_3 = Key'Pad3
| v == c'GLFW_KEY_KP_4 = Key'Pad4
| v == c'GLFW_KEY_KP_5 = Key'Pad5
| v == c'GLFW_KEY_KP_6 = Key'Pad6
| v == c'GLFW_KEY_KP_7 = Key'Pad7
| v == c'GLFW_KEY_KP_8 = Key'Pad8
| v == c'GLFW_KEY_KP_9 = Key'Pad9
| v == c'GLFW_KEY_KP_DECIMAL = Key'PadDecimal
| v == c'GLFW_KEY_KP_DIVIDE = Key'PadDivide
| v == c'GLFW_KEY_KP_MULTIPLY = Key'PadMultiply
| v == c'GLFW_KEY_KP_SUBTRACT = Key'PadSubtract
| v == c'GLFW_KEY_KP_ADD = Key'PadAdd
| v == c'GLFW_KEY_KP_ENTER = Key'PadEnter
| v == c'GLFW_KEY_KP_EQUAL = Key'PadEqual
| v == c'GLFW_KEY_LEFT_SHIFT = Key'LeftShift
| v == c'GLFW_KEY_LEFT_CONTROL = Key'LeftControl
| v == c'GLFW_KEY_LEFT_ALT = Key'LeftAlt
| v == c'GLFW_KEY_LEFT_SUPER = Key'LeftSuper
| v == c'GLFW_KEY_RIGHT_SHIFT = Key'RightShift
| v == c'GLFW_KEY_RIGHT_CONTROL = Key'RightControl
| v == c'GLFW_KEY_RIGHT_ALT = Key'RightAlt
| v == c'GLFW_KEY_RIGHT_SUPER = Key'RightSuper
| v == c'GLFW_KEY_MENU = Key'Menu
| otherwise = error $ "C CInt Key fromC: " ++ show v
toC Key'Unknown = c'GLFW_KEY_UNKNOWN
toC Key'Space = c'GLFW_KEY_SPACE
toC Key'Apostrophe = c'GLFW_KEY_APOSTROPHE
toC Key'Comma = c'GLFW_KEY_COMMA
toC Key'Minus = c'GLFW_KEY_MINUS
toC Key'Period = c'GLFW_KEY_PERIOD
toC Key'Slash = c'GLFW_KEY_SLASH
toC Key'0 = c'GLFW_KEY_0
toC Key'1 = c'GLFW_KEY_1
toC Key'2 = c'GLFW_KEY_2
toC Key'3 = c'GLFW_KEY_3
toC Key'4 = c'GLFW_KEY_4
toC Key'5 = c'GLFW_KEY_5
toC Key'6 = c'GLFW_KEY_6
toC Key'7 = c'GLFW_KEY_7
toC Key'8 = c'GLFW_KEY_8
toC Key'9 = c'GLFW_KEY_9
toC Key'Semicolon = c'GLFW_KEY_SEMICOLON
toC Key'Equal = c'GLFW_KEY_EQUAL
toC Key'A = c'GLFW_KEY_A
toC Key'B = c'GLFW_KEY_B
toC Key'C = c'GLFW_KEY_C
toC Key'D = c'GLFW_KEY_D
toC Key'E = c'GLFW_KEY_E
toC Key'F = c'GLFW_KEY_F
toC Key'G = c'GLFW_KEY_G
toC Key'H = c'GLFW_KEY_H
toC Key'I = c'GLFW_KEY_I
toC Key'J = c'GLFW_KEY_J
toC Key'K = c'GLFW_KEY_K
toC Key'L = c'GLFW_KEY_L
toC Key'M = c'GLFW_KEY_M
toC Key'N = c'GLFW_KEY_N
toC Key'O = c'GLFW_KEY_O
toC Key'P = c'GLFW_KEY_P
toC Key'Q = c'GLFW_KEY_Q
toC Key'R = c'GLFW_KEY_R
toC Key'S = c'GLFW_KEY_S
toC Key'T = c'GLFW_KEY_T
toC Key'U = c'GLFW_KEY_U
toC Key'V = c'GLFW_KEY_V
toC Key'W = c'GLFW_KEY_W
toC Key'X = c'GLFW_KEY_X
toC Key'Y = c'GLFW_KEY_Y
toC Key'Z = c'GLFW_KEY_Z
toC Key'LeftBracket = c'GLFW_KEY_LEFT_BRACKET
toC Key'Backslash = c'GLFW_KEY_BACKSLASH
toC Key'RightBracket = c'GLFW_KEY_RIGHT_BRACKET
toC Key'GraveAccent = c'GLFW_KEY_GRAVE_ACCENT
toC Key'World1 = c'GLFW_KEY_WORLD_1
toC Key'World2 = c'GLFW_KEY_WORLD_2
toC Key'Escape = c'GLFW_KEY_ESCAPE
toC Key'Enter = c'GLFW_KEY_ENTER
toC Key'Tab = c'GLFW_KEY_TAB
toC Key'Backspace = c'GLFW_KEY_BACKSPACE
toC Key'Insert = c'GLFW_KEY_INSERT
toC Key'Delete = c'GLFW_KEY_DELETE
toC Key'Right = c'GLFW_KEY_RIGHT
toC Key'Left = c'GLFW_KEY_LEFT
toC Key'Down = c'GLFW_KEY_DOWN
toC Key'Up = c'GLFW_KEY_UP
toC Key'PageUp = c'GLFW_KEY_PAGE_UP
toC Key'PageDown = c'GLFW_KEY_PAGE_DOWN
toC Key'Home = c'GLFW_KEY_HOME
toC Key'End = c'GLFW_KEY_END
toC Key'CapsLock = c'GLFW_KEY_CAPS_LOCK
toC Key'ScrollLock = c'GLFW_KEY_SCROLL_LOCK
toC Key'NumLock = c'GLFW_KEY_NUM_LOCK
toC Key'PrintScreen = c'GLFW_KEY_PRINT_SCREEN
toC Key'Pause = c'GLFW_KEY_PAUSE
toC Key'F1 = c'GLFW_KEY_F1
toC Key'F2 = c'GLFW_KEY_F2
toC Key'F3 = c'GLFW_KEY_F3
toC Key'F4 = c'GLFW_KEY_F4
toC Key'F5 = c'GLFW_KEY_F5
toC Key'F6 = c'GLFW_KEY_F6
toC Key'F7 = c'GLFW_KEY_F7
toC Key'F8 = c'GLFW_KEY_F8
toC Key'F9 = c'GLFW_KEY_F9
toC Key'F10 = c'GLFW_KEY_F10
toC Key'F11 = c'GLFW_KEY_F11
toC Key'F12 = c'GLFW_KEY_F12
toC Key'F13 = c'GLFW_KEY_F13
toC Key'F14 = c'GLFW_KEY_F14
toC Key'F15 = c'GLFW_KEY_F15
toC Key'F16 = c'GLFW_KEY_F16
toC Key'F17 = c'GLFW_KEY_F17
toC Key'F18 = c'GLFW_KEY_F18
toC Key'F19 = c'GLFW_KEY_F19
toC Key'F20 = c'GLFW_KEY_F20
toC Key'F21 = c'GLFW_KEY_F21
toC Key'F22 = c'GLFW_KEY_F22
toC Key'F23 = c'GLFW_KEY_F23
toC Key'F24 = c'GLFW_KEY_F24
toC Key'F25 = c'GLFW_KEY_F25
toC Key'Pad0 = c'GLFW_KEY_KP_0
toC Key'Pad1 = c'GLFW_KEY_KP_1
toC Key'Pad2 = c'GLFW_KEY_KP_2
toC Key'Pad3 = c'GLFW_KEY_KP_3
toC Key'Pad4 = c'GLFW_KEY_KP_4
toC Key'Pad5 = c'GLFW_KEY_KP_5
toC Key'Pad6 = c'GLFW_KEY_KP_6
toC Key'Pad7 = c'GLFW_KEY_KP_7
toC Key'Pad8 = c'GLFW_KEY_KP_8
toC Key'Pad9 = c'GLFW_KEY_KP_9
toC Key'PadDecimal = c'GLFW_KEY_KP_DECIMAL
toC Key'PadDivide = c'GLFW_KEY_KP_DIVIDE
toC Key'PadMultiply = c'GLFW_KEY_KP_MULTIPLY
toC Key'PadSubtract = c'GLFW_KEY_KP_SUBTRACT
toC Key'PadAdd = c'GLFW_KEY_KP_ADD
toC Key'PadEnter = c'GLFW_KEY_KP_ENTER
toC Key'PadEqual = c'GLFW_KEY_KP_EQUAL
toC Key'LeftShift = c'GLFW_KEY_LEFT_SHIFT
toC Key'LeftControl = c'GLFW_KEY_LEFT_CONTROL
toC Key'LeftAlt = c'GLFW_KEY_LEFT_ALT
toC Key'LeftSuper = c'GLFW_KEY_LEFT_SUPER
toC Key'RightShift = c'GLFW_KEY_RIGHT_SHIFT
toC Key'RightControl = c'GLFW_KEY_RIGHT_CONTROL
toC Key'RightAlt = c'GLFW_KEY_RIGHT_ALT
toC Key'RightSuper = c'GLFW_KEY_RIGHT_SUPER
toC Key'Menu = c'GLFW_KEY_MENU
instance C CInt KeyState where
fromC v
| v == c'GLFW_PRESS = KeyState'Pressed
| v == c'GLFW_RELEASE = KeyState'Released
| v == c'GLFW_REPEAT = KeyState'Repeating
| otherwise = error $ "C CInt KeyState fromC: " ++ show v
toC KeyState'Pressed = c'GLFW_PRESS
toC KeyState'Released = c'GLFW_RELEASE
toC KeyState'Repeating = c'GLFW_REPEAT
instance C CInt Joystick where
fromC v
| v == c'GLFW_JOYSTICK_1 = Joystick'1
| v == c'GLFW_JOYSTICK_2 = Joystick'2
| v == c'GLFW_JOYSTICK_3 = Joystick'3
| v == c'GLFW_JOYSTICK_4 = Joystick'4
| v == c'GLFW_JOYSTICK_5 = Joystick'5
| v == c'GLFW_JOYSTICK_6 = Joystick'6
| v == c'GLFW_JOYSTICK_7 = Joystick'7
| v == c'GLFW_JOYSTICK_8 = Joystick'8
| v == c'GLFW_JOYSTICK_9 = Joystick'9
| v == c'GLFW_JOYSTICK_10 = Joystick'10
| v == c'GLFW_JOYSTICK_11 = Joystick'11
| v == c'GLFW_JOYSTICK_12 = Joystick'12
| v == c'GLFW_JOYSTICK_13 = Joystick'13
| v == c'GLFW_JOYSTICK_14 = Joystick'14
| v == c'GLFW_JOYSTICK_15 = Joystick'15
| v == c'GLFW_JOYSTICK_16 = Joystick'16
| otherwise = error $ "C CInt Joystick fromC: " ++ show v
toC Joystick'1 = c'GLFW_JOYSTICK_1
toC Joystick'2 = c'GLFW_JOYSTICK_2
toC Joystick'3 = c'GLFW_JOYSTICK_3
toC Joystick'4 = c'GLFW_JOYSTICK_4
toC Joystick'5 = c'GLFW_JOYSTICK_5
toC Joystick'6 = c'GLFW_JOYSTICK_6
toC Joystick'7 = c'GLFW_JOYSTICK_7
toC Joystick'8 = c'GLFW_JOYSTICK_8
toC Joystick'9 = c'GLFW_JOYSTICK_9
toC Joystick'10 = c'GLFW_JOYSTICK_10
toC Joystick'11 = c'GLFW_JOYSTICK_11
toC Joystick'12 = c'GLFW_JOYSTICK_12
toC Joystick'13 = c'GLFW_JOYSTICK_13
toC Joystick'14 = c'GLFW_JOYSTICK_14
toC Joystick'15 = c'GLFW_JOYSTICK_15
toC Joystick'16 = c'GLFW_JOYSTICK_16
instance C CUChar JoystickHatState where
fromC v
| v == c'GLFW_HAT_CENTERED = JoystickHatState'Centered
| v == c'GLFW_HAT_UP = JoystickHatState'Up
| v == c'GLFW_HAT_RIGHT = JoystickHatState'Right
| v == c'GLFW_HAT_DOWN = JoystickHatState'Down
| v == c'GLFW_HAT_LEFT = JoystickHatState'Left
| v == c'GLFW_HAT_RIGHT_UP = JoystickHatState'RightUp
| v == c'GLFW_HAT_RIGHT_DOWN = JoystickHatState'RightDown
| v == c'GLFW_HAT_LEFT_UP = JoystickHatState'LeftUp
| v == c'GLFW_HAT_LEFT_DOWN = JoystickHatState'LeftDown
| otherwise = error $ "C CUChar JoystickHatState fromC: " ++ show v
toC JoystickHatState'Centered = c'GLFW_HAT_CENTERED
toC JoystickHatState'Up = c'GLFW_HAT_UP
toC JoystickHatState'Right = c'GLFW_HAT_RIGHT
toC JoystickHatState'Down = c'GLFW_HAT_DOWN
toC JoystickHatState'Left = c'GLFW_HAT_LEFT
toC JoystickHatState'RightUp = c'GLFW_HAT_RIGHT_UP
toC JoystickHatState'RightDown = c'GLFW_HAT_RIGHT_DOWN
toC JoystickHatState'LeftUp = c'GLFW_HAT_LEFT_UP
toC JoystickHatState'LeftDown = c'GLFW_HAT_LEFT_DOWN
instance C CUChar JoystickButtonState where
fromC v
| v == c'GLFW_PRESS = JoystickButtonState'Pressed
| v == c'GLFW_RELEASE = JoystickButtonState'Released
| otherwise = error $ "C CUChar JoystickButtonState fromC: " ++ show v
toC JoystickButtonState'Pressed = c'GLFW_PRESS
toC JoystickButtonState'Released = c'GLFW_RELEASE
instance C CInt JoystickState where
fromC v
| v == c'GLFW_CONNECTED = JoystickState'Connected
| v == c'GLFW_DISCONNECTED = JoystickState'Disconnected
| otherwise = error $ "C CInt JoystickState fromC: " ++ show v
toC JoystickState'Connected = c'GLFW_CONNECTED
toC JoystickState'Disconnected = c'GLFW_DISCONNECTED
instance C CInt GamepadButton where
fromC v
| v == c'GLFW_GAMEPAD_BUTTON_A = GamepadButton'A
| v == c'GLFW_GAMEPAD_BUTTON_B = GamepadButton'B
| v == c'GLFW_GAMEPAD_BUTTON_X = GamepadButton'X
| v == c'GLFW_GAMEPAD_BUTTON_Y = GamepadButton'Y
| v == c'GLFW_GAMEPAD_BUTTON_LEFT_BUMPER = GamepadButton'LeftBumper
| v == c'GLFW_GAMEPAD_BUTTON_RIGHT_BUMPER = GamepadButton'RightBumper
| v == c'GLFW_GAMEPAD_BUTTON_BACK = GamepadButton'Back
| v == c'GLFW_GAMEPAD_BUTTON_START = GamepadButton'Start
| v == c'GLFW_GAMEPAD_BUTTON_GUIDE = GamepadButton'Guide
| v == c'GLFW_GAMEPAD_BUTTON_LEFT_THUMB = GamepadButton'LeftThumb
| v == c'GLFW_GAMEPAD_BUTTON_RIGHT_THUMB = GamepadButton'RightThumb
| v == c'GLFW_GAMEPAD_BUTTON_DPAD_UP = GamepadButton'DpadUp
| v == c'GLFW_GAMEPAD_BUTTON_DPAD_RIGHT = GamepadButton'DpadRight
| v == c'GLFW_GAMEPAD_BUTTON_DPAD_DOWN = GamepadButton'DpadDown
| v == c'GLFW_GAMEPAD_BUTTON_DPAD_LEFT = GamepadButton'DpadLeft
| v == c'GLFW_GAMEPAD_BUTTON_CROSS = GamepadButton'Cross
| v == c'GLFW_GAMEPAD_BUTTON_CIRCLE = GamepadButton'Circle
| v == c'GLFW_GAMEPAD_BUTTON_SQUARE = GamepadButton'Square
| v == c'GLFW_GAMEPAD_BUTTON_TRIANGLE = GamepadButton'Triangle
| otherwise = error $ "C CInt GamepadButton fromC: " ++ show v
toC GamepadButton'A = c'GLFW_GAMEPAD_BUTTON_A
toC GamepadButton'B = c'GLFW_GAMEPAD_BUTTON_B
toC GamepadButton'X = c'GLFW_GAMEPAD_BUTTON_X
toC GamepadButton'Y = c'GLFW_GAMEPAD_BUTTON_Y
toC GamepadButton'LeftBumper = c'GLFW_GAMEPAD_BUTTON_LEFT_BUMPER
toC GamepadButton'RightBumper = c'GLFW_GAMEPAD_BUTTON_RIGHT_BUMPER
toC GamepadButton'Back = c'GLFW_GAMEPAD_BUTTON_BACK
toC GamepadButton'Start = c'GLFW_GAMEPAD_BUTTON_START
toC GamepadButton'Guide = c'GLFW_GAMEPAD_BUTTON_GUIDE
toC GamepadButton'LeftThumb = c'GLFW_GAMEPAD_BUTTON_LEFT_THUMB
toC GamepadButton'RightThumb = c'GLFW_GAMEPAD_BUTTON_RIGHT_THUMB
toC GamepadButton'DpadUp = c'GLFW_GAMEPAD_BUTTON_DPAD_UP
toC GamepadButton'DpadRight = c'GLFW_GAMEPAD_BUTTON_DPAD_RIGHT
toC GamepadButton'DpadDown = c'GLFW_GAMEPAD_BUTTON_DPAD_DOWN
toC GamepadButton'DpadLeft = c'GLFW_GAMEPAD_BUTTON_DPAD_LEFT
toC GamepadButton'Cross = c'GLFW_GAMEPAD_BUTTON_CROSS
toC GamepadButton'Circle = c'GLFW_GAMEPAD_BUTTON_CIRCLE
toC GamepadButton'Square = c'GLFW_GAMEPAD_BUTTON_SQUARE
toC GamepadButton'Triangle = c'GLFW_GAMEPAD_BUTTON_TRIANGLE
instance C CUChar GamepadButtonState where
fromC v
| v == c'GLFW_PRESS = GamepadButtonState'Pressed
| v == c'GLFW_RELEASE = GamepadButtonState'Released
| otherwise = error $ "C CUChar GamepadButtonState fromC: " ++ show v
toC GamepadButtonState'Pressed = c'GLFW_PRESS
toC GamepadButtonState'Released = c'GLFW_RELEASE
instance C CInt GamepadAxis where
fromC v
| v == c'GLFW_GAMEPAD_AXIS_LEFT_X = GamepadAxis'LeftX
| v == c'GLFW_GAMEPAD_AXIS_RIGHT_X = GamepadAxis'RightX
| v == c'GLFW_GAMEPAD_AXIS_LEFT_Y = GamepadAxis'LeftY
| v == c'GLFW_GAMEPAD_AXIS_RIGHT_Y = GamepadAxis'RightY
| v == c'GLFW_GAMEPAD_AXIS_LEFT_TRIGGER = GamepadAxis'LeftTrigger
| v == c'GLFW_GAMEPAD_AXIS_RIGHT_TRIGGER = GamepadAxis'RightTrigger
| otherwise = error $ "C CInt GamepadAxis fromC: " ++ show v
toC GamepadAxis'LeftX = c'GLFW_GAMEPAD_AXIS_LEFT_X
toC GamepadAxis'RightX = c'GLFW_GAMEPAD_AXIS_RIGHT_X
toC GamepadAxis'LeftY = c'GLFW_GAMEPAD_AXIS_LEFT_Y
toC GamepadAxis'RightY = c'GLFW_GAMEPAD_AXIS_RIGHT_Y
toC GamepadAxis'LeftTrigger = c'GLFW_GAMEPAD_AXIS_LEFT_TRIGGER
toC GamepadAxis'RightTrigger = c'GLFW_GAMEPAD_AXIS_RIGHT_TRIGGER
instance C CInt MouseButton where
fromC v
| v == c'GLFW_MOUSE_BUTTON_1 = MouseButton'1
| v == c'GLFW_MOUSE_BUTTON_2 = MouseButton'2
| v == c'GLFW_MOUSE_BUTTON_3 = MouseButton'3
| v == c'GLFW_MOUSE_BUTTON_4 = MouseButton'4
| v == c'GLFW_MOUSE_BUTTON_5 = MouseButton'5
| v == c'GLFW_MOUSE_BUTTON_6 = MouseButton'6
| v == c'GLFW_MOUSE_BUTTON_7 = MouseButton'7
| v == c'GLFW_MOUSE_BUTTON_8 = MouseButton'8
| otherwise = error $ "C CInt MouseButton fromC: " ++ show v
toC MouseButton'1 = c'GLFW_MOUSE_BUTTON_1
toC MouseButton'2 = c'GLFW_MOUSE_BUTTON_2
toC MouseButton'3 = c'GLFW_MOUSE_BUTTON_3
toC MouseButton'4 = c'GLFW_MOUSE_BUTTON_4
toC MouseButton'5 = c'GLFW_MOUSE_BUTTON_5
toC MouseButton'6 = c'GLFW_MOUSE_BUTTON_6
toC MouseButton'7 = c'GLFW_MOUSE_BUTTON_7
toC MouseButton'8 = c'GLFW_MOUSE_BUTTON_8
instance C CInt MouseButtonState where
fromC v
| v == c'GLFW_PRESS = MouseButtonState'Pressed
| v == c'GLFW_RELEASE = MouseButtonState'Released
| otherwise = error $ "C CInt MouseButtonState fromC: " ++ show v
toC MouseButtonState'Pressed = c'GLFW_PRESS
toC MouseButtonState'Released = c'GLFW_RELEASE
instance C CInt CursorState where
fromC v
| v == c'GLFW_TRUE = CursorState'InWindow
| v == c'GLFW_FALSE = CursorState'NotInWindow
| otherwise = error $ "C CInt CursorState fromC: " ++ show v
toC CursorState'InWindow = c'GLFW_TRUE
toC CursorState'NotInWindow = c'GLFW_FALSE
instance C CInt CursorInputMode where
fromC v
| v == c'GLFW_CURSOR_NORMAL = CursorInputMode'Normal
| v == c'GLFW_CURSOR_HIDDEN = CursorInputMode'Hidden
| v == c'GLFW_CURSOR_DISABLED = CursorInputMode'Disabled
| otherwise = error $ "C CInt CursorInputMode fromC: " ++ show v
toC CursorInputMode'Normal = c'GLFW_CURSOR_NORMAL
toC CursorInputMode'Hidden = c'GLFW_CURSOR_HIDDEN
toC CursorInputMode'Disabled = c'GLFW_CURSOR_DISABLED
instance C CInt StickyKeysInputMode where
fromC v
| v == c'GLFW_TRUE = StickyKeysInputMode'Enabled
| v == c'GLFW_FALSE = StickyKeysInputMode'Disabled
| otherwise = error $ "C CInt StickyKeysInputMode fromC: " ++ show v
toC StickyKeysInputMode'Enabled = c'GLFW_TRUE
toC StickyKeysInputMode'Disabled = c'GLFW_FALSE
instance C CInt StickyMouseButtonsInputMode where
fromC v
| v == c'GLFW_TRUE = StickyMouseButtonsInputMode'Enabled
| v == c'GLFW_FALSE = StickyMouseButtonsInputMode'Disabled
| otherwise = error $ "C CInt StickyMouseButtonsInputMode fromC: " ++ show v
toC StickyMouseButtonsInputMode'Enabled = c'GLFW_TRUE
toC StickyMouseButtonsInputMode'Disabled = c'GLFW_FALSE
instance C CInt WindowAttrib where
fromC v
| v == c'GLFW_DECORATED = WindowAttrib'Decorated
| v == c'GLFW_RESIZABLE = WindowAttrib'Resizable
| v == c'GLFW_FLOATING = WindowAttrib'Floating
| v == c'GLFW_AUTO_ICONIFY = WindowAttrib'AutoIconify
| v == c'GLFW_FOCUS_ON_SHOW = WindowAttrib'FocusOnShow
| v == c'GLFW_HOVERED = WindowAttrib'Hovered
| otherwise = error $ "C CInt WindowAttrib fromC: " ++ show v
toC WindowAttrib'Decorated = c'GLFW_DECORATED
toC WindowAttrib'Resizable = c'GLFW_RESIZABLE
toC WindowAttrib'Floating = c'GLFW_FLOATING
toC WindowAttrib'AutoIconify = c'GLFW_AUTO_ICONIFY
toC WindowAttrib'FocusOnShow = c'GLFW_FOCUS_ON_SHOW
toC WindowAttrib'Hovered = c'GLFW_HOVERED
--------------------------------------------------------------------------------
{-# ANN module "HLint: ignore Use camelCase" #-}
|
bsl/GLFW-b
|
Graphics/UI/GLFW/C.hs
|
bsd-2-clause
| 27,656
| 0
| 10
| 5,169
| 6,622
| 3,190
| 3,432
| 623
| 0
|
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
module Futhark.Analysis.SymbolTable
( SymbolTable (bindings)
, empty
, fromScope
-- * Entries
, Entry
, deepen
, bindingDepth
, valueRange
, loopVariable
, entryBinding
, entryLetBoundAttr
, entryFParamLore
, entryType
, asScalExp
-- * Lookup
, elem
, lookup
, lookupExp
, lookupType
, lookupSubExp
, lookupScalExp
, lookupValue
, lookupVar
-- * Insertion
, insertBinding
, insertFParams
, insertLParam
, insertArrayLParam
, insertLoopVar
-- * Bounds
, updateBounds
, setUpperBound
, setLowerBound
, isAtLeast
-- * Misc
, enclosingLoopVars
, rangesRep
, typeEnv
)
where
import Control.Applicative hiding (empty)
import Control.Monad
import Control.Monad.Reader
import Data.Ord
import Data.Maybe
import Data.Monoid
import Data.List hiding (elem, insert, lookup)
import qualified Data.Set as S
import qualified Data.HashSet as HS
import qualified Data.HashMap.Lazy as HM
import Prelude hiding (elem, lookup)
import Futhark.Representation.AST hiding (FParam, ParamT (..), paramType, lookupType)
import qualified Futhark.Representation.AST as AST
import Futhark.Analysis.ScalExp
import Futhark.Transform.Substitute
import qualified Futhark.Analysis.AlgSimplify as AS
import Futhark.Representation.AST.Attributes.Ranges
(Range, ScalExpRange, Ranged)
import qualified Futhark.Representation.AST.Attributes.Ranges as Ranges
data SymbolTable lore = SymbolTable {
loopDepth :: Int
, bindings :: HM.HashMap VName (Entry lore)
}
instance Monoid (SymbolTable lore) where
table1 `mappend` table2 =
SymbolTable { loopDepth = max (loopDepth table1) (loopDepth table2)
, bindings = bindings table1 `mappend` bindings table2
}
mempty = empty
empty :: SymbolTable lore
empty = SymbolTable 0 HM.empty
fromScope :: Annotations lore => Scope lore -> SymbolTable lore
fromScope = HM.foldlWithKey' insertFreeVar' empty
where insertFreeVar' m k attr = insertFreeVar k attr m
deepen :: SymbolTable lore -> SymbolTable lore
deepen vtable = vtable { loopDepth = loopDepth vtable + 1 }
data Entry lore = LoopVar (LoopVarEntry lore)
| LetBound (LetBoundEntry lore)
| FParam (FParamEntry lore)
| LParam (LParamEntry lore)
| FreeVar (FreeVarEntry lore)
data LoopVarEntry lore =
LoopVarEntry { loopVarRange :: ScalExpRange
, loopVarBindingDepth :: Int
}
data LetBoundEntry lore =
LetBoundEntry { letBoundRange :: ScalExpRange
, letBoundAttr :: LetAttr lore
, letBoundBinding :: Binding lore
, letBoundBindingDepth :: Int
, letBoundScalExp :: Maybe ScalExp
, letBoundBindage :: Bindage
}
data FParamEntry lore =
FParamEntry { fparamRange :: ScalExpRange
, fparamAttr :: FParamAttr lore
, fparamBindingDepth :: Int
}
data LParamEntry lore =
LParamEntry { lparamRange :: ScalExpRange
, lparamAttr :: LParamAttr lore
, lparamBindingDepth :: Int
}
data FreeVarEntry lore =
FreeVarEntry { freeVarAttr :: NameInfo lore
, freeVarBindingDepth :: Int
, freeVarRange :: ScalExpRange
}
isVarBound :: Entry lore -> Maybe (LetBoundEntry lore)
isVarBound (LetBound entry)
| BindVar <- letBoundBindage entry =
Just entry
isVarBound _ =
Nothing
asScalExp :: Entry lore -> Maybe ScalExp
asScalExp = letBoundScalExp <=< isVarBound
bindingDepth :: Entry lore -> Int
bindingDepth (LetBound entry) = letBoundBindingDepth entry
bindingDepth (FParam entry) = fparamBindingDepth entry
bindingDepth (LParam entry) = lparamBindingDepth entry
bindingDepth (LoopVar entry) = loopVarBindingDepth entry
bindingDepth (FreeVar _) = 0
setBindingDepth :: Int -> Entry lore -> Entry lore
setBindingDepth d (LetBound entry) =
LetBound $ entry { letBoundBindingDepth = d }
setBindingDepth d (FParam entry) =
FParam $ entry { fparamBindingDepth = d }
setBindingDepth d (LParam entry) =
LParam $ entry { lparamBindingDepth = d }
setBindingDepth d (LoopVar entry) =
LoopVar $ entry { loopVarBindingDepth = d }
setBindingDepth d (FreeVar entry) =
FreeVar $ entry { freeVarBindingDepth = d }
valueRange :: Entry lore -> ScalExpRange
valueRange (LetBound entry) = letBoundRange entry
valueRange (FParam entry) = fparamRange entry
valueRange (LParam entry) = lparamRange entry
valueRange (LoopVar entry) = loopVarRange entry
valueRange (FreeVar entry) = freeVarRange entry
setValueRange :: ScalExpRange -> Entry lore -> Entry lore
setValueRange range (LetBound entry) =
LetBound $ entry { letBoundRange = range }
setValueRange range (FParam entry) =
FParam $ entry { fparamRange = range }
setValueRange range (LParam entry) =
LParam $ entry { lparamRange = range }
setValueRange range (LoopVar entry) =
LoopVar $ entry { loopVarRange = range }
setValueRange range (FreeVar entry) =
FreeVar $ entry { freeVarRange = range }
entryBinding :: Entry lore -> Maybe (Binding lore)
entryBinding (LetBound entry) = Just $ letBoundBinding entry
entryBinding _ = Nothing
entryLetBoundAttr :: Entry lore -> Maybe (LetAttr lore)
entryLetBoundAttr (LetBound entry) = Just $ letBoundAttr entry
entryLetBoundAttr _ = Nothing
entryFParamLore :: Entry lore -> Maybe (FParamAttr lore)
entryFParamLore (FParam entry) = Just $ fparamAttr entry
entryFParamLore _ = Nothing
loopVariable :: Entry lore -> Bool
loopVariable (LoopVar _) = True
loopVariable _ = False
asExp :: Entry lore -> Maybe (Exp lore)
asExp = fmap (bindingExp . letBoundBinding) . isVarBound
entryType :: Annotations lore => Entry lore -> Type
entryType (LetBound entry) = typeOf $ letBoundAttr entry
entryType (LParam entry) = typeOf $ lparamAttr entry
entryType (FParam entry) = typeOf $ fparamAttr entry
entryType (LoopVar _) = Prim int32
entryType (FreeVar entry) = typeOf $ freeVarAttr entry
instance Substitutable lore => Substitute (LetBoundEntry lore) where
substituteNames substs entry =
LetBoundEntry {
letBoundRange = substituteNames substs $ letBoundRange entry
, letBoundAttr = substituteNames substs $ letBoundAttr entry
, letBoundBinding = substituteNames substs $ letBoundBinding entry
, letBoundScalExp = substituteNames substs $ letBoundScalExp entry
, letBoundBindingDepth = letBoundBindingDepth entry
, letBoundBindage = substituteNames substs $ letBoundBindage entry
}
instance Substitutable lore => Substitute (FParamEntry lore) where
substituteNames substs entry =
FParamEntry {
fparamRange = substituteNames substs $ fparamRange entry
, fparamAttr = substituteNames substs $ fparamAttr entry
, fparamBindingDepth = fparamBindingDepth entry
}
instance Substitutable lore => Substitute (LParamEntry lore) where
substituteNames substs entry =
LParamEntry {
lparamRange = substituteNames substs $ lparamRange entry
, lparamBindingDepth = lparamBindingDepth entry
, lparamAttr = substituteNames substs $ lparamAttr entry
}
instance Substitutable lore => Substitute (LoopVarEntry lore) where
substituteNames substs entry =
LoopVarEntry {
loopVarRange = substituteNames substs $ loopVarRange entry
, loopVarBindingDepth = loopVarBindingDepth entry
}
instance Substitute (NameInfo lore) => Substitute (FreeVarEntry lore) where
substituteNames substs entry =
FreeVarEntry {
freeVarRange = substituteNames substs $ freeVarRange entry
, freeVarAttr = substituteNames substs $ freeVarAttr entry
, freeVarBindingDepth = freeVarBindingDepth entry
}
instance Substitutable lore =>
Substitute (Entry lore) where
substituteNames substs (LetBound entry) =
LetBound $ substituteNames substs entry
substituteNames substs (FParam entry) =
FParam $ substituteNames substs entry
substituteNames substs (LParam entry) =
LParam $ substituteNames substs entry
substituteNames substs (LoopVar entry) =
LoopVar $ substituteNames substs entry
substituteNames substs (FreeVar entry) =
FreeVar $ substituteNames substs entry
elem :: VName -> SymbolTable lore -> Bool
elem name = isJust . lookup name
lookup :: VName -> SymbolTable lore -> Maybe (Entry lore)
lookup name = HM.lookup name . bindings
lookupExp :: VName -> SymbolTable lore -> Maybe (Exp lore)
lookupExp name vtable = asExp =<< lookup name vtable
lookupType :: Annotations lore => VName -> SymbolTable lore -> Maybe Type
lookupType name vtable = entryType <$> lookup name vtable
lookupSubExp :: VName -> SymbolTable lore -> Maybe SubExp
lookupSubExp name vtable = do
e <- lookupExp name vtable
case e of
PrimOp (SubExp se) -> Just se
_ -> Nothing
lookupScalExp :: VName -> SymbolTable lore -> Maybe ScalExp
lookupScalExp name vtable = asScalExp =<< lookup name vtable
lookupValue :: VName -> SymbolTable lore -> Maybe Value
lookupValue name vtable = case lookupSubExp name vtable of
Just (Constant val) -> Just $ PrimVal val
_ -> Nothing
lookupVar :: VName -> SymbolTable lore -> Maybe VName
lookupVar name vtable = case lookupSubExp name vtable of
Just (Var v) -> Just v
_ -> Nothing
lookupRange :: VName -> SymbolTable lore -> ScalExpRange
lookupRange name vtable =
maybe (Nothing, Nothing) valueRange $ lookup name vtable
enclosingLoopVars :: [VName] -> SymbolTable lore -> [VName]
enclosingLoopVars free vtable =
map fst $
sortBy (flip (comparing (bindingDepth . snd))) $
filter (loopVariable . snd) $ mapMaybe fetch free
where fetch name = do e <- lookup name vtable
return (name, e)
rangesRep :: SymbolTable lore -> AS.RangesRep
rangesRep = HM.filter knownRange . HM.map toRep . bindings
where toRep entry = (bindingDepth entry, lower, upper)
where (lower, upper) = valueRange entry
knownRange (_, lower, upper) = isJust lower || isJust upper
typeEnv :: SymbolTable lore -> Scope lore
typeEnv = HM.map nameType . bindings
where nameType (LetBound entry) = LetInfo $ letBoundAttr entry
nameType (LoopVar _) = IndexInfo
nameType (FParam entry) = FParamInfo $ fparamAttr entry
nameType (LParam entry) = LParamInfo $ lparamAttr entry
nameType (FreeVar entry) = freeVarAttr entry
defBndEntry :: Annotations lore =>
SymbolTable lore
-> PatElem (LetAttr lore)
-> Range
-> Binding lore
-> LetBoundEntry lore
defBndEntry vtable patElem range bnd =
LetBoundEntry {
letBoundRange = simplifyRange $ scalExpRange range
, letBoundAttr = patElemAttr patElem
, letBoundBinding = bnd
, letBoundScalExp =
runReader (toScalExp (`lookupScalExp` vtable) (bindingExp bnd)) types
, letBoundBindingDepth = 0
, letBoundBindage = patElemBindage patElem
}
where ranges :: AS.RangesRep
ranges = rangesRep vtable
types = typeEnv vtable
scalExpRange :: Range -> ScalExpRange
scalExpRange (lower, upper) =
(scalExpBound fst =<< lower,
scalExpBound snd =<< upper)
scalExpBound :: (ScalExpRange -> Maybe ScalExp)
-> Ranges.KnownBound
-> Maybe ScalExp
scalExpBound pick (Ranges.VarBound v) =
pick $ lookupRange v vtable
scalExpBound _ (Ranges.ScalarBound se) =
Just se
scalExpBound _ (Ranges.MinimumBound b1 b2) = do
b1' <- scalExpBound fst b1
b2' <- scalExpBound fst b2
return $ MaxMin True [b1', b2']
scalExpBound _ (Ranges.MaximumBound b1 b2) = do
b1' <- scalExpBound snd b1
b2' <- scalExpBound snd b2
return $ MaxMin False [b1', b2']
simplifyRange :: ScalExpRange -> ScalExpRange
simplifyRange (lower, upper) =
(simplifyBound lower,
simplifyBound upper)
simplifyBound (Just se) =
Just $ AS.simplify se ranges
simplifyBound Nothing =
Nothing
bindingEntries :: Ranged lore =>
Binding lore -> SymbolTable lore
-> [LetBoundEntry lore]
bindingEntries bnd@(Let pat _ _) vtable =
[ defBndEntry vtable pat_elem (Ranges.rangeOf pat_elem) bnd
| pat_elem <- patternElements pat
]
insertEntry :: VName -> Entry lore -> SymbolTable lore
-> SymbolTable lore
insertEntry name entry =
insertEntries [(name,entry)]
insertEntries :: [(VName, Entry lore)] -> SymbolTable lore
-> SymbolTable lore
insertEntries entries vtable =
vtable { bindings = foldl insertWithDepth (bindings vtable) entries
}
where insertWithDepth bnds (name, entry) =
let entry' = setBindingDepth (loopDepth vtable) entry
in HM.insert name entry' bnds
insertBinding :: Ranged lore =>
Binding lore
-> SymbolTable lore
-> SymbolTable lore
insertBinding bnd vtable =
insertEntries (zip names $ map LetBound $ bindingEntries bnd vtable) vtable
where names = patternNames $ bindingPattern bnd
insertFParam :: Annotations lore =>
AST.FParam lore
-> SymbolTable lore
-> SymbolTable lore
insertFParam fparam = insertEntry name entry
where name = AST.paramName fparam
entry = FParam FParamEntry { fparamRange = (Nothing, Nothing)
, fparamAttr = AST.paramAttr fparam
, fparamBindingDepth = 0
}
insertFParams :: Annotations lore =>
[AST.FParam lore] -> SymbolTable lore
-> SymbolTable lore
insertFParams fparams symtable = foldr insertFParam symtable fparams
insertLParamWithRange :: Annotations lore =>
LParam lore -> ScalExpRange -> SymbolTable lore
-> SymbolTable lore
insertLParamWithRange param range vtable =
-- We know that the sizes in the type of param are at least zero,
-- since they are array sizes.
let vtable' = insertEntry name bind vtable
in foldr (`isAtLeast` 0) vtable' sizevars
where bind = LParam LParamEntry { lparamRange = range
, lparamAttr = AST.paramAttr param
, lparamBindingDepth = 0
}
name = AST.paramName param
sizevars = subExpVars $ arrayDims $ AST.paramType param
insertLParam :: Annotations lore =>
LParam lore -> SymbolTable lore -> SymbolTable lore
insertLParam param =
insertLParamWithRange param (Nothing, Nothing)
insertArrayLParam :: Annotations lore =>
LParam lore -> Maybe VName -> SymbolTable lore
-> SymbolTable lore
insertArrayLParam param (Just array) vtable =
-- We now know that the outer size of 'array' is at least one, and
-- that the inner sizes are at least zero, since they are array
-- sizes.
let vtable' = insertLParamWithRange param (lookupRange array vtable) vtable
in case arrayDims <$> lookupType array vtable of
Just (Var v:_) -> (v `isAtLeast` 1) vtable'
_ -> vtable'
insertArrayLParam param Nothing vtable =
-- Well, we still know that it's a param...
insertLParam param vtable
insertLoopVar :: VName -> SubExp -> SymbolTable lore -> SymbolTable lore
insertLoopVar name bound = insertEntry name bind
where bind = LoopVar LoopVarEntry {
loopVarRange = (Just 0,
Just $ subExpToScalExp bound int32 - 1)
, loopVarBindingDepth = 0
}
insertFreeVar :: VName -> NameInfo lore -> SymbolTable lore -> SymbolTable lore
insertFreeVar name attr = insertEntry name entry
where entry = FreeVar FreeVarEntry {
freeVarAttr = attr
, freeVarRange = (Nothing, Nothing)
, freeVarBindingDepth = 0
}
updateBounds :: Annotations lore => Bool -> SubExp -> SymbolTable lore -> SymbolTable lore
updateBounds isTrue cond vtable =
case runReader (toScalExp (`lookupScalExp` vtable) $ PrimOp $ SubExp cond) types of
Nothing -> vtable
Just cond' ->
let cond'' | isTrue = cond'
| otherwise = SNot cond'
in updateBounds' cond'' vtable
where types = typeEnv vtable
-- | Updating the ranges of all symbols whenever we enter a branch is
-- presently too expensive, and disabled here.
noUpdateBounds :: Bool
noUpdateBounds = True
-- | Refines the ranges in the symbol table with
-- ranges extracted from branch conditions.
-- `cond' is the condition of the if-branch.
updateBounds' :: ScalExp -> SymbolTable lore -> SymbolTable lore
updateBounds' _ sym_tab | noUpdateBounds = sym_tab
updateBounds' cond sym_tab =
foldr updateBound sym_tab $ mapMaybe solve_leq0 $
getNotFactorsLEQ0 $ AS.simplify (SNot cond) ranges
where
updateBound (sym,True ,bound) = setUpperBound sym bound
updateBound (sym,False,bound) = setLowerBound sym bound
ranges = HM.filter nonEmptyRange $ HM.map toRep $ bindings sym_tab
toRep entry = (bindingDepth entry, lower, upper)
where (lower, upper) = valueRange entry
nonEmptyRange (_, lower, upper) = isJust lower || isJust upper
-- | Input: a bool exp in DNF form, named `cond'
-- It gets the terms of the argument,
-- i.e., cond = c1 || ... || cn
-- and negates them.
-- Returns [not c1, ..., not cn], i.e., the factors
-- of `not cond' in CNF form: not cond = (not c1) && ... && (not cn)
getNotFactorsLEQ0 :: ScalExp -> [ScalExp]
getNotFactorsLEQ0 (RelExp rel e_scal) =
if scalExpType e_scal /= int32 then []
else let leq0_escal = if rel == LTH0
then SMinus 0 e_scal
else SMinus 1 e_scal
in [AS.simplify leq0_escal ranges]
getNotFactorsLEQ0 (SLogOr e1 e2) = getNotFactorsLEQ0 e1 ++ getNotFactorsLEQ0 e2
getNotFactorsLEQ0 _ = []
-- | Argument is scalar expression `e'.
-- Implementation finds the symbol defined at
-- the highest depth in expression `e', call it `i',
-- and decomposes e = a*i + b. If `a' and `b' are
-- free of `i', AND `a == 1 or -1' THEN the upper/lower
-- bound can be improved. Otherwise Nothing.
--
-- Returns: Nothing or
-- Just (i, a == 1, -a*b), i.e., (symbol, isUpperBound, bound)
solve_leq0 :: ScalExp -> Maybe (VName, Bool, ScalExp)
solve_leq0 e_scal = do
sym <- pickRefinedSym S.empty e_scal
(a,b) <- either (const Nothing) id $ AS.linFormScalE sym e_scal ranges
case a of
-1 ->
Just (sym, False, b)
1 ->
let mb = AS.simplify (negate b) ranges
in Just (sym, True, mb)
_ -> Nothing
-- When picking a symbols, @sym@ whose bound it is to be refined:
-- make sure that @sym@ does not belong to the transitive closure
-- of the symbols apearing in the ranges of all the other symbols
-- in the sclar expression (themselves included).
-- If this does not hold, pick another symbol, rinse and repeat.
pickRefinedSym :: S.Set VName -> ScalExp -> Maybe VName
pickRefinedSym elsyms0 e_scal = do
let candidates = freeIn e_scal
sym0 = AS.pickSymToElim ranges elsyms0 e_scal
case sym0 of
Just sy -> let trclsyms = foldl trClSymsInRange HS.empty $ HS.toList $
candidates `HS.difference` HS.singleton sy
in if HS.member sy trclsyms
then pickRefinedSym (S.insert sy elsyms0) e_scal
else sym0
Nothing -> sym0
-- computes the transitive closure of the symbols appearing
-- in the ranges of a symbol
trClSymsInRange :: HS.HashSet VName -> VName -> HS.HashSet VName
trClSymsInRange cur_syms sym =
if HS.member sym cur_syms then cur_syms
else case HM.lookup sym ranges of
Just (_,lb,ub) -> let sym_bds = concatMap (HS.toList . freeIn) (catMaybes [lb, ub])
in foldl trClSymsInRange
(HS.insert sym cur_syms)
(HS.toList $ HS.fromList sym_bds)
Nothing -> HS.insert sym cur_syms
setUpperBound :: VName -> ScalExp -> SymbolTable lore
-> SymbolTable lore
setUpperBound name bound vtable =
vtable { bindings = HM.adjust setUpperBound' name $ bindings vtable }
where setUpperBound' entry =
let (oldLowerBound, oldUpperBound) = valueRange entry
in setValueRange
(oldLowerBound,
Just $ maybe bound (MaxMin True . (:[bound])) oldUpperBound)
entry
setLowerBound :: VName -> ScalExp -> SymbolTable lore -> SymbolTable lore
setLowerBound name bound vtable =
vtable { bindings = HM.adjust setLowerBound' name $ bindings vtable }
where setLowerBound' entry =
let (oldLowerBound, oldUpperBound) = valueRange entry
in setValueRange
(Just $ maybe bound (MaxMin False . (:[bound])) oldLowerBound,
oldUpperBound)
entry
isAtLeast :: VName -> Int -> SymbolTable lore -> SymbolTable lore
isAtLeast name x =
setLowerBound name $ fromIntegral x
|
mrakgr/futhark
|
src/Futhark/Analysis/SymbolTable.hs
|
bsd-3-clause
| 21,977
| 0
| 20
| 6,090
| 5,744
| 2,944
| 2,800
| 457
| 12
|
module Ntha.Type.Type where
import Ntha.State
import Ntha.Z3.Assertion
import Ntha.Z3.Class
import Ntha.Z3.Logic
import Z3.Monad
import Control.Monad (foldM, liftM)
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Text.PrettyPrint as PP
type Id = Int
type TName = String
type TField = String
type Types = [Type]
type TInstance = Maybe Type
type Z3Pred = Pred Term RType Assertion
data Type = TVar Id (IORef TInstance) TName -- type variable
| TOper TName Types -- type operator
| TRecord (M.Map TField Type)
| TCon TName Types Type
| TSig Type
| TRefined String Type Term
-- extract normal type from refined type for type inference
extractType :: Type -> Type
extractType t = case t of
-- just support arrow type for now
TOper "→" args -> TOper "→" (map extractType args)
TRefined _ t' _ -> t'
_ -> t
extractTerm :: Type -> [Term]
extractTerm t = case t of
TOper "→" args -> args >>= extractTerm
TRefined _ _ tm -> [tm]
_ -> []
getPredNames :: Type -> [String]
getPredNames t = case t of
TOper "→" args -> args >>= getPredNames
TRefined n _ _ -> [n]
_ -> []
intT :: Type
intT = TOper "Number" []
boolT :: Type
boolT = TOper "Boolean" []
charT :: Type
charT = TOper "Char" []
listT :: Type -> Type -- list type is not polymorphism
listT t = TOper "List" [t]
productT :: Types -> Type -- tuple type, product type is a name from Algebraic Data type
productT ts = TOper "*" ts
arrowT :: Type -> Type -> Type -- function type with single param
arrowT fromType toType = TOper "→" $ [fromType, toType]
functionT :: Types -> Type -> Type
functionT paramsT rtnT = foldr (\paramT resT -> arrowT paramT resT) rtnT paramsT
strT :: Type
strT = listT charT
unitT :: Type
unitT = TOper "()" []
prune :: Type -> Infer Type
prune t = case t of
TVar _ inst _ -> do
instV <- readIORef inst
case instV of
Just inst' -> do
newInstance <- prune inst'
writeIORef inst $ Just newInstance
return newInstance
Nothing -> return t
_ -> return t
stringOfType :: M.Map TName TName -> Type -> Infer String
stringOfType subrule (TVar _ inst name) = do
instV <- readIORef inst
case instV of
Just inst' -> stringOfType subrule inst'
Nothing -> return $ fromMaybe "α" $ M.lookup name subrule
stringOfType subrule (TOper name args) = case name of
"*" -> do
argsStr <- (intercalate " * ") <$> mapM (stringOfType subrule) args
return $ "(" ++ argsStr ++ ")"
"List" -> do
argStr <- stringOfType subrule $ args!!0
return $ "[" ++ argStr ++ "]"
"→" -> do
argT <- prune $ args!!0
rtnT <- prune $ args!!1
argStr <- stringOfType subrule argT
rtnStr <- stringOfType subrule rtnT
let adjust t s = case t of
TOper "→" _ -> "(" ++ s ++ ")"
_ -> s
let argStr' = adjust argT argStr
let rtnStr' = adjust rtnT rtnStr
return $ argStr' ++ " → " ++ rtnStr'
_ -> if (length args) == 0
then return name
else do
argsStr <- unwords <$> mapM (stringOfType subrule) args
return $ "(" ++ name ++ " " ++ argsStr ++ ")"
stringOfType subrule (TRecord pairs) = do
pairsStr <- (intercalate ", ") <$> (mapM (\(k, v) -> ((k ++ ": ") ++) <$> stringOfType subrule v) $ M.toList pairs)
return $ "{" ++ pairsStr ++ "}"
stringOfType subrule (TCon name types dataType) = do
dataTypeStr <- stringOfType subrule dataType
case types of
[] -> return dataTypeStr
_ -> do
typesStr <- (intercalate ", ") <$> mapM (stringOfType subrule) types
return $ "(" ++ name ++ " " ++ typesStr ++ " ⇒ " ++ dataTypeStr ++ ")"
stringOfType subrule (TSig t) = liftM ("typesig: " ++) $ stringOfType subrule t
stringOfType subrule (TRefined _ t _) = liftM ("refined: " ++) $ stringOfType subrule t
getFreeVars :: Type -> Infer (S.Set TName)
getFreeVars (TVar _ inst name) = do
instV <- readIORef inst
case instV of
Just inst' -> getFreeVars inst'
Nothing -> return $ S.singleton name
getFreeVars (TOper _ args) =
foldM (\acc arg -> do
freeVars <- getFreeVars arg
return $ S.union freeVars acc)
S.empty args
getFreeVars (TRecord pairs) =
foldM (\acc (_, v) -> do
freeVars <- getFreeVars v
return $ S.union freeVars acc)
S.empty $ M.toList pairs
getFreeVars (TCon _ types dataType) =
foldM (\acc t -> do
freeVars <- getFreeVars t
return $ S.union freeVars acc)
S.empty $ types ++ [dataType]
getFreeVars (TSig t) = getFreeVars t
getFreeVars (TRefined _ t _) = getFreeVars t
{-# NOINLINE normalize #-}
normalize :: Type -> String
normalize t = unsafePerformIO $ do
freeVars <- getFreeVars t
let subrule = M.map (\c -> [c]) $ M.fromList $ zip (S.toList freeVars) ['α'..'ω']
stringOfType subrule t
instance Show Type where
showsPrec _ x = shows $ PP.text $ normalize x
instance Eq Type where
TVar id1 inst1 vname1 == TVar id2 inst2 vname2 = id1 == id2 && instV1 == instV2 && vname1 == vname2 where
instV1 = readState inst1
instV2 = readState inst2
TOper name1 args1 == TOper name2 args2 = name1 == name2 && args1 == args2
TRecord pairs1 == TRecord pairs2 = pairs1 == pairs2
TCon name1 types1 dataType1 == TCon name2 types2 dataType2 = name1 == name2 && types1 == types2 && dataType1 == dataType2
TSig t1 == TSig t2 = t1 == t2
TRefined x1 t1 tm1 == TRefined x2 t2 tm2 = x1 == x2 && t1 == t2 && tm1 == tm2
_ == _ = False
instance Ord Type where
TVar id1 inst1 vname1 <= TVar id2 inst2 vname2 = id1 <= id2 && instV1 <= instV2 && vname1 <= vname2 where
instV1 = readState inst1
instV2 = readState inst2
TOper name1 args1 <= TOper name2 args2 = name1 <= name2 && args1 <= args2
TRecord pairs1 <= TRecord pairs2 = pairs1 <= pairs2
TCon name1 types1 dataType1 <= TCon name2 types2 dataType2 = name1 <= name2 && types1 <= types2 && dataType1 <= dataType2
TSig t1 <= TSig t2 = t1 <= t2
TRefined x1 t1 tm1 <= TRefined x2 t2 tm2 = x1 <= x2 && t1 <= t2 && tm1 <= tm2
_ <= _ = False
makeVariable :: Infer Type
makeVariable = do
i <- nextId
name <- nextUniqueName
instRef <- newIORef Nothing
return $ TVar i instRef name
-- for refined type
data Term = TmVar String
| TmNum Int
| TmLT Term Term
| TmGT Term Term
| TmLE Term Term
| TmGE Term Term
| TmSub Term Term
| TmAdd Term Term
| TmMul Term Term
| TmDiv Term Term
| TmEqual Term Term
| TmAnd Term Term
| TmOr Term Term
| TmNot Term
| TmIf Term Term Term
deriving instance Eq Term
deriving instance Ord Term
deriving instance Show Term
-- currently just support integer
data RType = RTInt
deriving instance Eq RType
deriving instance Ord RType
instance Z3Encoded Term where
encode (TmVar x) = do
ctx <- getQualifierCtx
case M.lookup x ctx of
Just (idx, _) -> return idx
Nothing -> smtError $ "Can't find variable " ++ x
encode (TmNum n) = mkIntSort >>= mkInt n
encode (TmLT t1 t2) = encode (Less t1 t2)
encode (TmGT t1 t2) = encode (Greater t1 t2)
encode (TmLE t1 t2) = encode (LessE t1 t2)
encode (TmGE t1 t2) = encode (GreaterE t1 t2)
encode (TmAdd t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkAdd [a1, a2]
encode (TmSub t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkSub [a1, a2]
encode (TmMul t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkMul [a1, a2]
encode (TmDiv t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkDiv a1 a2
encode (TmEqual t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkEq a1 a2
encode (TmAnd t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkAnd [a1, a2]
encode (TmOr t1 t2) = do
a1 <- encode t1
a2 <- encode t2
mkOr [a1, a2]
encode (TmNot t) = encode t >>= mkNot
encode (TmIf p c a) = do
a1 <- encode p
a2 <- encode c
a3 <- encode a
mkIte a1 a2 a3
instance Z3Sorted Term where
sort (TmVar x) = do
ctx <- getQualifierCtx
case M.lookup x ctx of
Just (_, s) -> return s
Nothing -> smtError $ "Can't find variable " ++ x
sort (TmNum _) = mkIntSort
sort (TmLT _ _) = mkBoolSort
sort (TmGT _ _) = mkBoolSort
sort (TmLE _ _) = mkBoolSort
sort (TmGE _ _) = mkBoolSort
sort (TmAdd _ _) = mkIntSort
sort (TmSub _ _) = mkIntSort
sort (TmMul _ _) = mkIntSort
sort (TmDiv _ _) = mkIntSort
sort (TmEqual _ _) = mkBoolSort
sort (TmAnd _ _) = mkBoolSort
sort (TmOr _ _) = mkBoolSort
sort (TmNot _) = mkBoolSort
sort (TmIf _ c _) = sort c
instance Z3Sorted RType where
sort RTInt = mkIntSort
|
zjhmale/Ntha
|
src/Ntha/Type/Type.hs
|
bsd-3-clause
| 9,167
| 0
| 18
| 2,704
| 3,512
| 1,718
| 1,794
| -1
| -1
|
module Main where
import Web.Scotty
import Data.Monoid (mconcat)
import System.Environment (getEnv)
import Data.Time.Clock (getCurrentTime)
import Data.Aeson hiding (json)
import Data.Aeson.Types
import Data.Text (Text)
import Control.Concurrent.MVar
import System.Exit
import Control.Monad.IO.Class
import System.Posix.Process (exitImmediately)
main :: IO ()
main = do
putStrLn "API starting...."
-- env vars
port <- read <$> getEnv "PORT" :: IO Int
-- counter used to crash the application
counter <- newMVar 0 :: IO (MVar Int)
-- scotty handler with ONE route
scotty port $
get "/" $ do
now <- liftIO getCurrentTime
currentCounter <- liftIO $ takeMVar counter
liftIO $ case currentCounter of
-- crash process every 5th request
5 -> do
putStrLn "ERROR: Uh oh.. Haskell needs a break :("
-- this type of exit successfully bypasses WAI trying to keep the
-- process alive
exitImmediately (ExitFailure 1)
a -> putMVar counter (a + 1)
-- return JSON object with current time value
json $ object ["currentTime" .= show now]
|
AKurilin/useless
|
Main.hs
|
bsd-3-clause
| 1,182
| 0
| 18
| 316
| 280
| 147
| 133
| 27
| 2
|
-- !!! Testing static, dynamic and wrapped import of trig function
-- Imports kept minimal to avoid pulling in Storable and other
-- things which use even more ffi.
import System.IO.Unsafe( unsafePerformIO )
import Foreign.Ptr( FunPtr, freeHaskellFunPtr )
tests = do
putStrLn "\nTesting sin==mysin (should return lots of Trues)"
print (testSin sin mysin)
putStrLn "\nTesting sin==dynamic_sin (should return lots of Trues)"
print (testSin sin (dyn_sin sin_addr))
putStrLn "\nTesting sin==IO wrapped_sin (should return lots of Trues)"
sin_addr2 <- wrapIO (return . sin)
print (testSin sin (unsafePerformIO . (dyn_sinIO sin_addr2)))
freeHaskellFunPtr sin_addr2
putStrLn "\nTesting sin==Id wrapped_sin (should return lots of Trues)"
sin_addr3 <- wrapId sin
print (testSin sin (dyn_sin sin_addr3))
freeHaskellFunPtr sin_addr3
testSin f g = [ (f x == g x) | x <- [0,0.01 .. 1] ]
foreign import ccall "math.h sin" mysin :: Double -> Double
foreign import ccall "dynamic" dyn_sin :: FunPtr (Double -> Double) -> (Double -> Double)
foreign import ccall "dynamic" dyn_sinIO :: FunPtr (Double -> IO Double) -> (Double -> IO Double)
foreign import ccall "math.h &sin" sin_addr :: FunPtr (Double -> Double)
foreign import ccall "wrapper" wrapId :: (Double -> Double) -> IO (FunPtr (Double -> Double))
foreign import ccall "wrapper" wrapIO :: (Double -> IO Double) -> IO (FunPtr (Double -> IO Double))
|
FranklinChen/Hugs
|
tests/ffi/Sin.hs
|
bsd-3-clause
| 1,422
| 0
| 13
| 244
| 414
| 208
| 206
| 22
| 1
|
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
--
-- GHC plugin to generate srcloc info.
--
-- (c) 2014 Galois, Inc.
--
module Ivory.Tower.SrcLoc.Plugin (plugin) where
import DynamicLoading
import GhcPlugins
import GHC.Plugins.SrcSpan
plugin :: Plugin
plugin = defaultPlugin { installCoreToDos = install }
install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
install opts todos = do
reinitializeGlobals
hsc_env <- getHscEnv
Just withLocName <- liftIO $ lookupRdrNameInModuleForPlugins hsc_env hANDLER_MONAD_MODULE wITH_LOC
withLocVar <- lookupId withLocName
Just mkLocName <- liftIO $ lookupRdrNameInModuleForPlugins hsc_env hANDLER_MONAD_MODULE mK_LOC
mkLocVar <- lookupId mkLocName
Just handlerName <- liftIO $ lookupRdrNameInModuleForPlugins hsc_env hANDLER_MONAD_MODULE hANDLER
handlerCon <- lookupTyCon handlerName
let annotate loc expr = mkWithLocExpr handlerCon mkLocVar withLocVar loc expr
let locpass = mkPass annotate killForeignStubs
return $ (CoreDoPluginPass "Add Locations" locpass) : todos
where
killForeignStubs = "kill-foreign-stubs" `elem` opts
-- | Check that the expression is a handler monad type constructor.
isHandlerStmt :: TyCon -> CoreExpr -> Bool
isHandlerStmt handlerM expr@(App _ _)
| Just (tc, _) <- splitTyConApp_maybe $ exprType expr
= tc == handlerM
isHandlerStmt handlerM expr@(Var _)
| Just (tc, _) <- splitTyConApp_maybe $ exprType expr
= tc == handlerM
isHandlerStmt _ _
= False
mkWithLocExpr :: TyCon -> Var -> Var -> SrcSpan -> CoreExpr -> CoreM CoreExpr
mkWithLocExpr handlerTyCon mkLocVar withLocVar (RealSrcSpan ss) expr
| isHandlerStmt handlerTyCon expr = do
loc <- mkLocExpr mkLocVar ss
return $ mkCoreApps (Var withLocVar) (tys' ++ [loc, expr])
where
tys' = map Type tys
(_, tys) = splitAppTys $ exprType expr
mkWithLocExpr _ _ _ _ expr = return expr
mkLocExpr :: Var -> RealSrcSpan -> CoreM CoreExpr
mkLocExpr mkLocVar ss = do
df <- getDynFlags
file <- mkStringExprFS $ srcSpanFile ss
return $ mkCoreApps (Var mkLocVar) [ file
, mkIntExprInt df (srcSpanStartLine ss)
, mkIntExprInt df (srcSpanStartCol ss)
, mkIntExprInt df (srcSpanEndLine ss)
, mkIntExprInt df (srcSpanEndCol ss)
]
hANDLER_MONAD_MODULE :: ModuleName
hANDLER_MONAD_MODULE = mkModuleName "Ivory.Tower.Monad.Handler"
wITH_LOC, mK_LOC, hANDLER :: RdrName
wITH_LOC = mkVarUnqual $ fsLit "withLocation"
mK_LOC = mkVarUnqual $ fsLit "mkLocation"
hANDLER = mkRdrQual hANDLER_MONAD_MODULE $ mkTcOcc "Handler"
|
GaloisInc/tower
|
tower/src/Ivory/Tower/SrcLoc/Plugin.hs
|
bsd-3-clause
| 2,780
| 0
| 12
| 654
| 723
| 362
| 361
| 55
| 1
|
{- arch-tag: Test runner
Copyright (C) 2004-2011 John Goerzen <jgoerzen@complete.org>
License: BSD3
-}
module Main where
import Test.HUnit
import Tests
main = runTestTT tests
|
jgoerzen/hslogger
|
testsrc/runtests.hs
|
bsd-3-clause
| 180
| 0
| 5
| 29
| 21
| 13
| 8
| 4
| 1
|
-- | Unsafe code posted <https://www.reddit.com/r/haskell/comments/3vlb8v/reading_data_problems/ on Reddit> and turned into safe code using NonEmpty.
module NonEmptyListExample where
-- <http://hackage.haskell.org/package/split split> utility library
import qualified Data.List.Split as Split
import qualified Data.List.NonEmpty as NonEmpty
import Data.List.NonEmpty (NonEmpty)
-- Our utility module.
import qualified Sort3
main :: IO ()
main = do
contents <- readFile "input.txt"
case NonEmpty.nonEmpty contents of
Nothing -> return ()
Just contents1 -> putStrLn $ show (totalArea(parseFile contents1))
totalArea :: NonEmpty (Int, Int, Int) -> Int
totalArea xs = foldl (\acc x -> (acc + partialArea x)) 0 xs
partialArea :: (Int, Int, Int) -> Int
partialArea (l, w, h) = 2 * (l*w + w*h + h*l) + slack
where
areas :: NonEmpty Int
areas = NonEmpty.fromList [l, w, h]
-- 'maximum' is safe on 'NonEmpty'
-- But 'smallSides' can be empty because of 'NonEmpty.filter',
-- and 'NonEmpty.fromList' is unsafe!
smallSides :: [Int]
smallSides = NonEmpty.filter (< maximum areas) areas
-- unsafe!
smallSides1 :: NonEmpty Int
smallSides1 = NonEmpty.fromList smallSides
-- 'foldl1' is safe on 'NonEmpty'
slack = foldl1 (*) smallSides1
parseFile :: NonEmpty Char -> NonEmpty (Int, Int, Int)
parseFile xs = NonEmpty.map (splitDimensions) (breakLines xs)
-- | We ended up not needing the fact that the input is nonempty, and
-- converted it to a regular list.
breakLines :: NonEmpty Char -> NonEmpty String
breakLines string1 = ourSplitOn "\n" (NonEmpty.toList string1)
-- | 'read' is unsafe. '(!!)' is unsafe.
splitDimensions :: String -> (Int, Int, Int)
splitDimensions xs = (item 0, item 1, item 2)
where item n = read ((Split.splitOn "x" xs)!!n)
-- | Using unsafe 'NonEmpty.fromList' is safe because we know
-- the result 'from Split.splitOn' is nonempty. Note that the elements
-- themselves can be empty.
ourSplitOn :: Eq a => [a] -> [a] -> NonEmpty [a]
ourSplitOn subList list = NonEmpty.fromList (Split.splitOn subList list)
-- | Don't use lists at all!
bestPartialArea :: (Int, Int, Int) -> Int
bestPartialArea (l, w, h) = 2 * (l*w + w*h + h*l) + slack
where
(side0, side1, _) = Sort3.sort3 (l, w, h)
slack = side0 * side1
|
FranklinChen/twenty-four-days2015-of-hackage
|
src/NonEmptyListExample.hs
|
bsd-3-clause
| 2,329
| 0
| 15
| 459
| 657
| 361
| 296
| 35
| 2
|
{-# LANGUAGE TypeFamilies, OverloadedStrings #-}
module DeepBanana.Layer.CUDA.CuRAND (
splitGenerator
, uniform
, normal
, logNormal
, dropout
) where
import Debug.Trace
import Foreign.Marshal
import System.IO.Unsafe
import Unsafe.Coerce
import DeepBanana.Device
import qualified DeepBanana.Device.Monad as DeviceM
import qualified DeepBanana.Device.CUDA as CUDA
import qualified DeepBanana.Device.CuRAND as CuRAND
import DeepBanana.Exception
import DeepBanana.Layer
import DeepBanana.Layer.CUDA.Monad
import DeepBanana.Prelude
import DeepBanana.Tensor
import DeepBanana.Tensor.Exception
import qualified DeepBanana.Tensor.Mutable as MT
-- Naively splits the generator by reseeding one with a random value from the
-- current generator. Probably not so great statistically, but should do the job
-- for our purposes.
splitGenerator :: forall m d . (MonadCuda m, Device d) => d -> m Generator
splitGenerator d = embedCudaFromST $ embedCuda unsafeIOToPrim $ do
gen <- get
newSeed <- liftIO $ runDeviceM d $ withGenerator gen $ \rawGen -> do
res <- CUDA.mallocArray 1
CuRAND.generateLongLong rawGen res 1
[newSeed] <- CUDA.peekListArray 1 res
CUDA.free res
return newSeed
return $ Generator newSeed 0
uniform :: forall m s d a
. (MonadCuda m, Device d, TensorScalar a, Shape s)
=> d -> s -> m (Tensor d s a)
uniform dev shp = embedCudaFromST $ embedCuda unsafeIOToPrim $ do
gen <- get
let outSize = size shp
res <- MT.emptyTensor dev shp :: CudaT IO (MT.IOTensor d s a)
liftIO $ MT.withDevicePtr res $ \resptr -> do
runDeviceM dev
$ withGenerator gen $ \rawGen -> do
generateUniform rawGen resptr (fromIntegral outSize)
modify (\gen -> gen {offset = offset gen + fromIntegral outSize})
unsafeFreeze res >>= return . unsafeCoerce
normal :: forall m d s a
. (MonadCuda m, Device d, TensorScalar a, Shape s)
=> d -> s -> a -> a -> m (Tensor d s a)
normal dev shp mean std = embedCudaFromST $ embedCuda unsafeIOToPrim $ do
gen <- get
let outSize = size shp
res <- MT.emptyTensor dev shp :: CudaT IO (MT.IOTensor d s a)
liftIO $ MT.withDevicePtr res $ \resptr -> do
runDeviceM dev
$ withGenerator gen $ \rawGen -> do
generateNormal rawGen resptr (fromIntegral outSize) mean std
modify (\gen -> gen {offset = offset gen + fromIntegral outSize})
unsafeFreeze res >>= return . unsafeCoerce
logNormal :: forall m d s a
. (MonadCuda m, Device d, TensorScalar a, Shape s)
=> d -> s -> a -> a -> m (Tensor d s a)
logNormal dev shp mean std = embedCudaFromST $ embedCuda unsafeIOToPrim $ do
gen <- get
let outSize = size shp
res <- MT.emptyTensor dev shp :: CudaT IO (MT.IOTensor d s a)
liftIO $ MT.withDevicePtr res $ \resptr -> do
runDeviceM dev
$ withGenerator gen $ \rawGen -> do
generateLogNormal rawGen resptr (fromIntegral outSize) mean std
modify (\gen -> gen {offset = offset gen + fromIntegral outSize})
unsafeFreeze res >>= return . unsafeCoerce
-- dropout
dropout :: forall m d s a
. (MonadCuda m, Device d, TensorScalar a, Shape s)
=> a
-> Layer m a '[] (Tensor d s a) (Tensor d s a)
dropout drop_proba = Layer $ \_ x -> embedCudaFromST $ do
let outSize = size $ shape x
mask <- embedCuda unsafeIOToPrim $ do
mmask <- MT.emptyTensor (device x) $ shape x :: CudaT IO (MT.IOTensor d s a)
gen <- get
liftIO $ do
MT.withDevicePtr mmask $ \maskptr -> do
runDeviceM (device x) $ do
withGenerator gen $ \rawGen -> do
generateUniform rawGen maskptr (fromIntegral outSize)
MT.threshInplace mmask drop_proba
unsafeFreeze mmask >>= return . unsafeCoerce :: CudaT IO (Tensor d s a)
modify (\gen -> gen {offset = offset gen + fromIntegral outSize})
case toAnyFixed $ shape x of
AnyFixed fshape -> do
fx <- shapeConvert fshape x
fmask <- shapeConvert fshape mask
withValidUniqueDevice (device x) $ \dev' -> do
dfx <- deviceConvert dev' fx
dfmask <- deviceConvert dev' fmask
dy <- shapeConvert (shape x) $ dfx * dfmask
y <- deviceConvert (device x) dy
return (y, broadcast' (shape x) >>> \upgrad -> unsafeRunCudaError $ do
fdy <- shapeConvert fshape upgrad
dfdy <- deviceConvert dev' fdy
ddx <- shapeConvert (shape x) $ dfdy * dfmask
dx <- deviceConvert (device x) ddx
return (W Z, dx))
|
alexisVallet/deep-banana
|
src/DeepBanana/Layer/CUDA/CuRAND.hs
|
bsd-3-clause
| 4,482
| 0
| 31
| 1,087
| 1,611
| 793
| 818
| -1
| -1
|
-- |
-- Module: Data.Aeson
-- Copyright: (c) 2011-2016 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- Types and functions for working efficiently with JSON data.
--
-- (A note on naming: in Greek mythology, Aeson was the father of Jason.)
module Data.Aeson
(
-- * How to use this library
-- $use
-- ** Writing instances by hand
-- $manual
-- ** Working with the AST
-- $ast
-- ** Decoding to a Haskell value
-- $haskell
-- ** Decoding a mixed-type object
-- $mixed
-- * Encoding and decoding
-- $encoding_and_decoding
-- ** Direct encoding
-- $encoding
decode
, decode'
, eitherDecode
, eitherDecode'
, encode
-- ** Variants for strict bytestrings
, decodeStrict
, decodeStrict'
, eitherDecodeStrict
, eitherDecodeStrict'
-- * Core JSON types
, Value(..)
, Encoding
, fromEncoding
, Array
, Object
-- * Convenience types
, DotNetTime(..)
-- * Type conversion
, FromJSON(..)
, Result(..)
, fromJSON
, ToJSON(..)
, KeyValue(..)
-- ** Keys for maps
, ToJSONKey(..)
, ToJSONKeyFunction(..)
, FromJSONKey(..)
, FromJSONKeyFunction(..)
-- ** Liftings to unary and binary type constructors
, FromJSON1(..)
, parseJSON1
, FromJSON2(..)
, parseJSON2
, ToJSON1(..)
, toJSON1
, toEncoding1
, ToJSON2(..)
, toJSON2
, toEncoding2
-- ** Generic JSON classes and options
, GFromJSON(..)
, FromArgs(..)
, GToJSON
, GToEncoding
, ToArgs(..)
, Zero
, One
, genericToJSON
, genericLiftToJSON
, genericToEncoding
, genericLiftToEncoding
, genericParseJSON
, genericLiftParseJSON
-- ** Generic and TH encoding configuration
, Options
, defaultOptions
-- *** Options fields
-- $optionsFields
, fieldLabelModifier
, constructorTagModifier
, allNullaryToStringTag
, omitNothingFields
, sumEncoding
, unwrapUnaryRecords
, tagSingleConstructors
-- *** Options utilities
, SumEncoding(..)
, camelTo2
, defaultTaggedObject
-- * Inspecting @'Value's@
, withObject
, withText
, withArray
, withNumber
, withScientific
, withBool
, withEmbeddedJSON
-- * Constructors and accessors
, Series
, pairs
, foldable
, (.:)
, (.:?)
, (.:!)
, (.!=)
, object
-- * Parsing
, json
, json'
) where
import Prelude ()
import Prelude.Compat
import Data.Aeson.Types.FromJSON (ifromJSON)
import Data.Aeson.Encoding (encodingToLazyByteString)
import Data.Aeson.Parser.Internal (decodeWith, decodeStrictWith, eitherDecodeWith, eitherDecodeStrictWith, jsonEOF, json, jsonEOF', json')
import Data.Aeson.Types
import Data.Aeson.Types.Internal (JSONPath, formatError)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
-- | Efficiently serialize a JSON value as a lazy 'L.ByteString'.
--
-- This is implemented in terms of the 'ToJSON' class's 'toEncoding' method.
encode :: (ToJSON a) => a -> L.ByteString
encode = encodingToLazyByteString . toEncoding
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses immediately, but defers conversion. See
-- 'json' for details.
decode :: (FromJSON a) => L.ByteString -> Maybe a
decode = decodeWith jsonEOF fromJSON
{-# INLINE decode #-}
-- | Efficiently deserialize a JSON value from a strict 'B.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses immediately, but defers conversion. See
-- 'json' for details.
decodeStrict :: (FromJSON a) => B.ByteString -> Maybe a
decodeStrict = decodeStrictWith jsonEOF fromJSON
{-# INLINE decodeStrict #-}
-- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses and performs conversion immediately. See
-- 'json'' for details.
decode' :: (FromJSON a) => L.ByteString -> Maybe a
decode' = decodeWith jsonEOF' fromJSON
{-# INLINE decode' #-}
-- | Efficiently deserialize a JSON value from a strict 'B.ByteString'.
-- If this fails due to incomplete or invalid input, 'Nothing' is
-- returned.
--
-- The input must consist solely of a JSON document, with no trailing
-- data except for whitespace.
--
-- This function parses and performs conversion immediately. See
-- 'json'' for details.
decodeStrict' :: (FromJSON a) => B.ByteString -> Maybe a
decodeStrict' = decodeStrictWith jsonEOF' fromJSON
{-# INLINE decodeStrict' #-}
eitherFormatError :: Either (JSONPath, String) a -> Either String a
eitherFormatError = either (Left . uncurry formatError) Right
{-# INLINE eitherFormatError #-}
-- | Like 'decode' but returns an error message when decoding fails.
eitherDecode :: (FromJSON a) => L.ByteString -> Either String a
eitherDecode = eitherFormatError . eitherDecodeWith jsonEOF ifromJSON
{-# INLINE eitherDecode #-}
-- | Like 'decodeStrict' but returns an error message when decoding fails.
eitherDecodeStrict :: (FromJSON a) => B.ByteString -> Either String a
eitherDecodeStrict =
eitherFormatError . eitherDecodeStrictWith jsonEOF ifromJSON
{-# INLINE eitherDecodeStrict #-}
-- | Like 'decode'' but returns an error message when decoding fails.
eitherDecode' :: (FromJSON a) => L.ByteString -> Either String a
eitherDecode' = eitherFormatError . eitherDecodeWith jsonEOF' ifromJSON
{-# INLINE eitherDecode' #-}
-- | Like 'decodeStrict'' but returns an error message when decoding fails.
eitherDecodeStrict' :: (FromJSON a) => B.ByteString -> Either String a
eitherDecodeStrict' =
eitherFormatError . eitherDecodeStrictWith jsonEOF' ifromJSON
{-# INLINE eitherDecodeStrict' #-}
-- $use
--
-- This section contains basic information on the different ways to
-- work with data using this library. These range from simple but
-- inflexible, to complex but flexible.
--
-- The most common way to use the library is to define a data type,
-- corresponding to some JSON data you want to work with, and then
-- write either a 'FromJSON' instance, a to 'ToJSON' instance, or both
-- for that type.
--
-- For example, given this JSON data:
--
-- > { "name": "Joe", "age": 12 }
--
-- we create a matching data type:
--
-- > {-# LANGUAGE DeriveGeneric #-}
-- >
-- > import GHC.Generics
-- >
-- > data Person = Person {
-- > name :: Text
-- > , age :: Int
-- > } deriving (Generic, Show)
--
-- The @LANGUAGE@ pragma and 'Generic' instance let us write empty
-- 'FromJSON' and 'ToJSON' instances for which the compiler will
-- generate sensible default implementations.
--
-- @
-- instance 'ToJSON' Person where
-- \-- No need to provide a 'toJSON' implementation.
--
-- \-- For efficiency, we write a simple 'toEncoding' implementation, as
-- \-- the default version uses 'toJSON'.
-- 'toEncoding' = 'genericToEncoding' 'defaultOptions'
--
-- instance 'FromJSON' Person
-- \-- No need to provide a 'parseJSON' implementation.
-- @
--
-- We can now encode a value like so:
--
-- > >>> encode (Person {name = "Joe", age = 12})
-- > "{\"name\":\"Joe\",\"age\":12}"
-- $manual
--
-- When necessary, we can write 'ToJSON' and 'FromJSON' instances by
-- hand. This is valuable when the JSON-on-the-wire and Haskell data
-- are different or otherwise need some more carefully managed
-- translation. Let's revisit our JSON data:
--
-- > { "name": "Joe", "age": 12 }
--
-- We once again create a matching data type, without bothering to add
-- a 'Generic' instance this time:
--
-- > data Person = Person {
-- > name :: Text
-- > , age :: Int
-- > } deriving Show
--
-- To decode data, we need to define a 'FromJSON' instance:
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- >
-- > instance FromJSON Person where
-- > parseJSON = withObject "Person" $ \v -> Person
-- > <$> v .: "name"
-- > <*> v .: "age"
--
-- We can now parse the JSON data like so:
--
-- > >>> decode "{\"name\":\"Joe\",\"age\":12}" :: Maybe Person
-- > Just (Person {name = "Joe", age = 12})
--
-- To encode data, we need to define a 'ToJSON' instance. Let's begin
-- with an instance written entirely by hand.
--
-- @
-- instance ToJSON Person where
-- \-- this generates a 'Value'
-- 'toJSON' (Person name age) =
-- 'object' [\"name\" '.=' name, \"age\" '.=' age]
--
-- \-- this encodes directly to a bytestring Builder
-- 'toEncoding' (Person name age) =
-- 'pairs' (\"name\" '.=' 'name' '<>' \"age\" '.=' age)
-- @
--
-- We can now encode a value like so:
--
-- > >>> encode (Person {name = "Joe", age = 12})
-- > "{\"name\":\"Joe\",\"age\":12}"
--
-- There are predefined 'FromJSON' and 'ToJSON' instances for many
-- types. Here's an example using lists and 'Int's:
--
-- > >>> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- And here's an example using the 'Data.Map.Map' type to get a map of
-- 'Int's.
--
-- > >>> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int)
-- > Just (fromList [("bar",2),("foo",1)])
-- While the notes below focus on decoding, you can apply almost the
-- same techniques to /encoding/ data. (The main difference is that
-- encoding always succeeds, but decoding has to handle the
-- possibility of failure, where an input doesn't match our
-- expectations.)
--
-- See the documentation of 'FromJSON' and 'ToJSON' for some examples
-- of how you can automatically derive instances in many common
-- circumstances.
-- $ast
--
-- Sometimes you want to work with JSON data directly, without first
-- converting it to a custom data type. This can be useful if you want
-- to e.g. convert JSON data to YAML data, without knowing what the
-- contents of the original JSON data was. The 'Value' type, which is
-- an instance of 'FromJSON', is used to represent an arbitrary JSON
-- AST (abstract syntax tree). Example usage:
--
-- > >>> decode "{\"foo\": 123}" :: Maybe Value
-- > Just (Object (fromList [("foo",Number 123)]))
--
-- > >>> decode "{\"foo\": [\"abc\",\"def\"]}" :: Maybe Value
-- > Just (Object (fromList [("foo",Array (fromList [String "abc",String "def"]))]))
--
-- Once you have a 'Value' you can write functions to traverse it and
-- make arbitrary transformations.
-- $haskell
--
-- We can decode to any instance of 'FromJSON':
--
-- > λ> decode "[1,2,3]" :: Maybe [Int]
-- > Just [1,2,3]
--
-- Alternatively, there are instances for standard data types, so you
-- can use them directly. For example, use the 'Data.Map.Map' type to
-- get a map of 'Int's.
--
-- > λ> import Data.Map
-- > λ> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int)
-- > Just (fromList [("bar",2),("foo",1)])
-- $mixed
--
-- The above approach with maps of course will not work for mixed-type
-- objects that don't follow a strict schema, but there are a couple
-- of approaches available for these.
--
-- The 'Object' type contains JSON objects:
--
-- > λ> decode "{\"name\":\"Dave\",\"age\":2}" :: Maybe Object
-- > Just (fromList [("name",String "Dave"),("age",Number 2)])
--
-- You can extract values from it with a parser using 'parse',
-- 'parseEither' or, in this example, 'parseMaybe':
--
-- > λ> do result <- decode "{\"name\":\"Dave\",\"age\":2}"
-- > flip parseMaybe result $ \obj -> do
-- > age <- obj .: "age"
-- > name <- obj .: "name"
-- > return (name ++ ": " ++ show (age*2))
-- >
-- > Just "Dave: 4"
--
-- Considering that any type that implements 'FromJSON' can be used
-- here, this is quite a powerful way to parse JSON. See the
-- documentation in 'FromJSON' for how to implement this class for
-- your own data types.
--
-- The downside is that you have to write the parser yourself; the
-- upside is that you have complete control over the way the JSON is
-- parsed.
-- $encoding_and_decoding
--
-- Decoding is a two-step process.
--
-- * When decoding a value, the process is reversed: the bytes are
-- converted to a 'Value', then the 'FromJSON' class is used to
-- convert to the desired type.
--
-- There are two ways to encode a value.
--
-- * Convert to a 'Value' using 'toJSON', then possibly further
-- encode. This was the only method available in aeson 0.9 and
-- earlier.
--
-- * Directly encode (to what will become a 'L.ByteString') using
-- 'toEncoding'. This is much more efficient (about 3x faster, and
-- less memory intensive besides), but is only available in aeson
-- 0.10 and newer.
--
-- For convenience, the 'encode' and 'decode' functions combine both
-- steps.
-- $encoding
--
-- In older versions of this library, encoding a Haskell value
-- involved converting to an intermediate 'Value', then encoding that.
--
-- A \"direct\" encoder converts straight from a source Haskell value
-- to a 'BL.ByteString' without constructing an intermediate 'Value'.
-- This approach is faster than 'toJSON', and allocates less memory.
-- The 'toEncoding' method makes it possible to implement direct
-- encoding with low memory overhead.
--
-- To complicate matters, the default implementation of 'toEncoding'
-- uses 'toJSON'. Why? The 'toEncoding' method was added to this
-- library much more recently than 'toJSON'. Using 'toJSON' ensures
-- that packages written against older versions of this library will
-- compile and produce correct output, but they will not see any
-- speedup from direct encoding.
--
-- To write a minimal implementation of direct encoding, your type
-- must implement GHC's 'Generic' class, and your code should look
-- like this:
--
-- @
-- 'toEncoding' = 'genericToEncoding' 'defaultOptions'
-- @
--
-- What if you have more elaborate encoding needs? For example,
-- perhaps you need to change the names of object keys, omit parts of
-- a value.
--
-- To encode to a JSON \"object\", use the 'pairs' function.
--
-- @
-- 'toEncoding' (Person name age) =
-- 'pairs' (\"name\" '.=' 'name' '<>' \"age\" '.=' age)
-- @
--
-- Any container type that implements 'Foldable' can be encoded to a
-- JSON \"array\" using 'foldable'.
--
-- > > import Data.Sequence as Seq
-- > > encode (Seq.fromList [1,2,3])
-- > "[1,2,3]"
|
sol/aeson
|
Data/Aeson.hs
|
bsd-3-clause
| 14,742
| 0
| 8
| 2,987
| 1,116
| 810
| 306
| 118
| 1
|
{-# LANGUAGE OverlappingInstances, UndecidableInstances #-}
module HJScript.Objects.JQuery where
import HJScript.Lang
import HJScript.DOM.Window
import HJScript.DOM.ElementNode
data JQuery = JQuery deriving Show
instance IsClass JQuery
-- | Constructors for Date
instance HasConstructor JQuery JString String
jQuery :: Exp JQuery
jQuery = JConst "jQuery"
selectExpr :: Exp c -> JObject JQuery
selectExpr e = methodCall "jQuery" e window
jSize :: JObject JQuery -> JInt
jSize = methodCallNoArgs "size"
length :: JObject JQuery -> JInt
length = deref "length"
get :: JInt -> JObject JQuery -> Exp ElementNode
get = methodCall "get"
empty :: JObject JQuery -> Exp JQuery
empty = methodCall "empty" ()
jVal :: JObject JQuery -> JString
jVal = methodCall "val" ()
jSetVal :: JString -> JObject JQuery -> JString
jSetVal = methodCall "val"
jText :: JObject JQuery -> JString
jText = methodCall "text" ()
jSetText :: JString -> JObject JQuery -> Exp JQuery
jSetText = methodCall "text"
append :: Exp a -> JObject JQuery -> Exp JQuery
append = methodCall "append"
prepend :: Exp a -> JObject JQuery -> Exp JQuery
prepend = methodCall "prepend"
ready :: HJScript () -> HJScript ()
ready script
= do fn <- procedure $ \() -> script
runExp $ methodCall "jQuery" fn window
change :: HJScript () -> JObject JQuery -> HJScript ()
change script query
= do fn <- procedure $ \() -> script
runExp $ methodCall "change" fn query
submit :: HJScript () -> JObject JQuery -> HJScript ()
submit script query
= do fn <- procedure $ \() -> script
runExp $ methodCall "submit" fn query
select :: HJScript () -> JObject JQuery -> HJScript ()
select script query
= do fn <- procedure $ \() -> script
runExp $ methodCall "select" fn query
runExp :: Exp a -> HJScript ()
runExp = outputStmt . ExpStmt
|
seereason/HJScript
|
src/HJScript/Objects/JQuery.hs
|
bsd-3-clause
| 1,848
| 0
| 10
| 371
| 642
| 314
| 328
| -1
| -1
|
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
module StarDict.Format.Ifo(parse,Opt(..)) where
import Data.Text (Text)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Maybe (fromJust)
import Control.Applicative
import Data.Attoparsec.Text hiding (parse)
import Prelude hiding (unlines)
import StarDict.Format.TH
type IFO = [Opt]
data ContentType = Pure | Locale | Pango
| Phonetic | Xdxf | Kana | PowerWord
| MediaWiki | Html | ResourseList
| WavFile | PicFile | Experimental
deriving (Show,Eq,Ord)
data Opt = BookName Text
| WordCount Integer
| SynWordCount Integer
| IdxFileSize Integer
| IdxOffsetBits Integer
| Author Text
| Email Text
| Website Text
| Description Text
| Date Text
| SameTypeSequence (Set ContentType)
deriving (Show,Eq,Ord)
buildIFOParser ''Opt
header = string "StarDict's dict ifo file" *> skipSpace
version = string "version=" *> (string "2.4.2" <|> string "3.0.0") *> skipSpace
options = some (parseIFO <|> parseSameTypeSequence)
parseSameTypeSequence =
let types =
[('m',Pure),
('l',Locale),
('g',Pango),
('t',Phonetic),
('x',Xdxf),
('y',Kana),
('k',PowerWord),
('w',MediaWiki),
('h',Html),
('r',ResourseList),
('W',WavFile),
('P',PicFile),
('X',Experimental)]
in fmap (SameTypeSequence . Set.fromList)
(string "sametypesequence" *> char '=' *>
some (anyChar >>= maybe (fail "foo") return . flip lookup types)
<* (endOfLine <|> endOfInput))
parse :: Text -> Either String [Opt]
parse t = case parseOnly (header *> version *> options) t of
Right x -> return x
Left e -> fail e
|
polachok/hdict
|
src/StarDict/Format/Ifo.hs
|
bsd-3-clause
| 1,824
| 2
| 17
| 506
| 577
| 333
| 244
| 55
| 2
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Tree.DeepSeq where
import Tree.Types
import Control.DeepSeq
instance NFData Tree where
rnf Leaf = ()
rnf (Fork a b) = rnf a `seq` rnf b `seq` ()
|
thoughtpolice/binary-serialise-cbor
|
bench/Tree/DeepSeq.hs
|
bsd-3-clause
| 198
| 0
| 8
| 39
| 71
| 39
| 32
| 7
| 0
|
module Main ( main ) where
import Data.Map ( Map )
import Data.Monoid
import Data.Set ( Set )
import System.Environment ( getArgs, withArgs )
import System.FilePath
import Test.HUnit ( assertEqual )
import LLVM.Analysis
import LLVM.Analysis.AccessPath
import LLVM.Analysis.CallGraph
import LLVM.Analysis.CallGraphSCCTraversal
import LLVM.Analysis.Util.Testing
import LLVM.Parse
import Foreign.Inference.Interface
import Foreign.Inference.Preprocessing
import Foreign.Inference.Analysis.Finalize
import Foreign.Inference.Analysis.IndirectCallResolver
import Foreign.Inference.Analysis.SAP
import Foreign.Inference.Analysis.SAPPTRel
import Foreign.Inference.Analysis.Util.CompositeSummary
main :: IO ()
main = do
args <- getArgs
let pattern = case args of
[] -> "tests/sap/return/*.c"
[infile] -> infile
_ -> error "At most one argument allowed"
ds <- loadDependencies [] []
let testDescriptors = [
TestDescriptor { testPattern = pattern
, testExpectedMapping = (<.> "expected")
, testResultBuilder = analyzeSAPs ds
, testResultComparator = assertEqual
}
]
withArgs [] $ testAgainstExpected requiredOptimizations bcParser testDescriptors
where
bcParser = parseLLVMFile defaultParserOptions
type Summary = (Int, String, [AccessType])
analyzeSAPs :: DependencySummary -> Module -> Map String (Set Summary)
analyzeSAPs ds m =
sapReturnResultToTestFormat (_sapSummary res)
where
ics = identifyIndirectCallTargets m
cg = callGraph m ics []
analyses :: [ComposableAnalysis AnalysisSummary FunctionMetadata]
analyses = [ identifyFinalizers ds ics finalizerSummary
, identifySAPPTRels ds sapPTRelSummary
, identifySAPs ds ics sapSummary sapPTRelSummary finalizerSummary
]
analysisFunc = callGraphComposeAnalysis analyses
res = callGraphSCCTraversal cg analysisFunc mempty
|
travitch/foreign-inference
|
tests/SAPTests.hs
|
bsd-3-clause
| 1,989
| 0
| 13
| 424
| 451
| 253
| 198
| 47
| 3
|
module ML.Common where
import Foreign.C
toBool :: CUChar -> Bool
toBool = (/= 0)
|
jxv/ml-hs
|
src/ML/Common.hs
|
bsd-3-clause
| 83
| 0
| 5
| 16
| 29
| 18
| 11
| 4
| 1
|
{-# LANGUAGE CPP #-}
module TcSimplify(
simplifyInfer, InferMode(..),
growThetaTyVars,
simplifyAmbiguityCheck,
simplifyDefault,
simplifyTop, simplifyInteractive, solveEqualities,
simplifyWantedsTcM,
tcCheckSatisfiability,
-- For Rules we need these
solveWanteds, runTcSDeriveds
) where
#include "HsVersions.h"
import Bag
import Class ( Class, classKey, classTyCon )
import DynFlags ( WarningFlag ( Opt_WarnMonomorphism )
, WarnReason ( Reason )
, DynFlags( solverIterations ) )
import Inst
import ListSetOps
import Maybes
import Name
import Outputable
import PrelInfo
import PrelNames
import TcErrors
import TcEvidence
import TcInteract
import TcCanonical ( makeSuperClasses )
import TcMType as TcM
import TcRnMonad as TcM
import TcSMonad as TcS
import TcType
import TrieMap () -- DV: for now
import Type
import TysWiredIn ( ptrRepLiftedTy )
import Unify ( tcMatchTyKi )
import Util
import Var
import VarSet
import UniqFM
import BasicTypes ( IntWithInf, intGtLimit )
import ErrUtils ( emptyMessages )
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad ( when, unless )
import Data.List ( partition )
{-
*********************************************************************************
* *
* External interface *
* *
*********************************************************************************
-}
simplifyTop :: WantedConstraints -> TcM (Bag EvBind)
-- Simplify top-level constraints
-- Usually these will be implications,
-- but when there is nothing to quantify we don't wrap
-- in a degenerate implication, so we do that here instead
simplifyTop wanteds
= do { traceTc "simplifyTop {" $ text "wanted = " <+> ppr wanteds
; ((final_wc, unsafe_ol), binds1) <- runTcS $
do { final_wc <- simpl_top wanteds
; unsafe_ol <- getSafeOverlapFailures
; return (final_wc, unsafe_ol) }
; traceTc "End simplifyTop }" empty
; traceTc "reportUnsolved {" empty
; binds2 <- reportUnsolved final_wc
; traceTc "reportUnsolved }" empty
; traceTc "reportUnsolved (unsafe overlapping) {" empty
; unless (isEmptyCts unsafe_ol) $ do {
-- grab current error messages and clear, warnAllUnsolved will
-- update error messages which we'll grab and then restore saved
-- messages.
; errs_var <- getErrsVar
; saved_msg <- TcM.readTcRef errs_var
; TcM.writeTcRef errs_var emptyMessages
; warnAllUnsolved $ WC { wc_simple = unsafe_ol
, wc_insol = emptyCts
, wc_impl = emptyBag }
; whyUnsafe <- fst <$> TcM.readTcRef errs_var
; TcM.writeTcRef errs_var saved_msg
; recordUnsafeInfer whyUnsafe
}
; traceTc "reportUnsolved (unsafe overlapping) }" empty
; return (evBindMapBinds binds1 `unionBags` binds2) }
-- | Type-check a thing that emits only equality constraints, then
-- solve those constraints. Fails outright if there is trouble.
solveEqualities :: TcM a -> TcM a
solveEqualities thing_inside
= checkNoErrs $ -- See Note [Fail fast on kind errors]
do { (result, wanted) <- captureConstraints thing_inside
; traceTc "solveEqualities {" $ text "wanted = " <+> ppr wanted
; final_wc <- runTcSEqualities $ simpl_top wanted
; traceTc "End solveEqualities }" empty
; traceTc "reportAllUnsolved {" empty
; reportAllUnsolved final_wc
; traceTc "reportAllUnsolved }" empty
; return result }
simpl_top :: WantedConstraints -> TcS WantedConstraints
-- See Note [Top-level Defaulting Plan]
simpl_top wanteds
= do { wc_first_go <- nestTcS (solveWantedsAndDrop wanteds)
-- This is where the main work happens
; try_tyvar_defaulting wc_first_go }
where
try_tyvar_defaulting :: WantedConstraints -> TcS WantedConstraints
try_tyvar_defaulting wc
| isEmptyWC wc
= return wc
| otherwise
= do { free_tvs <- TcS.zonkTyCoVarsAndFVList (tyCoVarsOfWCList wc)
; let meta_tvs = filter (isTyVar <&&> isMetaTyVar) free_tvs
-- zonkTyCoVarsAndFV: the wc_first_go is not yet zonked
-- filter isMetaTyVar: we might have runtime-skolems in GHCi,
-- and we definitely don't want to try to assign to those!
-- the isTyVar needs to weed out coercion variables
; defaulted <- mapM defaultTyVarTcS meta_tvs -- Has unification side effects
; if or defaulted
then do { wc_residual <- nestTcS (solveWanteds wc)
-- See Note [Must simplify after defaulting]
; try_class_defaulting wc_residual }
else try_class_defaulting wc } -- No defaulting took place
try_class_defaulting :: WantedConstraints -> TcS WantedConstraints
try_class_defaulting wc
| isEmptyWC wc
= return wc
| otherwise -- See Note [When to do type-class defaulting]
= do { something_happened <- applyDefaultingRules wc
-- See Note [Top-level Defaulting Plan]
; if something_happened
then do { wc_residual <- nestTcS (solveWantedsAndDrop wc)
; try_class_defaulting wc_residual }
-- See Note [Overview of implicit CallStacks] in TcEvidence
else try_callstack_defaulting wc }
try_callstack_defaulting :: WantedConstraints -> TcS WantedConstraints
try_callstack_defaulting wc
| isEmptyWC wc
= return wc
| otherwise
= defaultCallStacks wc
-- | Default any remaining @CallStack@ constraints to empty @CallStack@s.
defaultCallStacks :: WantedConstraints -> TcS WantedConstraints
-- See Note [Overview of implicit CallStacks] in TcEvidence
defaultCallStacks wanteds
= do simples <- handle_simples (wc_simple wanteds)
implics <- mapBagM handle_implic (wc_impl wanteds)
return (wanteds { wc_simple = simples, wc_impl = implics })
where
handle_simples simples
= catBagMaybes <$> mapBagM defaultCallStack simples
handle_implic implic
= do { wanteds <- setEvBindsTcS (ic_binds implic) $
-- defaultCallStack sets a binding, so
-- we must set the correct binding group
defaultCallStacks (ic_wanted implic)
; return (implic { ic_wanted = wanteds }) }
defaultCallStack ct
| Just _ <- isCallStackPred (ctPred ct)
= do { solveCallStack (cc_ev ct) EvCsEmpty
; return Nothing }
defaultCallStack ct
= return (Just ct)
{- Note [Fail fast on kind errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
solveEqualities is used to solve kind equalities when kind-checking
user-written types. If solving fails we should fail outright, rather
than just accumulate an error message, for two reasons:
* A kind-bogus type signature may cause a cascade of knock-on
errors if we let it pass
* More seriously, we don't have a convenient term-level place to add
deferred bindings for unsolved kind-equality constraints, so we
don't build evidence bindings (by usine reportAllUnsolved). That
means that we'll be left with with a type that has coercion holes
in it, something like
<type> |> co-hole
where co-hole is not filled in. Eeek! That un-filled-in
hole actually causes GHC to crash with "fvProv falls into a hole"
See Trac #11563, #11520, #11516, #11399
So it's important to use 'checkNoErrs' here!
Note [When to do type-class defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In GHC 7.6 and 7.8.2, we did type-class defaulting only if insolubleWC
was false, on the grounds that defaulting can't help solve insoluble
constraints. But if we *don't* do defaulting we may report a whole
lot of errors that would be solved by defaulting; these errors are
quite spurious because fixing the single insoluble error means that
defaulting happens again, which makes all the other errors go away.
This is jolly confusing: Trac #9033.
So it seems better to always do type-class defaulting.
However, always doing defaulting does mean that we'll do it in
situations like this (Trac #5934):
run :: (forall s. GenST s) -> Int
run = fromInteger 0
We don't unify the return type of fromInteger with the given function
type, because the latter involves foralls. So we're left with
(Num alpha, alpha ~ (forall s. GenST s) -> Int)
Now we do defaulting, get alpha := Integer, and report that we can't
match Integer with (forall s. GenST s) -> Int. That's not totally
stupid, but perhaps a little strange.
Another potential alternative would be to suppress *all* non-insoluble
errors if there are *any* insoluble errors, anywhere, but that seems
too drastic.
Note [Must simplify after defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may have a deeply buried constraint
(t:*) ~ (a:Open)
which we couldn't solve because of the kind incompatibility, and 'a' is free.
Then when we default 'a' we can solve the constraint. And we want to do
that before starting in on type classes. We MUST do it before reporting
errors, because it isn't an error! Trac #7967 was due to this.
Note [Top-level Defaulting Plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have considered two design choices for where/when to apply defaulting.
(i) Do it in SimplCheck mode only /whenever/ you try to solve some
simple constraints, maybe deep inside the context of implications.
This used to be the case in GHC 7.4.1.
(ii) Do it in a tight loop at simplifyTop, once all other constraints have
finished. This is the current story.
Option (i) had many disadvantages:
a) Firstly, it was deep inside the actual solver.
b) Secondly, it was dependent on the context (Infer a type signature,
or Check a type signature, or Interactive) since we did not want
to always start defaulting when inferring (though there is an exception to
this, see Note [Default while Inferring]).
c) It plainly did not work. Consider typecheck/should_compile/DfltProb2.hs:
f :: Int -> Bool
f x = const True (\y -> let w :: a -> a
w a = const a (y+1)
in w y)
We will get an implication constraint (for beta the type of y):
[untch=beta] forall a. 0 => Num beta
which we really cannot default /while solving/ the implication, since beta is
untouchable.
Instead our new defaulting story is to pull defaulting out of the solver loop and
go with option (ii), implemented at SimplifyTop. Namely:
- First, have a go at solving the residual constraint of the whole
program
- Try to approximate it with a simple constraint
- Figure out derived defaulting equations for that simple constraint
- Go round the loop again if you did manage to get some equations
Now, that has to do with class defaulting. However there exists type variable /kind/
defaulting. Again this is done at the top-level and the plan is:
- At the top-level, once you had a go at solving the constraint, do
figure out /all/ the touchable unification variables of the wanted constraints.
- Apply defaulting to their kinds
More details in Note [DefaultTyVar].
Note [Safe Haskell Overlapping Instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In Safe Haskell, we apply an extra restriction to overlapping instances. The
motive is to prevent untrusted code provided by a third-party, changing the
behavior of trusted code through type-classes. This is due to the global and
implicit nature of type-classes that can hide the source of the dictionary.
Another way to state this is: if a module M compiles without importing another
module N, changing M to import N shouldn't change the behavior of M.
Overlapping instances with type-classes can violate this principle. However,
overlapping instances aren't always unsafe. They are just unsafe when the most
selected dictionary comes from untrusted code (code compiled with -XSafe) and
overlaps instances provided by other modules.
In particular, in Safe Haskell at a call site with overlapping instances, we
apply the following rule to determine if it is a 'unsafe' overlap:
1) Most specific instance, I1, defined in an `-XSafe` compiled module.
2) I1 is an orphan instance or a MPTC.
3) At least one overlapped instance, Ix, is both:
A) from a different module than I1
B) Ix is not marked `OVERLAPPABLE`
This is a slightly involved heuristic, but captures the situation of an
imported module N changing the behavior of existing code. For example, if
condition (2) isn't violated, then the module author M must depend either on a
type-class or type defined in N.
Secondly, when should these heuristics be enforced? We enforced them when the
type-class method call site is in a module marked `-XSafe` or `-XTrustworthy`.
This allows `-XUnsafe` modules to operate without restriction, and for Safe
Haskell inferrence to infer modules with unsafe overlaps as unsafe.
One alternative design would be to also consider if an instance was imported as
a `safe` import or not and only apply the restriction to instances imported
safely. However, since instances are global and can be imported through more
than one path, this alternative doesn't work.
Note [Safe Haskell Overlapping Instances Implementation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
How is this implemented? It's complicated! So we'll step through it all:
1) `InstEnv.lookupInstEnv` -- Performs instance resolution, so this is where
we check if a particular type-class method call is safe or unsafe. We do this
through the return type, `ClsInstLookupResult`, where the last parameter is a
list of instances that are unsafe to overlap. When the method call is safe,
the list is null.
2) `TcInteract.matchClassInst` -- This module drives the instance resolution
/ dictionary generation. The return type is `LookupInstResult`, which either
says no instance matched, or one found, and if it was a safe or unsafe
overlap.
3) `TcInteract.doTopReactDict` -- Takes a dictionary / class constraint and
tries to resolve it by calling (in part) `matchClassInst`. The resolving
mechanism has a work list (of constraints) that it process one at a time. If
the constraint can't be resolved, it's added to an inert set. When compiling
an `-XSafe` or `-XTrustworthy` module, we follow this approach as we know
compilation should fail. These are handled as normal constraint resolution
failures from here-on (see step 6).
Otherwise, we may be inferring safety (or using `-Wunsafe`), and
compilation should succeed, but print warnings and/or mark the compiled module
as `-XUnsafe`. In this case, we call `insertSafeOverlapFailureTcS` which adds
the unsafe (but resolved!) constraint to the `inert_safehask` field of
`InertCans`.
4) `TcSimplify.simplifyTop`:
* Call simpl_top, the top-level function for driving the simplifier for
constraint resolution.
* Once finished, call `getSafeOverlapFailures` to retrieve the
list of overlapping instances that were successfully resolved,
but unsafe. Remember, this is only applicable for generating warnings
(`-Wunsafe`) or inferring a module unsafe. `-XSafe` and `-XTrustworthy`
cause compilation failure by not resolving the unsafe constraint at all.
* For unresolved constraints (all types), call `TcErrors.reportUnsolved`,
while for resolved but unsafe overlapping dictionary constraints, call
`TcErrors.warnAllUnsolved`. Both functions convert constraints into a
warning message for the user.
* In the case of `warnAllUnsolved` for resolved, but unsafe
dictionary constraints, we collect the generated warning
message (pop it) and call `TcRnMonad.recordUnsafeInfer` to
mark the module we are compiling as unsafe, passing the
warning message along as the reason.
5) `TcErrors.*Unsolved` -- Generates error messages for constraints by
actually calling `InstEnv.lookupInstEnv` again! Yes, confusing, but all we
know is the constraint that is unresolved or unsafe. For dictionary, all we
know is that we need a dictionary of type C, but not what instances are
available and how they overlap. So we once again call `lookupInstEnv` to
figure that out so we can generate a helpful error message.
6) `TcRnMonad.recordUnsafeInfer` -- Save the unsafe result and reason in an
IORef called `tcg_safeInfer`.
7) `HscMain.tcRnModule'` -- Reads `tcg_safeInfer` after type-checking, calling
`HscMain.markUnsafeInfer` (passing the reason along) when safe-inferrence
failed.
Note [No defaulting in the ambiguity check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When simplifying constraints for the ambiguity check, we use
solveWantedsAndDrop, not simpl_top, so that we do no defaulting.
Trac #11947 was an example:
f :: Num a => Int -> Int
This is ambiguous of course, but we don't want to default the
(Num alpha) constraint to (Num Int)! Doing so gives a defaulting
warning, but no error.
-}
------------------
simplifyAmbiguityCheck :: Type -> WantedConstraints -> TcM ()
simplifyAmbiguityCheck ty wanteds
= do { traceTc "simplifyAmbiguityCheck {" (text "type = " <+> ppr ty $$ text "wanted = " <+> ppr wanteds)
; (final_wc, _) <- runTcS $ solveWantedsAndDrop wanteds
-- NB: no defaulting! See Note [No defaulting in the ambiguity check]
; traceTc "End simplifyAmbiguityCheck }" empty
-- Normally report all errors; but with -XAllowAmbiguousTypes
-- report only insoluble ones, since they represent genuinely
-- inaccessible code
; allow_ambiguous <- xoptM LangExt.AllowAmbiguousTypes
; traceTc "reportUnsolved(ambig) {" empty
; unless (allow_ambiguous && not (insolubleWC final_wc))
(discardResult (reportUnsolved final_wc))
; traceTc "reportUnsolved(ambig) }" empty
; return () }
------------------
simplifyInteractive :: WantedConstraints -> TcM (Bag EvBind)
simplifyInteractive wanteds
= traceTc "simplifyInteractive" empty >>
simplifyTop wanteds
------------------
simplifyDefault :: ThetaType -- Wanted; has no type variables in it
-> TcM () -- Succeeds if the constraint is soluble
simplifyDefault theta
= do { traceTc "simplifyDefault" empty
; wanteds <- newWanteds DefaultOrigin theta
; unsolved <- runTcSDeriveds (solveWantedsAndDrop (mkSimpleWC wanteds))
; traceTc "reportUnsolved {" empty
; reportAllUnsolved unsolved
; traceTc "reportUnsolved }" empty
; return () }
------------------
tcCheckSatisfiability :: Bag EvVar -> TcM Bool
-- Return True if satisfiable, False if definitely contradictory
tcCheckSatisfiability given_ids
= do { lcl_env <- TcM.getLclEnv
; let given_loc = mkGivenLoc topTcLevel UnkSkol lcl_env
; (res, _ev_binds) <- runTcS $
do { traceTcS "checkSatisfiability {" (ppr given_ids)
; let given_cts = mkGivens given_loc (bagToList given_ids)
-- See Note [Superclasses and satisfiability]
; solveSimpleGivens given_cts
; insols <- getInertInsols
; insols <- try_harder insols
; traceTcS "checkSatisfiability }" (ppr insols)
; return (isEmptyBag insols) }
; return res }
where
try_harder :: Cts -> TcS Cts
-- Maybe we have to search up the superclass chain to find
-- an unsatisfiable constraint. Example: pmcheck/T3927b.
-- At the moment we try just once
try_harder insols
| not (isEmptyBag insols) -- We've found that it's definitely unsatisfiable
= return insols -- Hurrah -- stop now.
| otherwise
= do { pending_given <- getPendingScDicts
; new_given <- makeSuperClasses pending_given
; solveSimpleGivens new_given
; getInertInsols }
{- Note [Superclasses and satisfiability]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Expand superclasses before starting, because (Int ~ Bool), has
(Int ~~ Bool) as a superclass, which in turn has (Int ~N# Bool)
as a superclass, and it's the latter that is insoluble. See
Note [The equality types story] in TysPrim.
If we fail to prove unsatisfiability we (arbitrarily) try just once to
find superclasses, using try_harder. Reason: we might have a type
signature
f :: F op (Implements push) => ..
where F is a type function. This happened in Trac #3972.
We could do more than once but we'd have to have /some/ limit: in the
the recursive case, we would go on forever in the common case where
the constraints /are/ satisfiable (Trac #10592 comment:12!).
For stratightforard situations without type functions the try_harder
step does nothing.
***********************************************************************************
* *
* Inference
* *
***********************************************************************************
Note [Inferring the type of a let-bound variable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = rhs
To infer f's type we do the following:
* Gather the constraints for the RHS with ambient level *one more than*
the current one. This is done by the call
pushLevelAndCaptureConstraints (tcMonoBinds...)
in TcBinds.tcPolyInfer
* Call simplifyInfer to simplify the constraints and decide what to
quantify over. We pass in the level used for the RHS constraints,
here called rhs_tclvl.
This ensures that the implication constraint we generate, if any,
has a strictly-increased level compared to the ambient level outside
the let binding.
-}
-- | How should we choose which constraints to quantify over?
data InferMode = ApplyMR -- ^ Apply the monomorphism restriction,
-- never quantifying over any constraints
| EagerDefaulting -- ^ See Note [TcRnExprMode] in TcRnDriver,
-- the :type +d case; this mode refuses
-- to quantify over any defaultable constraint
| NoRestrictions -- ^ Quantify over any constraint that
-- satisfies TcType.pickQuantifiablePreds
instance Outputable InferMode where
ppr ApplyMR = text "ApplyMR"
ppr EagerDefaulting = text "EagerDefaulting"
ppr NoRestrictions = text "NoRestrictions"
simplifyInfer :: TcLevel -- Used when generating the constraints
-> InferMode
-> [TcIdSigInst] -- Any signatures (possibly partial)
-> [(Name, TcTauType)] -- Variables to be generalised,
-- and their tau-types
-> WantedConstraints
-> TcM ([TcTyVar], -- Quantify over these type variables
[EvVar], -- ... and these constraints (fully zonked)
TcEvBinds) -- ... binding these evidence variables
simplifyInfer rhs_tclvl infer_mode sigs name_taus wanteds
| isEmptyWC wanteds
= do { gbl_tvs <- tcGetGlobalTyCoVars
; dep_vars <- zonkTcTypesAndSplitDepVars (map snd name_taus)
; qtkvs <- quantifyZonkedTyVars gbl_tvs dep_vars
; traceTc "simplifyInfer: empty WC" (ppr name_taus $$ ppr qtkvs)
; return (qtkvs, [], emptyTcEvBinds) }
| otherwise
= do { traceTc "simplifyInfer {" $ vcat
[ text "sigs =" <+> ppr sigs
, text "binds =" <+> ppr name_taus
, text "rhs_tclvl =" <+> ppr rhs_tclvl
, text "infer_mode =" <+> ppr infer_mode
, text "(unzonked) wanted =" <+> ppr wanteds
]
; let partial_sigs = filter isPartialSig sigs
psig_theta = concatMap sig_inst_theta partial_sigs
-- First do full-blown solving
-- NB: we must gather up all the bindings from doing
-- this solving; hence (runTcSWithEvBinds ev_binds_var).
-- And note that since there are nested implications,
-- calling solveWanteds will side-effect their evidence
-- bindings, so we can't just revert to the input
-- constraint.
; tc_lcl_env <- TcM.getLclEnv
; ev_binds_var <- TcM.newTcEvBinds
; psig_theta_vars <- mapM TcM.newEvVar psig_theta
; wanted_transformed_incl_derivs
<- setTcLevel rhs_tclvl $
runTcSWithEvBinds ev_binds_var $
do { let loc = mkGivenLoc rhs_tclvl UnkSkol tc_lcl_env
psig_givens = mkGivens loc psig_theta_vars
; _ <- solveSimpleGivens psig_givens
-- See Note [Add signature contexts as givens]
; solveWanteds wanteds }
; wanted_transformed_incl_derivs <- TcM.zonkWC wanted_transformed_incl_derivs
-- Find quant_pred_candidates, the predicates that
-- we'll consider quantifying over
-- NB1: wanted_transformed does not include anything provable from
-- the psig_theta; it's just the extra bit
-- NB2: We do not do any defaulting when inferring a type, this can lead
-- to less polymorphic types, see Note [Default while Inferring]
; let wanted_transformed = dropDerivedWC wanted_transformed_incl_derivs
; quant_pred_candidates -- Fully zonked
<- if insolubleWC wanted_transformed_incl_derivs
then return [] -- See Note [Quantification with errors]
-- NB: must include derived errors in this test,
-- hence "incl_derivs"
else do { let quant_cand = approximateWC False wanted_transformed
meta_tvs = filter isMetaTyVar $
tyCoVarsOfCtsList quant_cand
; gbl_tvs <- tcGetGlobalTyCoVars
-- Miminise quant_cand. We are not interested in any evidence
-- produced, because we are going to simplify wanted_transformed
-- again later. All we want here are the predicates over which to
-- quantify.
--
-- If any meta-tyvar unifications take place (unlikely),
-- we'll pick that up later.
-- See Note [Promote _and_ default when inferring]
; let def_tyvar tv
= when (not $ tv `elemVarSet` gbl_tvs) $
defaultTyVar tv
; mapM_ def_tyvar meta_tvs
; mapM_ (promoteTyVar rhs_tclvl) meta_tvs
; clone_wanteds <- mapM cloneWanted (bagToList quant_cand)
; WC { wc_simple = simples }
<- setTcLevel rhs_tclvl $
simplifyWantedsTcM clone_wanteds
-- Discard evidence; result is zonked
; return [ ctEvPred ev | ct <- bagToList simples
, let ev = ctEvidence ct ] }
-- NB: quant_pred_candidates is already fully zonked
-- Decide what type variables and constraints to quantify
-- NB: bound_theta are constraints we want to quantify over,
-- /apart from/ the psig_theta, which we always quantify over
; (qtvs, bound_theta) <- decideQuantification infer_mode name_taus psig_theta
quant_pred_candidates
-- Promote any type variables that are free in the inferred type
-- of the function:
-- f :: forall qtvs. bound_theta => zonked_tau
-- These variables now become free in the envt, and hence will show
-- up whenever 'f' is called. They may currently at rhs_tclvl, but
-- they had better be unifiable at the outer_tclvl!
-- Example: envt mentions alpha[1]
-- tau_ty = beta[2] -> beta[2]
-- consraints = alpha ~ [beta]
-- we don't quantify over beta (since it is fixed by envt)
-- so we must promote it! The inferred type is just
-- f :: beta -> beta
; zonked_taus <- mapM (TcM.zonkTcType . snd) name_taus
-- decideQuantification turned some meta tyvars into
-- quantified skolems, so we have to zonk again
; let phi_tkvs = tyCoVarsOfTypes bound_theta -- Already zonked
`unionVarSet` tyCoVarsOfTypes zonked_taus
promote_tkvs = closeOverKinds phi_tkvs `delVarSetList` qtvs
; MASSERT2( closeOverKinds promote_tkvs `subVarSet` promote_tkvs
, ppr phi_tkvs $$
ppr (closeOverKinds phi_tkvs) $$
ppr promote_tkvs $$
ppr (closeOverKinds promote_tkvs) )
-- we really don't want a type to be promoted when its kind isn't!
-- promoteTyVar ignores coercion variables
; outer_tclvl <- TcM.getTcLevel
; mapM_ (promoteTyVar outer_tclvl) (nonDetEltsUFM promote_tkvs)
-- It's OK to use nonDetEltsUFM here because promoteTyVar is
-- commutative
-- Emit an implication constraint for the
-- remaining constraints from the RHS
-- extra_qtvs: see Note [Quantification and partial signatures]
; bound_theta_vars <- mapM TcM.newEvVar bound_theta
; psig_theta_vars <- mapM zonkId psig_theta_vars
; all_qtvs <- add_psig_tvs qtvs
[ tv | sig <- partial_sigs
, (_,tv) <- sig_inst_skols sig ]
; let full_theta = psig_theta ++ bound_theta
full_theta_vars = psig_theta_vars ++ bound_theta_vars
skol_info = InferSkol [ (name, mkSigmaTy [] full_theta ty)
| (name, ty) <- name_taus ]
-- Don't add the quantified variables here, because
-- they are also bound in ic_skols and we want them
-- to be tidied uniformly
implic = Implic { ic_tclvl = rhs_tclvl
, ic_skols = all_qtvs
, ic_no_eqs = False
, ic_given = full_theta_vars
, ic_wanted = wanted_transformed
, ic_status = IC_Unsolved
, ic_binds = ev_binds_var
, ic_info = skol_info
, ic_env = tc_lcl_env }
; emitImplication implic
-- All done!
; traceTc "} simplifyInfer/produced residual implication for quantification" $
vcat [ text "quant_pred_candidates =" <+> ppr quant_pred_candidates
, text "promote_tvs=" <+> ppr promote_tkvs
, text "psig_theta =" <+> ppr psig_theta
, text "bound_theta =" <+> ppr bound_theta
, text "full_theta =" <+> ppr full_theta
, text "qtvs =" <+> ppr qtvs
, text "implic =" <+> ppr implic ]
; return ( qtvs, full_theta_vars, TcEvBinds ev_binds_var ) }
where
add_psig_tvs qtvs [] = return qtvs
add_psig_tvs qtvs (tv:tvs)
= do { tv <- zonkTcTyVarToTyVar tv
; if tv `elem` qtvs
then add_psig_tvs qtvs tvs
else do { mb_tv <- zonkQuantifiedTyVar False tv
; case mb_tv of
Nothing -> add_psig_tvs qtvs tvs
Just tv -> add_psig_tvs (tv:qtvs) tvs } }
{- Note [Add signature contexts as givens]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (Trac #11016):
f2 :: (?x :: Int) => _
f2 = ?x
or this
f3 :: a ~ Bool => (a, _)
f3 = (True, False)
or theis
f4 :: (Ord a, _) => a -> Bool
f4 x = x==x
We'll use plan InferGen because there are holes in the type. But:
* For f2 we want to have the (?x :: Int) constraint floating around
so that the functional dependencies kick in. Otherwise the
occurrence of ?x on the RHS produces constraint (?x :: alpha), and
we won't unify alpha:=Int.
* For f3 we want the (a ~ Bool) available to solve the wanted (a ~ Bool)
in the RHS
* For f4 we want to use the (Ord a) in the signature to solve the Eq a
constraint.
Solution: in simplifyInfer, just before simplifying the constraints
gathered from the RHS, add Given constraints for the context of any
type signatures.
************************************************************************
* *
Quantification
* *
************************************************************************
Note [Deciding quantification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the monomorphism restriction does not apply, then we quantify as follows:
* Take the global tyvars, and "grow" them using the equality constraints
E.g. if x:alpha is in the environment, and alpha ~ [beta] (which can
happen because alpha is untouchable here) then do not quantify over
beta, because alpha fixes beta, and beta is effectively free in
the environment too
These are the mono_tvs
* Take the free vars of the tau-type (zonked_tau_tvs) and "grow" them
using all the constraints. These are tau_tvs_plus
* Use quantifyTyVars to quantify over (tau_tvs_plus - mono_tvs), being
careful to close over kinds, and to skolemise the quantified tyvars.
(This actually unifies each quantifies meta-tyvar with a fresh skolem.)
Result is qtvs.
* Filter the constraints using pickQuantifiablePreds and the qtvs.
We have to zonk the constraints first, so they "see" the freshly
created skolems.
If the MR does apply, mono_tvs includes all the constrained tyvars --
including all covars -- and the quantified constraints are empty/insoluble.
-}
decideQuantification
:: InferMode
-> [(Name, TcTauType)] -- Variables to be generalised
-> [PredType] -- All annotated constraints from signatures
-> [PredType] -- Candidate theta
-> TcM ( [TcTyVar] -- Quantify over these (skolems)
, [PredType] ) -- and this context (fully zonked)
-- See Note [Deciding quantification]
decideQuantification infer_mode name_taus psig_theta candidates
= do { gbl_tvs <- tcGetGlobalTyCoVars
; zonked_taus <- mapM TcM.zonkTcType (psig_theta ++ taus)
-- psig_theta: see Note [Quantification and partial signatures]
; ovl_strings <- xoptM LangExt.OverloadedStrings
; let DV {dv_kvs = zkvs, dv_tvs = ztvs} = splitDepVarsOfTypes zonked_taus
(gbl_cand, quant_cand) -- gbl_cand = do not quantify me
= case infer_mode of -- quant_cand = try to quantify me
ApplyMR -> (candidates, [])
NoRestrictions -> ([], candidates)
EagerDefaulting -> partition is_interactive_ct candidates
where
is_interactive_ct ct
| Just (cls, _) <- getClassPredTys_maybe ct
= isInteractiveClass ovl_strings cls
| otherwise
= False
eq_constraints = filter isEqPred quant_cand
constrained_tvs = tyCoVarsOfTypes gbl_cand
mono_tvs = growThetaTyVars eq_constraints $
gbl_tvs `unionVarSet` constrained_tvs
tau_tvs_plus = growThetaTyVarsDSet quant_cand ztvs
dvs_plus = DV { dv_kvs = zkvs, dv_tvs = tau_tvs_plus }
; qtvs <- quantifyZonkedTyVars mono_tvs dvs_plus
-- We don't grow the kvs, as there's no real need to. Recall
-- that quantifyTyVars uses the separation between kvs and tvs
-- only for defaulting, and we don't want (ever) to default a tv
-- to *. So, don't grow the kvs.
; quant_cand <- TcM.zonkTcTypes quant_cand
-- quantifyTyVars turned some meta tyvars into
-- quantified skolems, so we have to zonk again
; let qtv_set = mkVarSet qtvs
theta = pickQuantifiablePreds qtv_set quant_cand
min_theta = mkMinimalBySCs theta
-- See Note [Minimize by Superclasses]
-- Warn about the monomorphism restriction
; warn_mono <- woptM Opt_WarnMonomorphism
; let mr_bites | ApplyMR <- infer_mode
= constrained_tvs `intersectsVarSet` tcDepVarSet dvs_plus
| otherwise
= False
; warnTc (Reason Opt_WarnMonomorphism) (warn_mono && mr_bites) $
hang (text "The Monomorphism Restriction applies to the binding"
<> plural bndrs <+> text "for" <+> pp_bndrs)
2 (text "Consider giving a type signature for"
<+> if isSingleton bndrs then pp_bndrs
else text "these binders")
; traceTc "decideQuantification"
(vcat [ text "infer_mode:" <+> ppr infer_mode
, text "gbl_cand:" <+> ppr gbl_cand
, text "quant_cand:" <+> ppr quant_cand
, text "gbl_tvs:" <+> ppr gbl_tvs
, text "mono_tvs:" <+> ppr mono_tvs
, text "tau_tvs_plus:" <+> ppr tau_tvs_plus
, text "qtvs:" <+> ppr qtvs
, text "min_theta:" <+> ppr min_theta ])
; return (qtvs, min_theta) }
where
pp_bndrs = pprWithCommas (quotes . ppr) bndrs
(bndrs, taus) = unzip name_taus
------------------
growThetaTyVars :: ThetaType -> TyCoVarSet -> TyVarSet
-- See Note [Growing the tau-tvs using constraints]
-- NB: only returns tyvars, never covars
growThetaTyVars theta tvs
| null theta = tvs_only
| otherwise = filterVarSet isTyVar $
transCloVarSet mk_next seed_tvs
where
tvs_only = filterVarSet isTyVar tvs
seed_tvs = tvs `unionVarSet` tyCoVarsOfTypes ips
(ips, non_ips) = partition isIPPred theta
-- See Note [Inheriting implicit parameters] in TcType
mk_next :: VarSet -> VarSet -- Maps current set to newly-grown ones
mk_next so_far = foldr (grow_one so_far) emptyVarSet non_ips
grow_one so_far pred tvs
| pred_tvs `intersectsVarSet` so_far = tvs `unionVarSet` pred_tvs
| otherwise = tvs
where
pred_tvs = tyCoVarsOfType pred
------------------
growThetaTyVarsDSet :: ThetaType -> DTyCoVarSet -> DTyVarSet
-- See Note [Growing the tau-tvs using constraints]
-- NB: only returns tyvars, never covars
-- It takes a deterministic set of TyCoVars and returns a deterministic set
-- of TyVars.
-- The implementation mirrors growThetaTyVars, the only difference is that
-- it avoids unionDVarSet and uses more efficient extendDVarSetList.
growThetaTyVarsDSet theta tvs
| null theta = tvs_only
| otherwise = filterDVarSet isTyVar $
transCloDVarSet mk_next seed_tvs
where
tvs_only = filterDVarSet isTyVar tvs
seed_tvs = tvs `extendDVarSetList` tyCoVarsOfTypesList ips
(ips, non_ips) = partition isIPPred theta
-- See Note [Inheriting implicit parameters] in TcType
mk_next :: DVarSet -> DVarSet -- Maps current set to newly-grown ones
mk_next so_far = foldr (grow_one so_far) emptyDVarSet non_ips
grow_one so_far pred tvs
| any (`elemDVarSet` so_far) pred_tvs = tvs `extendDVarSetList` pred_tvs
| otherwise = tvs
where
pred_tvs = tyCoVarsOfTypeList pred
{- Note [Quantification and partial signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When choosing type variables to quantify, the basic plan is to
quantify over all type variables that are
* free in the tau_tvs, and
* not forced to be monomorphic (mono_tvs),
for example by being free in the environment.
However, in the case of a partial type signature, be doing inference
*in the presence of a type signature*. For example:
f :: _ -> a
f x = ...
or
g :: (Eq _a) => _b -> _b
In both cases we use plan InferGen, and hence call simplifyInfer.
But those 'a' variables are skolems, and we should be sure to quantify
over them, for two reasons
* In the case of a type error
f :: _ -> Maybe a
f x = True && x
The inferred type of 'f' is f :: Bool -> Bool, but there's a
left-over error of form (HoleCan (Maybe a ~ Bool)). The error-reporting
machine expects to find a binding site for the skolem 'a', so we
add it to the ic_skols of the residual implication.
Note that we /only/ do this to the residual implication. We don't
complicate the quantified type varialbes of 'f' for downstream code;
it's just a device to make the error message generator know what to
report.
* Consider the partial type signature
f :: (Eq _) => Int -> Int
f x = x
In normal cases that makes sense; e.g.
g :: Eq _a => _a -> _a
g x = x
where the signature makes the type less general than it could
be. But for 'f' we must therefore quantify over the user-annotated
constraints, to get
f :: forall a. Eq a => Int -> Int
(thereby correctly triggering an ambiguity error later). If we don't
we'll end up with a strange open type
f :: Eq alpha => Int -> Int
which isn't ambiguous but is still very wrong. That's why include
psig_theta in the variables to quantify over, passed to
decideQuantification.
Note [Quantifying over equality constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Should we quantify over an equality constraint (s ~ t)? In general, we don't.
Doing so may simply postpone a type error from the function definition site to
its call site. (At worst, imagine (Int ~ Bool)).
However, consider this
forall a. (F [a] ~ Int) => blah
Should we quantify over the (F [a] ~ Int)? Perhaps yes, because at the call
site we will know 'a', and perhaps we have instance F [Bool] = Int.
So we *do* quantify over a type-family equality where the arguments mention
the quantified variables.
Note [Growing the tau-tvs using constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(growThetaTyVars insts tvs) is the result of extending the set
of tyvars, tvs, using all conceivable links from pred
E.g. tvs = {a}, preds = {H [a] b, K (b,Int) c, Eq e}
Then growThetaTyVars preds tvs = {a,b,c}
Notice that
growThetaTyVars is conservative if v might be fixed by vs
=> v `elem` grow(vs,C)
Note [Quantification with errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we find that the RHS of the definition has some absolutely-insoluble
constraints, we abandon all attempts to find a context to quantify
over, and instead make the function fully-polymorphic in whatever
type we have found. For two reasons
a) Minimise downstream errors
b) Avoid spurious errors from this function
But NB that we must include *derived* errors in the check. Example:
(a::*) ~ Int#
We get an insoluble derived error *~#, and we don't want to discard
it before doing the isInsolubleWC test! (Trac #8262)
Note [Default while Inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our current plan is that defaulting only happens at simplifyTop and
not simplifyInfer. This may lead to some insoluble deferred constraints.
Example:
instance D g => C g Int b
constraint inferred = (forall b. 0 => C gamma alpha b) /\ Num alpha
type inferred = gamma -> gamma
Now, if we try to default (alpha := Int) we will be able to refine the implication to
(forall b. 0 => C gamma Int b)
which can then be simplified further to
(forall b. 0 => D gamma)
Finally, we /can/ approximate this implication with (D gamma) and infer the quantified
type: forall g. D g => g -> g
Instead what will currently happen is that we will get a quantified type
(forall g. g -> g) and an implication:
forall g. 0 => (forall b. 0 => C g alpha b) /\ Num alpha
Which, even if the simplifyTop defaults (alpha := Int) we will still be left with an
unsolvable implication:
forall g. 0 => (forall b. 0 => D g)
The concrete example would be:
h :: C g a s => g -> a -> ST s a
f (x::gamma) = (\_ -> x) (runST (h x (undefined::alpha)) + 1)
But it is quite tedious to do defaulting and resolve the implication constraints, and
we have not observed code breaking because of the lack of defaulting in inference, so
we don't do it for now.
Note [Minimize by Superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we quantify over a constraint, in simplifyInfer we need to
quantify over a constraint that is minimal in some sense: For
instance, if the final wanted constraint is (Eq alpha, Ord alpha),
we'd like to quantify over Ord alpha, because we can just get Eq alpha
from superclass selection from Ord alpha. This minimization is what
mkMinimalBySCs does. Then, simplifyInfer uses the minimal constraint
to check the original wanted.
Note [Avoid unnecessary constraint simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-------- NB NB NB (Jun 12) -------------
This note not longer applies; see the notes with Trac #4361.
But I'm leaving it in here so we remember the issue.)
----------------------------------------
When inferring the type of a let-binding, with simplifyInfer,
try to avoid unnecessarily simplifying class constraints.
Doing so aids sharing, but it also helps with delicate
situations like
instance C t => C [t] where ..
f :: C [t] => ....
f x = let g y = ...(constraint C [t])...
in ...
When inferring a type for 'g', we don't want to apply the
instance decl, because then we can't satisfy (C t). So we
just notice that g isn't quantified over 't' and partition
the constraints before simplifying.
This only half-works, but then let-generalisation only half-works.
*********************************************************************************
* *
* Main Simplifier *
* *
***********************************************************************************
-}
simplifyWantedsTcM :: [CtEvidence] -> TcM WantedConstraints
-- Solve the specified Wanted constraints
-- Discard the evidence binds
-- Discards all Derived stuff in result
-- Postcondition: fully zonked and unflattened constraints
simplifyWantedsTcM wanted
= do { traceTc "simplifyWantedsTcM {" (ppr wanted)
; (result, _) <- runTcS (solveWantedsAndDrop (mkSimpleWC wanted))
; result <- TcM.zonkWC result
; traceTc "simplifyWantedsTcM }" (ppr result)
; return result }
solveWantedsAndDrop :: WantedConstraints -> TcS WantedConstraints
-- Since solveWanteds returns the residual WantedConstraints,
-- it should always be called within a runTcS or something similar,
-- Result is not zonked
solveWantedsAndDrop wanted
= do { wc <- solveWanteds wanted
; return (dropDerivedWC wc) }
solveWanteds :: WantedConstraints -> TcS WantedConstraints
-- so that the inert set doesn't mindlessly propagate.
-- NB: wc_simples may be wanted /or/ derived now
solveWanteds wc@(WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
= do { traceTcS "solveWanteds {" (ppr wc)
; wc1 <- solveSimpleWanteds simples
; let WC { wc_simple = simples1, wc_insol = insols1, wc_impl = implics1 } = wc1
; (floated_eqs, implics2) <- solveNestedImplications (implics `unionBags` implics1)
; (no_new_scs, simples2) <- expandSuperClasses simples1
; traceTcS "solveWanteds middle" $ vcat [ text "simples1 =" <+> ppr simples1
, text "simples2 =" <+> ppr simples2 ]
; dflags <- getDynFlags
; final_wc <- simpl_loop 0 (solverIterations dflags) floated_eqs
no_new_scs
(WC { wc_simple = simples2, wc_impl = implics2
, wc_insol = insols `unionBags` insols1 })
; bb <- TcS.getTcEvBindsMap
; traceTcS "solveWanteds }" $
vcat [ text "final wc =" <+> ppr final_wc
, text "current evbinds =" <+> ppr (evBindMapBinds bb) ]
; return final_wc }
simpl_loop :: Int -> IntWithInf -> Cts -> Bool
-> WantedConstraints
-> TcS WantedConstraints
simpl_loop n limit floated_eqs no_new_deriveds
wc@(WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
| isEmptyBag floated_eqs && no_new_deriveds
= return wc -- Done!
| n `intGtLimit` limit
= do { -- Add an error (not a warning) if we blow the limit,
-- Typically if we blow the limit we are going to report some other error
-- (an unsolved constraint), and we don't want that error to suppress
-- the iteration limit warning!
addErrTcS (hang (text "solveWanteds: too many iterations"
<+> parens (text "limit =" <+> ppr limit))
2 (vcat [ text "Unsolved:" <+> ppr wc
, ppUnless (isEmptyBag floated_eqs) $
text "Floated equalities:" <+> ppr floated_eqs
, ppUnless no_new_deriveds $
text "New deriveds found"
, text "Set limit with -fconstraint-solver-iterations=n; n=0 for no limit"
]))
; return wc }
| otherwise
= do { let n_floated = lengthBag floated_eqs
; csTraceTcS $
text "simpl_loop iteration=" <> int n
<+> (parens $ hsep [ text "no new deriveds =" <+> ppr no_new_deriveds <> comma
, int n_floated <+> text "floated eqs" <> comma
, int (lengthBag simples) <+> text "simples to solve" ])
-- solveSimples may make progress if either float_eqs hold
; (unifs1, wc1) <- reportUnifications $
solveSimpleWanteds (floated_eqs `unionBags` simples)
-- Put floated_eqs first so they get solved first
-- NB: the floated_eqs may include /derived/ equalities
-- arising from fundeps inside an implication
; let WC { wc_simple = simples1, wc_insol = insols1, wc_impl = implics1 } = wc1
; (no_new_scs, simples2) <- expandSuperClasses simples1
-- We have already tried to solve the nested implications once
-- Try again only if we have unified some meta-variables
-- (which is a bit like adding more givens
-- See Note [Cutting off simpl_loop]
; (floated_eqs2, implics2) <- if unifs1 == 0 && isEmptyBag implics1
then return (emptyBag, implics)
else solveNestedImplications (implics `unionBags` implics1)
; simpl_loop (n+1) limit floated_eqs2 no_new_scs
(WC { wc_simple = simples2, wc_impl = implics2
, wc_insol = insols `unionBags` insols1 }) }
expandSuperClasses :: Cts -> TcS (Bool, Cts)
-- If there are any unsolved wanteds, expand one step of
-- superclasses for deriveds
-- Returned Bool is True <=> no new superclass constraints added
-- See Note [The superclass story] in TcCanonical
expandSuperClasses unsolved
| not (anyBag superClassesMightHelp unsolved)
= return (True, unsolved)
| otherwise
= do { traceTcS "expandSuperClasses {" empty
; let (pending_wanted, unsolved') = mapAccumBagL get [] unsolved
get acc ct | Just ct' <- isPendingScDict ct
= (ct':acc, ct')
| otherwise
= (acc, ct)
; pending_given <- getPendingScDicts
; if null pending_given && null pending_wanted
then do { traceTcS "End expandSuperClasses no-op }" empty
; return (True, unsolved) }
else
do { new_given <- makeSuperClasses pending_given
; solveSimpleGivens new_given
; new_wanted <- makeSuperClasses pending_wanted
; traceTcS "End expandSuperClasses }"
(vcat [ text "Given:" <+> ppr pending_given
, text "Wanted:" <+> ppr new_wanted ])
; return (False, unsolved' `unionBags` listToBag new_wanted) } }
solveNestedImplications :: Bag Implication
-> TcS (Cts, Bag Implication)
-- Precondition: the TcS inerts may contain unsolved simples which have
-- to be converted to givens before we go inside a nested implication.
solveNestedImplications implics
| isEmptyBag implics
= return (emptyBag, emptyBag)
| otherwise
= do { traceTcS "solveNestedImplications starting {" empty
; (floated_eqs_s, unsolved_implics) <- mapAndUnzipBagM solveImplication implics
; let floated_eqs = concatBag floated_eqs_s
-- ... and we are back in the original TcS inerts
-- Notice that the original includes the _insoluble_simples so it was safe to ignore
-- them in the beginning of this function.
; traceTcS "solveNestedImplications end }" $
vcat [ text "all floated_eqs =" <+> ppr floated_eqs
, text "unsolved_implics =" <+> ppr unsolved_implics ]
; return (floated_eqs, catBagMaybes unsolved_implics) }
solveImplication :: Implication -- Wanted
-> TcS (Cts, -- All wanted or derived floated equalities: var = type
Maybe Implication) -- Simplified implication (empty or singleton)
-- Precondition: The TcS monad contains an empty worklist and given-only inerts
-- which after trying to solve this implication we must restore to their original value
solveImplication imp@(Implic { ic_tclvl = tclvl
, ic_binds = ev_binds_var
, ic_skols = skols
, ic_given = given_ids
, ic_wanted = wanteds
, ic_info = info
, ic_status = status
, ic_env = env })
| IC_Solved {} <- status
= return (emptyCts, Just imp) -- Do nothing
| otherwise -- Even for IC_Insoluble it is worth doing more work
-- The insoluble stuff might be in one sub-implication
-- and other unsolved goals in another; and we want to
-- solve the latter as much as possible
= do { inerts <- getTcSInerts
; traceTcS "solveImplication {" (ppr imp $$ text "Inerts" <+> ppr inerts)
-- Solve the nested constraints
; (no_given_eqs, given_insols, residual_wanted)
<- nestImplicTcS ev_binds_var tclvl $
do { let loc = mkGivenLoc tclvl info env
givens = mkGivens loc given_ids
; solveSimpleGivens givens
; residual_wanted <- solveWanteds wanteds
-- solveWanteds, *not* solveWantedsAndDrop, because
-- we want to retain derived equalities so we can float
-- them out in floatEqualities
; (no_eqs, given_insols) <- getNoGivenEqs tclvl skols
-- Call getNoGivenEqs /after/ solveWanteds, because
-- solveWanteds can augment the givens, via expandSuperClasses,
-- to reveal given superclass equalities
; return (no_eqs, given_insols, residual_wanted) }
; (floated_eqs, residual_wanted)
<- floatEqualities skols no_given_eqs residual_wanted
; traceTcS "solveImplication 2"
(ppr given_insols $$ ppr residual_wanted)
; let final_wanted = residual_wanted `addInsols` given_insols
; res_implic <- setImplicationStatus (imp { ic_no_eqs = no_given_eqs
, ic_wanted = final_wanted })
; (evbinds, tcvs) <- TcS.getTcEvBindsAndTCVs ev_binds_var
; traceTcS "solveImplication end }" $ vcat
[ text "no_given_eqs =" <+> ppr no_given_eqs
, text "floated_eqs =" <+> ppr floated_eqs
, text "res_implic =" <+> ppr res_implic
, text "implication evbinds =" <+> ppr (evBindMapBinds evbinds)
, text "implication tvcs =" <+> ppr tcvs ]
; return (floated_eqs, res_implic) }
----------------------
setImplicationStatus :: Implication -> TcS (Maybe Implication)
-- Finalise the implication returned from solveImplication:
-- * Set the ic_status field
-- * Trim the ic_wanted field to remove Derived constraints
-- Return Nothing if we can discard the implication altogether
setImplicationStatus implic@(Implic { ic_binds = ev_binds_var
, ic_info = info
, ic_wanted = wc
, ic_given = givens })
| some_insoluble
= return $ Just $
implic { ic_status = IC_Insoluble
, ic_wanted = wc { wc_simple = pruned_simples
, wc_insol = pruned_insols } }
| some_unsolved
= do { traceTcS "setImplicationStatus" $
vcat [ppr givens $$ ppr simples $$ ppr insols $$ ppr mb_implic_needs]
; return $ Just $
implic { ic_status = IC_Unsolved
, ic_wanted = wc { wc_simple = pruned_simples
, wc_insol = pruned_insols } }
}
| otherwise -- Everything is solved; look at the implications
-- See Note [Tracking redundant constraints]
= do { ev_binds <- TcS.getTcEvBindsAndTCVs ev_binds_var
; let all_needs = neededEvVars ev_binds implic_needs
dead_givens | warnRedundantGivens info
= filterOut (`elemVarSet` all_needs) givens
| otherwise = [] -- None to report
final_needs = all_needs `delVarSetList` givens
discard_entire_implication -- Can we discard the entire implication?
= null dead_givens -- No warning from this implication
&& isEmptyBag pruned_implics -- No live children
&& isEmptyVarSet final_needs -- No needed vars to pass up to parent
final_status = IC_Solved { ics_need = final_needs
, ics_dead = dead_givens }
final_implic = implic { ic_status = final_status
, ic_wanted = wc { wc_simple = pruned_simples
, wc_insol = pruned_insols
, wc_impl = pruned_implics } }
-- We can only prune the child implications (pruned_implics)
-- in the IC_Solved status case, because only then we can
-- accumulate their needed evidence variables into the
-- IC_Solved final_status field of the parent implication.
-- Check that there are no term-level evidence bindings
-- in the cases where we have no place to put them
; MASSERT2( termEvidenceAllowed info || isEmptyEvBindMap (fst ev_binds)
, ppr info $$ ppr ev_binds )
; return $ if discard_entire_implication
then Nothing
else Just final_implic }
where
WC { wc_simple = simples, wc_impl = implics, wc_insol = insols } = wc
some_insoluble = insolubleWC wc
some_unsolved = not (isEmptyBag simples && isEmptyBag insols)
|| isNothing mb_implic_needs
pruned_simples = dropDerivedSimples simples
pruned_insols = dropDerivedInsols insols
pruned_implics = filterBag need_to_keep_implic implics
mb_implic_needs :: Maybe VarSet
-- Just vs => all implics are IC_Solved, with 'vs' needed
-- Nothing => at least one implic is not IC_Solved
mb_implic_needs = foldrBag add_implic (Just emptyVarSet) implics
Just implic_needs = mb_implic_needs
add_implic implic acc
| Just vs_acc <- acc
, IC_Solved { ics_need = vs } <- ic_status implic
= Just (vs `unionVarSet` vs_acc)
| otherwise = Nothing
need_to_keep_implic ic
| IC_Solved { ics_dead = [] } <- ic_status ic
-- Fully solved, and no redundant givens to report
, isEmptyBag (wc_impl (ic_wanted ic))
-- And no children that might have things to report
= False
| otherwise
= True
warnRedundantGivens :: SkolemInfo -> Bool
warnRedundantGivens (SigSkol ctxt _)
= case ctxt of
FunSigCtxt _ warn_redundant -> warn_redundant
ExprSigCtxt -> True
_ -> False
-- To think about: do we want to report redundant givens for
-- pattern synonyms, PatSynSigSkol? c.f Trac #9953, comment:21.
warnRedundantGivens (InstSkol {}) = True
warnRedundantGivens _ = False
neededEvVars :: (EvBindMap, TcTyVarSet) -> VarSet -> VarSet
-- Find all the evidence variables that are "needed",
-- and then delete all those bound by the evidence bindings
-- See Note [Tracking redundant constraints]
neededEvVars (ev_binds, tcvs) initial_seeds
= (needed `unionVarSet` tcvs) `minusVarSet` bndrs
where
seeds = foldEvBindMap add_wanted initial_seeds ev_binds
needed = transCloVarSet also_needs seeds
bndrs = foldEvBindMap add_bndr emptyVarSet ev_binds
add_wanted :: EvBind -> VarSet -> VarSet
add_wanted (EvBind { eb_is_given = is_given, eb_rhs = rhs }) needs
| is_given = needs -- Add the rhs vars of the Wanted bindings only
| otherwise = evVarsOfTerm rhs `unionVarSet` needs
also_needs :: VarSet -> VarSet
also_needs needs
= nonDetFoldUFM add emptyVarSet needs
-- It's OK to use nonDetFoldUFM here because we immediately forget
-- about the ordering by creating a set
where
add v needs
| Just ev_bind <- lookupEvBind ev_binds v
, EvBind { eb_is_given = is_given, eb_rhs = rhs } <- ev_bind
, is_given
= evVarsOfTerm rhs `unionVarSet` needs
| otherwise
= needs
add_bndr :: EvBind -> VarSet -> VarSet
add_bndr (EvBind { eb_lhs = v }) vs = extendVarSet vs v
{-
Note [Tracking redundant constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With Opt_WarnRedundantConstraints, GHC can report which
constraints of a type signature (or instance declaration) are
redundant, and can be omitted. Here is an overview of how it
works:
----- What is a redundant constraint?
* The things that can be redundant are precisely the Given
constraints of an implication.
* A constraint can be redundant in two different ways:
a) It is implied by other givens. E.g.
f :: (Eq a, Ord a) => blah -- Eq a unnecessary
g :: (Eq a, a~b, Eq b) => blah -- Either Eq a or Eq b unnecessary
b) It is not needed by the Wanted constraints covered by the
implication E.g.
f :: Eq a => a -> Bool
f x = True -- Equality not used
* To find (a), when we have two Given constraints,
we must be careful to drop the one that is a naked variable (if poss).
So if we have
f :: (Eq a, Ord a) => blah
then we may find [G] sc_sel (d1::Ord a) :: Eq a
[G] d2 :: Eq a
We want to discard d2 in favour of the superclass selection from
the Ord dictionary. This is done by TcInteract.solveOneFromTheOther
See Note [Replacement vs keeping].
* To find (b) we need to know which evidence bindings are 'wanted';
hence the eb_is_given field on an EvBind.
----- How tracking works
* When the constraint solver finishes solving all the wanteds in
an implication, it sets its status to IC_Solved
- The ics_dead field, of IC_Solved, records the subset of this
implication's ic_given that are redundant (not needed).
- The ics_need field of IC_Solved then records all the
in-scope (given) evidence variables bound by the context, that
were needed to solve this implication, including all its nested
implications. (We remove the ic_given of this implication from
the set, of course.)
* We compute which evidence variables are needed by an implication
in setImplicationStatus. A variable is needed if
a) it is free in the RHS of a Wanted EvBind,
b) it is free in the RHS of an EvBind whose LHS is needed,
c) it is in the ics_need of a nested implication.
* We need to be careful not to discard an implication
prematurely, even one that is fully solved, because we might
thereby forget which variables it needs, and hence wrongly
report a constraint as redundant. But we can discard it once
its free vars have been incorporated into its parent; or if it
simply has no free vars. This careful discarding is also
handled in setImplicationStatus.
----- Reporting redundant constraints
* TcErrors does the actual warning, in warnRedundantConstraints.
* We don't report redundant givens for *every* implication; only
for those which reply True to TcSimplify.warnRedundantGivens:
- For example, in a class declaration, the default method *can*
use the class constraint, but it certainly doesn't *have* to,
and we don't want to report an error there.
- More subtly, in a function definition
f :: (Ord a, Ord a, Ix a) => a -> a
f x = rhs
we do an ambiguity check on the type (which would find that one
of the Ord a constraints was redundant), and then we check that
the definition has that type (which might find that both are
redundant). We don't want to report the same error twice, so we
disable it for the ambiguity check. Hence using two different
FunSigCtxts, one with the warn-redundant field set True, and the
other set False in
- TcBinds.tcSpecPrag
- TcBinds.tcTySig
This decision is taken in setImplicationStatus, rather than TcErrors
so that we can discard implication constraints that we don't need.
So ics_dead consists only of the *reportable* redundant givens.
----- Shortcomings
Consider (see Trac #9939)
f2 :: (Eq a, Ord a) => a -> a -> Bool
-- Ord a redundant, but Eq a is reported
f2 x y = (x == y)
We report (Eq a) as redundant, whereas actually (Ord a) is. But it's
really not easy to detect that!
Note [Cutting off simpl_loop]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is very important not to iterate in simpl_loop unless there is a chance
of progress. Trac #8474 is a classic example:
* There's a deeply-nested chain of implication constraints.
?x:alpha => ?y1:beta1 => ... ?yn:betan => [W] ?x:Int
* From the innermost one we get a [D] alpha ~ Int,
but alpha is untouchable until we get out to the outermost one
* We float [D] alpha~Int out (it is in floated_eqs), but since alpha
is untouchable, the solveInteract in simpl_loop makes no progress
* So there is no point in attempting to re-solve
?yn:betan => [W] ?x:Int
via solveNestedImplications, because we'll just get the
same [D] again
* If we *do* re-solve, we'll get an ininite loop. It is cut off by
the fixed bound of 10, but solving the next takes 10*10*...*10 (ie
exponentially many) iterations!
Conclusion: we should call solveNestedImplications only if we did
some unification in solveSimpleWanteds; because that's the only way
we'll get more Givens (a unification is like adding a Given) to
allow the implication to make progress.
-}
promoteTyVar :: TcLevel -> TcTyVar -> TcM ()
-- When we float a constraint out of an implication we must restore
-- invariant (MetaTvInv) in Note [TcLevel and untouchable type variables] in TcType
-- See Note [Promoting unification variables]
promoteTyVar tclvl tv
| isFloatedTouchableMetaTyVar tclvl tv
= do { cloned_tv <- TcM.cloneMetaTyVar tv
; let rhs_tv = setMetaTyVarTcLevel cloned_tv tclvl
; TcM.writeMetaTyVar tv (mkTyVarTy rhs_tv) }
| otherwise
= return ()
promoteTyVarTcS :: TcLevel -> TcTyVar -> TcS ()
-- When we float a constraint out of an implication we must restore
-- invariant (MetaTvInv) in Note [TcLevel and untouchable type variables] in TcType
-- See Note [Promoting unification variables]
-- We don't just call promoteTyVar because we want to use unifyTyVar,
-- not writeMetaTyVar
promoteTyVarTcS tclvl tv
| isFloatedTouchableMetaTyVar tclvl tv
= do { cloned_tv <- TcS.cloneMetaTyVar tv
; let rhs_tv = setMetaTyVarTcLevel cloned_tv tclvl
; unifyTyVar tv (mkTyVarTy rhs_tv) }
| otherwise
= return ()
-- | If the tyvar is a RuntimeRep var, set it to PtrRepLifted. Returns whether or
-- not this happened.
defaultTyVar :: TcTyVar -> TcM ()
-- Precondition: MetaTyVars only
-- See Note [DefaultTyVar]
defaultTyVar the_tv
| isRuntimeRepVar the_tv
= do { traceTc "defaultTyVar RuntimeRep" (ppr the_tv)
; writeMetaTyVar the_tv ptrRepLiftedTy }
| otherwise = return () -- The common case
-- | Like 'defaultTyVar', but in the TcS monad.
defaultTyVarTcS :: TcTyVar -> TcS Bool
defaultTyVarTcS the_tv
| isRuntimeRepVar the_tv
= do { traceTcS "defaultTyVarTcS RuntimeRep" (ppr the_tv)
; unifyTyVar the_tv ptrRepLiftedTy
; return True }
| otherwise
= return False -- the common case
approximateWC :: Bool -> WantedConstraints -> Cts
-- Postcondition: Wanted or Derived Cts
-- See Note [ApproximateWC]
approximateWC float_past_equalities wc
= float_wc emptyVarSet wc
where
float_wc :: TcTyCoVarSet -> WantedConstraints -> Cts
float_wc trapping_tvs (WC { wc_simple = simples, wc_impl = implics })
= filterBag is_floatable simples `unionBags`
do_bag (float_implic new_trapping_tvs) implics
where
is_floatable ct = tyCoVarsOfCt ct `disjointVarSet` new_trapping_tvs
new_trapping_tvs = transCloVarSet grow trapping_tvs
grow :: VarSet -> VarSet -- Maps current trapped tyvars to newly-trapped ones
grow so_far = foldrBag (grow_one so_far) emptyVarSet simples
grow_one so_far ct tvs
| ct_tvs `intersectsVarSet` so_far = tvs `unionVarSet` ct_tvs
| otherwise = tvs
where
ct_tvs = tyCoVarsOfCt ct
float_implic :: TcTyCoVarSet -> Implication -> Cts
float_implic trapping_tvs imp
| float_past_equalities || ic_no_eqs imp
= float_wc new_trapping_tvs (ic_wanted imp)
| otherwise -- Take care with equalities
= emptyCts -- See (1) under Note [ApproximateWC]
where
new_trapping_tvs = trapping_tvs `extendVarSetList` ic_skols imp
do_bag :: (a -> Bag c) -> Bag a -> Bag c
do_bag f = foldrBag (unionBags.f) emptyBag
{- Note [ApproximateWC]
~~~~~~~~~~~~~~~~~~~~~~~
approximateWC takes a constraint, typically arising from the RHS of a
let-binding whose type we are *inferring*, and extracts from it some
*simple* constraints that we might plausibly abstract over. Of course
the top-level simple constraints are plausible, but we also float constraints
out from inside, if they are not captured by skolems.
The same function is used when doing type-class defaulting (see the call
to applyDefaultingRules) to extract constraints that that might be defaulted.
There are two caveats:
1. When infering most-general types (in simplifyInfer), we do *not*
float anything out if the implication binds equality constraints,
because that defeats the OutsideIn story. Consider
data T a where
TInt :: T Int
MkT :: T a
f TInt = 3::Int
We get the implication (a ~ Int => res ~ Int), where so far we've decided
f :: T a -> res
We don't want to float (res~Int) out because then we'll infer
f :: T a -> Int
which is only on of the possible types. (GHC 7.6 accidentally *did*
float out of such implications, which meant it would happily infer
non-principal types.)
HOWEVER (Trac #12797) in findDefaultableGroups we are not worried about
the most-general type; and we /do/ want to float out of equalities.
Hence the boolean flag to approximateWC.
2. We do not float out an inner constraint that shares a type variable
(transitively) with one that is trapped by a skolem. Eg
forall a. F a ~ beta, Integral beta
We don't want to float out (Integral beta). Doing so would be bad
when defaulting, because then we'll default beta:=Integer, and that
makes the error message much worse; we'd get
Can't solve F a ~ Integer
rather than
Can't solve Integral (F a)
Moreover, floating out these "contaminated" constraints doesn't help
when generalising either. If we generalise over (Integral b), we still
can't solve the retained implication (forall a. F a ~ b). Indeed,
arguably that too would be a harder error to understand.
Note [DefaultTyVar]
~~~~~~~~~~~~~~~~~~~
defaultTyVar is used on any un-instantiated meta type variables to
default any RuntimeRep variables to PtrRepLifted. This is important
to ensure that instance declarations match. For example consider
instance Show (a->b)
foo x = show (\_ -> True)
Then we'll get a constraint (Show (p ->q)) where p has kind (TYPE r),
and that won't match the typeKind (*) in the instance decl. See tests
tc217 and tc175.
We look only at touchable type variables. No further constraints
are going to affect these type variables, so it's time to do it by
hand. However we aren't ready to default them fully to () or
whatever, because the type-class defaulting rules have yet to run.
An alternate implementation would be to emit a derived constraint setting
the RuntimeRep variable to PtrRepLifted, but this seems unnecessarily indirect.
Note [Promote _and_ default when inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are inferring a type, we simplify the constraint, and then use
approximateWC to produce a list of candidate constraints. Then we MUST
a) Promote any meta-tyvars that have been floated out by
approximateWC, to restore invariant (MetaTvInv) described in
Note [TcLevel and untouchable type variables] in TcType.
b) Default the kind of any meta-tyvars that are not mentioned in
in the environment.
To see (b), suppose the constraint is (C ((a :: OpenKind) -> Int)), and we
have an instance (C ((x:*) -> Int)). The instance doesn't match -- but it
should! If we don't solve the constraint, we'll stupidly quantify over
(C (a->Int)) and, worse, in doing so zonkQuantifiedTyVar will quantify over
(b:*) instead of (a:OpenKind), which can lead to disaster; see Trac #7332.
Trac #7641 is a simpler example.
Note [Promoting unification variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we float an equality out of an implication we must "promote" free
unification variables of the equality, in order to maintain Invariant
(MetaTvInv) from Note [TcLevel and untouchable type variables] in TcType. for the
leftover implication.
This is absolutely necessary. Consider the following example. We start
with two implications and a class with a functional dependency.
class C x y | x -> y
instance C [a] [a]
(I1) [untch=beta]forall b. 0 => F Int ~ [beta]
(I2) [untch=beta]forall c. 0 => F Int ~ [[alpha]] /\ C beta [c]
We float (F Int ~ [beta]) out of I1, and we float (F Int ~ [[alpha]]) out of I2.
They may react to yield that (beta := [alpha]) which can then be pushed inwards
the leftover of I2 to get (C [alpha] [a]) which, using the FunDep, will mean that
(alpha := a). In the end we will have the skolem 'b' escaping in the untouchable
beta! Concrete example is in indexed_types/should_fail/ExtraTcsUntch.hs:
class C x y | x -> y where
op :: x -> y -> ()
instance C [a] [a]
type family F a :: *
h :: F Int -> ()
h = undefined
data TEx where
TEx :: a -> TEx
f (x::beta) =
let g1 :: forall b. b -> ()
g1 _ = h [x]
g2 z = case z of TEx y -> (h [[undefined]], op x [y])
in (g1 '3', g2 undefined)
*********************************************************************************
* *
* Floating equalities *
* *
*********************************************************************************
Note [Float Equalities out of Implications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For ordinary pattern matches (including existentials) we float
equalities out of implications, for instance:
data T where
MkT :: Eq a => a -> T
f x y = case x of MkT _ -> (y::Int)
We get the implication constraint (x::T) (y::alpha):
forall a. [untouchable=alpha] Eq a => alpha ~ Int
We want to float out the equality into a scope where alpha is no
longer untouchable, to solve the implication!
But we cannot float equalities out of implications whose givens may
yield or contain equalities:
data T a where
T1 :: T Int
T2 :: T Bool
T3 :: T a
h :: T a -> a -> Int
f x y = case x of
T1 -> y::Int
T2 -> y::Bool
T3 -> h x y
We generate constraint, for (x::T alpha) and (y :: beta):
[untouchables = beta] (alpha ~ Int => beta ~ Int) -- From 1st branch
[untouchables = beta] (alpha ~ Bool => beta ~ Bool) -- From 2nd branch
(alpha ~ beta) -- From 3rd branch
If we float the equality (beta ~ Int) outside of the first implication and
the equality (beta ~ Bool) out of the second we get an insoluble constraint.
But if we just leave them inside the implications, we unify alpha := beta and
solve everything.
Principle:
We do not want to float equalities out which may
need the given *evidence* to become soluble.
Consequence: classes with functional dependencies don't matter (since there is
no evidence for a fundep equality), but equality superclasses do matter (since
they carry evidence).
-}
floatEqualities :: [TcTyVar] -> Bool
-> WantedConstraints
-> TcS (Cts, WantedConstraints)
-- Main idea: see Note [Float Equalities out of Implications]
--
-- Precondition: the wc_simple of the incoming WantedConstraints are
-- fully zonked, so that we can see their free variables
--
-- Postcondition: The returned floated constraints (Cts) are only
-- Wanted or Derived
--
-- Also performs some unifications (via promoteTyVar), adding to
-- monadically-carried ty_binds. These will be used when processing
-- floated_eqs later
--
-- Subtleties: Note [Float equalities from under a skolem binding]
-- Note [Skolem escape]
floatEqualities skols no_given_eqs
wanteds@(WC { wc_simple = simples })
| not no_given_eqs -- There are some given equalities, so don't float
= return (emptyBag, wanteds) -- Note [Float Equalities out of Implications]
| otherwise
= do { outer_tclvl <- TcS.getTcLevel
; mapM_ (promoteTyVarTcS outer_tclvl)
(tyCoVarsOfCtsList float_eqs)
-- See Note [Promoting unification variables]
; traceTcS "floatEqualities" (vcat [ text "Skols =" <+> ppr skols
, text "Simples =" <+> ppr simples
, text "Floated eqs =" <+> ppr float_eqs])
; return ( float_eqs
, wanteds { wc_simple = remaining_simples } ) }
where
skol_set = mkVarSet skols
(float_eqs, remaining_simples) = partitionBag (usefulToFloat skol_set) simples
usefulToFloat :: VarSet -> Ct -> Bool
usefulToFloat skol_set ct -- The constraint is un-flattened and de-canonicalised
= is_meta_var_eq pred &&
(tyCoVarsOfType pred `disjointVarSet` skol_set)
where
pred = ctPred ct
-- Float out alpha ~ ty, or ty ~ alpha
-- which might be unified outside
-- See Note [Which equalities to float]
is_meta_var_eq pred
| EqPred NomEq ty1 ty2 <- classifyPredType pred
= case (tcGetTyVar_maybe ty1, tcGetTyVar_maybe ty2) of
(Just tv1, _) -> float_tv_eq tv1 ty2
(_, Just tv2) -> float_tv_eq tv2 ty1
_ -> False
| otherwise
= False
float_tv_eq tv1 ty2 -- See Note [Which equalities to float]
= isMetaTyVar tv1
&& (not (isSigTyVar tv1) || isTyVarTy ty2)
{- Note [Float equalities from under a skolem binding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Which of the simple equalities can we float out? Obviously, only
ones that don't mention the skolem-bound variables. But that is
over-eager. Consider
[2] forall a. F a beta[1] ~ gamma[2], G beta[1] gamma[2] ~ Int
The second constraint doesn't mention 'a'. But if we float it,
we'll promote gamma[2] to gamma'[1]. Now suppose that we learn that
beta := Bool, and F a Bool = a, and G Bool _ = Int. Then we'll
we left with the constraint
[2] forall a. a ~ gamma'[1]
which is insoluble because gamma became untouchable.
Solution: float only constraints that stand a jolly good chance of
being soluble simply by being floated, namely ones of form
a ~ ty
where 'a' is a currently-untouchable unification variable, but may
become touchable by being floated (perhaps by more than one level).
We had a very complicated rule previously, but this is nice and
simple. (To see the notes, look at this Note in a version of
TcSimplify prior to Oct 2014).
Note [Which equalities to float]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Which equalities should we float? We want to float ones where there
is a decent chance that floating outwards will allow unification to
happen. In particular:
Float out equalities of form (alpha ~ ty) or (ty ~ alpha), where
* alpha is a meta-tyvar.
* And 'alpha' is not a SigTv with 'ty' being a non-tyvar. In that
case, floating out won't help either, and it may affect grouping
of error messages.
Note [Skolem escape]
~~~~~~~~~~~~~~~~~~~~
You might worry about skolem escape with all this floating.
For example, consider
[2] forall a. (a ~ F beta[2] delta,
Maybe beta[2] ~ gamma[1])
The (Maybe beta ~ gamma) doesn't mention 'a', so we float it, and
solve with gamma := beta. But what if later delta:=Int, and
F b Int = b.
Then we'd get a ~ beta[2], and solve to get beta:=a, and now the
skolem has escaped!
But it's ok: when we float (Maybe beta[2] ~ gamma[1]), we promote beta[2]
to beta[1], and that means the (a ~ beta[1]) will be stuck, as it should be.
*********************************************************************************
* *
* Defaulting and disamgiguation *
* *
*********************************************************************************
-}
applyDefaultingRules :: WantedConstraints -> TcS Bool
-- True <=> I did some defaulting, by unifying a meta-tyvar
-- Imput WantedConstraints are not necessarily zonked
applyDefaultingRules wanteds
| isEmptyWC wanteds
= return False
| otherwise
= do { info@(default_tys, _) <- getDefaultInfo
; wanteds <- TcS.zonkWC wanteds
; let groups = findDefaultableGroups info wanteds
; traceTcS "applyDefaultingRules {" $
vcat [ text "wanteds =" <+> ppr wanteds
, text "groups =" <+> ppr groups
, text "info =" <+> ppr info ]
; something_happeneds <- mapM (disambigGroup default_tys) groups
; traceTcS "applyDefaultingRules }" (ppr something_happeneds)
; return (or something_happeneds) }
findDefaultableGroups
:: ( [Type]
, (Bool,Bool) ) -- (Overloaded strings, extended default rules)
-> WantedConstraints -- Unsolved (wanted or derived)
-> [(TyVar, [Ct])]
findDefaultableGroups (default_tys, (ovl_strings, extended_defaults)) wanteds
| null default_tys
= []
| otherwise
= [ (tv, map fstOf3 group)
| group@((_,_,tv):_) <- unary_groups
, defaultable_tyvar tv
, defaultable_classes (map sndOf3 group) ]
where
simples = approximateWC True wanteds
(unaries, non_unaries) = partitionWith find_unary (bagToList simples)
unary_groups = equivClasses cmp_tv unaries
unary_groups :: [[(Ct, Class, TcTyVar)]] -- (C tv) constraints
unaries :: [(Ct, Class, TcTyVar)] -- (C tv) constraints
non_unaries :: [Ct] -- and *other* constraints
-- Finds unary type-class constraints
-- But take account of polykinded classes like Typeable,
-- which may look like (Typeable * (a:*)) (Trac #8931)
find_unary cc
| Just (cls,tys) <- getClassPredTys_maybe (ctPred cc)
, [ty] <- filterOutInvisibleTypes (classTyCon cls) tys
-- Ignore invisible arguments for this purpose
, Just tv <- tcGetTyVar_maybe ty
, isMetaTyVar tv -- We might have runtime-skolems in GHCi, and
-- we definitely don't want to try to assign to those!
= Left (cc, cls, tv)
find_unary cc = Right cc -- Non unary or non dictionary
bad_tvs :: TcTyCoVarSet -- TyVars mentioned by non-unaries
bad_tvs = mapUnionVarSet tyCoVarsOfCt non_unaries
cmp_tv (_,_,tv1) (_,_,tv2) = tv1 `compare` tv2
defaultable_tyvar tv
= let b1 = isTyConableTyVar tv -- Note [Avoiding spurious errors]
b2 = not (tv `elemVarSet` bad_tvs)
in b1 && b2
defaultable_classes clss
| extended_defaults = any (isInteractiveClass ovl_strings) clss
| otherwise = all is_std_class clss && (any (isNumClass ovl_strings) clss)
-- is_std_class adds IsString to the standard numeric classes,
-- when -foverloaded-strings is enabled
is_std_class cls = isStandardClass cls ||
(ovl_strings && (cls `hasKey` isStringClassKey))
------------------------------
disambigGroup :: [Type] -- The default types
-> (TcTyVar, [Ct]) -- All classes of the form (C a)
-- sharing same type variable
-> TcS Bool -- True <=> something happened, reflected in ty_binds
disambigGroup [] _
= return False
disambigGroup (default_ty:default_tys) group@(the_tv, wanteds)
= do { traceTcS "disambigGroup {" (vcat [ ppr default_ty, ppr the_tv, ppr wanteds ])
; fake_ev_binds_var <- TcS.newTcEvBinds
; tclvl <- TcS.getTcLevel
; success <- nestImplicTcS fake_ev_binds_var (pushTcLevel tclvl) try_group
; if success then
-- Success: record the type variable binding, and return
do { unifyTyVar the_tv default_ty
; wrapWarnTcS $ warnDefaulting wanteds default_ty
; traceTcS "disambigGroup succeeded }" (ppr default_ty)
; return True }
else
-- Failure: try with the next type
do { traceTcS "disambigGroup failed, will try other default types }"
(ppr default_ty)
; disambigGroup default_tys group } }
where
try_group
| Just subst <- mb_subst
= do { lcl_env <- TcS.getLclEnv
; let loc = CtLoc { ctl_origin = GivenOrigin UnkSkol
, ctl_env = lcl_env
, ctl_t_or_k = Nothing
, ctl_depth = initialSubGoalDepth }
; wanted_evs <- mapM (newWantedEvVarNC loc . substTy subst . ctPred)
wanteds
; fmap isEmptyWC $
solveSimpleWanteds $ listToBag $
map mkNonCanonical wanted_evs }
| otherwise
= return False
the_ty = mkTyVarTy the_tv
mb_subst = tcMatchTyKi the_ty default_ty
-- Make sure the kinds match too; hence this call to tcMatchTyKi
-- E.g. suppose the only constraint was (Typeable k (a::k))
-- With the addition of polykinded defaulting we also want to reject
-- ill-kinded defaulting attempts like (Eq []) or (Foldable Int) here.
-- In interactive mode, or with -XExtendedDefaultRules,
-- we default Show a to Show () to avoid graututious errors on "show []"
isInteractiveClass :: Bool -- -XOverloadedStrings?
-> Class -> Bool
isInteractiveClass ovl_strings cls
= isNumClass ovl_strings cls || (classKey cls `elem` interactiveClassKeys)
-- isNumClass adds IsString to the standard numeric classes,
-- when -foverloaded-strings is enabled
isNumClass :: Bool -- -XOverloadedStrings?
-> Class -> Bool
isNumClass ovl_strings cls
= isNumericClass cls || (ovl_strings && (cls `hasKey` isStringClassKey))
{-
Note [Avoiding spurious errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When doing the unification for defaulting, we check for skolem
type variables, and simply don't default them. For example:
f = (*) -- Monomorphic
g :: Num a => a -> a
g x = f x x
Here, we get a complaint when checking the type signature for g,
that g isn't polymorphic enough; but then we get another one when
dealing with the (Num a) context arising from f's definition;
we try to unify a with Int (to default it), but find that it's
already been unified with the rigid variable from g's type sig.
-}
|
mettekou/ghc
|
compiler/typecheck/TcSimplify.hs
|
bsd-3-clause
| 90,897
| 2
| 20
| 25,733
| 9,527
| 4,968
| 4,559
| 781
| 5
|
import Control.Monad.Trans.Free hiding (Pure)
import Data.Maybe (fromMaybe)
import Data.IORef
import Options.Applicative
import Pipes
import Pipes.Internal
import Data.Time.Clock
import Common (Step(..), search, parameters)
options :: Parser (String, Maybe Double, Int)
options = (,,)
<$> strOption (mconcat
[ long "hostname"
, value "suns.degradolab.org"
, showDefaultWith id
, metavar "STRING"
, help "Search engine address"
] )
<*> optional (option auto (mconcat
[ short 'r'
, long "rmsd"
, value 1.0
, showDefault
, metavar "DOUBLE"
, help "Override RMSD cutoff"
] ))
<*> option auto (mconcat
[ short 'n'
, long "num"
, value 100
, showDefault
, metavar "INT"
, help "Number of results"
] )
parserInfo :: ParserInfo (String, Maybe Double, Int)
parserInfo = info (helper <*> options) $ mconcat
[ fullDesc
, header "suns-cmd: The Suns search command line client"
, progDesc "Send search requests and store results as PDB files"
, footer "Report bugs to suns-search@googlegroups.com"
]
main :: IO ()
main = do
(hostName, rmsd', numResults) <- execParser parserInfo
putStrLn ("Host name: " ++ hostName)
putStrLn ("RMSD Override: " ++ show rmsd')
putStrLn ("Max results: " ++ show numResults)
let params =
map (\(rmsd, filePath, _) ->
(fromMaybe rmsd rmsd', numResults, 0, filePath) )
parameters
search hostName params $ \ps -> do
t0 <- getCurrentTime
tRef <- newIORef t0
iterT (step tRef) ps
where
step tRef (Step filePath p) = do
putStrLn ("Testing: " ++ filePath)
(n, m) <- _length p
t1 <- readIORef tRef
t2 <- getCurrentTime
writeIORef tRef t2
let diff = realToFrac (t2 `diffUTCTime` t1) * 1000 :: Double
putStrLn ("Number of results: " ++ show n)
putStrLn ("Time: " ++ show diff)
m
_length :: Monad m => Producer a m r -> m (Int, r)
_length p = go p 0
where
go (Pure r) n = return (n, r)
go (Respond _ k) n = go (k ()) $! n + 1
go (Request _ k) n = go (k ()) n
go (M m) n = m >>= \p' -> go p' n
|
Gabriel439/suns-cmd
|
Bench.hs
|
bsd-3-clause
| 2,287
| 0
| 15
| 721
| 785
| 398
| 387
| 66
| 4
|
{-# LANGUAGE OverloadedStrings #-}
module ZMotor where
import System.Hardware.Serialport
import Control.Monad (void)
import qualified Data.ByteString.Char8 as BS
newtype ZMotor = ZMotor SerialPort
open :: FilePath -> IO ZMotor
open fname = do
dev <- openSerial fname settings
send dev "rate 1000\n"
return $ ZMotor dev
where
settings = defaultSerialSettings { commSpeed = CS115200 }
move :: ZMotor -> Int -> IO ()
move (ZMotor dev) pos = do
send dev $ BS.pack $ "move "++show pos++"\n"
flush dev
return ()
|
bgamari/navigate
|
ZMotor.hs
|
bsd-3-clause
| 547
| 0
| 11
| 121
| 178
| 91
| 87
| 17
| 1
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE Trustworthy #-}
module Network.Tox.DHT.KBucketsSpec where
import Test.Hspec
import Test.QuickCheck
import Control.Monad (unless, when)
import Data.List (sortOn)
import qualified Data.Map as Map
import Data.Proxy (Proxy (..))
import Network.Tox.Crypto.Key (PublicKey)
import qualified Network.Tox.DHT.ClientList as ClientList
import qualified Network.Tox.DHT.Distance as Distance
import Network.Tox.DHT.KBuckets (KBuckets)
import qualified Network.Tox.DHT.KBuckets as KBuckets
import qualified Network.Tox.DHT.NodeList as NodeList
import Network.Tox.EncodingSpec
import Network.Tox.NodeInfo.NodeInfo (NodeInfo)
import qualified Network.Tox.NodeInfo.NodeInfo as NodeInfo
makeInputKey :: Int -> Char -> PublicKey
makeInputKey pos digit =
read $ "\"" ++ map (const '0') [0 .. pos - 1] ++ digit : map (const '0') [pos .. 62] ++ "\""
getAllBuckets :: KBuckets -> [[NodeInfo]]
getAllBuckets kBuckets =
map ClientList.nodeInfos (Map.elems (KBuckets.buckets kBuckets))
spec :: Spec
spec = do
readShowSpec (Proxy :: Proxy KBuckets)
it "does not accept adding a NodeInfo with the baseKey as publicKey" $
property $ \kBuckets time nodeInfo ->
KBuckets.addNode time nodeInfo { NodeInfo.publicKey = KBuckets.baseKey kBuckets } kBuckets
`shouldBe`
kBuckets
it "adding a node to an empty k-buckets always succeeds if baseKey <> nodeKey" $
property $ \baseKey time nodeInfo ->
let
empty = KBuckets.empty baseKey
kBuckets = KBuckets.addNode time nodeInfo empty
in
if baseKey == NodeInfo.publicKey nodeInfo
then kBuckets `shouldBe` empty
else kBuckets `shouldNotBe` empty
it "removing a node twice has no effect" $
property $ \baseKey time nodeInfo ->
let
empty = KBuckets.empty baseKey
afterAdd = KBuckets.addNode time nodeInfo empty
afterRemove0 = KBuckets.removeNode (NodeInfo.publicKey nodeInfo) afterAdd
afterRemove1 = KBuckets.removeNode (NodeInfo.publicKey nodeInfo) afterRemove0
in
afterRemove0 `shouldBe` afterRemove1
it "adding a node twice has no effect" $
property $ \baseKey time nodeInfo ->
let
empty = KBuckets.empty baseKey
afterAdd0 = KBuckets.addNode time nodeInfo empty
afterAdd1 = KBuckets.addNode time nodeInfo afterAdd0
in
afterAdd0 `shouldBe` afterAdd1
it "adding a non-viable node has no effect" $
property $ \(kBuckets::KBuckets) time nodeInfo ->
let
viable = KBuckets.viable nodeInfo kBuckets
afterAdd = KBuckets.addNode time nodeInfo kBuckets
in
unless viable $ afterAdd `shouldBe` kBuckets
it "never contains a NodeInfo with the public key equal to the base key" $
property $ \kBuckets ->
notElem (KBuckets.baseKey kBuckets) $ concatMap (map NodeInfo.publicKey) $ getAllBuckets kBuckets
describe "each bucket list" $ do
it "has maximum size bucketSize" $
property $ \kBuckets ->
mapM_
(`shouldSatisfy` (== KBuckets.bucketSize kBuckets) . ClientList.maxSize)
. Map.elems $ KBuckets.buckets kBuckets
it "has base key baseKey" $
property $ \kBuckets ->
mapM_
(`shouldSatisfy` (== KBuckets.baseKey kBuckets) . ClientList.baseKey)
. Map.elems $ KBuckets.buckets kBuckets
describe "bucketIndex" $ do
it "returns an integer between 0 and 255 for any two non-equal keys" $
property $ \k1 k2 ->
when (k1 /= k2) $
-- In our implementation, this is guaranteed by the type system, as
-- we're using Word8, which can only represent values in this range.
KBuckets.bucketIndex k1 k2 `shouldSatisfy` \case
Nothing -> False
Just index -> index >= 0 && index <= 255
it "is undefined for two equal keys" $
property $ \k ->
KBuckets.bucketIndex k k `shouldBe` Nothing
it "returns a larger index for smaller distances and smaller index for larger distances" $
property $ \k1 k2 k3 ->
let
d = Distance.xorDistance k1
i = KBuckets.bucketIndex k1
in
if d k2 <= d k3
then i k2 >= i k3
else i k2 <= i k3
it "produces indices 0..255 for each bit set in the key" $
let
zeroKey = read "\"0000000000000000000000000000000000000000000000000000000000000000\""
inputs = zeroKey : concatMap (\pos -> map (makeInputKey pos) ['8', '4', '2', '1']) [0 .. 63]
outputs = Nothing : map Just [0 .. 255]
in
map (KBuckets.bucketIndex zeroKey) inputs `shouldBe` outputs
describe "foldNodes" $
it "iterates over nodes in order of distance from the base key" $
property $ \kBuckets ->
let
nodes = reverse $ NodeList.foldNodes (flip (:)) [] kBuckets
nodeDistance node = Distance.xorDistance (KBuckets.baseKey kBuckets) (NodeInfo.publicKey node)
in
nodes `shouldBe` sortOn nodeDistance nodes
|
iphydf/hs-toxcore
|
test/Network/Tox/DHT/KBucketsSpec.hs
|
gpl-3.0
| 5,327
| 0
| 21
| 1,501
| 1,295
| 681
| 614
| 108
| 4
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>WebSockets | ZAP Uzantısı</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçerikler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Dizin</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Sık Kullanılanlar</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/websocket/src/main/javahelp/org/zaproxy/zap/extension/websocket/resources/help_tr_TR/helpset_tr_TR.hs
|
apache-2.0
| 985
| 80
| 66
| 160
| 427
| 215
| 212
| -1
| -1
|
-- |
-- Module : Statistics.Matrix.Algorithms
-- Copyright : 2014 Bryan O'Sullivan
-- License : BSD3
--
-- Useful matrix functions.
module Statistics.Matrix.Algorithms
(
qr
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad.ST (ST, runST)
import Prelude hiding (sum, replicate)
import Statistics.Matrix (Matrix, column, dimension, for, norm)
import qualified Statistics.Matrix.Mutable as M
import Statistics.Sample.Internal (sum)
import qualified Data.Vector.Unboxed as U
-- | /O(r*c)/ Compute the QR decomposition of a matrix.
-- The result returned is the matrices (/q/,/r/).
qr :: Matrix -> (Matrix, Matrix)
qr mat = runST $ do
let (m,n) = dimension mat
r <- M.replicate n n 0
a <- M.thaw mat
for 0 n $ \j -> do
cn <- M.immutably a $ \aa -> norm (column aa j)
M.unsafeWrite r j j cn
for 0 m $ \i -> M.unsafeModify a i j (/ cn)
for (j+1) n $ \jj -> do
p <- innerProduct a j jj
M.unsafeWrite r j jj p
for 0 m $ \i -> do
aij <- M.unsafeRead a i j
M.unsafeModify a i jj $ subtract (p * aij)
(,) <$> M.unsafeFreeze a <*> M.unsafeFreeze r
innerProduct :: M.MMatrix s -> Int -> Int -> ST s Double
innerProduct mmat j k = M.immutably mmat $ \mat ->
sum $ U.zipWith (*) (column mat j) (column mat k)
|
fpco/statistics
|
Statistics/Matrix/Algorithms.hs
|
bsd-2-clause
| 1,299
| 0
| 23
| 304
| 502
| 265
| 237
| 29
| 1
|
{-# LANGUAGE CPP #-}
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 1994-2004
--
-- -----------------------------------------------------------------------------
module PPC.Regs (
-- squeeze functions
virtualRegSqueeze,
realRegSqueeze,
mkVirtualReg,
regDotColor,
-- immediates
Imm(..),
strImmLit,
litToImm,
-- addressing modes
AddrMode(..),
addrOffset,
-- registers
spRel,
argRegs,
allArgRegs,
callClobberedRegs,
allMachRegNos,
classOfRealReg,
showReg,
-- machine specific
allFPArgRegs,
fits16Bits,
makeImmediate,
fReg,
r0, sp, toc, r3, r4, r11, r12, r27, r28, r30,
tmpReg,
f1, f20, f21,
allocatableRegs
)
where
#include "nativeGen/NCG.h"
#include "HsVersions.h"
import Reg
import RegClass
import Format
import Cmm
import CLabel ( CLabel )
import Unique
import CodeGen.Platform
import DynFlags
import Outputable
import Platform
import Data.Word ( Word8, Word16, Word32, Word64 )
import Data.Int ( Int8, Int16, Int32, Int64 )
-- squeese functions for the graph allocator -----------------------------------
-- | regSqueeze_class reg
-- Calculuate the maximum number of register colors that could be
-- denied to a node of this class due to having this reg
-- as a neighbour.
--
{-# INLINE virtualRegSqueeze #-}
virtualRegSqueeze :: RegClass -> VirtualReg -> Int
virtualRegSqueeze cls vr
= case cls of
RcInteger
-> case vr of
VirtualRegI{} -> 1
VirtualRegHi{} -> 1
_other -> 0
RcDouble
-> case vr of
VirtualRegD{} -> 1
VirtualRegF{} -> 0
_other -> 0
_other -> 0
{-# INLINE realRegSqueeze #-}
realRegSqueeze :: RegClass -> RealReg -> Int
realRegSqueeze cls rr
= case cls of
RcInteger
-> case rr of
RealRegSingle regNo
| regNo < 32 -> 1 -- first fp reg is 32
| otherwise -> 0
RealRegPair{} -> 0
RcDouble
-> case rr of
RealRegSingle regNo
| regNo < 32 -> 0
| otherwise -> 1
RealRegPair{} -> 0
_other -> 0
mkVirtualReg :: Unique -> Format -> VirtualReg
mkVirtualReg u format
| not (isFloatFormat format) = VirtualRegI u
| otherwise
= case format of
FF32 -> VirtualRegD u
FF64 -> VirtualRegD u
_ -> panic "mkVirtualReg"
regDotColor :: RealReg -> SDoc
regDotColor reg
= case classOfRealReg reg of
RcInteger -> text "blue"
RcFloat -> text "red"
RcDouble -> text "green"
RcDoubleSSE -> text "yellow"
-- immediates ------------------------------------------------------------------
data Imm
= ImmInt Int
| ImmInteger Integer -- Sigh.
| ImmCLbl CLabel -- AbstractC Label (with baggage)
| ImmLit SDoc -- Simple string
| ImmIndex CLabel Int
| ImmFloat Rational
| ImmDouble Rational
| ImmConstantSum Imm Imm
| ImmConstantDiff Imm Imm
| LO Imm
| HI Imm
| HA Imm {- high halfword adjusted -}
| HIGHERA Imm
| HIGHESTA Imm
strImmLit :: String -> Imm
strImmLit s = ImmLit (text s)
litToImm :: CmmLit -> Imm
litToImm (CmmInt i w) = ImmInteger (narrowS w i)
-- narrow to the width: a CmmInt might be out of
-- range, but we assume that ImmInteger only contains
-- in-range values. A signed value should be fine here.
litToImm (CmmFloat f W32) = ImmFloat f
litToImm (CmmFloat f W64) = ImmDouble f
litToImm (CmmLabel l) = ImmCLbl l
litToImm (CmmLabelOff l off) = ImmIndex l off
litToImm (CmmLabelDiffOff l1 l2 off)
= ImmConstantSum
(ImmConstantDiff (ImmCLbl l1) (ImmCLbl l2))
(ImmInt off)
litToImm _ = panic "PPC.Regs.litToImm: no match"
-- addressing modes ------------------------------------------------------------
data AddrMode
= AddrRegReg Reg Reg
| AddrRegImm Reg Imm
addrOffset :: AddrMode -> Int -> Maybe AddrMode
addrOffset addr off
= case addr of
AddrRegImm r (ImmInt n)
| fits16Bits n2 -> Just (AddrRegImm r (ImmInt n2))
| otherwise -> Nothing
where n2 = n + off
AddrRegImm r (ImmInteger n)
| fits16Bits n2 -> Just (AddrRegImm r (ImmInt (fromInteger n2)))
| otherwise -> Nothing
where n2 = n + toInteger off
_ -> Nothing
-- registers -------------------------------------------------------------------
-- @spRel@ gives us a stack relative addressing mode for volatile
-- temporaries and for excess call arguments. @fpRel@, where
-- applicable, is the same but for the frame pointer.
spRel :: DynFlags
-> Int -- desired stack offset in words, positive or negative
-> AddrMode
spRel dflags n = AddrRegImm sp (ImmInt (n * wORD_SIZE dflags))
-- argRegs is the set of regs which are read for an n-argument call to C.
-- For archs which pass all args on the stack (x86), is empty.
-- Sparc passes up to the first 6 args in regs.
argRegs :: RegNo -> [Reg]
argRegs 0 = []
argRegs 1 = map regSingle [3]
argRegs 2 = map regSingle [3,4]
argRegs 3 = map regSingle [3..5]
argRegs 4 = map regSingle [3..6]
argRegs 5 = map regSingle [3..7]
argRegs 6 = map regSingle [3..8]
argRegs 7 = map regSingle [3..9]
argRegs 8 = map regSingle [3..10]
argRegs _ = panic "MachRegs.argRegs(powerpc): don't know about >8 arguments!"
allArgRegs :: [Reg]
allArgRegs = map regSingle [3..10]
-- these are the regs which we cannot assume stay alive over a C call.
callClobberedRegs :: Platform -> [Reg]
callClobberedRegs platform
= case platformOS platform of
OSAIX -> map regSingle (0:[2..12] ++ map fReg [0..13])
OSDarwin -> map regSingle (0:[2..12] ++ map fReg [0..13])
OSLinux -> map regSingle (0:[2..13] ++ map fReg [0..13])
_ -> panic "PPC.Regs.callClobberedRegs: not defined for this architecture"
allMachRegNos :: [RegNo]
allMachRegNos = [0..63]
{-# INLINE classOfRealReg #-}
classOfRealReg :: RealReg -> RegClass
classOfRealReg (RealRegSingle i)
| i < 32 = RcInteger
| otherwise = RcDouble
classOfRealReg (RealRegPair{})
= panic "regClass(ppr): no reg pairs on this architecture"
showReg :: RegNo -> String
showReg n
| n >= 0 && n <= 31 = "%r" ++ show n
| n >= 32 && n <= 63 = "%f" ++ show (n - 32)
| otherwise = "%unknown_powerpc_real_reg_" ++ show n
-- machine specific ------------------------------------------------------------
allFPArgRegs :: Platform -> [Reg]
allFPArgRegs platform
= case platformOS platform of
OSAIX -> map (regSingle . fReg) [1..13]
OSDarwin -> map (regSingle . fReg) [1..13]
OSLinux -> map (regSingle . fReg) [1..8]
_ -> panic "PPC.Regs.allFPArgRegs: not defined for this architecture"
fits16Bits :: Integral a => a -> Bool
fits16Bits x = x >= -32768 && x < 32768
makeImmediate :: Integral a => Width -> Bool -> a -> Maybe Imm
makeImmediate rep signed x = fmap ImmInt (toI16 rep signed)
where
narrow W64 False = fromIntegral (fromIntegral x :: Word64)
narrow W32 False = fromIntegral (fromIntegral x :: Word32)
narrow W16 False = fromIntegral (fromIntegral x :: Word16)
narrow W8 False = fromIntegral (fromIntegral x :: Word8)
narrow W64 True = fromIntegral (fromIntegral x :: Int64)
narrow W32 True = fromIntegral (fromIntegral x :: Int32)
narrow W16 True = fromIntegral (fromIntegral x :: Int16)
narrow W8 True = fromIntegral (fromIntegral x :: Int8)
narrow _ _ = panic "PPC.Regs.narrow: no match"
narrowed = narrow rep signed
toI16 W32 True
| narrowed >= -32768 && narrowed < 32768 = Just narrowed
| otherwise = Nothing
toI16 W32 False
| narrowed >= 0 && narrowed < 65536 = Just narrowed
| otherwise = Nothing
toI16 W64 True
| narrowed >= -32768 && narrowed < 32768 = Just narrowed
| otherwise = Nothing
toI16 W64 False
| narrowed >= 0 && narrowed < 65536 = Just narrowed
| otherwise = Nothing
toI16 _ _ = Just narrowed
{-
The PowerPC has 64 registers of interest; 32 integer registers and 32 floating
point registers.
-}
fReg :: Int -> RegNo
fReg x = (32 + x)
r0, sp, toc, r3, r4, r11, r12, r27, r28, r30, f1, f20, f21 :: Reg
r0 = regSingle 0
sp = regSingle 1
toc = regSingle 2
r3 = regSingle 3
r4 = regSingle 4
r11 = regSingle 11
r12 = regSingle 12
r27 = regSingle 27
r28 = regSingle 28
r30 = regSingle 30
f1 = regSingle $ fReg 1
f20 = regSingle $ fReg 20
f21 = regSingle $ fReg 21
-- allocatableRegs is allMachRegNos with the fixed-use regs removed.
-- i.e., these are the regs for which we are prepared to allow the
-- register allocator to attempt to map VRegs to.
allocatableRegs :: Platform -> [RealReg]
allocatableRegs platform
= let isFree i = freeReg platform i
in map RealRegSingle $ filter isFree allMachRegNos
-- temporary register for compiler use
tmpReg :: Platform -> Reg
tmpReg platform =
case platformArch platform of
ArchPPC -> regSingle 13
ArchPPC_64 _ -> regSingle 30
_ -> panic "PPC.Regs.tmpReg: unknowm arch"
|
vikraman/ghc
|
compiler/nativeGen/PPC/Regs.hs
|
bsd-3-clause
| 10,028
| 0
| 15
| 3,174
| 2,493
| 1,296
| 1,197
| 226
| 13
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Reading from external processes.
module System.Process.Read
(readProcessStdout
,readProcessStderrStdout
,tryProcessStdout
,tryProcessStderrStdout
,sinkProcessStdout
,sinkProcessStderrStdout
,sinkProcessStderrStdoutHandle
,logProcessStderrStdout
,readProcess
,EnvOverride(..)
,unEnvOverride
,mkEnvOverride
,modifyEnvOverride
,envHelper
,doesExecutableExist
,findExecutable
,getEnvOverride
,envSearchPath
,preProcess
,readProcessNull
,ReadProcessException (..)
,augmentPath
,augmentPathMap
,resetExeCache
)
where
import Control.Applicative
import Control.Arrow ((***), first)
import Control.Concurrent.Async (concurrently)
import Control.Exception hiding (try, catch)
import Control.Monad (join, liftM, unless, void)
import Control.Monad.Catch (MonadThrow, MonadCatch, throwM, try, catch)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger
import Control.Monad.Trans.Control (MonadBaseControl, liftBaseWith)
import qualified Data.ByteString as S
import Data.ByteString.Builder
import qualified Data.ByteString.Lazy as L
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Conduit.Process hiding (callProcess)
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (isJust, maybeToList, fromMaybe)
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import Data.Typeable (Typeable)
import Distribution.System (OS (Windows), Platform (Platform))
import Language.Haskell.TH as TH (location)
import Path
import Path.Extra
import Path.IO hiding (findExecutable)
import Prelude -- Fix AMP warning
import qualified System.Directory as D
import System.Environment (getEnvironment)
import System.Exit
import qualified System.FilePath as FP
import System.IO (Handle, hClose)
import System.Process.Log
import Prelude () -- Hide post-AMP warnings
-- | Override the environment received by a child process.
data EnvOverride = EnvOverride
{ eoTextMap :: Map Text Text -- ^ Environment variables as map
, eoStringList :: [(String, String)] -- ^ Environment variables as association list
, eoPath :: [FilePath] -- ^ List of directories searched for executables (@PATH@)
, eoExeCache :: IORef (Map FilePath (Either ReadProcessException (Path Abs File)))
, eoExeExtensions :: [String] -- ^ @[""]@ on non-Windows systems, @["", ".exe", ".bat"]@ on Windows
, eoPlatform :: Platform
}
-- | Get the environment variables from an 'EnvOverride'.
unEnvOverride :: EnvOverride -> Map Text Text
unEnvOverride = eoTextMap
-- | Get the list of directories searched (@PATH@).
envSearchPath :: EnvOverride -> [FilePath]
envSearchPath = eoPath
-- | Modify the environment variables of an 'EnvOverride'.
modifyEnvOverride :: MonadIO m
=> EnvOverride
-> (Map Text Text -> Map Text Text)
-> m EnvOverride
modifyEnvOverride eo f = mkEnvOverride
(eoPlatform eo)
(f $ eoTextMap eo)
-- | Create a new 'EnvOverride'.
mkEnvOverride :: MonadIO m
=> Platform
-> Map Text Text
-> m EnvOverride
mkEnvOverride platform tm' = do
ref <- liftIO $ newIORef Map.empty
return EnvOverride
{ eoTextMap = tm
, eoStringList = map (T.unpack *** T.unpack) $ Map.toList tm
, eoPath =
(if isWindows then (".":) else id)
(maybe [] (FP.splitSearchPath . T.unpack) (Map.lookup "PATH" tm))
, eoExeCache = ref
, eoExeExtensions =
if isWindows
then let pathext = fromMaybe
".COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC"
(Map.lookup "PATHEXT" tm)
in map T.unpack $ "" : T.splitOn ";" pathext
else [""]
, eoPlatform = platform
}
where
-- Fix case insensitivity of the PATH environment variable on Windows.
tm
| isWindows = Map.fromList $ map (first T.toUpper) $ Map.toList tm'
| otherwise = tm'
-- Don't use CPP so that the Windows code path is at least type checked
-- regularly
isWindows =
case platform of
Platform _ Windows -> True
_ -> False
-- | Helper conversion function.
envHelper :: EnvOverride -> Maybe [(String, String)]
envHelper = Just . eoStringList
-- | Read from the process, ignoring any output.
--
-- Throws a 'ReadProcessException' exception if the process fails.
readProcessNull :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> Maybe (Path Abs Dir) -- ^ Optional working directory
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> m ()
readProcessNull wd menv name args =
sinkProcessStdout wd menv name args CL.sinkNull
-- | Try to produce a strict 'S.ByteString' from the stdout of a
-- process.
tryProcessStdout :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> m (Either ReadProcessException S.ByteString)
tryProcessStdout wd menv name args =
try (readProcessStdout wd menv name args)
-- | Try to produce strict 'S.ByteString's from the stderr and stdout of a
-- process.
tryProcessStderrStdout :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> m (Either ReadProcessException (S.ByteString, S.ByteString))
tryProcessStderrStdout wd menv name args =
try (readProcessStderrStdout wd menv name args)
-- | Produce a strict 'S.ByteString' from the stdout of a process.
--
-- Throws a 'ReadProcessException' exception if the process fails.
readProcessStdout :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> m S.ByteString
readProcessStdout wd menv name args =
sinkProcessStdout wd menv name args CL.consume >>=
liftIO . evaluate . S.concat
-- | Produce strict 'S.ByteString's from the stderr and stdout of a process.
--
-- Throws a 'ReadProcessException' exception if the process fails.
readProcessStderrStdout :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> m (S.ByteString, S.ByteString)
readProcessStderrStdout wd menv name args = do
(e, o) <- sinkProcessStderrStdout wd menv name args CL.consume CL.consume
liftIO $ (,) <$> evaluate (S.concat e) <*> evaluate (S.concat o)
-- | An exception while trying to read from process.
data ReadProcessException
= ProcessFailed CreateProcess ExitCode L.ByteString L.ByteString
-- ^ @'ProcessFailed' createProcess exitCode stdout stderr@
| NoPathFound
| ExecutableNotFound String [FilePath]
| ExecutableNotFoundAt FilePath
deriving Typeable
instance Show ReadProcessException where
show (ProcessFailed cp ec out err) = concat $
[ "Running "
, showSpec $ cmdspec cp] ++
maybe [] (\x -> [" in directory ", x]) (cwd cp) ++
[ " exited with "
, show ec
, "\n\n"
, toStr out
, "\n"
, toStr err
]
where
toStr = LT.unpack . LT.decodeUtf8With lenientDecode
showSpec (ShellCommand str) = str
showSpec (RawCommand cmd args) =
unwords $ cmd : map (T.unpack . showProcessArgDebug) args
show NoPathFound = "PATH not found in EnvOverride"
show (ExecutableNotFound name path) = concat
[ "Executable named "
, name
, " not found on path: "
, show path
]
show (ExecutableNotFoundAt name) =
"Did not find executable at specified path: " ++ name
instance Exception ReadProcessException
-- | Consume the stdout of a process feeding strict 'S.ByteString's to a consumer.
-- If the process fails, spits out stdout and stderr as error log
-- level. Should not be used for long-running processes or ones with
-- lots of output; for that use 'sinkProcessStdoutLogStderr'.
--
-- Throws a 'ReadProcessException' if unsuccessful.
sinkProcessStdout
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> Sink S.ByteString IO a -- ^ Sink for stdout
-> m a
sinkProcessStdout wd menv name args sinkStdout = do
stderrBuffer <- liftIO (newIORef mempty)
stdoutBuffer <- liftIO (newIORef mempty)
(_,sinkRet) <-
catch
(sinkProcessStderrStdout
wd
menv
name
args
(CL.mapM_ (\bytes -> liftIO (modifyIORef' stderrBuffer (<> byteString bytes))))
(CL.iterM (\bytes -> liftIO (modifyIORef' stdoutBuffer (<> byteString bytes))) $=
sinkStdout))
(\(ProcessExitedUnsuccessfully cp ec) ->
do stderrBuilder <- liftIO (readIORef stderrBuffer)
stdoutBuilder <- liftIO (readIORef stdoutBuffer)
throwM $ ProcessFailed
cp
ec
(toLazyByteString stdoutBuilder)
(toLazyByteString stderrBuilder))
return sinkRet
logProcessStderrStdout
:: (MonadIO m, MonadBaseControl IO m, MonadLogger m)
=> Maybe (Path Abs Dir)
-> String
-> EnvOverride
-> [String]
-> m ()
logProcessStderrStdout mdir name menv args = liftBaseWith $ \restore -> do
let logLines = CB.lines =$ CL.mapM_ (void . restore . monadLoggerLog $(TH.location >>= liftLoc) "" LevelInfo . toLogStr)
void $ restore $ sinkProcessStderrStdout mdir menv name args logLines logLines
-- | Consume the stdout and stderr of a process feeding strict 'S.ByteString's to the consumers.
--
-- Throws a 'ReadProcessException' if unsuccessful in launching, or 'ProcessExitedUnsuccessfully' if the process itself fails.
sinkProcessStderrStdout :: forall m e o. (MonadIO m, MonadLogger m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> Sink S.ByteString IO e -- ^ Sink for stderr
-> Sink S.ByteString IO o -- ^ Sink for stdout
-> m (e,o)
sinkProcessStderrStdout wd menv name args sinkStderr sinkStdout = do
name' <- preProcess wd menv name
$withProcessTimeLog name' args $
liftIO $ withCheckedProcess
(proc name' args) { env = envHelper menv, cwd = fmap toFilePath wd }
(\ClosedStream out err -> f err out)
where
-- There is a bug in streaming-commons or conduit-extra which
-- leads to a file descriptor leak. Ideally, we should be able to
-- simply use the following code. Instead, we're using the code
-- below it, which is explicit in closing Handles. When the
-- upstream bug is fixed, we can consider moving back to the
-- simpler code, though there's really no downside to the more
-- complex version used here.
--
-- f :: Source IO S.ByteString -> Source IO S.ByteString -> IO (e, o)
-- f err out = (err $$ sinkStderr) `concurrently` (out $$ sinkStdout)
f :: Handle -> Handle -> IO (e, o)
f err out = ((CB.sourceHandle err $$ sinkStderr) `concurrently` (CB.sourceHandle out $$ sinkStdout))
`finally` hClose err `finally` hClose out
-- | Like sinkProcessStderrStdout, but receives Handles for stderr and stdout instead of 'Sink's.
--
-- Throws a 'ReadProcessException' if unsuccessful in launching, or 'ProcessExitedUnsuccessfully' if the process itself fails.
sinkProcessStderrStdoutHandle :: (MonadIO m, MonadLogger m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to run in
-> EnvOverride
-> String -- ^ Command
-> [String] -- ^ Command line arguments
-> Handle
-> Handle
-> m ()
sinkProcessStderrStdoutHandle wd menv name args err out = do
name' <- preProcess wd menv name
$withProcessTimeLog name' args $
liftIO $ withCheckedProcess
(proc name' args)
{ env = envHelper menv
, cwd = fmap toFilePath wd
, std_err = UseHandle err
, std_out = UseHandle out
}
(\ClosedStream UseProvidedHandle UseProvidedHandle -> return ())
-- | Perform pre-call-process tasks. Ensure the working directory exists and find the
-- executable path.
--
-- Throws a 'ReadProcessException' if unsuccessful.
preProcess :: (MonadIO m)
=> Maybe (Path Abs Dir) -- ^ Optional directory to create if necessary
-> EnvOverride -- ^ How to override environment
-> String -- ^ Command name
-> m FilePath
preProcess wd menv name = do
name' <- liftIO $ liftM toFilePath $ join $ findExecutable menv name
maybe (return ()) ensureDir wd
return name'
-- | Check if the given executable exists on the given PATH.
doesExecutableExist :: (MonadIO m)
=> EnvOverride -- ^ How to override environment
-> String -- ^ Name of executable
-> m Bool
doesExecutableExist menv name = liftM isJust $ findExecutable menv name
-- | Find the complete path for the executable.
--
-- Throws a 'ReadProcessException' if unsuccessful.
findExecutable :: (MonadIO m, MonadThrow n)
=> EnvOverride -- ^ How to override environment
-> String -- ^ Name of executable
-> m (n (Path Abs File)) -- ^ Full path to that executable on success
findExecutable eo name0 | any FP.isPathSeparator name0 = do
let names0 = map (name0 ++) (eoExeExtensions eo)
testNames [] = return $ throwM $ ExecutableNotFoundAt name0
testNames (name:names) = do
exists <- liftIO $ D.doesFileExist name
if exists
then do
path <- liftIO $ resolveFile' name
return $ return path
else testNames names
testNames names0
findExecutable eo name = liftIO $ do
m <- readIORef $ eoExeCache eo
epath <- case Map.lookup name m of
Just epath -> return epath
Nothing -> do
let loop [] = return $ Left $ ExecutableNotFound name (eoPath eo)
loop (dir:dirs) = do
let fp0 = dir FP.</> name
fps0 = map (fp0 ++) (eoExeExtensions eo)
testFPs [] = loop dirs
testFPs (fp:fps) = do
exists <- D.doesFileExist fp
existsExec <- if exists then liftM D.executable $ D.getPermissions fp else return False
if existsExec
then do
fp' <- D.makeAbsolute fp >>= parseAbsFile
return $ return fp'
else testFPs fps
testFPs fps0
epath <- loop $ eoPath eo
() <- atomicModifyIORef (eoExeCache eo) $ \m' ->
(Map.insert name epath m', ())
return epath
return $ either throwM return epath
-- | Reset the executable cache.
resetExeCache :: MonadIO m => EnvOverride -> m ()
resetExeCache eo = liftIO (atomicModifyIORef (eoExeCache eo) (const mempty))
-- | Load up an 'EnvOverride' from the standard environment.
getEnvOverride :: MonadIO m => Platform -> m EnvOverride
getEnvOverride platform =
liftIO $
getEnvironment >>=
mkEnvOverride platform
. Map.fromList . map (T.pack *** T.pack)
data PathException = PathsInvalidInPath [FilePath]
deriving Typeable
instance Exception PathException
instance Show PathException where
show (PathsInvalidInPath paths) = unlines $
[ "Would need to add some paths to the PATH environment variable \
\to continue, but they would be invalid because they contain a "
++ show FP.searchPathSeparator ++ "."
, "Please fix the following paths and try again:"
] ++ paths
-- | Augment the PATH environment variable with the given extra paths.
augmentPath :: MonadThrow m => [Path Abs Dir] -> Maybe Text -> m Text
augmentPath dirs mpath =
do let illegal = filter (FP.searchPathSeparator `elem`) (map toFilePath dirs)
unless (null illegal) (throwM $ PathsInvalidInPath illegal)
return $ T.intercalate (T.singleton FP.searchPathSeparator)
$ map (T.pack . toFilePathNoTrailingSep) dirs
++ maybeToList mpath
-- | Apply 'augmentPath' on the PATH value in the given Map.
augmentPathMap :: MonadThrow m => [Path Abs Dir] -> Map Text Text
-> m (Map Text Text)
augmentPathMap dirs origEnv =
do path <- augmentPath dirs mpath
return $ Map.insert "PATH" path origEnv
where
mpath = Map.lookup "PATH" origEnv
|
AndreasPK/stack
|
src/System/Process/Read.hs
|
bsd-3-clause
| 18,635
| 0
| 30
| 5,523
| 4,037
| 2,150
| 1,887
| 351
| 8
|
{-# LANGUAGE OverloadedStrings #-}
module LogWithSpec ( logWithSpec ) where
import TestInit
import Prelude hiding (FilePath)
import Control.Concurrent (newEmptyMVar, takeMVar, putMVar)
import Data.Text (Text)
default (Text)
logWithSpec :: Spec
logWithSpec =
describe "withOutputWriter" $
it "calls writer function with handler and stdout output" $ do
outputVar <- newEmptyMVar
shelly $ log_stdout_with (putMVar outputVar)
$ run_ "echo" ["single line output"]
result <- takeMVar outputVar
assertEqual "expecting output" "single line output" result
|
adinapoli/Shelly.hs
|
test/src/LogWithSpec.hs
|
bsd-3-clause
| 588
| 0
| 12
| 111
| 133
| 71
| 62
| 16
| 1
|
module Tuura.Formula (Formulae, Input, Output, Equation,
parseFormula, getFormulae, loadFormulae, unloadFormulae
) where
import Foreign.C.String
newtype Input = Input String
newtype Output = Output String
newtype Equation = Equation String
data Formulae = Formulae
{
inputs :: [Input]
, outputs :: [Output]
, equations :: [Equation]
}
parseFormula :: [Input] -> [Output] -> [Equation] -> Formulae
parseFormula = Formulae
getFormulae :: IO Formulae
getFormulae = do
nInputs <- getNumInputs
ins <- getInputs nInputs
nOutputs <- getNumOutputs
outs <- getOutputs nOutputs
nEquations <- getNumEquations
eqs <- getEquations nEquations
let formula = parseFormula ins outs eqs
return formula
loadFormulae :: Formulae -> IO ()
loadFormulae formulae = do
inputCStrings <- convertInputs $ selectInputs formulae
outputCStrings <- convertOutputs $ selectOutputs formulae
equationCStrings <- convertEquations $ selectEquations formulae
loadInputs inputCStrings
loadOutputs outputCStrings
loadEquations equationCStrings
foreign import ccall unsafe "free_formulae"
unloadFormulae :: IO ()
printInput :: Input -> String
printInput (Input s) = s
loadInput :: String -> Input
loadInput = Input
printOutput :: Output -> String
printOutput (Output s) = s
loadOutput :: String -> Output
loadOutput = Output
printEquation :: Equation -> String
printEquation (Equation s) = s
loadEquation :: String -> Equation
loadEquation = Equation
selectInputs :: Formulae -> [Input]
selectInputs = inputs
selectOutputs :: Formulae -> [Output]
selectOutputs = outputs
selectEquations :: Formulae -> [Equation]
selectEquations = equations
getInputs :: Int -> IO [Input]
getInputs nInputs = traverse getInput [0..nInputs-1]
getInput :: Int -> IO Input
getInput inputID = do
inputCString <- get_input inputID
inputString <- peekCString inputCString
let input = loadInput inputString
return input
getOutputs :: Int -> IO [Output]
getOutputs nOutputs = traverse getOutput [0..nOutputs-1]
getOutput :: Int -> IO Output
getOutput outputID = do
outputCString <- get_output outputID
outputString <- peekCString outputCString
let output = loadOutput outputString
return output
getEquations :: Int -> IO [Equation]
getEquations nEquations = traverse getEquation [0..nEquations-1]
getEquation :: Int -> IO Equation
getEquation formulaID = do
equationCString <- get_equation formulaID
equationString <- peekCString equationCString
let equation = loadEquation equationString
return equation
convertInputs :: [Input] -> IO [CString]
convertInputs is = mapM newCString $ (map printInput is)
convertOutputs :: [Output] -> IO [CString]
convertOutputs os = mapM newCString $ (map printOutput os)
convertEquations :: [Equation] -> IO [CString]
convertEquations es = mapM newCString $ (map printEquation es)
loadInputs :: [CString] -> IO ()
loadInputs is = mapM_ pushInput is
loadOutputs :: [CString] -> IO ()
loadOutputs os = mapM_ pushOutput os
loadEquations :: [CString] -> IO ()
loadEquations es = mapM_ pushEquation es
foreign import ccall unsafe "get_input"
get_input :: Int -> IO CString
foreign import ccall unsafe "push_input"
pushInput :: CString -> IO ()
foreign import ccall unsafe "get_num_inputs"
getNumInputs :: IO Int
foreign import ccall unsafe "get_output"
get_output :: Int -> IO CString
foreign import ccall unsafe "push_output"
pushOutput :: CString -> IO ()
foreign import ccall unsafe "get_num_outputs"
getNumOutputs :: IO Int
foreign import ccall unsafe "get_equation"
get_equation :: Int -> IO CString
foreign import ccall unsafe "push_equation"
pushEquation :: CString -> IO ()
foreign import ccall unsafe "get_num_equations"
getNumEquations :: IO Int
|
allegroCoder/scenco-1
|
src/Tuura/Formula.hs
|
bsd-3-clause
| 3,887
| 0
| 10
| 768
| 1,159
| 589
| 570
| 104
| 1
|
{-# LANGUAGE Trustworthy #-}
{-# OPTIONS -fno-warn-unused-imports #-}
#include "HsConfigure.h"
-- #hide
module Data.Time.Clock.Scale
(
-- * Universal Time
-- | Time as measured by the earth.
UniversalTime(..),
-- * Absolute intervals
DiffTime,
secondsToDiffTime, picosecondsToDiffTime
) where
import Control.DeepSeq
import Data.Ratio ((%))
import Data.Fixed
import Data.Typeable
#if LANGUAGE_Rank2Types
import Data.Data
#endif
-- | The Modified Julian Date is the day with the fraction of the day, measured from UT midnight.
-- It's used to represent UT1, which is time as measured by the earth's rotation, adjusted for various wobbles.
newtype UniversalTime = ModJulianDate {getModJulianDate :: Rational} deriving (Eq,Ord
#if LANGUAGE_DeriveDataTypeable
#if LANGUAGE_Rank2Types
,Data, Typeable
#endif
#endif
)
-- necessary because H98 doesn't have "cunning newtype" derivation
instance NFData UniversalTime where
rnf (ModJulianDate a) = rnf a
-- | This is a length of time, as measured by a clock.
-- Conversion functions will treat it as seconds.
-- It has a precision of 10^-12 s.
newtype DiffTime = MkDiffTime Pico deriving (Eq,Ord
#if LANGUAGE_DeriveDataTypeable
#if LANGUAGE_Rank2Types
#if HAS_DataPico
,Data, Typeable
#else
#endif
#endif
#endif
)
-- necessary because H98 doesn't have "cunning newtype" derivation
instance NFData DiffTime -- FIXME: Data.Fixed had no NFData instances yet at time of writing
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Enum DiffTime where
succ (MkDiffTime a) = MkDiffTime (succ a)
pred (MkDiffTime a) = MkDiffTime (pred a)
toEnum = MkDiffTime . toEnum
fromEnum (MkDiffTime a) = fromEnum a
enumFrom (MkDiffTime a) = fmap MkDiffTime (enumFrom a)
enumFromThen (MkDiffTime a) (MkDiffTime b) = fmap MkDiffTime (enumFromThen a b)
enumFromTo (MkDiffTime a) (MkDiffTime b) = fmap MkDiffTime (enumFromTo a b)
enumFromThenTo (MkDiffTime a) (MkDiffTime b) (MkDiffTime c) = fmap MkDiffTime (enumFromThenTo a b c)
instance Show DiffTime where
show (MkDiffTime t) = (showFixed True t) ++ "s"
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Num DiffTime where
(MkDiffTime a) + (MkDiffTime b) = MkDiffTime (a + b)
(MkDiffTime a) - (MkDiffTime b) = MkDiffTime (a - b)
(MkDiffTime a) * (MkDiffTime b) = MkDiffTime (a * b)
negate (MkDiffTime a) = MkDiffTime (negate a)
abs (MkDiffTime a) = MkDiffTime (abs a)
signum (MkDiffTime a) = MkDiffTime (signum a)
fromInteger i = MkDiffTime (fromInteger i)
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Real DiffTime where
toRational (MkDiffTime a) = toRational a
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Fractional DiffTime where
(MkDiffTime a) / (MkDiffTime b) = MkDiffTime (a / b)
recip (MkDiffTime a) = MkDiffTime (recip a)
fromRational r = MkDiffTime (fromRational r)
-- necessary because H98 doesn't have "cunning newtype" derivation
instance RealFrac DiffTime where
properFraction (MkDiffTime a) = let (b',a') = properFraction a in (b',MkDiffTime a')
truncate (MkDiffTime a) = truncate a
round (MkDiffTime a) = round a
ceiling (MkDiffTime a) = ceiling a
floor (MkDiffTime a) = floor a
-- | Create a 'DiffTime' which represents an integral number of seconds.
secondsToDiffTime :: Integer -> DiffTime
secondsToDiffTime = fromInteger
-- | Create a 'DiffTime' from a number of picoseconds.
picosecondsToDiffTime :: Integer -> DiffTime
picosecondsToDiffTime x = fromRational (x % 1000000000000)
{-# RULES
"realToFrac/DiffTime->Pico" realToFrac = \ (MkDiffTime ps) -> ps
"realToFrac/Pico->DiffTime" realToFrac = MkDiffTime
#-}
|
jwiegley/ghc-release
|
libraries/time/Data/Time/Clock/Scale.hs
|
gpl-3.0
| 3,728
| 4
| 10
| 639
| 903
| 473
| 430
| 54
| 1
|
module Core
( ns )
where
import System.IO (hFlush, stdout)
import Control.Exception (catch)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as Map
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import Readline (readline)
import Reader (read_str)
import Types
import Printer (_pr_str, _pr_list)
-- General functions
equal_Q [a, b] = return $ if a == b then MalTrue else MalFalse
equal_Q _ = throwStr "illegal arguments to ="
run_1 :: (MalVal -> MalVal) -> [MalVal] -> IOThrows MalVal
run_1 f (x:[]) = return $ f x
run_1 _ _ = throwStr "function takes a single argument"
run_2 :: (MalVal -> MalVal -> MalVal) -> [MalVal] -> IOThrows MalVal
run_2 f (x:y:[]) = return $ f x y
run_2 _ _ = throwStr "function takes a two arguments"
-- Error/Exception functions
throw (mv:[]) = throwMalVal mv
throw _ = throwStr "illegal arguments to throw"
-- Scalar functions
symbol (MalString str:[]) = return $ MalSymbol str
symbol _ = throwStr "symbol called with non-string"
keyword (MalString ('\x029e':str):[]) = return $ MalString $ "\x029e" ++ str
keyword (MalString str:[]) = return $ MalString $ "\x029e" ++ str
keyword _ = throwStr "keyword called with non-string"
-- String functions
pr_str args = do
return $ MalString $ _pr_list True " " args
str args = do
return $ MalString $ _pr_list False "" args
prn args = do
liftIO $ putStrLn $ _pr_list True " " args
liftIO $ hFlush stdout
return Nil
println args = do
liftIO $ putStrLn $ _pr_list False " " args
liftIO $ hFlush stdout
return Nil
slurp ([MalString path]) = do
str <- liftIO $ readFile path
return $ MalString str
slurp _ = throwStr "invalid arguments to slurp"
do_readline ([MalString prompt]) = do
str <- liftIO $ readline prompt
case str of
Nothing -> throwStr "readline failed"
Just str -> return $ MalString str
do_readline _ = throwStr "invalid arguments to readline"
-- Numeric functions
num_op op [MalNumber a, MalNumber b] = do
return $ MalNumber $ op a b
num_op _ _ = throwStr "illegal arguments to number operation"
cmp_op op [MalNumber a, MalNumber b] = do
return $ if op a b then MalTrue else MalFalse
cmp_op _ _ = throwStr "illegal arguments to comparison operation"
time_ms _ = do
t <- liftIO $ getPOSIXTime
return $ MalNumber $ round (t * 1000)
-- List functions
list args = return $ MalList args Nil
-- Vector functions
vector args = return $ MalVector args Nil
-- Hash Map functions
_pairup [x] = throwStr "Odd number of elements to _pairup"
_pairup [] = return []
_pairup (MalString x:y:xs) = do
rest <- _pairup xs
return $ (x,y):rest
hash_map args = do
pairs <- _pairup args
return $ MalHashMap (Map.fromList pairs) Nil
assoc (MalHashMap hm _:kvs) = do
pairs <- _pairup kvs
return $ MalHashMap (Map.union (Map.fromList pairs) hm) Nil
assoc _ = throwStr "invalid call to assoc"
dissoc (MalHashMap hm _:ks) = do
let remover = (\hm (MalString k) -> Map.delete k hm) in
return $ MalHashMap (foldl remover hm ks) Nil
dissoc _ = throwStr "invalid call to dissoc"
get (MalHashMap hm _:MalString k:[]) = do
case Map.lookup k hm of
Just mv -> return mv
Nothing -> return Nil
get (Nil:MalString k:[]) = return Nil
get _ = throwStr "invalid call to get"
contains_Q (MalHashMap hm _:MalString k:[]) = do
if Map.member k hm then return MalTrue
else return MalFalse
contains_Q (Nil:MalString k:[]) = return MalFalse
contains_Q _ = throwStr "invalid call to contains?"
keys (MalHashMap hm _:[]) = do
return $ MalList (map MalString (Map.keys hm)) Nil
keys _ = throwStr "invalid call to keys"
vals (MalHashMap hm _:[]) = do
return $ MalList (Map.elems hm) Nil
vals _ = throwStr "invalid call to vals"
-- Sequence functions
_sequential_Q (MalList _ _) = MalTrue
_sequential_Q (MalVector _ _) = MalTrue
_sequential_Q _ = MalFalse
cons x Nil = MalList [x] Nil
cons x (MalList lst _) = MalList (x:lst) Nil
cons x (MalVector lst _) = MalList (x:lst) Nil
concat1 a (MalList lst _) = a ++ lst
concat1 a (MalVector lst _) = a ++ lst
do_concat args = return $ MalList (foldl concat1 [] args) Nil
nth ((MalList lst _):(MalNumber idx):[]) = do
if idx < length lst then return $ lst !! idx
else throwStr "nth: index out of range"
nth ((MalVector lst _):(MalNumber idx):[]) = do
if idx < length lst then return $ lst !! idx
else throwStr "nth: index out of range"
nth _ = throwStr "invalid call to nth"
first Nil = Nil
first (MalList lst _) = if length lst > 0 then lst !! 0 else Nil
first (MalVector lst _) = if length lst > 0 then lst !! 0 else Nil
rest Nil = MalList [] Nil
rest (MalList lst _) = MalList (drop 1 lst) Nil
rest (MalVector lst _) = MalList (drop 1 lst) Nil
empty_Q Nil = MalTrue
empty_Q (MalList [] _) = MalTrue
empty_Q (MalVector [] _) = MalTrue
empty_Q _ = MalFalse
count (Nil:[]) = return $ MalNumber 0
count (MalList lst _:[]) = return $ MalNumber $ length lst
count (MalVector lst _:[]) = return $ MalNumber $ length lst
count _ = throwStr $ "non-sequence passed to count"
apply args = do
f <- _get_call args
lst <- _to_list (last args)
f $ (init (drop 1 args)) ++ lst
do_map args = do
f <- _get_call args
lst <- _to_list (args !! 1)
do new_lst <- mapM (\x -> f [x]) lst
return $ MalList new_lst Nil
conj ((MalList lst _):args) = return $ MalList ((reverse args) ++ lst) Nil
conj ((MalVector lst _):args) = return $ MalVector (lst ++ args) Nil
conj _ = throwStr $ "illegal arguments to conj"
do_seq (l@(MalList [] _):[]) = return $ Nil
do_seq (l@(MalList lst m):[]) = return $ l
do_seq (MalVector [] _:[]) = return $ Nil
do_seq (MalVector lst _:[]) = return $ MalList lst Nil
do_seq (MalString []:[]) = return $ Nil
do_seq (MalString s:[]) = return $ MalList [MalString [c] | c <- s] Nil
do_seq (Nil:[]) = return $ Nil
do_seq _ = throwStr $ "seq: called on non-sequence"
-- Metadata functions
with_meta ((MalList lst _):m:[]) = return $ MalList lst m
with_meta ((MalVector lst _):m:[]) = return $ MalVector lst m
with_meta ((MalHashMap hm _):m:[]) = return $ MalHashMap hm m
with_meta ((MalAtom atm _):m:[]) = return $ MalAtom atm m
with_meta ((Func f _):m:[]) = return $ Func f m
with_meta ((MalFunc {fn=f, ast=a, env=e, params=p, macro=mc}):m:[]) = do
return $ MalFunc {fn=f, ast=a, env=e, params=p, macro=mc, meta=m}
with_meta _ = throwStr $ "invalid with-meta call"
do_meta ((MalList _ m):[]) = return m
do_meta ((MalVector _ m):[]) = return m
do_meta ((MalHashMap _ m):[]) = return m
do_meta ((MalAtom _ m):[]) = return m
do_meta ((Func _ m):[]) = return m
do_meta ((MalFunc {meta=m}):[]) = return m
do_meta _ = throwStr $ "invalid meta call"
-- Atom functions
atom (val:[]) = do
ref <- liftIO $ newIORef val
return $ MalAtom ref Nil
atom _ = throwStr "invalid atom call"
deref (MalAtom ref _:[]) = do
val <- liftIO $ readIORef ref
return val
deref _ = throwStr "invalid deref call"
reset_BANG (MalAtom ref _:val:[]) = do
liftIO $ writeIORef ref $ val
return val
reset_BANG _ = throwStr "invalid deref call"
swap_BANG (MalAtom ref _:args) = do
val <- liftIO $ readIORef ref
f <- _get_call args
new_val <- f $ [val] ++ (tail args)
_ <- liftIO $ writeIORef ref $ new_val
return new_val
ns = [
("=", _func equal_Q),
("throw", _func throw),
("nil?", _func $ run_1 $ _nil_Q),
("true?", _func $ run_1 $ _true_Q),
("false?", _func $ run_1 $ _false_Q),
("string?", _func $ run_1 $ _string_Q),
("symbol", _func $ symbol),
("symbol?", _func $ run_1 $ _symbol_Q),
("keyword", _func $ keyword),
("keyword?", _func $ run_1 $ _keyword_Q),
("pr-str", _func pr_str),
("str", _func str),
("prn", _func prn),
("println", _func println),
("readline", _func do_readline),
("read-string", _func (\[(MalString s)] -> read_str s)),
("slurp", _func slurp),
("<", _func $ cmp_op (<)),
("<=", _func $ cmp_op (<=)),
(">", _func $ cmp_op (>)),
(">=", _func $ cmp_op (>=)),
("+", _func $ num_op (+)),
("-", _func $ num_op (-)),
("*", _func $ num_op (*)),
("/", _func $ num_op (div)),
("time-ms", _func $ time_ms),
("list", _func $ list),
("list?", _func $ run_1 _list_Q),
("vector", _func $ vector),
("vector?", _func $ run_1 _vector_Q),
("hash-map", _func $ hash_map),
("map?", _func $ run_1 _hash_map_Q),
("assoc", _func $ assoc),
("dissoc", _func $ dissoc),
("get", _func $ get),
("contains?",_func $ contains_Q),
("keys", _func $ keys),
("vals", _func $ vals),
("sequential?", _func $ run_1 _sequential_Q),
("cons", _func $ run_2 $ cons),
("concat", _func $ do_concat),
("nth", _func nth),
("first", _func $ run_1 $ first),
("rest", _func $ run_1 $ rest),
("empty?", _func $ run_1 $ empty_Q),
("count", _func $ count),
("apply", _func $ apply),
("map", _func $ do_map),
("conj", _func $ conj),
("seq", _func $ do_seq),
("with-meta", _func $ with_meta),
("meta", _func $ do_meta),
("atom", _func $ atom),
("atom?", _func $ run_1 _atom_Q),
("deref", _func $ deref),
("reset!", _func $ reset_BANG),
("swap!", _func $ swap_BANG)]
|
jwalsh/mal
|
haskell/Core.hs
|
mpl-2.0
| 9,477
| 0
| 14
| 2,249
| 4,170
| 2,153
| 2,017
| 234
| 3
|
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1994-1998
Desugaring foreign calls
-}
{-# LANGUAGE CPP #-}
module Language.Haskell.Liquid.Desugar710.DsCCall
( dsCCall
, mkFCall
, unboxArg
, boxResult
, resultWrapper
) where
-- #include "HsVersions.h"
import CoreSyn
import DsMonad
import CoreUtils
import MkCore
import Var
import MkId
import ForeignCall
import DataCon
import TcType
import Type
import Coercion
import PrimOp
import TysPrim
import TyCon
import TysWiredIn
import BasicTypes
import Literal
import PrelNames
import VarSet
import DynFlags
import Outputable
import Util
import Data.Maybe
{-
Desugaring of @ccall@s consists of adding some state manipulation,
unboxing any boxed primitive arguments and boxing the result if
desired.
The state stuff just consists of adding in
@PrimIO (\ s -> case s of { S# s# -> ... })@ in an appropriate place.
The unboxing is straightforward, as all information needed to unbox is
available from the type. For each boxed-primitive argument, we
transform:
\begin{verbatim}
_ccall_ foo [ r, t1, ... tm ] e1 ... em
|
|
V
case e1 of { T1# x1# ->
...
case em of { Tm# xm# -> xm#
ccall# foo [ r, t1#, ... tm# ] x1# ... xm#
} ... }
\end{verbatim}
The reboxing of a @_ccall_@ result is a bit tricker: the types don't
contain information about the state-pairing functions so we have to
keep a list of \tr{(type, s-p-function)} pairs. We transform as
follows:
\begin{verbatim}
ccall# foo [ r, t1#, ... tm# ] e1# ... em#
|
|
V
\ s# -> case (ccall# foo [ r, t1#, ... tm# ] s# e1# ... em#) of
(StateAnd<r># result# state#) -> (R# result#, realWorld#)
\end{verbatim}
-}
dsCCall :: CLabelString -- C routine to invoke
-> [CoreExpr] -- Arguments (desugared)
-> Safety -- Safety of the call
-> Type -- Type of the result: IO t
-> DsM CoreExpr -- Result, of type ???
dsCCall lbl args may_gc result_ty
= do (unboxed_args, arg_wrappers) <- mapAndUnzipM unboxArg args
(ccall_result_ty, res_wrapper) <- boxResult result_ty
uniq <- newUnique
dflags <- getDynFlags
let
target = StaticTarget lbl Nothing True
the_fcall = CCall (CCallSpec target CCallConv may_gc)
the_prim_app = mkFCall dflags uniq the_fcall unboxed_args ccall_result_ty
return (foldr ($) (res_wrapper the_prim_app) arg_wrappers)
mkFCall :: DynFlags -> Unique -> ForeignCall
-> [CoreExpr] -- Args
-> Type -- Result type
-> CoreExpr
-- Construct the ccall. The only tricky bit is that the ccall Id should have
-- no free vars, so if any of the arg tys do we must give it a polymorphic type.
-- [I forget *why* it should have no free vars!]
-- For example:
-- mkCCall ... [s::StablePtr (a->b), x::Addr, c::Char]
--
-- Here we build a ccall thus
-- (ccallid::(forall a b. StablePtr (a -> b) -> Addr -> Char -> IO Addr))
-- a b s x c
mkFCall dflags uniq the_fcall val_args res_ty
= mkApps (mkVarApps (Var the_fcall_id) tyvars) val_args
where
arg_tys = map exprType val_args
body_ty = (mkFunTys arg_tys res_ty)
tyvars = varSetElems (tyVarsOfType body_ty)
ty = mkForAllTys tyvars body_ty
the_fcall_id = mkFCallId dflags uniq the_fcall ty
unboxArg :: CoreExpr -- The supplied argument
-> DsM (CoreExpr, -- To pass as the actual argument
CoreExpr -> CoreExpr -- Wrapper to unbox the arg
)
-- Example: if the arg is e::Int, unboxArg will return
-- (x#::Int#, \W. case x of I# x# -> W)
-- where W is a CoreExpr that probably mentions x#
unboxArg arg
-- Primtive types: nothing to unbox
| isPrimitiveType arg_ty
= return (arg, \body -> body)
-- Recursive newtypes
| Just(co, _rep_ty) <- topNormaliseNewType_maybe arg_ty
= unboxArg (mkCast arg co)
-- Booleans
| Just tc <- tyConAppTyCon_maybe arg_ty,
tc `hasKey` boolTyConKey
= do dflags <- getDynFlags
prim_arg <- newSysLocalDs intPrimTy
return (Var prim_arg,
\ body -> Case (mkWildCase arg arg_ty intPrimTy
[(DataAlt falseDataCon,[],mkIntLit dflags 0),
(DataAlt trueDataCon, [],mkIntLit dflags 1)])
-- In increasing tag order!
prim_arg
(exprType body)
[(DEFAULT,[],body)])
-- Data types with a single constructor, which has a single, primitive-typed arg
-- This deals with Int, Float etc; also Ptr, ForeignPtr
| is_product_type && data_con_arity == 1
= -- ASSERT2(isUnLiftedType data_con_arg_ty1, pprType arg_ty)
-- Typechecker ensures this
do case_bndr <- newSysLocalDs arg_ty
prim_arg <- newSysLocalDs data_con_arg_ty1
return (Var prim_arg,
\ body -> Case arg case_bndr (exprType body) [(DataAlt data_con,[prim_arg],body)]
)
-- Byte-arrays, both mutable and otherwise; hack warning
-- We're looking for values of type ByteArray, MutableByteArray
-- data ByteArray ix = ByteArray ix ix ByteArray#
-- data MutableByteArray s ix = MutableByteArray ix ix (MutableByteArray# s)
| is_product_type &&
data_con_arity == 3 &&
isJust maybe_arg3_tycon &&
(arg3_tycon == byteArrayPrimTyCon ||
arg3_tycon == mutableByteArrayPrimTyCon)
= do case_bndr <- newSysLocalDs arg_ty
vars@[_l_var, _r_var, arr_cts_var] <- newSysLocalsDs data_con_arg_tys
return (Var arr_cts_var,
\ body -> Case arg case_bndr (exprType body) [(DataAlt data_con,vars,body)]
)
| otherwise
= do l <- getSrcSpanDs
pprPanic "unboxArg: " (ppr l <+> ppr arg_ty)
where
arg_ty = exprType arg
maybe_product_type = splitDataProductType_maybe arg_ty
is_product_type = isJust maybe_product_type
Just (_, _, data_con, data_con_arg_tys) = maybe_product_type
data_con_arity = dataConSourceArity data_con
(data_con_arg_ty1 : _) = data_con_arg_tys
(_ : _ : data_con_arg_ty3 : _) = data_con_arg_tys
maybe_arg3_tycon = tyConAppTyCon_maybe data_con_arg_ty3
Just arg3_tycon = maybe_arg3_tycon
boxResult :: Type
-> DsM (Type, CoreExpr -> CoreExpr)
-- Takes the result of the user-level ccall:
-- either (IO t),
-- or maybe just t for an side-effect-free call
-- Returns a wrapper for the primitive ccall itself, along with the
-- type of the result of the primitive ccall. This result type
-- will be of the form
-- State# RealWorld -> (# State# RealWorld, t' #)
-- where t' is the unwrapped form of t. If t is simply (), then
-- the result type will be
-- State# RealWorld -> (# State# RealWorld #)
boxResult result_ty
| Just (io_tycon, io_res_ty) <- tcSplitIOType_maybe result_ty
-- isIOType_maybe handles the case where the type is a
-- simple wrapping of IO. E.g.
-- newtype Wrap a = W (IO a)
-- No coercion necessary because its a non-recursive newtype
-- (If we wanted to handle a *recursive* newtype too, we'd need
-- another case, and a coercion.)
-- The result is IO t, so wrap the result in an IO constructor
= do { res <- resultWrapper io_res_ty
; let extra_result_tys
= case res of
(Just ty,_)
| isUnboxedTupleType ty
-> let Just ls = tyConAppArgs_maybe ty in tail ls
_ -> []
return_result state anss
= mkCoreConApps (tupleCon UnboxedTuple (2 + length extra_result_tys))
(map Type (realWorldStatePrimTy : io_res_ty : extra_result_tys)
++ (state : anss))
; (ccall_res_ty, the_alt) <- mk_alt return_result res
; state_id <- newSysLocalDs realWorldStatePrimTy
; let io_data_con = head (tyConDataCons io_tycon)
toIOCon = dataConWrapId io_data_con
wrap the_call =
mkApps (Var toIOCon)
[ Type io_res_ty,
Lam state_id $
mkWildCase (App the_call (Var state_id))
ccall_res_ty
(coreAltType the_alt)
[the_alt]
]
; return (realWorldStatePrimTy `mkFunTy` ccall_res_ty, wrap) }
boxResult result_ty
= do -- It isn't IO, so do unsafePerformIO
-- It's not conveniently available, so we inline it
res <- resultWrapper result_ty
(ccall_res_ty, the_alt) <- mk_alt return_result res
let
wrap = \ the_call -> mkWildCase (App the_call (Var realWorldPrimId))
ccall_res_ty
(coreAltType the_alt)
[the_alt]
return (realWorldStatePrimTy `mkFunTy` ccall_res_ty, wrap)
where
return_result _ [ans] = ans
return_result _ _ = panic "return_result: expected single result"
mk_alt :: (Expr Var -> [Expr Var] -> Expr Var)
-> (Maybe Type, Expr Var -> Expr Var)
-> DsM (Type, (AltCon, [Id], Expr Var))
mk_alt return_result (Nothing, wrap_result)
= do -- The ccall returns ()
state_id <- newSysLocalDs realWorldStatePrimTy
let
the_rhs = return_result (Var state_id)
[wrap_result (panic "boxResult")]
ccall_res_ty = mkTyConApp unboxedSingletonTyCon [realWorldStatePrimTy]
the_alt = (DataAlt unboxedSingletonDataCon, [state_id], the_rhs)
return (ccall_res_ty, the_alt)
mk_alt return_result (Just prim_res_ty, wrap_result)
-- The ccall returns a non-() value
| isUnboxedTupleType prim_res_ty= do
let
Just ls = tyConAppArgs_maybe prim_res_ty
arity = 1 + length ls
args_ids@(result_id:as) <- mapM newSysLocalDs ls
state_id <- newSysLocalDs realWorldStatePrimTy
let
the_rhs = return_result (Var state_id)
(wrap_result (Var result_id) : map Var as)
ccall_res_ty = mkTyConApp (tupleTyCon UnboxedTuple arity)
(realWorldStatePrimTy : ls)
the_alt = ( DataAlt (tupleCon UnboxedTuple arity)
, (state_id : args_ids)
, the_rhs
)
return (ccall_res_ty, the_alt)
| otherwise = do
result_id <- newSysLocalDs prim_res_ty
state_id <- newSysLocalDs realWorldStatePrimTy
let
the_rhs = return_result (Var state_id)
[wrap_result (Var result_id)]
ccall_res_ty = mkTyConApp unboxedPairTyCon [realWorldStatePrimTy, prim_res_ty]
the_alt = (DataAlt unboxedPairDataCon, [state_id, result_id], the_rhs)
return (ccall_res_ty, the_alt)
resultWrapper :: Type
-> DsM (Maybe Type, -- Type of the expected result, if any
CoreExpr -> CoreExpr) -- Wrapper for the result
-- resultWrapper deals with the result *value*
-- E.g. foreign import foo :: Int -> IO T
-- Then resultWrapper deals with marshalling the 'T' part
resultWrapper result_ty
-- Base case 1: primitive types
| isPrimitiveType result_ty
= return (Just result_ty, \e -> e)
-- Base case 2: the unit type ()
| Just (tc,_) <- maybe_tc_app, tc `hasKey` unitTyConKey
= return (Nothing, \_ -> Var unitDataConId)
-- Base case 3: the boolean type
| Just (tc,_) <- maybe_tc_app, tc `hasKey` boolTyConKey
= do
dflags <- getDynFlags
return
(Just intPrimTy, \e -> mkWildCase e intPrimTy
boolTy
[(DEFAULT ,[],Var trueDataConId ),
(LitAlt (mkMachInt dflags 0),[],Var falseDataConId)])
-- Newtypes
| Just (co, rep_ty) <- topNormaliseNewType_maybe result_ty
= do (maybe_ty, wrapper) <- resultWrapper rep_ty
return (maybe_ty, \e -> mkCast (wrapper e) (mkSymCo co))
-- The type might contain foralls (eg. for dummy type arguments,
-- referring to 'Ptr a' is legal).
| Just (tyvar, rest) <- splitForAllTy_maybe result_ty
= do (maybe_ty, wrapper) <- resultWrapper rest
return (maybe_ty, \e -> Lam tyvar (wrapper e))
-- Data types with a single constructor, which has a single arg
-- This includes types like Ptr and ForeignPtr
| Just (tycon, tycon_arg_tys, data_con, data_con_arg_tys) <- splitDataProductType_maybe result_ty,
dataConSourceArity data_con == 1
= do dflags <- getDynFlags
let
(unwrapped_res_ty : _) = data_con_arg_tys
narrow_wrapper = maybeNarrow dflags tycon
(maybe_ty, wrapper) <- resultWrapper unwrapped_res_ty
return
(maybe_ty, \e -> mkApps (Var (dataConWrapId data_con))
(map Type tycon_arg_tys ++ [wrapper (narrow_wrapper e)]))
| otherwise
= pprPanic "resultWrapper" (ppr result_ty)
where
maybe_tc_app = splitTyConApp_maybe result_ty
-- When the result of a foreign call is smaller than the word size, we
-- need to sign- or zero-extend the result up to the word size. The C
-- standard appears to say that this is the responsibility of the
-- caller, not the callee.
maybeNarrow :: DynFlags -> TyCon -> (CoreExpr -> CoreExpr)
maybeNarrow dflags tycon
| tycon `hasKey` int8TyConKey = \e -> App (Var (mkPrimOpId Narrow8IntOp)) e
| tycon `hasKey` int16TyConKey = \e -> App (Var (mkPrimOpId Narrow16IntOp)) e
| tycon `hasKey` int32TyConKey
&& wORD_SIZE dflags > 4 = \e -> App (Var (mkPrimOpId Narrow32IntOp)) e
| tycon `hasKey` word8TyConKey = \e -> App (Var (mkPrimOpId Narrow8WordOp)) e
| tycon `hasKey` word16TyConKey = \e -> App (Var (mkPrimOpId Narrow16WordOp)) e
| tycon `hasKey` word32TyConKey
&& wORD_SIZE dflags > 4 = \e -> App (Var (mkPrimOpId Narrow32WordOp)) e
| otherwise = id
|
spinda/liquidhaskell
|
src/Language/Haskell/Liquid/Desugar710/DsCCall.hs
|
bsd-3-clause
| 14,609
| 0
| 19
| 4,611
| 2,842
| 1,475
| 1,367
| 224
| 3
|
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
module VhdSync
( SyncProcess
, SyncEvent(..)
, SyncStage(..)
, SyncState(..)
, SyncStateChangeReason(..)
, SyncResultDetails(..)
, SyncResultReason(..)
, SyncAdmctlDetails(..)
, SyncSrvcksumDetails(..)
, SyncError(..)
, SyncProgress(..)
, startDownload, startUpload
, waitSyncEvent
, cleanupSyncProcess
, pauseSyncProcess
, resumeSyncProcess
) where
import Data.IORef
import Data.List
import Control.Concurrent
import Control.Monad
import Control.Applicative
import Control.Monad.Trans
import qualified Control.Exception as E
import System.FilePath
import System.IO
import qualified System.Posix.Process as P
import System.Posix.Types
import System.Posix.Signals
import System.Exit
import System.Timeout
import Tools.Log
import Tools.Misc
import Tools.VhdSyncC
import qualified Network.Socket as N
import Types
data Key = Key { keyPath :: FilePath }
data Mode = Input | Output
data Header = Header String String
data SyncOption = ZeroArgOption String
| OneArgOption String String
data SyncProcess =
SyncProcess {
handle :: ProcessID
, controlSocket :: N.Socket
, recvBuffer :: IORef String
, cleaningup :: IORef Bool
, syncMode :: Mode
, processStatus :: MVar P.ProcessStatus
}
data SyncProgress = SyncProgress {
progressTime :: Double
, progressRemaining :: Double
, progressTotal :: Double
, progressUlSpeed :: Double
, progressDlSpeed :: Double
} deriving ( Eq, Show )
data XferProgress = XferProgress {
xferTime :: Double
, xferDlTotal :: Double
, xferDlNow :: Double
, xferDlSpeed :: Double
, xferUlTotal :: Double
, xferUlNow :: Double
, xferUlSpeed :: Double
} deriving ( Eq, Show )
syncProgressFromXfer :: Mode -> XferProgress -> SyncProgress
syncProgressFromXfer Output xfer = SyncProgress (xferTime xfer) (xferDlTotal xfer - xferDlNow xfer) (xferDlTotal xfer) (xferUlSpeed xfer) (xferDlSpeed xfer)
syncProgressFromXfer Input xfer = SyncProgress (xferTime xfer) (xferUlTotal xfer - xferUlNow xfer) (xferUlTotal xfer) (xferUlSpeed xfer) (xferDlSpeed xfer)
data SyncStateChangeDetails = SyncStateChangeDetails {
scState :: SyncState
, scReason :: SyncStateChangeReason
} deriving ( Eq, Show )
data SyncState = Running
| Paused deriving ( Eq, Show, Enum )
data SyncStateChangeReason = None
| User
| NetworkInterruption
| AdmissionControl
| ServerChecksumming deriving ( Eq, Show, Enum )
type SyncResultCode = Int --0 is OK, anything else is an error
data SyncResultReason = Success --OK
| FileAccessError --Image file could not be opened (filer could have gone down, etc.)
| InvalidChunkParams --Invalid parameters for chunked GET
| InvalidRange --Invalid range specified for ranged GET
| MalformedRangeHeader --Malformed range header for ranged GET
| TransferSizeTooLarge --Transfer size greater than 128GB
| FileNotUnique --Specified filename is not unique in the image repository
| MalformedURI --Malformed request URI
| FileNotExists --File does not exist in the specified image repository
| FileNotTracked --File is not tracked in the backend database
| InsufficientSpace --Not enough disk space in image repository to complete the transfer
| InvalidRpc --Specified RPC does not map to one of our exposed services
| InvalidRpcMethod --Specified RPC method does not map to one of the methods on the specified service
| InvalidImageState --Image file in bad state (e.g., FINAL for upload or PARTIAL for download)
| DigestMalformed --Checksum specification invalid (syntax of checksum line wrong)
| DigestMismatch --Checksums on client/server don't match
| IncompatibleVersion --Client version is not compatible with the server version
| InvalidKey --Key validation of encrypted image failed
| AccessDenied --Cannot grant access to the file (anymore)
| HTTPNotAllowed --HTTP transfer requested but not HTTP not configured
| TooManyXfers --Too many transfers are happening right now
| AdmissionError --Couldn't talk to the admission daemon
| DbAccessNotAllowed --Tried to use the DB when it was not allowed
| InvalidGenNumber --Image client generation number does not match that on the server
| ImageCoalesced --Image has been coalesced on the server
| ImageAlreadyExists --Upload was attempted but image already exists on the server.
| LocalChecksumFailureDvd --Local Checksum failed on DVD
| LocalChecksumFailureUsb --Local Checksum failed on USB
| LocalChecksumNotFoundDvd --Local Checksum not found on DVD
| LocalChecksumNotFoundUsb --Local Checksum not found on USB
| LocalChecksumPartFailDvd --Local Checksum Partial Failure on DVD
| LocalChecksumPartFailUsb --Local Checksum Partial Failure USB
| LocalChecksumMalformedDvd --Local Checksum Malformed on DVD
| LocalChecksumMalformedUsb --Local Checksum Malformed on USB
deriving ( Eq, Show )
instance Enum SyncResultReason where
toEnum 0 = Success
toEnum 101 = FileAccessError
toEnum 102 = InvalidChunkParams
toEnum 103 = InvalidRange
toEnum 104 = MalformedRangeHeader
toEnum 105 = TransferSizeTooLarge
toEnum 106 = FileNotUnique
toEnum 107 = MalformedURI
toEnum 108 = FileNotExists
toEnum 109 = FileNotTracked
toEnum 110 = InsufficientSpace
toEnum 111 = InvalidRpc
toEnum 112 = InvalidRpcMethod
toEnum 113 = InvalidImageState
toEnum 114 = DigestMalformed
toEnum 115 = DigestMismatch
toEnum 116 = IncompatibleVersion
toEnum 117 = InvalidKey
toEnum 118 = AccessDenied
toEnum 119 = HTTPNotAllowed
toEnum 120 = TooManyXfers
toEnum 121 = AdmissionError
toEnum 122 = DbAccessNotAllowed
toEnum 123 = ImageCoalesced
toEnum 124 = InvalidGenNumber
toEnum 125 = ImageAlreadyExists
toEnum (-1) = LocalChecksumFailureDvd
toEnum (-2) = LocalChecksumFailureUsb
toEnum (-3) = LocalChecksumNotFoundDvd
toEnum (-4) = LocalChecksumNotFoundUsb
toEnum (-5) = LocalChecksumPartFailDvd
toEnum (-6) = LocalChecksumPartFailUsb
toEnum (-7) = LocalChecksumMalformedDvd
toEnum (-8) = LocalChecksumMalformedUsb
toEnum x = error ("unknown vhd-sync result reason " ++ show x )
fromEnum Success = 0
fromEnum FileAccessError = 101
fromEnum InvalidChunkParams = 102
fromEnum InvalidRange = 103
fromEnum MalformedRangeHeader = 104
fromEnum TransferSizeTooLarge = 105
fromEnum FileNotUnique = 106
fromEnum MalformedURI = 107
fromEnum FileNotExists = 108
fromEnum FileNotTracked = 109
fromEnum InsufficientSpace = 110
fromEnum InvalidRpc = 111
fromEnum InvalidRpcMethod = 112
fromEnum InvalidImageState = 113
fromEnum DigestMalformed = 114
fromEnum DigestMismatch = 115
fromEnum IncompatibleVersion = 116
fromEnum InvalidKey = 117
fromEnum AccessDenied = 118
fromEnum HTTPNotAllowed = 119
fromEnum TooManyXfers = 120
fromEnum AdmissionError = 121
fromEnum DbAccessNotAllowed = 122
fromEnum ImageCoalesced = 123
fromEnum InvalidGenNumber = 124
fromEnum ImageAlreadyExists = 125
fromEnum LocalChecksumFailureDvd = (-1)
fromEnum LocalChecksumFailureUsb = (-2)
fromEnum LocalChecksumNotFoundDvd = (-3)
fromEnum LocalChecksumNotFoundUsb = (-4)
fromEnum LocalChecksumPartFailDvd = (-5)
fromEnum LocalChecksumPartFailUsb = (-6)
fromEnum LocalChecksumMalformedDvd = (-7)
fromEnum LocalChecksumMalformedUsb = (-8)
data SyncResultDetails = SyncResultDetails {
srCode :: SyncResultCode
, srReason :: SyncResultReason
} deriving ( Eq, Show )
data SyncAdmctlDetails = SyncAdmctlDetails {
saTime :: Double --message time
, saNextContact :: Double --number of seconds until next attempt to contact server
, saQueuePosition :: Int --0-based position in the wait queue on server
} deriving ( Eq, Show )
data SyncSrvcksumDetails = SyncSrvcksumDetails {
ckTime :: Double --message time
, ckNextContact :: Double --number of seconds until next attempt to contact server
} deriving ( Eq, Show )
data SyncEvent = Finished
| Failed SyncError
| OnDemandStop
| StageProgress SyncStage SyncProgress
| StateChange SyncState SyncStateChangeReason
| Result SyncResultDetails
| Admctl SyncAdmctlDetails
| ServerCksum SyncSrvcksumDetails
| ClearThroat
| Unexpected String
deriving ( Eq, Show )
data SyncStage = Chksum | Install | Transfer deriving ( Eq, Show )
data SyncError = FailureExitCode Int
| SocketError String
deriving ( Eq, Show )
type EventParser = String -> Maybe SyncEvent
mode_str Input = "input"
mode_str Output = "output"
-- babysits PID and stores the waitpid result in mvar
-- can be only called once, because waitpid (aka P.getProcessStatus here) can be only called once
babysitProcessStatus :: ProcessID -> MVar P.ProcessStatus -> IO ()
babysitProcessStatus pid status_mv =
do x <- E.try $ P.getProcessStatus True False pid
case x of
Left ex -> do
warn $ "trouble waiting for PID " ++ show pid ++ ": " ++ show (ex :: E.SomeException)
repeat
Right Nothing ->
repeat
Right (Just s) ->
putMVar status_mv s
where
repeat = babysitProcessStatus pid status_mv
getSyncProcessStatusNonBlocking :: SyncProcess -> IO (Maybe P.ProcessStatus)
getSyncProcessStatusNonBlocking process = do
-- non blocking case
s <- tryTakeMVar (processStatus process)
case s of
Nothing -> return Nothing -- don't have status yet
Just s -> putMVar (processStatus process) s >> (return . Just $ s) -- recycle the status for further reads
getSyncProcessStatusBlocking :: SyncProcess -> IO P.ProcessStatus
getSyncProcessStatusBlocking process =
-- blocking case we just try to grab mvar
readMVar (processStatus process)
getSyncProcessStatusTimeoutSecs :: Int -> SyncProcess -> IO (Maybe ExitCode)
getSyncProcessStatusTimeoutSecs t process
| t <= 0 = return Nothing
| otherwise = when_code =<< getSyncProcessStatusNonBlocking process
where
when_code Nothing = threadDelay (10^6) >> getSyncProcessStatusTimeoutSecs (t-1) process
when_code (Just c) = return $ Just (fromProcessStatus c)
terminateProcess :: ProcessID -> IO ()
terminateProcess pid =
signalProcess sigTERM pid
`E.catch`
( \ex -> warn $ "trouble sending sigTERM to sync: " ++ show (ex :: E.SomeException) )
fromProcessStatus :: P.ProcessStatus -> ExitCode
fromProcessStatus (P.Exited c) = c
fromProcessStatus (P.Terminated s) = ExitFailure $ fromIntegral s + 128
fromProcessStatus (P.Stopped s) = ExitFailure $ fromIntegral s + 128
makeHeaders :: TransferCtxID -> [Header]
makeHeaders (TransferCtxID ctx_id) =
[ Header "X-XCBE-Xfer-Context" ctx_id ]
makeFlags :: [(Bool, String)] -> [SyncOption]
makeFlags optsMap = map (\x -> ZeroArgOption (snd x)) $ filter (\x -> fst x) optsMap
makeIntArgs :: [(Int, String)] -> [SyncOption]
makeIntArgs optsMap = map (\(x,y) -> OneArgOption y (show x)) optsMap
makeCommonOptions :: CommonTransferOptions -> [SyncOption]
makeCommonOptions CommonTransferOptions {chunkSize=cksize, logLevel=loglevel, verbose=verbose, lowspeedLimit=lowspeed, lowspeedTime=lowtime, connectTimeout=ctimeout} = makeFlags flagsMap ++ makeIntArgs argsMap
where
flagsMap = [(verbose, "--verbose")]
argsMap = [(cksize, "--cksize"),
(lowspeed, "--lowspeed"),
(lowtime, "--lowtime"),
(ctimeout, "--ctimeout"),
(loglevel, "--loglevel")]
makeOptions :: TransferOptions -> [SyncOption]
makeOptions UploadOptions {enableCompaction=compact, commonOptions=opts} = makeFlags flagsMap ++ makeCommonOptions opts
where
flagsMap = [(compact, "--compact")]
makeOptions DownloadOptions {encryptLocalImage=encrypt, enableDvdCache=dvdcache, enableUsbCache=usbcache, forceLocalCheckSum=forcelocalchecksum, commonOptions=opts} = makeFlags flagsMap ++ makeCommonOptions opts
where
flagsMap = [(encrypt, "--encrypt-local-vhd"),
(dvdcache, "--enabledvd"),
(usbcache, "--enableusb"),
(forcelocalchecksum, "--force-local-checksum") ]
startDownload :: TransferOptions -> FilePath -> String -> String -> CryptoSpec -> TransferCtxID -> IO SyncProcess
startDownload dl_opts output_file transfer_url rpc_url crypto id =
do info $ "Downloading " ++ output_file ++ " from " ++ transfer_url
let headers = makeHeaders id
let opts = makeOptions dl_opts
run output_file transfer_url rpc_url Output headers crypto opts
startUpload :: TransferOptions -> FilePath -> String -> String -> CryptoSpec -> TransferCtxID -> IO SyncProcess
startUpload ul_opts input_file transfer_url rpc_url crypto id =
do info $ "Uploading " ++ input_file ++ " to " ++ transfer_url
let headers = makeHeaders id
let opts = makeOptions ul_opts
run input_file transfer_url rpc_url Input headers crypto opts
runVhdSync :: Int -> [String] -> IO ProcessID
runVhdSync monitorFd args =
forkCloseAndExec [0,1,2,monitorFd] ("/usr/bin/vhd-sync.compress" : args)
run :: FilePath -> String -> String -> Mode -> [Header] -> CryptoSpec -> [SyncOption] -> IO SyncProcess
run file transfer_url rpc_url mode headers crypto options =
do ( parent, child ) <- N.socketPair N.AF_UNIX N.Stream N.defaultProtocol
info $ "created socket pair " ++ show parent ++ " " ++ show child
info $ "calling vhd-sync with arguments: " ++ (concat . intersperse " " $ args child)
h <- runVhdSync (fromIntegral $ N.fdSocket child) (args child)
E.try $ N.sClose child :: IO (Either E.SomeException ())
buf <- newIORef ""
cup <- newIORef False
status_mv <- newEmptyMVar
let process = SyncProcess { controlSocket = parent
, handle = h
, recvBuffer = buf
, cleaningup = cup
, syncMode = mode
, processStatus = status_mv }
-- run waitpid in separate thread
forkOS . liftIO $ babysitProcessStatus h status_mv
return process
where
args socket
= [ "--" ++ (mode_str mode), file
, "--sparse"
, "--monitorfd", show (N.fdSocket socket)
, "--progress"
, "--forceurl"
, "--cacert", cryptoServerCertPath crypto
, "--cert", cryptoClientCertPath crypto
, "--key", cryptoClientKeyPath crypto
, "--transferurl", transfer_url ]
++ ( concat . map option_arg $ options )
++ ( concat . map header_arg $ headers )
++ [ rpc_url ]
where
option_arg (ZeroArgOption v) = [v]
option_arg (OneArgOption k v) = [k, v]
header_arg (Header k v) = ["--header", k++":"++v]
-- wait for an event from the currently running sync process
-- if the sync process has exited, return value will be one of: Finished, OnDamandStop, or Failed
-- and we will have cleaned up the sync process
waitSyncEvent :: SyncProcess -> IO SyncEvent
waitSyncEvent process =
when_code =<< getSyncProcessStatusNonBlocking process
where
when_code Nothing = wait_for_event
when_code (Just c) = exit_event c
exit_event es =
do cup <- readIORef (cleaningup process)
if cup
then do return OnDemandStop
else do cleanupSyncSocket process
case fromProcessStatus es of
ExitSuccess -> return Finished
ExitFailure v -> return $ Failed (FailureExitCode v)
wait_for_event =
do r <- buffered_event
case r of
Nothing -> socket_event
Just e -> return e
sock = controlSocket process
quantum = 10^6 -- 1s
socket_event =
do r <- timeout quantum (threadWaitRead . fromIntegral . N.fdSocket $ sock)
case r of
Nothing -> waitSyncEvent process -- timeout
_ -> read_parse_event
buffered_event :: IO (Maybe SyncEvent)
buffered_event =
cutLine (recvBuffer process) >>= return . parse
where
parse Nothing = Nothing
parse (Just l) = Just $ parse_line l
-- read from control socket then
read_parse_event =
do dat <- E.try (N.recv sock 128)
case dat of
-- if there was a problem reading from socket,
-- try to terminate vhd sync, check the exit code to see whether this
-- was just a graceful exit because task finished and vhd sync closed socket on its end and then quitted
Left ( ex :: E.SomeException ) ->
do warn $ "exception: " ++ show ex
cup <- readIORef (cleaningup process)
if cup
then do
warn $ "but it is in cleanup sequence, waiting for cleanup to finish..."
getSyncProcessStatusBlocking process
warn $ "wait done."
return OnDemandStop
else do
status <- cleanupSyncProcess process
case status of
Exited ExitSuccess -> return Finished
_ -> return $ Failed (SocketError $ show ex)
-- if read succeded, parse event
Right dat ->
do modifyIORef (recvBuffer process) $ \buf -> buf ++ dat
cmd <- buffered_event
case cmd of
Nothing -> waitSyncEvent process
Just evt -> return evt
parse_line :: String -> SyncEvent
parse_line l =
case find_parser l of
Nothing -> Unexpected l
Just parse -> case parse l of
Nothing -> Unexpected l
Just ev -> ev
find_parser :: String -> Maybe EventParser
find_parser line =
foldl' look Nothing parsers
where
look v@(Just _) _ = v
look Nothing p =
case p of
(prefix,parser) | prefix `isPrefixOf` line -> Just parser
_ -> Nothing
parsers :: [ (String,EventParser) ]
parsers =
[ ("cksum_progress" , chksum_pr)
, ("install_progress" , install_pr)
, ("xfer_progress" , xfer_pr)
, ("state_change" , state_pr)
, ("result" , result_pr)
, ("admctl" , admctl_pr)
, ("server_checksumming", srvcksum_pr)
, ("clear throat" , clearthroat_pr)
]
where
chksum_pr str =
do x <- parseProgress . skipWord $ str
return $ StageProgress Chksum x
install_pr str =
do x <- parseProgress . skipWord $ str
return $ StageProgress Install x
xfer_pr str =
do x <- parseXferProgress . skipWord $ str
return $ StageProgress Transfer (syncProgressFromXfer mode x)
state_pr str =
do x <- parseStateChange . skipWord $ str
return $ StateChange (scState x) (scReason x)
result_pr str =
do x <- parseResult . skipWord $ str
return $ Result x
admctl_pr str =
do x <- parseAdmctl . skipWord $ str
return $ Admctl x
srvcksum_pr str =
do x <- parseServerCksum . skipWord $ str
return $ ServerCksum x
clearthroat_pr str =
return ClearThroat
mode = syncMode process
data SyncExit = Exited ExitCode
| BrutallyKilled
cleanupSyncSocket :: SyncProcess -> IO ()
cleanupSyncSocket process =
do -- set cleanup flag, blow the sockets, ignore errors
writeIORef (cleaningup process) True
info "closing control socket"
N.sClose (controlSocket process)
`E.catch`
( \ex ->
warn $ "trouble closing control socket: " ++ show (ex :: E.SomeException) )
cleanupSyncProcess :: SyncProcess -> IO SyncExit
cleanupSyncProcess process =
do cleanupSyncSocket process
info "waiting for vhd sync to exit gracefully.."
-- give it 5s benefit of doubt
done <- getSyncProcessStatusTimeoutSecs 5 process
case done of
(Just excode) -> do
info $ "vhd sync exited with: " ++ show excode
return $ Exited excode
Nothing -> do
info "killing vhd sync"
terminateProcess (handle process)
done_again <- getSyncProcessStatusTimeoutSecs 5 process
case done_again of
Just _ -> info "killed"
Nothing -> warn "failed to kill vhd sync!"
return BrutallyKilled
-- Send a state change command to vhd-sync
-- log errors, but otherwise happily ignore
-- defer to waitSyncEvent for error handling
syncStateChange :: SyncProcess -> SyncState -> IO ()
syncStateChange process state =
do dat <- E.try (N.send sock cmd)
case dat of
Left ( ex :: E.SomeException ) ->
warn $ "exception: " ++ show ex
Right sent ->
when ( sent /= length cmd ) $ warn msg
where
msg = "only " ++ show sent ++ " bytes sent; tried to send " ++ show (length cmd)
where
sock = controlSocket process
cmd = "state_change " ++ (show $ fromEnum state) ++ "\n"
pauseSyncProcess :: SyncProcess -> IO ()
pauseSyncProcess process = syncStateChange process Paused
resumeSyncProcess :: SyncProcess -> IO ()
resumeSyncProcess process = syncStateChange process Running
parseInt :: String -> Maybe Int
parseInt str =
case reads str of
((v,_):_) -> Just v
_ -> Nothing
parseFloat :: String -> Maybe Double
parseFloat str =
case reads str of
((v,_):_) -> Just v
_ -> Nothing
skipWord :: String -> String
skipWord s =
case words s of
(_:ws) -> concat . intersperse " " $ ws
_ -> s
parseProgress :: String -> Maybe SyncProgress
parseProgress str =
case words str of
[time_str,rem_str,total_str] ->
do time <- parseFloat time_str
rem <- parseFloat rem_str
total <- parseFloat total_str
return $ SyncProgress time rem total 0 0
_ -> Nothing
parseXferProgress :: String -> Maybe XferProgress
parseXferProgress str =
case words str of
[time_str, dl_tot_str, dl_now_str, dl_speed_str, ul_tot_str, ul_now_str, ul_speed_str] ->
do time <- parseFloat time_str
dl_tot <- parseFloat dl_tot_str
dl_now <- parseFloat dl_now_str
dl_speed <- parseFloat dl_speed_str
ul_tot <- parseFloat ul_tot_str
ul_now <- parseFloat ul_now_str
ul_speed <- parseFloat ul_speed_str
return $ XferProgress time dl_tot dl_now dl_speed ul_tot ul_now ul_speed
_ -> Nothing
parseStateChange :: String -> Maybe SyncStateChangeDetails
parseStateChange str =
case words str of
[state_str,reason_str] ->
do state <- parseInt state_str
reason <- parseInt reason_str
return $ SyncStateChangeDetails (toEnum state) (toEnum reason)
_ -> Nothing
-- result messages have the format "result RESULT_CODE ERROR_CODE RESULT_MSG", for example:
-- "result 0 0 Success."
-- "result 4 110 Transfer failed."
parseResult :: String -> Maybe SyncResultDetails
parseResult str =
case (take 2 $ words str) of
[result_str,reason_str] ->
do result <- parseInt result_str
reason <- parseInt reason_str
return $ SyncResultDetails (toEnum result) (toEnum reason)
_ -> Nothing
parseAdmctl :: String -> Maybe SyncAdmctlDetails
parseAdmctl str =
case words str of
[time_str,nextcontact_str,qpos_str] ->
do time <- parseFloat time_str
nextcontact <- parseFloat nextcontact_str
qpos <- parseFloat qpos_str
return $ SyncAdmctlDetails time nextcontact (truncate qpos)
_ -> Nothing
parseServerCksum :: String -> Maybe SyncSrvcksumDetails
parseServerCksum str =
case words str of
[time_str,nextcontact_str] ->
do time <- parseFloat time_str
nextcontact <- parseFloat nextcontact_str
return $ SyncSrvcksumDetails time nextcontact
_ -> Nothing
-- try to cut a line out of a character buffer
cutLine :: (IORef String) -> IO (Maybe String)
cutLine buf =
do str <- readIORef buf
case cut "" str of
Nothing -> return Nothing
Just (line,rest) -> do writeIORef buf rest
return . Just $ chomp line
where
cut accum [] = Nothing
cut accum ('\n':xs) = Just (reverse accum,xs)
cut accum (x:xs) = cut (x:accum) xs
|
jean-edouard/manager
|
disksync/VhdSync.hs
|
gpl-2.0
| 27,843
| 4
| 25
| 9,030
| 6,033
| 3,141
| 2,892
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Yi.Config.Default.Emacs (configureEmacs) where
import Lens.Micro.Platform ((%=), (.=), (.~))
import Yi.Buffer.Misc (identA, directoryContentA)
import Yi.Config.Misc (ScrollStyle (..))
import Yi.Editor (buffersA, newBufferE)
import Yi.Event (Modifier (..), Key (..), Event (..))
import Yi.Interact (mkAutomaton, anyEvent, write, (||>), event, P)
import Yi.Keymap (makeAction)
import Yi.Keymap.Emacs (keymap)
import Yi.Keymap.Keys (printableChar, spec)
import Yi.Config.Lens
import Yi.Config.Simple (ConfigM)
import qualified Yi.Rope as R
import Yi.Types
import Control.Monad (forever, unless, void)
import qualified Data.Map as M
import Lens.Micro.Platform (use, (^.))
configureEmacs :: ConfigM ()
configureEmacs = do
configUIA %= (configScrollStyleA .~ Just SnapToCenter)
defaultKmA .= keymap
startActionsA %= (makeAction openScratchBuffer :)
configInputPreprocessA .= escToMeta
configKillringAccumulateA .= True
-- | Input preprocessor: Transform Esc;Char into Meta-Char
-- Useful for emacs lovers ;)
escToMeta :: P Event Event
escToMeta = mkAutomaton $ forever $ (anyEvent >>= write) ||> do
_ <- event (spec KEsc)
c <- printableChar
write (Event (KASCII c) [MMeta])
-- | Open an emacs-like scratch buffer if no file is open.
openScratchBuffer :: YiM ()
openScratchBuffer = withEditor $ do
fileBufOpen <- any isFileOrDir . M.elems <$> use buffersA
unless fileBufOpen $
void . newBufferE (MemBuffer "scratch") $ R.unlines
[ "This buffer is for notes you don't want to save."
, "If you want to create a file, open that file,"
, "then enter the text in that file's own buffer."
, ""
]
where
isFileOrDir :: FBuffer -> Bool
isFileOrDir attrs = case attrs ^. identA of
MemBuffer _ -> attrs ^. directoryContentA
FileBuffer _ -> True
|
yi-editor/yi
|
yi-keymap-emacs/src/Yi/Config/Default/Emacs.hs
|
gpl-2.0
| 2,096
| 0
| 12
| 573
| 524
| 300
| 224
| 43
| 2
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sq-AL">
<title>JSON View</title>
<maps>
<homeID>jsonview</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/jsonview/src/main/javahelp/help_sq_AL/helpset_sq_AL.hs
|
apache-2.0
| 959
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
{-# OPTIONS -fglasgow-exts #-}
-- This should work, because the type sig and the type
-- in the pattern match exactly
module Foo where
foo :: (forall a. a -> b) -> b
foo (f :: forall a. a -> b) = f undefined :: b
|
hvr/jhc
|
regress/tests/1_typecheck/2_pass/ghc/uncat/tc198.hs
|
mit
| 216
| 0
| 9
| 49
| 58
| 34
| 24
| -1
| -1
|
{-# LANGUAGE RankNTypes, PolyKinds, DataKinds, GADTs #-}
module T15743e where
import Data.Proxy
import Data.Kind
-- NO CUSK.
data T k (a :: k) (b :: Proxy k2) f c :: forall k3. Proxy k3 -> forall (k4 :: k5). Proxy k4 -> Type where
MkT :: f c -> T k a b f c d e
-- Want:
-- T :: forall {k3} {k7} {k6} (k2 :: k3) (k5 :: Type).
-- forall k -> k -> Proxy k2 -> (k7 -> Type) -> k4 ->
-- forall (k3 :: k6). Proxy k3 -> forall (k4 :: k5). Proxy k4 -> Type
--
--
-- CUSK
data T2 (k :: Type) (a :: k) (b :: Proxy k2) (f :: k7 -> Type) (c :: k7) :: forall k3. Proxy k3 -> forall k5 (k4 :: k5). Proxy k4 -> Type where
MkT2 :: f c -> T2 k a b f c d e
|
sdiehl/ghc
|
testsuite/tests/dependent/should_compile/T15743e.hs
|
bsd-3-clause
| 658
| 0
| 9
| 175
| 214
| 129
| 85
| -1
| -1
|
module HsModuleMaps where
import HsModule
import MUtils
import HsIdent(seqHsIdent)
instance Functor (HsImportDeclI m) where
fmap f (HsImportDecl s m q as optspec) =
HsImportDecl s m q as (fmap (apSnd (map (fmap f))) optspec)
instance Functor (HsExportSpecI m) where
fmap f e =
case e of
EntE espec -> EntE (fmap f espec)
ModuleE mn -> ModuleE mn
instance Functor EntSpec where
fmap f e =
case e of
Var i -> Var (f i)
Abs i -> Abs (f i)
AllSubs i -> AllSubs (f i)
ListSubs i is -> ListSubs (f i) (map (fmap f) is)
--------------------------------------------------------------------------------
mapDecls f (HsModule loc name exps imps ds) = HsModule loc name exps imps (f ds)
seqDecls (HsModule loc name exps imps ds) = HsModule loc name exps imps # ds
seqImportDecl (HsImportDecl s m q as optspec) =
HsImportDecl s m q as # seqMaybe (fmap (apSndM (mapM seqEntSpec)) optspec)
seqExportSpec e =
case e of
EntE espec -> EntE # seqEntSpec espec
ModuleE mn -> return (ModuleE mn)
seqEntSpec e =
case e of
Var i -> Var # i
Abs i -> Abs # i
AllSubs i -> AllSubs # i
ListSubs i is -> ListSubs # i <# mapM seqHsIdent is
--------------------------------------------------------------------------------
mapModMN f (HsModule loc name exps imps ds) =
HsModule loc (f name) (mapExpsMN f exps) (mapImpsMN f imps) ds
mapExpsMN f = fmap . map . mapExpMN $ f
mapExpMN f (EntE e) = EntE e
mapExpMN f (ModuleE m) = ModuleE (f m)
mapImpsMN f = map . mapImpMN $ f
mapImpMN f (HsImportDecl loc m q as spec) =
HsImportDecl loc (f m) q (fmap f as) spec
|
forste/haReFork
|
tools/base/AST/HsModuleMaps.hs
|
bsd-3-clause
| 1,631
| 0
| 14
| 379
| 708
| 343
| 365
| 41
| 4
|
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Backends.Html.Names
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009,
-- Mark Lentczner 2010
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Backends.Xhtml.Names (
ppName, ppDocName, ppLDocName, ppRdrName, ppUncheckedLink,
ppBinder, ppBinderInfix, ppBinder',
ppModule, ppModuleRef, ppIPName, linkId, Notation(..)
) where
import Haddock.Backends.Xhtml.Utils
import Haddock.GhcUtils
import Haddock.Types
import Haddock.Utils
import Text.XHtml hiding ( name, title, p, quote )
import qualified Data.Map as M
import qualified Data.List as List
import GHC
import Name
import RdrName
import FastString (unpackFS)
-- | Indicator of how to render a 'DocName' into 'Html'
data Notation = Raw -- ^ Render as-is.
| Infix -- ^ Render using infix notation.
| Prefix -- ^ Render using prefix notation.
deriving (Eq, Show)
ppOccName :: OccName -> Html
ppOccName = toHtml . occNameString
ppRdrName :: RdrName -> Html
ppRdrName = ppOccName . rdrNameOcc
ppIPName :: HsIPName -> Html
ppIPName = toHtml . ('?':) . unpackFS . hsIPNameFS
ppUncheckedLink :: Qualification -> (ModuleName, OccName) -> Html
ppUncheckedLink _ (mdl, occ) = linkIdOcc' mdl (Just occ) << ppOccName occ -- TODO: apply ppQualifyName
-- The Bool indicates if it is to be rendered in infix notation
ppLDocName :: Qualification -> Notation -> Located DocName -> Html
ppLDocName qual notation (L _ d) = ppDocName qual notation True d
ppDocName :: Qualification -> Notation -> Bool -> DocName -> Html
ppDocName qual notation insertAnchors docName =
case docName of
Documented name mdl ->
linkIdOcc mdl (Just (nameOccName name)) insertAnchors
<< ppQualifyName qual notation name mdl
Undocumented name
| isExternalName name || isWiredInName name ->
ppQualifyName qual notation name (nameModule name)
| otherwise -> ppName notation name
-- | Render a name depending on the selected qualification mode
ppQualifyName :: Qualification -> Notation -> Name -> Module -> Html
ppQualifyName qual notation name mdl =
case qual of
NoQual -> ppName notation name
FullQual -> ppFullQualName notation mdl name
LocalQual localmdl ->
if moduleString mdl == moduleString localmdl
then ppName notation name
else ppFullQualName notation mdl name
RelativeQual localmdl ->
case List.stripPrefix (moduleString localmdl) (moduleString mdl) of
-- local, A.x -> x
Just [] -> ppName notation name
-- sub-module, A.B.x -> B.x
Just ('.':m) -> toHtml $ m ++ '.' : getOccString name
-- some module with same prefix, ABC.x -> ABC.x
Just _ -> ppFullQualName notation mdl name
-- some other module, D.x -> D.x
Nothing -> ppFullQualName notation mdl name
AliasedQual aliases localmdl ->
case (moduleString mdl == moduleString localmdl,
M.lookup mdl aliases) of
(False, Just alias) -> ppQualName notation alias name
_ -> ppName notation name
ppFullQualName :: Notation -> Module -> Name -> Html
ppFullQualName notation mdl name = wrapInfix notation (getOccName name) qname
where
qname = toHtml $ moduleString mdl ++ '.' : getOccString name
ppQualName :: Notation -> ModuleName -> Name -> Html
ppQualName notation mdlName name = wrapInfix notation (getOccName name) qname
where
qname = toHtml $ moduleNameString mdlName ++ '.' : getOccString name
ppName :: Notation -> Name -> Html
ppName notation name = wrapInfix notation (getOccName name) $ toHtml (getOccString name)
ppBinder :: Bool -> OccName -> Html
-- The Bool indicates whether we are generating the summary, in which case
-- the binder will be a link to the full definition.
ppBinder True n = linkedAnchor (nameAnchorId n) << ppBinder' Prefix n
ppBinder False n = namedAnchor (nameAnchorId n) ! [theclass "def"]
<< ppBinder' Prefix n
ppBinderInfix :: Bool -> OccName -> Html
ppBinderInfix True n = linkedAnchor (nameAnchorId n) << ppBinder' Infix n
ppBinderInfix False n = namedAnchor (nameAnchorId n) ! [theclass "def"]
<< ppBinder' Infix n
ppBinder' :: Notation -> OccName -> Html
ppBinder' notation n = wrapInfix notation n $ ppOccName n
wrapInfix :: Notation -> OccName -> Html -> Html
wrapInfix notation n = case notation of
Infix | is_star_kind -> id
| not is_sym -> quote
Prefix | is_star_kind -> id
| is_sym -> parens
_ -> id
where
is_sym = isSymOcc n
is_star_kind = isTcOcc n && occNameString n == "*"
linkId :: Module -> Maybe Name -> Html -> Html
linkId mdl mbName = linkIdOcc mdl (fmap nameOccName mbName) True
linkIdOcc :: Module -> Maybe OccName -> Bool -> Html -> Html
linkIdOcc mdl mbName insertAnchors =
if insertAnchors
then anchor ! [href url]
else id
where
url = case mbName of
Nothing -> moduleUrl mdl
Just name -> moduleNameUrl mdl name
linkIdOcc' :: ModuleName -> Maybe OccName -> Html -> Html
linkIdOcc' mdl mbName = anchor ! [href url]
where
url = case mbName of
Nothing -> moduleHtmlFile' mdl
Just name -> moduleNameUrl' mdl name
ppModule :: Module -> Html
ppModule mdl = anchor ! [href (moduleUrl mdl)]
<< toHtml (moduleString mdl)
ppModuleRef :: ModuleName -> String -> Html
ppModuleRef mdl ref = anchor ! [href (moduleHtmlFile' mdl ++ ref)]
<< toHtml (moduleNameString mdl)
-- NB: The ref parameter already includes the '#'.
-- This function is only called from markupModule expanding a
-- DocModule, which doesn't seem to be ever be used.
|
jwiegley/ghc-release
|
utils/haddock/src/Haddock/Backends/Xhtml/Names.hs
|
gpl-3.0
| 5,980
| 0
| 13
| 1,387
| 1,559
| 800
| 759
| 107
| 10
|
module T12042a (module T12042a, module T12042) where
import {-# SOURCE #-} T12042
type U = S
|
olsner/ghc
|
testsuite/tests/typecheck/should_fail/T12042a.hs
|
bsd-3-clause
| 93
| 0
| 4
| 16
| 24
| 17
| 7
| 3
| 0
|
module A where
import B
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/ghci/prog009/A1.hs
|
bsd-3-clause
| 25
| 0
| 3
| 6
| 7
| 5
| 2
| 2
| 0
|
import Control.Monad
import Data.List
readNumbers :: String -> [Int]
readNumbers = map read . words
-- the task is to find minimum of all a's and minimum of all b's and multiply the two
findMins = foldl' (\[min_a, min_b] [a, b] -> [min min_a a, min min_b b]) [maxBound :: Int, maxBound :: Int]
multiply [a, b] = a * b
main :: IO ()
main = do
n <- readLn :: IO Int
list <- replicateM n getLine
let inputs = map readNumbers list
-- print inputs
let ans = multiply (findMins inputs)
print ans
|
mgrebenets/hackerrank
|
alg/geometry/rectangular-game.hs
|
mit
| 507
| 0
| 12
| 113
| 193
| 101
| 92
| 13
| 1
|
{- for ch04 DMA
- by Yue Wang
- -}
nextLexicalBitStirng :: (Num a, Eq a) => [a] -> [a]
nextLexicalBitStirng [] = []
nextLexicalBitStirng xs = if head rxs==0 then reverse(1:(tail rxs)) else nextLexicalBitStirng(take (length xs - 1) xs) ++ [0]
where rxs = reverse xs
|
Mooophy/DMA
|
ch04/nextLexicalBitStirng.hs
|
mit
| 316
| 0
| 12
| 98
| 121
| 63
| 58
| 4
| 2
|
module Y2016.M09.D15.Solution where
import Control.Monad (guard)
import Data.Array
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
-- the below import is available from the 1HaskellADay git repository
import Control.List (takeout)
import Data.Matrix
{--
Today's Haskell exercise is neither sudoku nor magic squares. We're just working
with one square, only, and arranging the number 1 through 9 in those squares.
Simple enough.
Oh, there's a few constraints.
So, one way to arrange the numbers 1 through 9 in a 3x3 matrix is as follows:
--}
threeBy :: Matrix Int
threeBy = fromLists (take 3 (counting [1,2,3]))
counting :: [Int] -> [[Int]]
counting = (map . (+) . length) >>= iterate -- via joomy @cattheory
{--
*Y2016.M09.D15.Exercise> pprint threeBy
Matrix 3x3
| 1 2 3 |
| 4 5 6 |
| 7 8 9 |
--}
-- But today's problem is a mite harder than threeBy. Here are the constraints:
-- 1. 1 is two squares directly right of 7
-- 2. 2 is two squares directly above 8
-- 3. 3 is two squares directly left of 9
-- 4. 4 is two squares directly below 3
-- 5. 5 is not in the center square
-- create a schema for the constraints so that the constraints hold and the
-- numbers 1 - 9 are arranged in a 3x3 matrix.
-- We see that 6 is unconstrainted
-- We see that 3 is 'doubly constrained'
type Val = Int -- the value the cell contains
type Idx = Int -- the index of the Row or the Column
data Col = Col Val Idx
deriving Show
data Row = Row Val Idx
deriving Show
data Constraint = SameRow Val Val Col Col
| SameCol Val Val Row Row
| NotCenter Val
deriving Show
constraints :: [Constraint]
constraints = [SameRow 1 7 (Col 1 3) (Col 7 1),
SameCol 2 8 (Row 2 1) (Row 8 3),
SameRow 3 9 (Col 3 1) (Col 9 3),
SameCol 4 3 (Row 4 3) (Row 3 1),
NotCenter 5]
-- we do the constrain-then-generate approach to solving this puzzle
-- for a constraint, c, we pick indices for the values constrained
type CellIdx = (Idx, Idx)
type ValMap = Map Val CellIdx
type Result = [(ValMap, [(CellIdx, Val)], [CellIdx])]
-- we need to take out an index iff we haven't already assigned the value
takeoutOr :: ValMap -> Val -> [CellIdx] -> [(CellIdx, [CellIdx])]
takeoutOr vm v idxs =
case Map.lookup v vm of
Nothing -> takeout idxs
Just x -> return (x, idxs)
pick :: ValMap -> [CellIdx] -> Constraint -> Result
pick ctx indices (SameRow v1 v2 (Col _ c1) (Col _ c2)) =
takeoutOr ctx v1 indices >>= \((a,b), rest) ->
guard (b == c1) >>
takeoutOr ctx v2 rest >>= \((c,d), rem) ->
guard (a == c) >>
guard (d == c2) >>
let newmap = Map.insert v1 (a,b) (Map.insert v2 (c,d) ctx) in
return (newmap, [((a,b), v1), ((c,d), v2)], rem)
pick ctx indices (NotCenter v) =
takeoutOr ctx v indices >>= \(idx, rest) -> guard (idx /= centre) >>
return (Map.insert v idx ctx, [(idx, v)], rest)
pick ctx indices (SameCol v1 v2 (Row _ r1) (Row _ r2)) =
takeoutOr ctx v1 indices >>= \((a,b), rest) ->
guard (a == r1) >>
takeoutOr ctx v2 rest >>= \((c,d),rem) ->
guard (b == d) >>
guard (c == r2) >>
let newmap = Map.insert v1 (a,b) (Map.insert v2 (c,d) ctx) in
return (newmap, [((a,b),v1),((c,d),v2)], rem)
centre :: (Idx, Idx)
centre = (2,2)
picks :: ValMap -> [CellIdx] -> [Constraint] -> Result
picks ctx idxs [] = [(ctx, [], idxs)]
picks ctx idxs (h:t) = pick ctx idxs h >>= \(newctx, ans, rest) ->
picks newctx rest t >>= \(ctxn, anss, rem) ->
return (ctxn, ans ++ anss, rem)
constrainedMatrix :: [Constraint] -> Set Int -> [Matrix Int]
constrainedMatrix constraints nums =
let i3 = matrix (identity 3) in
picks Map.empty (indices i3) constraints >>= \(_, assigned, rest) ->
return (M (array (bounds i3) (assigned ++
-- Now, the remaining unconstrained values are assigned to the remaining indices
zip rest (Set.toList (foldr removeVals nums constraints)))))
removeVals :: Constraint -> Set Val -> Set Val
removeVals (NotCenter v) = Set.delete v
removeVals (SameRow v1 v2 _ _) = Set.delete v1 . Set.delete v2
removeVals (SameCol v1 v2 _ _) = Set.delete v1 . Set.delete v2
-- so constraintedMatrix guards [1..9] gives a properly arrayed matrix
{--
*Y2016.M09.D15.Solution> mapM_ pprint . constrainedMatrix constraints $ Set.fromList [1..9]
Matrix 3x3
| 3 2 9 |
| 7 6 1 |
| 4 8 5 |
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M09/D15/Solution.hs
|
mit
| 4,409
| 0
| 20
| 992
| 1,472
| 815
| 657
| 74
| 2
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.WebKitCSSTransformValue
(js__get, _get, pattern CSS_TRANSLATE, pattern CSS_TRANSLATEX,
pattern CSS_TRANSLATEY, pattern CSS_ROTATE, pattern CSS_SCALE,
pattern CSS_SCALEX, pattern CSS_SCALEY, pattern CSS_SKEW,
pattern CSS_SKEWX, pattern CSS_SKEWY, pattern CSS_MATRIX,
pattern CSS_TRANSLATEZ, pattern CSS_TRANSLATE3D,
pattern CSS_ROTATEX, pattern CSS_ROTATEY, pattern CSS_ROTATEZ,
pattern CSS_ROTATE3D, pattern CSS_SCALEZ, pattern CSS_SCALE3D,
pattern CSS_PERSPECTIVE, pattern CSS_MATRIX3D, js_getOperationType,
getOperationType, WebKitCSSTransformValue,
castToWebKitCSSTransformValue, gTypeWebKitCSSTransformValue)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"_get\"]($2)" js__get ::
WebKitCSSTransformValue -> Word -> IO (Nullable CSSValue)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebKitCSSTransformValue._get Mozilla WebKitCSSTransformValue._get documentation>
_get ::
(MonadIO m) =>
WebKitCSSTransformValue -> Word -> m (Maybe CSSValue)
_get self index
= liftIO (nullableToMaybe <$> (js__get (self) index))
pattern CSS_TRANSLATE = 1
pattern CSS_TRANSLATEX = 2
pattern CSS_TRANSLATEY = 3
pattern CSS_ROTATE = 4
pattern CSS_SCALE = 5
pattern CSS_SCALEX = 6
pattern CSS_SCALEY = 7
pattern CSS_SKEW = 8
pattern CSS_SKEWX = 9
pattern CSS_SKEWY = 10
pattern CSS_MATRIX = 11
pattern CSS_TRANSLATEZ = 12
pattern CSS_TRANSLATE3D = 13
pattern CSS_ROTATEX = 14
pattern CSS_ROTATEY = 15
pattern CSS_ROTATEZ = 16
pattern CSS_ROTATE3D = 17
pattern CSS_SCALEZ = 18
pattern CSS_SCALE3D = 19
pattern CSS_PERSPECTIVE = 20
pattern CSS_MATRIX3D = 21
foreign import javascript unsafe "$1[\"operationType\"]"
js_getOperationType :: WebKitCSSTransformValue -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebKitCSSTransformValue.operationType Mozilla WebKitCSSTransformValue.operationType documentation>
getOperationType ::
(MonadIO m) => WebKitCSSTransformValue -> m Word
getOperationType self = liftIO (js_getOperationType (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/WebKitCSSTransformValue.hs
|
mit
| 2,939
| 14
| 10
| 424
| 726
| 426
| 300
| 59
| 1
|
nwd :: Integer -> Integer -> Integer
nwd 0 y = abs y
nwd x 0 = abs x
nwd x y
| x < 0 = nwd (abs x) y
| y < 0 = nwd x (abs y)
| x == y = x
| x > y = if (x - y) > y then nwd (x - y) y else nwd y (x - y)
| x < y = nwd y x
|
RAFIRAF/HASKELL
|
nwdEu3.hs
|
mit
| 228
| 0
| 9
| 86
| 182
| 87
| 95
| 9
| 2
|
import HUnit
|
chris-wood/ccnx-pktgen
|
testsuite/tests/parser-tests.hs
|
mit
| 15
| 0
| 3
| 4
| 4
| 2
| 2
| 1
| 0
|
-- | Blending stuff.
--
-- <https://www.opengl.org/wiki/Blending>
--
module Graphics.Caramia.Blend
(
-- * Data types
BlendSpec(..)
, BlendEquation(..)
, BlendFunc(..)
-- * Pre-defined blending specs
, preMultipliedAlpha
, nopBlend )
where
--import Graphics.Caramia.Prelude
import Graphics.Caramia.Blend.Internal
import Graphics.Caramia.Color
preMultipliedAlpha :: BlendSpec
preMultipliedAlpha = BlendSpec
{ srcColorFunc = BFOne
, srcAlphaFunc = BFOne
, dstColorFunc = BFOneMinusSrcAlpha
, dstAlphaFunc = BFOneMinusSrcAlpha
, colorEquation = BEAdd
, alphaEquation = BEAdd
, blendColor = rgba 1 1 1 1 }
-- | Blending that doesn't do anything special; just copies the source pixel to
-- destination.
nopBlend :: BlendSpec
nopBlend = BlendSpec
{ srcColorFunc = BFOne
, srcAlphaFunc = BFOne
, dstColorFunc = BFZero
, dstAlphaFunc = BFZero
, colorEquation = BEAdd
, alphaEquation = BEAdd
, blendColor = rgba 1 1 1 1 }
|
Noeda/caramia
|
src/Graphics/Caramia/Blend.hs
|
mit
| 1,011
| 0
| 7
| 230
| 184
| 121
| 63
| 27
| 1
|
-- | Utilities for controlling bluetooth.
--
-- Note that these only work when there is a tmux pane called btctl
-- that is running bluetoothctl. The approach using tmux comes from
-- https://serverfault.com/a/547144
--
-- TODO: Make it so that this starts the pane if it isn't already
-- running.
module Bluetooth where
import qualified Data.Text as T
import Imports
bluetoothConnect :: Utf8Builder -> Lens' Env (Maybe Text) -> Xio ()
bluetoothConnect n l = withUuid n l $ \uuid ->
sendToBluetoothCtl ["connect ", uuid, "Enter"]
bluetoothDisconnect :: Utf8Builder -> Lens' Env (Maybe Text) -> Xio ()
bluetoothDisconnect n l = withUuid n l $ \uuid ->
sendToBluetoothCtl ["disconnect ", uuid, "Enter"]
withUuid :: Utf8Builder -> Lens' Env (Maybe Text) -> (String -> Xio ()) -> Xio ()
withUuid n l f = do
muuid <- view l
case muuid of
Just uuid -> f (T.unpack uuid)
Nothing -> logError $ mconcat
["Can't connect to ", n, ", as ", n, ".uuid doesn't exist"]
sendToBluetoothCtl :: [String] -> Xio ()
sendToBluetoothCtl keypresses =
syncSpawn "tmux" $ ["send-keys", "-t", "bt"] ++ keypresses
|
mgsloan/compconfig
|
env/src/Bluetooth.hs
|
mit
| 1,116
| 0
| 13
| 212
| 320
| 169
| 151
| 19
| 2
|
module OCR (convert) where
convert :: String -> String
convert xs = error "You need to implement this function."
|
exercism/xhaskell
|
exercises/practice/ocr-numbers/src/OCR.hs
|
mit
| 114
| 0
| 5
| 20
| 29
| 16
| 13
| 3
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module CoinApi.Types.Rate where
import CoinApi.Types.Internal
data Rate = Rate { time :: !UTCTime
, asset_id_base :: !Text
, asset_id_quote :: !Text
, rate :: !Scientific }
deriving (Show, Eq)
instance FromJSON Rate where
parseJSON = withObject "Rate" $ \o -> Rate
<$> fmap fromTime (o .: "time")
<*> o .: "asset_id_base"
<*> o .: "asset_id_quote"
<*> o .: "rate"
|
coinapi/coinapi-sdk
|
data-api/haskell-rest/CoinApi/Types/Rate.hs
|
mit
| 639
| 0
| 17
| 303
| 127
| 70
| 57
| 22
| 0
|
-- Quasicrystals demo.
--
-- Based on code from:
-- http://mainisusuallyafunction.blogspot.com/2011/10/quasicrystals-as-sums-of-waves-in-plane.html
--
import Graphics.Gloss.Raster.Field
import System.Environment
import System.Exit
import Data.Char
-- Main -----------------------------------------------------------------------
main :: IO ()
main
= do args <- getArgs
config <- parseArgs args defaultConfig
let display
= case configFullScreen config of
True -> FullScreen (configSizeX config, configSizeY config)
False -> InWindow "Crystal"
(configSizeX config, configSizeY config)
(10, 10)
let scale = fromIntegral $ configScale config
animateField display
(configZoom config, configZoom config)
(quasicrystal scale (configDegree config))
-- Config ---------------------------------------------------------------------
data Config
= Config
{ configSizeX :: Int
, configSizeY :: Int
, configFullScreen :: Bool
, configZoom :: Int
, configScale :: Int
, configDegree :: Int }
deriving Show
defaultConfig :: Config
defaultConfig
= Config
{ configSizeX = 800
, configSizeY = 600
, configFullScreen = False
, configZoom = 2
, configScale = 30
, configDegree = 5 }
parseArgs :: [String] -> Config -> IO Config
parseArgs args config
| [] <- args
= return config
| "-fullscreen" : sizeX : sizeY : rest <- args
, all isDigit sizeX
, all isDigit sizeY
= parseArgs rest
$ config { configSizeX = read sizeX
, configSizeY = read sizeY
, configFullScreen = True }
| "-window" : sizeX : sizeY : rest <- args
, all isDigit sizeX
, all isDigit sizeY
= parseArgs rest
$ config { configSizeX = read sizeX
, configSizeY = read sizeY
, configFullScreen = False }
| "-zoom" : zoom : rest <- args
, all isDigit zoom
= parseArgs rest
$ config { configZoom = read zoom }
| "-scale" : scale : rest <- args
, all isDigit scale
= parseArgs rest
$ config { configScale = read scale }
| "-degree" : degree : rest <- args
, all isDigit degree
= parseArgs rest
$ config { configDegree = read degree }
| otherwise
= do printUsage
exitWith $ ExitFailure 1
printUsage :: IO ()
printUsage
= putStr $ unlines
[ "quazicrystal [flags]"
, " -fullscreen sizeX sizeY Run full screen"
, " -window sizeX sizeY Run in a window (default 800, 600)"
, " -zoom <NAT> Pixel replication factor (default 5)"
, " -scale <NAT> Feature size of visualisation (default 30)"
, " -degree <NAT> Number waves to sum for each point (default 5)"
, ""
, " You'll want to run this with +RTS -N to enable threads" ]
-- Types ----------------------------------------------------------------------
-- | Angle in radians.
type Angle = Float
-- | Angle offset used for animation.
type Phi = Float
-- | Number of waves to sum for each pixel.
type Degree = Int
-- | Feature size of visualisation.
type Scale = Float
-- | Time in seconds since the program started.
type Time = Float
-- Point ----------------------------------------------------------------------
-- | Compute a single point of the visualisation.
quasicrystal :: Scale -> Degree -> Time -> Point -> Color
quasicrystal !scale !degree !time !p
= let -- Scale the time to be the phi value of the animation.
-- The action seems to slow down at increasing phi values,
-- so we increase phi faster as time moves on.
phi = 1 + (time ** 1.5) * 0.005
in rampColor
$ waves degree phi
$ point scale p
-- | Sum up all the waves at a particular point.
waves :: Degree -> Phi -> Point -> Float
waves !degree !phi !x = wrap $ waver 0 degree
where
!th = pi / phi
waver :: Float -> Int -> Float
waver !acc !n
| n == 0 = acc
| otherwise = waver (acc + wave (fromIntegral n * th) x)
(n - 1)
wrap n
= let !n_ = truncate n :: Int
!n' = n - fromIntegral n_
in if odd n_ then 1 - n'
else n'
-- | Generate the value for a single wave.
wave :: Angle -> Point -> Float
wave !th = f where
!cth = cos th
!sth = sin th
{-# INLINE f #-}
f (x, y) = (cos (cth*x + sth*y) + 1) / 2
-- | Convert an image point to a point on our wave plane.
point :: Scale -> Point -> Point
point !scale (x, y) = (x * scale, y * scale)
-- | Color ramp from blue to white.
rampColor :: Float -> Color
rampColor v
= rawColor v (0.4 + (v * 0.6)) 1 1
|
gscalzo/HaskellTheHardWay
|
gloss-try/gloss-master/gloss-examples/raster/Crystal/Main.hs
|
mit
| 5,282
| 0
| 15
| 1,924
| 1,176
| 609
| 567
| -1
| -1
|
{-# LANGUAGE OverloadedStrings, NamedFieldPuns, Rank2Types #-}
module Client (clientMain) where
import Types
import Haste.App
-- TODO:
-- * persistence
-- * routing
import Control.Applicative
import Control.Monad
import Prelude hiding (div)
import Haste.Foreign
import Haste.Prim
import Lens.Family2
import React
import System.IO.Unsafe
import Debug.Trace
-- MODEL
data PageState = PageState
{ _todos :: [Todo]
, _typingValue :: JSString
}
-- UTILITY
todos :: Lens' PageState [Todo]
todos f (PageState t v) = (`PageState` v) <$> f t
typingValue :: Lens' PageState JSString
typingValue f (PageState t v) = PageState t <$> f v
trim :: JSString -> JSString
trim = unsafePerformIO . ffi "(function(str) { return str.trim(); })"
-- remove an item from the list by index
iFilter :: Int -> [a] -> [a]
iFilter 0 (_:as) = as
iFilter n (a:as) = a : iFilter (n-1) as
iFilter _ [] = error "can't remove from empty list"
-- CONTROLLER
handleEnter :: PageState -> PageState
handleEnter oldState@PageState{_todos, _typingValue} =
let trimmed = trim (trace "got here" _typingValue)
in if trimmed == ""
then oldState
else PageState (_todos ++ [Todo 0 trimmed Active]) ""
-- TODO exit editing
-- "If escape is pressed during the edit, the edit state should be left and
-- any changes be discarded."
handleEsc :: PageState -> PageState
handleEsc state = state & typingValue .~ ""
handleHeaderKey :: PageState -> KeyboardEvent -> PageState
handleHeaderKey state KeyboardEvent{key="Enter"} = handleEnter state
handleHeaderKey state KeyboardEvent{key="Escape"} = handleEsc state
handleHeaderKey state _ = state
handleTyping :: PageState -> ChangeEvent -> PageState
handleTyping state (ChangeEvent _typingValue) = state{_typingValue}
statusOfToggle :: [Todo] -> Status
statusOfToggle _todos =
let allActive = all (\Todo{_status} -> _status == Active) _todos
in if allActive then Active else Completed
handleToggleAll :: PageState -> MouseEvent -> PageState
handleToggleAll state@PageState{_todos} _ = state{_todos=newTodos} where
_status = toggleStatus $ statusOfToggle _todos
newTodos = map (\todo -> todo{_status}) _todos
handleItemCheck :: Int -> PageState -> MouseEvent -> PageState
handleItemCheck todoNum state _ =
state & todos . ix' todoNum . status %~ toggleStatus
-- TODO
handleLabelDoubleClick :: PageState -> MouseEvent -> PageState
handleLabelDoubleClick = const
handleDestroy :: Int -> PageState -> MouseEvent -> PageState
handleDestroy todoNum state _ = state & todos %~ iFilter todoNum
clearCompleted :: PageState -> MouseEvent -> PageState
clearCompleted state _ = state & todos %~ todosWithStatus Active
-- VIEW
-- "New todos are entered in the input at the top of the app. The input
-- element should be focused when the page is loaded preferably using the
-- autofocus input attribute. Pressing Enter creates the todo, appends it
-- to the todo list and clears the input. Make sure to .trim() the input
-- and then check that it's not empty before creating a new todo."
header :: StatefulReact PageState ()
header = header_ <! id_ "header" $ do
PageState{_typingValue} <- getState
h1_ "todos"
input_ <! id_ "new-todo"
<! placeholder_ "What needs to be done?"
<! autofocus_ True
<! value_ _typingValue
<! onChange handleTyping
<! onKeyDown handleHeaderKey
todoView :: Int -> StatefulReact PageState ()
todoView i = do
PageState{_todos} <- getState
let Todo{_id, _text, _status} = _todos !! i
li_ <! class_ (if _status == Completed then "completed" else "") $ do
div_ <! class_ "view" $ do
input_ <! class_ "toggle"
<! id_ (toJSString $ "toggle-" ++ show _id )
<! type_ "checkbox"
<! checked_ (_status == Completed)
<! onClick (handleItemCheck i)
label_ <! onDoubleClick handleLabelDoubleClick $ text_ _text
button_ <! class_ "destroy"
<! id_ (toJSString $ "destroy-" ++ show _id)
<! onClick (handleDestroy i) $ return ()
input_ <! class_ "edit"
<! value_ _text
todosWithStatus :: Status -> [Todo] -> [Todo]
todosWithStatus stat = filter (\Todo{_status} -> _status == stat)
mainBody :: StatefulReact PageState ()
mainBody = do
PageState{_todos} <- getState
section_ <! id_ "main" $ do
input_ <! id_ "toggle-all" <! type_ "checkbox"
label_ <! for_ "toggle-all"
<! onClick handleToggleAll $
"Mark all as complete"
ul_ <! id_ "todo-list" $ forM_ [0 .. length _todos - 1] todoView
innerFooter :: StatefulReact PageState ()
innerFooter = footer_ <! id_ "footer" $ do
PageState{_todos} <- getState
let activeCount = length (todosWithStatus Active _todos)
let inactiveCount = length (todosWithStatus Completed _todos)
-- "Displays the number of active todos in a pluralized form. Make sure
-- the number is wrapped by a <strong> tag. Also make sure to pluralize
-- the item word correctly: 0 items, 1 item, 2 items. Example: 2 items
-- left"
span_ <! id_ "todo-count" $ do
strong_ (text_ (toJSStr (show activeCount)))
if activeCount == 1 then " item left" else " items left"
button_ <! id_ "clear-completed"
<! class_ (if inactiveCount == 0 then "hidden" else "")
<! onClick clearCompleted $
text_ (toJSStr ("Clear completed (" ++ show inactiveCount ++ ")"))
outerFooter :: StatefulReact PageState ()
outerFooter = footer_ <! id_ "info" $ do
p_ "Double-click to edit a todo"
p_ $ do
"Created by "
a_ <! href_ "http://joelburget.com" $ "Joel Burget"
p_ $ do
"Part of "
a_ <! href_ "http://todomvc.com" $ "TodoMVC"
wholePage :: StatefulReact PageState ()
wholePage = div_ $ do
PageState{_todos} <- getState
section_ <! id_ "todoapp" $ do
header
-- "When there are no todos, #main and #footer should be hidden."
unless (null _todos) $ do
mainBody
innerFooter
outerFooter
clientMain :: API -> Client ()
clientMain api = do
initTodos <- onServer $ apiFetchTodos api
Just inject <- elemById "inject"
liftIO $ render (PageState initTodos "") inject wholePage
forM_ initTodos $ \Todo{_id} -> listenMods _id
withElems ["new-todo", "clear-completed"] $ \ [todo, clear] ->
do todo `onEvent` OnKeyDown $ \k ->
case k of
13 -> do
m <- getProp todo "value"
(_id, todos') <- onServer $ apiAddTodo api <.> Todo 0 (toJSString m) Active
liftIO $ render (PageState todos' "") inject wholePage
listenMods _id
_ -> return ()
clear `onEvent` OnClick $ \ _ _ ->
do todos' <- onServer $ apiClearComplete api
liftIO $ render (PageState todos' "") inject wholePage
where
listenMods _id =
do withElem ("destroy-" ++ show _id ) $ \ todo ->
todo `onEvent` OnClick $ \ _ _ ->
onServer $ apiDeleteTodo api <.> _id
withElem ("toggle-" ++ show _id) $ \ todo ->
todo `onEvent` OnClick $ \ _ _ ->
onServer $ apiToggleTodo api <.> _id
|
jeremyjh/react-haste-app
|
src/Client.hs
|
mit
| 7,352
| 0
| 22
| 1,891
| 2,069
| 1,034
| 1,035
| -1
| -1
|
{-# htermination (splitAt :: MyInt -> (List a) -> Tup2 (List a) (List a)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup2 a b = Tup2 a b ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
primMinusNat :: Nat -> Nat -> MyInt;
primMinusNat Zero Zero = Pos Zero;
primMinusNat Zero (Succ y) = Neg (Succ y);
primMinusNat (Succ x) Zero = Pos (Succ x);
primMinusNat (Succ x) (Succ y) = primMinusNat x y;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primMinusInt :: MyInt -> MyInt -> MyInt;
primMinusInt (Pos x) (Neg y) = Pos (primPlusNat x y);
primMinusInt (Neg x) (Pos y) = Neg (primPlusNat x y);
primMinusInt (Neg x) (Neg y) = primMinusNat y x;
primMinusInt (Pos x) (Pos y) = primMinusNat x y;
msMyInt :: MyInt -> MyInt -> MyInt
msMyInt = primMinusInt;
splitAt0Vu42 wy wz = splitAt (msMyInt wy (Pos (Succ Zero))) wz;
splitAt0Xs'0 wy wz (Tup2 xs' vx) = xs';
splitAt0Xs' wy wz = splitAt0Xs'0 wy wz (splitAt0Vu42 wy wz);
splitAt0Xs''0 wy wz (Tup2 vw xs'') = xs'';
splitAt0Xs'' wy wz = splitAt0Xs''0 wy wz (splitAt0Vu42 wy wz);
splitAt0 n (Cons x xs) = Tup2 (Cons x (splitAt0Xs' n xs)) (splitAt0Xs'' n xs);
splitAt1 vv Nil = Tup2 Nil Nil;
splitAt1 wu wv = splitAt0 wu wv;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compareMyInt :: MyInt -> MyInt -> Ordering
compareMyInt = primCmpInt;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
not :: MyBool -> MyBool;
not MyTrue = MyFalse;
not MyFalse = MyTrue;
fsEsOrdering :: Ordering -> Ordering -> MyBool
fsEsOrdering x y = not (esEsOrdering x y);
ltEsMyInt :: MyInt -> MyInt -> MyBool
ltEsMyInt x y = fsEsOrdering (compareMyInt x y) GT;
splitAt2 n xs MyTrue = Tup2 Nil xs;
splitAt2 n xs MyFalse = splitAt1 n xs;
splitAt3 n xs = splitAt2 n xs (ltEsMyInt n (Pos Zero));
splitAt3 ww wx = splitAt1 ww wx;
splitAt :: MyInt -> (List a) -> Tup2 (List a) (List a);
splitAt n xs = splitAt3 n xs;
splitAt vv Nil = splitAt1 vv Nil;
splitAt n (Cons x xs) = splitAt0 n (Cons x xs);
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/splitAt_1.hs
|
mit
| 2,995
| 0
| 11
| 635
| 1,384
| 725
| 659
| 73
| 1
|
{-# htermination properFraction :: Float -> (Int,Float) #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_properFraction_2.hs
|
mit
| 60
| 0
| 2
| 8
| 3
| 2
| 1
| 1
| 0
|
module ProjectEuler.Problem30
( problem
) where
import Data.Char
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 30 Solved result
testAllow :: Int -> Bool
testAllow x = sum (map ((^(5 :: Int)) . digitToInt) (show x)) == x
result :: Int
result = sum (filter testAllow [10.. (9^(5 :: Int) * 5)])
|
Javran/Project-Euler
|
src/ProjectEuler/Problem30.hs
|
mit
| 323
| 0
| 12
| 62
| 136
| 76
| 60
| 10
| 1
|
{-| This script builds any version of the Elm Platform from source.
Before you use it, make sure you have the Haskell Platform with a recent
version of cabal.
To install a released version of Elm, you will run something like this:
runhaskell BuildFromSource.hs 0.16
Before you do that, in some directory of your choosing, add
wherever/Elm-Platform/0.16/.cabal-sandbox/bin to your PATH.
Then, run the above. You will now actually have a new directory for the
Elm Platform, like this:
Elm-Platform/0.16/
elm-make/ -- git repo for the build tool, ready to edit
elm-repl/ -- git repo for the REPL, ready to edit
...
.cabal-sandbox/ -- various build files
All of the executables you need are in .cabal-sandbox/bin, which is on
your PATH and thus can be used from anywhere.
You can build many versions of the Elm Platform, so it is possible to have
Elm-Platform/0.16/ and Elm-Platform/0.13/ with no problems. It is up to you
to manage your PATH variable or symlinks though.
To get set up with the master branch of all Elm Platform projects, run this:
runhaskell BuildFromSource.hs master
From there you can start developing on any of the projects, switching branches
and testing interactions between projects.
-}
module Main where
import qualified Data.List as List
import qualified Data.Map as Map
import System.Directory (createDirectoryIfMissing,
getCurrentDirectory, setCurrentDirectory)
import System.Environment (getArgs)
import System.Exit (ExitCode, exitFailure)
import System.FilePath ((</>))
import System.IO (hPutStrLn, stderr)
import System.Process (rawSystem)
(=:) = (,)
configs :: Map.Map String [(String, String)]
configs =
Map.fromList
[
"master" =:
[ "elm-compiler" =: "master"
, "elm-package" =: "master"
, "elm-make" =: "master"
, "elm-reactor" =: "master"
, "elm-repl" =: "master"
]
,
"0.16" =:
[ "elm-compiler" =: "0.16"
, "elm-package" =: "0.16"
, "elm-make" =: "0.16"
, "elm-reactor" =: "0.16"
, "elm-repl" =: "0.16"
]
,
"0.15.1" =:
[ "elm-compiler" =: "0.15.1"
, "elm-package" =: "0.5.1"
, "elm-make" =: "0.2"
, "elm-reactor" =: "0.3.2"
, "elm-repl" =: "0.4.2"
]
,
"0.15" =:
[ "elm-compiler" =: "0.15"
, "elm-package" =: "0.5"
, "elm-make" =: "0.1.2"
, "elm-reactor" =: "0.3.1"
, "elm-repl" =: "0.4.1"
]
,
"0.14.1" =:
[ "elm-compiler" =: "0.14.1"
, "elm-package" =: "0.4"
, "elm-make" =: "0.1.1"
, "elm-reactor" =: "0.3"
, "elm-repl" =: "0.4"
]
,
"0.14" =:
[ "elm-compiler" =: "0.14"
, "elm-package" =: "0.2"
, "elm-make" =: "0.1"
, "elm-reactor" =: "0.2"
, "elm-repl" =: "0.4"
]
,
"0.13" =:
[ "Elm" =: "0.13"
, "elm-reactor" =: "0.1"
, "elm-repl" =: "0.3"
, "elm-get" =: "0.1.3"
]
,
"0.12.3" =:
[ "Elm" =: "0.12.3"
, "elm-server" =: "0.11.0.1"
, "elm-repl" =: "0.2.2.1"
, "elm-get" =: "0.1.2"
]
]
main :: IO ()
main =
do args <- getArgs
case args of
[version] | Map.member version configs ->
let artifactDirectory = "Elm-Platform" </> version
repos = configs Map.! version
in
makeRepos artifactDirectory version repos
_ ->
do hPutStrLn stderr $
"Expecting one of the following values as an argument:\n" ++
" " ++ List.intercalate ", " (Map.keys configs)
exitFailure
makeRepos :: FilePath -> String -> [(String, String)] -> IO ()
makeRepos artifactDirectory version repos =
do createDirectoryIfMissing True artifactDirectory
setCurrentDirectory artifactDirectory
root <- getCurrentDirectory
mapM_ (uncurry (makeRepo root)) repos
cabal [ "update" ]
-- create a sandbox for installation
cabal [ "sandbox", "init" ]
-- add each of the sub-directories as a sandbox source
cabal ([ "sandbox", "add-source" ] ++ map fst repos)
-- install all of the packages together in order to resolve transitive dependencies robustly
-- (install the dependencies a bit more quietly than the elm packages)
cabal ([ "install", "-j", "--only-dependencies", "--ghc-options=\"-w\"" ] ++ (if version <= "0.15.1" then [ "--constraint=fsnotify<0.2" ] else []) ++ map fst repos)
cabal ([ "install", "-j", "--ghc-options=\"-XFlexibleContexts\"" ] ++ filter (/= "elm-reactor") (map fst repos))
-- elm-reactor needs to be installed last because of a post-build dependency on elm-make
cabal [ "install", "-j", "elm-reactor" ]
return ()
makeRepo :: FilePath -> String -> String -> IO ()
makeRepo root projectName version =
do -- get the right version of the repo
git [ "clone", "https://github.com/elm-lang/" ++ projectName ++ ".git" ]
setCurrentDirectory projectName
git [ "checkout", version, "--quiet" ]
-- move back into the root
setCurrentDirectory root
-- HELPER FUNCTIONS
cabal :: [String] -> IO ExitCode
cabal = rawSystem "cabal"
git :: [String] -> IO ExitCode
git = rawSystem "git"
|
ryansb/elm-dev-containers
|
BuildFromSource.hs
|
mit
| 5,548
| 0
| 16
| 1,682
| 964
| 524
| 440
| 98
| 2
|
module GhostLang.ParserProps
( ghostModuleDefP
, moduleDeclP
, importDeclP
, valueRefP
, timeUnitRefP
, payloadRefP
, paceRefP
, methodP
, contentP
, intrinsicCommandP
, patternP
, procedureP
, operationP
) where
import GhostLang.Compiler.Grammar ( ghostModuleDef
, moduleDecl
, importDecl
, valueRef
, timeUnitRef
, payloadRef
, paceRef
, method
, content
, intrinsicCommand
, pattern
, procedure
, operation
)
import GhostLang.Interpreter (IntrinsicSet)
import GhostLang.Types ( GhostModule
, ModuleDecl
, ImportDecl
, Value
, TimeUnit
, Payload
, Pace
, Method
, Content
, Pattern
, Procedure
, Operation
)
import GhostLang.Stringify (Stringify (..))
import Text.Parsec (parse)
import Text.Parsec.String (Parser)
-- | Property to test the top level ghostModuleDef parser,
ghostModuleDefP :: GhostModule IntrinsicSet -> Bool
ghostModuleDefP = prop ghostModuleDef
-- | Property to test the moduleDecl parser.
moduleDeclP :: ModuleDecl -> Bool
moduleDeclP = prop moduleDecl
-- | Property to test the importDecl parser.
importDeclP :: ImportDecl -> Bool
importDeclP = prop importDecl
-- | Property to test the valueRef parser.
valueRefP :: Value -> Bool
valueRefP = prop valueRef
-- | Property to test the timeUnitRef parser.
timeUnitRefP :: TimeUnit -> Bool
timeUnitRefP = prop timeUnitRef
-- | Property to test the payloadRef parser.
payloadRefP :: Payload -> Bool
payloadRefP = prop payloadRef
-- | Property to test the paceRef parser.
paceRefP :: Pace -> Bool
paceRefP = prop paceRef
-- | Property to test the method parser.
methodP :: Method -> Bool
methodP = prop method
-- | Property to test the content parser.
contentP :: Content -> Bool
contentP = prop content
-- | Property to test the instrinsicCommand parser.
intrinsicCommandP :: IntrinsicSet -> Bool
intrinsicCommandP = prop intrinsicCommand
-- | Property to test the pattern parser.
patternP :: Pattern IntrinsicSet -> Bool
patternP = prop pattern
-- | Property to test the procedure parser.
procedureP :: Procedure IntrinsicSet -> Bool
procedureP = prop procedure
-- | Property to test the operation parser.
operationP :: Operation IntrinsicSet -> Bool
operationP = prop operation
prop :: (Eq a, Stringify a) => Parser a -> a -> Bool
prop p x =
case parse p "" (stringify x) of
Right x' -> x == x'
_ -> False
|
kosmoskatten/ghost-lang
|
ghost-lang/test/GhostLang/ParserProps.hs
|
mit
| 3,088
| 2
| 8
| 1,172
| 506
| 290
| 216
| 74
| 2
|
module Handler.ListMeals where
import Import
import Handler.Common
import Handler.Cache
getListMealsR :: Handler Html
getListMealsR = do
-- ip <- getIp
-- putStrLn $ pack "Client list request from IP: " ++ ip
menuList <- lift $ getData
canVoteList <- filterM (\menuItem -> do
canVote <- validateVoterIp $ Handler.Cache.id menuItem
return canVote
) menuList
let canVoteIdList = fmap (\k -> Handler.Cache.id k) canVoteList
defaultLayout $ do
$(widgetFile "list")
|
sramekj/lunchvote
|
Handler/ListMeals.hs
|
mit
| 611
| 0
| 16
| 217
| 133
| 67
| 66
| -1
| -1
|
module Chess.Internal.Notation (parseMove, parseCoordinateNotation, parseCoordinateStringWithPromotion) where
import Chess.Internal.Move
import Chess.Internal.Board
import Chess.Internal.Piece
import Data.List
import Data.Char
import Data.Attoparsec.Text
import qualified Data.Text as T
import Control.Applicative
parseMove :: GameState -> String -> Maybe Move
parseMove = parseCoordinateNotation
parseCoordinateNotation :: GameState -> String -> Maybe Move
parseCoordinateNotation game moveString = case parseOnly parseCoordinateStringWithPromotion (T.pack moveString) of
Left _ -> Nothing
Right (start, end, promotion) -> findMoveForCoordinates game start end promotion
findMoveForCoordinates :: GameState -> Coordinates -> Coordinates -> Maybe PieceType -> Maybe Move
findMoveForCoordinates game start end Nothing | length moves == 1 = Just $ head moves
where moves = findMovesMatchingCoordinates game start end
findMoveForCoordinates game start end (Just promotion) = findPromotionMove start end promotion moves
where moves = findMovesMatchingCoordinates game start end
findMoveForCoordinates _ _ _ _ = Nothing
findPromotionMove :: Coordinates -> Coordinates -> PieceType -> [Move] -> Maybe Move
findPromotionMove start end promotion = find matchPromotionMove
where matchPromotionMove (Promotion _ s e p) = s == start && e == end && p == promotion
matchPromotionMove _ = False
findMovesMatchingCoordinates :: GameState -> Coordinates -> Coordinates -> [Move]
findMovesMatchingCoordinates game start end = filter (coordinatesMatch start end) allMoves
where allMoves = generateAllMoves game
coordinatesMatch :: Coordinates -> Coordinates -> Move -> Bool
coordinatesMatch start end (Movement _ from to) = from == start && to == end
coordinatesMatch start end (Capture _ from to) = from == start && to == end
coordinatesMatch start end (EnPassant _ from to) = from == start && to == end
coordinatesMatch start end (PawnDoubleMove _ from to) = from == start && to == end
coordinatesMatch start end (Promotion _ from to _) = from == start && to == end
coordinatesMatch start end (Castling White Short) = start == (7, 4) && end == (7, 6)
coordinatesMatch start end (Castling White Long) = start == (7, 4) && end == (7, 2)
coordinatesMatch start end (Castling Black Short) = start == (0, 4) && end == (0, 6)
coordinatesMatch start end (Castling Black Long) = start == (0, 4) && end == (0, 2)
parseCoordinateStringWithPromotion :: Parser (Coordinates, Coordinates, Maybe PieceType)
parseCoordinateStringWithPromotion = do (coord1, coord2) <- parseCoordinateString
promotion <- parsePromotion
endOfInput
return (coord1, coord2, promotion)
parseCoordinateString :: Parser (Coordinates, Coordinates)
parseCoordinateString = do coord1 <- parseCoordinates
_ <- char '-'
coord2 <- parseCoordinates
return (coord1, coord2)
parseCoordinates :: Parser Coordinates
parseCoordinates = do column <- letter
row <- digit
case parseCoordinate [toLower column, row] of
Just coordinates -> return coordinates
Nothing -> fail "Could not parse coordinate"
parsePromotion :: Parser (Maybe PieceType)
parsePromotion = (Just <$> parsePromotionEqualSign) <|> (Just <$> parsePromotionParenthesis) <|> return Nothing
parsePromotionEqualSign :: Parser PieceType
parsePromotionEqualSign = do _ <- char '='
promotionChar <- satisfy (`elem` "NBRQ")
case parsePieceType promotionChar of
Just piece -> return piece
Nothing -> fail "Invalid promotion piecetype"
parsePromotionParenthesis :: Parser PieceType
parsePromotionParenthesis = do _ <- char '('
promotionChar <- satisfy (`elem` "NBRQ")
_ <- char ')'
case parsePieceType promotionChar of
Just piece -> return piece
Nothing -> fail "Invalid promotion piecetype"
|
nablaa/hchesslib
|
src/Chess/Internal/Notation.hs
|
gpl-2.0
| 4,440
| 0
| 11
| 1,260
| 1,188
| 605
| 583
| 69
| 2
|
{- |
Module : $Header$
Description : abstract syntax of CASL architectural specifications
Copyright : (c) Klaus Luettich, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@informatik.uni-bremen.de
Stability : provisional
Portability : non-portable(imports Syntax.AS_Structured)
Abstract syntax of (Het)CASL architectural specifications
Follows Sect. II:2.2.4 of the CASL Reference Manual.
-}
module Syntax.AS_Architecture where
-- DrIFT command:
{-! global: GetRange !-}
import Common.Id
import Common.IRI
import Common.AS_Annotation
import Syntax.AS_Structured
-- for arch-spec-defn and unit-spec-defn see AS_Library
data ARCH_SPEC = Basic_arch_spec [Annoted UNIT_DECL_DEFN]
(Annoted UNIT_EXPRESSION) Range
-- pos: "unit","result"
| Arch_spec_name ARCH_SPEC_NAME
| Group_arch_spec (Annoted ARCH_SPEC) Range
-- pos: "{","}"
deriving (Show)
data UNIT_DECL_DEFN = Unit_decl UNIT_NAME REF_SPEC [Annoted UNIT_TERM] Range
-- pos: ":", opt ("given"; Annoted holds pos of commas)
| Unit_defn UNIT_NAME UNIT_EXPRESSION Range
-- pos: "="
deriving (Show)
data UNIT_SPEC = Unit_type [Annoted SPEC] (Annoted SPEC) Range
-- pos: opt "*"s , "->"
| Spec_name SPEC_NAME
| Closed_unit_spec UNIT_SPEC Range
-- pos: "closed"
deriving (Show)
data REF_SPEC = Unit_spec UNIT_SPEC
| Refinement Bool UNIT_SPEC [G_mapping] REF_SPEC Range
-- false means "behaviourally"
| Arch_unit_spec (Annoted ARCH_SPEC) Range
-- pos: "arch","spec"
-- The ARCH_SPEC has to be surrounded with braces and
-- after the opening brace is a [Annotation] allowed
| Compose_ref [REF_SPEC] Range
-- pos: "then"
| Component_ref [UNIT_REF] Range
-- pos "{", commas and "}"
deriving (Show)
data UNIT_REF = Unit_ref UNIT_NAME REF_SPEC Range
-- pos: ":"
deriving (Show)
data UNIT_EXPRESSION = Unit_expression [UNIT_BINDING] (Annoted UNIT_TERM) Range
-- pos: opt "lambda",semi colons, "."
deriving (Show)
data UNIT_BINDING = Unit_binding UNIT_NAME UNIT_SPEC Range
-- pos: ":"
deriving (Show)
data UNIT_TERM = Unit_reduction (Annoted UNIT_TERM) RESTRICTION
| Unit_translation (Annoted UNIT_TERM) RENAMING
| Amalgamation [Annoted UNIT_TERM] Range
-- pos: "and"s
| Local_unit [Annoted UNIT_DECL_DEFN] (Annoted UNIT_TERM) Range
-- pos: "local", "within"
| Unit_appl UNIT_NAME [FIT_ARG_UNIT] Range
-- pos: many of "[","]"
| Group_unit_term (Annoted UNIT_TERM) Range
-- pos: "{","}"
deriving (Show)
data FIT_ARG_UNIT = Fit_arg_unit (Annoted UNIT_TERM) [G_mapping] Range
-- pos: opt "fit"
deriving (Show)
type ARCH_SPEC_NAME = IRI
type UNIT_NAME = IRI
-- Generated by DrIFT, look but don't touch!
instance GetRange ARCH_SPEC where
getRange x = case x of
Basic_arch_spec _ _ p -> p
Arch_spec_name _ -> nullRange
Group_arch_spec _ p -> p
rangeSpan x = case x of
Basic_arch_spec a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
Arch_spec_name a -> joinRanges [rangeSpan a]
Group_arch_spec a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange UNIT_DECL_DEFN where
getRange x = case x of
Unit_decl _ _ _ p -> p
Unit_defn _ _ p -> p
rangeSpan x = case x of
Unit_decl a b c d -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c, rangeSpan d]
Unit_defn a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
instance GetRange UNIT_SPEC where
getRange x = case x of
Unit_type _ _ p -> p
Spec_name _ -> nullRange
Closed_unit_spec _ p -> p
rangeSpan x = case x of
Unit_type a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
Spec_name a -> joinRanges [rangeSpan a]
Closed_unit_spec a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange REF_SPEC where
getRange x = case x of
Unit_spec _ -> nullRange
Refinement _ _ _ _ p -> p
Arch_unit_spec _ p -> p
Compose_ref _ p -> p
Component_ref _ p -> p
rangeSpan x = case x of
Unit_spec a -> joinRanges [rangeSpan a]
Refinement a b c d e -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c, rangeSpan d, rangeSpan e]
Arch_unit_spec a b -> joinRanges [rangeSpan a, rangeSpan b]
Compose_ref a b -> joinRanges [rangeSpan a, rangeSpan b]
Component_ref a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange UNIT_REF where
getRange x = case x of
Unit_ref _ _ p -> p
rangeSpan x = case x of
Unit_ref a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
instance GetRange UNIT_EXPRESSION where
getRange x = case x of
Unit_expression _ _ p -> p
rangeSpan x = case x of
Unit_expression a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
instance GetRange UNIT_BINDING where
getRange x = case x of
Unit_binding _ _ p -> p
rangeSpan x = case x of
Unit_binding a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
instance GetRange UNIT_TERM where
getRange x = case x of
Unit_reduction _ _ -> nullRange
Unit_translation _ _ -> nullRange
Amalgamation _ p -> p
Local_unit _ _ p -> p
Unit_appl _ _ p -> p
Group_unit_term _ p -> p
rangeSpan x = case x of
Unit_reduction a b -> joinRanges [rangeSpan a, rangeSpan b]
Unit_translation a b -> joinRanges [rangeSpan a, rangeSpan b]
Amalgamation a b -> joinRanges [rangeSpan a, rangeSpan b]
Local_unit a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
Unit_appl a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
Group_unit_term a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange FIT_ARG_UNIT where
getRange x = case x of
Fit_arg_unit _ _ p -> p
rangeSpan x = case x of
Fit_arg_unit a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
|
nevrenato/Hets_Fork
|
Syntax/AS_Architecture.hs
|
gpl-2.0
| 6,833
| 0
| 11
| 2,307
| 1,702
| 853
| 849
| 124
| 0
|
{-# LANGUAGE MultiParamTypeClasses #-}
module RSA.Quiz
( make
, Param (..)
, Config (..)
)
where
-- $Id$
import RSA.Param
import RSA.Break hiding ( make )
import RSA.Break.Data
import Faktor.Prim
import Faktor.Certify ( powmod )
import Autolib.Util.Zufall
-- import Autolib.Util.Wort
import Autolib.Util.Seed
import Util.Datei
import Inter.Types
import Inter.Quiz hiding ( make )
import Data.List (nub )
roll p = do
let ps = dropWhile ( < fromIntegral ( von p ) )
$ takeWhile ( < fromIntegral ( bis p ) )
$ primes ( 100 :: Integer )
[p, q] <- someDifferentIO ps 2
let n = p * q
let phi = pred p * pred q
d <- coprime phi
x <- coprime n
return $ Config
{ public_key = ( d, n )
, message = powmod x d n
}
-- | don't use for large n (stupid implementation)
someDifferentIO :: Eq a => [a] -> Int -> IO [a]
someDifferentIO xs n = someIO xs n
`repeat_until` all_different
all_different :: Eq a => [a] -> Bool
all_different xs = length xs == length (nub xs)
coprime :: Integer -> IO Integer
coprime n = randomRIO (1, n-1)
`repeat_until` \ x -> 1 == gcd x n
instance Generator RSA_Code_Break Param Config where
generator _ p key = roll p
instance Project RSA_Code_Break Config Config where
project _ = id
make :: Make
make = quiz RSA_Code_Break RSA.Param.example
|
Erdwolf/autotool-bonn
|
src/RSA/Quiz.hs
|
gpl-2.0
| 1,347
| 6
| 14
| 325
| 497
| 264
| 233
| 42
| 1
|
{-# LANGUAGE ExtendedDefaultRules, QuasiQuotes #-}
module Nirum.Cli (main, writeFiles) where
import Control.Concurrent (threadDelay)
import Control.Monad (forM_, forever, when)
import GHC.Exts (IsList (toList))
import System.IO
import qualified Data.ByteString as B
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.Text as T
import Control.Concurrent.STM
import Data.Monoid ((<>))
import qualified Options.Applicative as OPT
import System.Directory (createDirectoryIfMissing)
import System.Exit (die)
import System.FilePath (takeDirectory, takeExtension, (</>))
import System.FSNotify
import Text.InterpolatedString.Perl6 (qq)
import Text.Megaparsec.Error (errorPos, parseErrorPretty)
import Text.Megaparsec.Pos (SourcePos (sourceLine, sourceColumn), unPos)
import Nirum.Constructs (Construct (toCode))
import Nirum.Constructs.Identifier (toText)
import Nirum.Constructs.ModulePath (ModulePath)
import Nirum.Package ( PackageError ( ImportError
, MetadataError
, ParseError
, ScanError
)
, ParseError
, scanModules
)
import Nirum.Package.ModuleSet ( ImportError ( CircularImportError
, MissingImportError
, MissingModulePathError
)
)
import Nirum.Targets ( BuildError (CompileError, PackageError, TargetNameError)
, BuildResult
, buildPackage
, targetNames
)
import Nirum.Version (versionString)
type TFlag = TVar Bool
type Nanosecond = Int
data Opts = Opts { outDirectory :: !String
, targetOption :: !String
, watch :: !Bool
, packageDirectory :: !String
}
data AppOptions = AppOptions { outputPath :: FilePath
, packagePath :: FilePath
, targetLanguage :: T.Text
, watching :: Bool
, building :: TFlag
, changed :: TFlag
}
debounceDelay :: Nanosecond
debounceDelay = 1 * 1000 * 1000
parseErrortoPrettyMessage :: ParseError -> FilePath -> IO String
parseErrortoPrettyMessage parseError' filePath' = do
sourceCode <- readFile filePath'
let sourceLines = lines sourceCode
sl = if length sourceLines < errorLine then ""
else sourceLines !! (errorLine - 1)
return [qq|
{parseErrorPretty $ parseError'}
$sl
{arrow}
|]
where
error' :: SourcePos
error' = head $ toList $ errorPos parseError'
errorLine :: Int
errorLine = fromEnum $ unPos $ sourceLine error'
errorColumn :: Int
errorColumn = fromEnum $ unPos $ sourceColumn error'
arrow :: T.Text
arrow = T.snoc (T.concat (replicate (errorColumn - 1) (T.pack " "))) '^'
toModuleNameText :: T.Text -> T.Text
toModuleNameText t = [qq|'{t}'|]
modulePathToRepr :: ModulePath -> T.Text
modulePathToRepr = toModuleNameText . toCode
importErrorToPrettyMessage :: ImportError -> T.Text
importErrorToPrettyMessage (CircularImportError modulePaths) =
[qq|Circular import detected in following orders: $order|]
where
circularModulesText :: [ModulePath] -> [T.Text]
circularModulesText = map modulePathToRepr
order :: T.Text
order = T.intercalate " > " $ circularModulesText modulePaths
importErrorToPrettyMessage (MissingModulePathError path path') =
[qq|No module named $dataName in $moduleName|]
where
moduleName :: T.Text
moduleName = modulePathToRepr path
dataName :: T.Text
dataName = modulePathToRepr path'
importErrorToPrettyMessage (MissingImportError path path' identifier) =
[qq|Cannot import $importText from $attrText in $foundText|]
where
importText :: T.Text
importText = (toModuleNameText . toText) identifier
foundText :: T.Text
foundText = modulePathToRepr path
attrText :: T.Text
attrText = modulePathToRepr path'
importErrorsToMessageList :: S.Set ImportError -> [T.Text]
importErrorsToMessageList importErrors =
S.toList $ S.map importErrorToPrettyMessage importErrors
importErrorsToPrettyMessage :: S.Set ImportError -> String
importErrorsToPrettyMessage importErrors =
T.unpack $ T.intercalate "\n" withListStyleText
where
withListStyleText :: [T.Text]
withListStyleText =
map (T.append "- ") (importErrorsToMessageList importErrors)
targetNamesText :: T.Text
targetNamesText = T.intercalate ", " $ S.toAscList targetNames
build :: AppOptions -> IO ()
build options@AppOptions { packagePath = src
, outputPath = outDir
, targetLanguage = target
} = do
result <- buildPackage target src
case result of
Left (TargetNameError targetName') ->
tryDie' [qq|Couldn't find "$targetName'" target.
Available targets: $targetNamesText|]
Left (PackageError (ParseError modulePath error')) -> do
{- FIXME: find more efficient way to determine filename from
the given module path -}
filePaths <- scanModules src
case M.lookup modulePath filePaths of
Just filePath' -> do
m <- parseErrortoPrettyMessage error' filePath'
tryDie' m
Nothing -> tryDie' [qq|$modulePath not found|]
Left (PackageError (ImportError importErrors)) ->
tryDie' [qq|Import error:
{importErrorsToPrettyMessage importErrors}
|]
Left (PackageError (ScanError _ error')) ->
tryDie' [qq|Scan error: $error'|]
Left (PackageError (MetadataError error')) ->
tryDie' [qq|Metadata error: $error'|]
Left (CompileError errors) ->
forM_ (M.toList errors) $ \ (filePath, compileError) ->
tryDie' [qq|error: $filePath: $compileError|]
Right buildResult -> writeFiles outDir buildResult
where
tryDie' = tryDie options
writeFiles :: FilePath -> BuildResult -> IO ()
writeFiles outDir files =
forM_ (M.toAscList files) $ \ (filePath, code) -> do
let outPath = outDir </> filePath
createDirectoryIfMissing True $ takeDirectory outPath
putStrLn outPath
B.writeFile outPath code
onFileChanged :: AppOptions -> Event -> IO ()
onFileChanged
options@AppOptions { building = building'
, changed = changed'
}
event
| takeExtension path == ".nrm" = do
atomically $ writeTVar changed' True
buildable <- atomically $ do
b <- readTVar building'
writeTVar building' True
return $ not b
when buildable $ do
threadDelay debounceDelay
reactiveBuild options
| otherwise = return ()
where
path :: FilePath
path = eventPath event
reactiveBuild :: AppOptions -> IO ()
reactiveBuild options@AppOptions { building = building'
, changed = changed'
} = do
changed'' <- readTVarIO changed'
when changed'' $ do
atomically $ writeTVar changed' False
build options
atomically $ writeTVar building' False
changedDuringBuild <- readTVarIO changed'
when changedDuringBuild $ reactiveBuild options
tryDie :: AppOptions -> String -> IO ()
tryDie AppOptions { watching = watching' } errorMessage
| watching' = hPutStrLn stderr errorMessage
| otherwise = die errorMessage
main :: IO ()
main =
withManager $ \ mgr -> do
opts <- OPT.execParser optsParser
building' <- atomically $ newTVar False
changed' <- atomically $ newTVar True
let watch' = watch opts
packagePath' = packageDirectory opts
options = AppOptions
{ outputPath = outDirectory opts
, packagePath = packagePath'
, targetLanguage = T.pack $ targetOption opts
, watching = watch'
, building = building'
, changed = changed'
}
when watch' $ do
_ <- watchDir mgr packagePath' (const True) (onFileChanged options)
return ()
reactiveBuild options
-- sleep forever (until interrupted)
when watch' $ forever $ threadDelay 1000000
where
-- CHECK: When the CLI options changes, update the CLI examples of docs
-- and README.md file.
optsParser :: OPT.ParserInfo Opts
optsParser =
OPT.info
(OPT.helper <*> versionOption <*> programOptions)
(OPT.fullDesc <>
OPT.progDesc ("Nirum compiler " ++ versionString) <>
OPT.header header)
header :: String
header = "Nirum: The IDL compiler and RPC/distributed object framework"
versionOption :: OPT.Parser (Opts -> Opts)
versionOption = OPT.infoOption
versionString (OPT.long "version" <>
OPT.short 'v' <> OPT.help "Show version")
programOptions :: OPT.Parser Opts
programOptions =
Opts <$> OPT.strOption
(OPT.long "output-dir" <> OPT.short 'o' <> OPT.metavar "DIR" <>
OPT.help "Output directory") <*>
OPT.strOption
(OPT.long "target" <> OPT.short 't' <> OPT.metavar "TARGET" <>
OPT.help [qq|Target language name.
Available: $targetNamesText|]) <*>
OPT.switch
(OPT.long "watch" <> OPT.short 'w' <>
OPT.help "Watch files for change and rebuild") <*>
OPT.strArgument
(OPT.metavar "DIR" <> OPT.help "Package directory")
|
spoqa/nirum
|
src/Nirum/Cli.hs
|
gpl-3.0
| 9,901
| 0
| 18
| 3,022
| 2,310
| 1,222
| 1,088
| 223
| 8
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Message (
MessagePayload(..), RoutedMessage(..),
MessageSource, MessageSink, MessageIO,
Message(..), Routable(..),
-- * Message IDs
MessageId, MessageIdGen, mkMessageIdGen, nextMessageId
) where
import Control.Applicative ( (<$>), (<*>) )
import Control.Concurrent ( forkIO )
import Control.Concurrent.STM
import Control.Monad ( void )
import qualified Crypto.Random.AESCtr as AESRNG
import Data.Binary
import Data.Conduit
import System.Random ( random )
import qualified Freenet.Chk as FN
import qualified Freenet.Ssk as FN
import Types
-- |
-- A source of messages, which usually would be another node
-- talking to us.
type MessageSource a = Source IO (Message a)
-- |
-- A sink for outgoing messages to another node.
type MessageSink a = Sink (Message a) IO ()
-- |
-- A (source, sink) pair of messages, suitable for talking to a node.
type MessageIO a = (MessageSource a, MessageSink a)
-------------------------------------------------------------------------------------
-- Message IDs
-------------------------------------------------------------------------------------
type MessageId = Word64
newtype MessageIdGen = MessageIdGen { unMessageIdGen :: TBQueue Word64 }
mkMessageIdGen :: IO MessageIdGen
mkMessageIdGen = do
q <- newTBQueueIO 64
rng <- AESRNG.makeSystem
let makeId r = let (next, r') = random r
in do
atomically $ writeTBQueue q next
makeId r'
void $ forkIO $ makeId rng
return $ MessageIdGen q
nextMessageId :: MessageIdGen -> STM MessageId
nextMessageId = readTBQueue . unMessageIdGen
-------------------------------------------------------------------------------------
-- Message Payload
-------------------------------------------------------------------------------------
-- |
-- Messages are parametrised over the type of Peer addresses used, which could
-- be either hostnames or message queues for simulations.
data MessagePayload a
= Hello (NodeInfo a)
| Ping
| GetPeerList -- ^ request for getting some peers which we might connect to
| PeerList [NodeInfo a] -- ^ response to @GetPeers@ request
| FreenetChkRequest FN.ChkRequest
| FreenetChkBlock FN.ChkBlock
| FreenetSskRequest FN.SskRequest
| FreenetSskBlock FN.SskBlock
| Bye String
| Failed (Maybe String) -- ^
deriving ( Show )
instance (Binary a) => Binary (MessagePayload a) where
put (Hello peer) = putHeader 1 >> put peer
put Ping = putHeader 2
put GetPeerList = putHeader 3
put (PeerList ps) = putHeader 4 >> put ps
put (FreenetChkRequest dr) = putHeader 5 >> put dr
put (FreenetChkBlock blk) = putHeader 6 >> put blk
put (FreenetSskRequest dr) = putHeader 7 >> put dr
put (FreenetSskBlock blk) = putHeader 8 >> put blk
put (Bye msg) = putHeader 9 >> put msg
put (Failed reason) = putHeader 10 >> put reason
get = do
t <- getWord8
case t of
1 -> Hello <$> get
2 -> return Ping
3 -> return GetPeerList
4 -> PeerList <$> get
5 -> FreenetChkRequest <$> get
6 -> FreenetChkBlock <$> get
7 -> FreenetSskRequest <$> get
8 -> FreenetSskBlock <$> get
9 -> Bye <$> get
10 -> Failed <$> get
_ -> fail $ "unknown message type " ++ show t
-- |
-- a message which should be routed to another peer
data Message a = Routed Bool (RoutedMessage a) -- ^ is this a backtrack step? and the routed message
| Response MessageId (MessagePayload a)
| Direct (MessagePayload a)
deriving (Show)
instance Binary a => Binary (Message a) where
put (Routed False msg) = putHeader 1 >> put msg
put (Routed True msg) = putHeader 2 >> put msg
put (Response mid msg) = putHeader 3 >> put mid >> put msg
put (Direct msg) = putHeader 4 >> put msg
get = do
t <- getWord8
case t of
1 -> Routed False <$> get
2 -> Routed True <$> get
3 -> Response <$> get <*> get
4 -> Direct <$> get
x -> fail $ "unknown message type " ++ show x
data RoutedMessage a = RoutedMessage
{ rmPayload :: MessagePayload a
, rmId :: MessageId
, rmMarked :: [Id]
, rmTarget :: Id
}
deriving ( Show )
instance Binary a => Binary (RoutedMessage a) where
put (RoutedMessage p mid ms tgt) = put p >> put mid >> put ms >> put tgt
get = RoutedMessage <$> get <*> get <*> get <*> get
class HasLocation l => Routable m l where
routeMarked :: m -> l -> Bool -- ^ is the location already marked?
routeMark :: m -> l -> m -- ^ mark the specified location
routeTarget :: m -> l -- ^ where should the message be routed
instance Routable (RoutedMessage a) Id where
routeTarget = rmTarget
routeMarked rm l = l `elem` (rmMarked rm)
routeMark rm l
| l `elem` (rmMarked rm) = rm
| otherwise = rm { rmMarked = (l : (rmMarked rm)) }
putHeader :: Word8 -> Put
putHeader t = put t
|
waldheinz/ads
|
src/lib/Message.hs
|
gpl-3.0
| 5,238
| 0
| 15
| 1,455
| 1,409
| 742
| 667
| 109
| 1
|
import System.Exit
import Test.HUnit
import StackMachine.Emulator
main = do
Counts _ _ errors failures <- runTestTT tests
if errors > 0 || failures > 0 then exitWith $ ExitFailure 1 else exitWith ExitSuccess
testMultibyteInstructions = TestCase (assertEqual "multi byte instructions initialization"
([Adr, Lit, Dsp, Brn, Bze])
(multiByteInstructions))
tests = TestList [TestLabel "testMultibyteInstructions" testMultibyteInstructions]
|
zebbo/stack-machine
|
test/EmulatorTests.hs
|
gpl-3.0
| 548
| 0
| 10
| 166
| 128
| 67
| 61
| 10
| 2
|
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module RunnerSpec (spec) where
import Test.Hspec
import Language.Mulang.Parsers.JavaScript
import Language.Mulang.Parsers.Python
import Language.Mulang.Interpreter.Runner
import Data.Text (unpack)
import NeatInterpolation (text)
parse language = language . unpack
run language code testSuite = runTests (parse language code) (parse language testSuite)
runjs = run js
runpy = run py
spec :: Spec
spec = do
describe "runTests" $ do
context "javascript" $ do
it "runs tests" $ do
let code = [text||]
let suite = [text|
it("is true", function() {
assert(true)
})
|]
runjs code suite `shouldReturn` [TestResult ["is true"] Success]
context "assert.equals" $ do
it "passes if values are equal" $ do
let code = [text||]
let suite = [text|
it("passes", function() {
assert.equals(1, 1)
})
|]
runjs code suite `shouldReturn` [TestResult ["passes"] Success]
it "fails if values are not equal" $ do
let code = [text||]
let suite = [text|
it("fails", function() {
assert.equals(1, 2)
})
|]
runjs code suite `shouldReturn` [TestResult ["fails"] (Failure "MuString \"Expected MuNumber 1.0 but got: MuNumber 2.0\"")]
it "can handle failed tests" $ do
let code = [text||]
let suite = [text|
it("fails", function() {
assert(false)
})
|]
runjs code suite `shouldReturn` [TestResult ["fails"] (Failure "MuString \"Expected True but got: False\"")]
it "can handle errored tests" $ do
let code = [text||]
let suite = [text|
it("errors", function() {
assert.equals(succ(3), 4)
})
|]
runjs code suite `shouldReturn` [TestResult ["errors"] (Failure "MuString \"Reference not found for name 'succ'\"")]
it "can reference functions defined in code" $ do
let code = [text|
function succ(n) {
return n + 1;
}
|]
let suite = [text|
it("succ increments a given number by 1", function() {
assert.equals(succ(3), 4)
})
|]
runjs code suite `shouldReturn` [TestResult ["succ increments a given number by 1"] Success]
it "accepts describes" $ do
let code = [text|
function succ(n) {
return n + 1;
}
|]
let suite = [text|
describe("succ", function () {
it("increments a given number by 1", function() {
assert.equals(succ(3), 4)
})
})
|]
runjs code suite `shouldReturn` [TestResult ["succ", "increments a given number by 1"] Success]
it "accepts multiple test cases" $ do
let code = [text|
function succ(n) {
return n + 1;
}
|]
let suite = [text|
it("if I pass a 3 to succ it returns 4", function() {
assert.equals(succ(3), 4)
})
it("if I pass a 10 to succ it returns 11", function() {
assert.equals(succ(10), 11)
})
|]
runjs code suite `shouldReturn` [(TestResult ["if I pass a 3 to succ it returns 4"] Success),
(TestResult ["if I pass a 10 to succ it returns 11"] Success)]
context "python" $ do
it "runs tests" $ do
let code = [text||]
let suite = [text|
class TestPython(unittest.TestCase):
def test_is_true():
self.assertTrue(True)
|]
runpy code suite `shouldReturn` [TestResult ["TestPython", "test_is_true"] Success]
context "assert.equals" $ do
it "passes if values are equal" $ do
let code = [text||]
let suite = [text|
class TestPython(unittest.TestCase):
def test_passes():
self.assertEqual(1, 1)
|]
runpy code suite `shouldReturn` [(TestResult ["TestPython", "test_passes"] Success)]
it "fails if values are not equal" $ do
let code = [text||]
let suite = [text|
class TestPython(unittest.TestCase):
def test_fails():
self.assertEqual(1, 2)
|]
runpy code suite `shouldReturn` [TestResult ["TestPython", "test_fails"] (Failure "MuString \"Expected MuNumber 1.0 but got: MuNumber 2.0\"")]
it "can handle failed tests" $ do
let code = [text||]
let suite = [text|
class TestPython(unittest.TestCase):
def test_fails():
self.assertTrue(False)
|]
runpy code suite `shouldReturn` [TestResult ["TestPython", "test_fails"] (Failure "MuString \"Expected True but got: False\"")]
it "can handle errored tests" $ do
let code = [text||]
let suite = [text|
class TestPython(unittest.TestCase):
def test_errors():
self.assertEqual(succ(3), 4)
|]
runpy code suite `shouldReturn` [TestResult ["TestPython", "test_errors"] (Failure "MuString \"Reference not found for name 'succ'\"")]
it "can reference functions defined in code" $ do
let code = [text|
def succ(n):
return n + 1
|]
let suite = [text|
class TestPython(unittest.TestCase):
def test_succ_increments_a_given_numer_by_1():
self.assertEqual(succ(3), 4)
|]
runpy code suite `shouldReturn` [TestResult ["TestPython", "test_succ_increments_a_given_numer_by_1"] Success]
it "accepts multiple test cases" $ do
let code = [text|
def succ(n):
return n + 1
|]
let suite = [text|
class TestSucc(unittest.TestCase):
def test_if_I_pass_a_3_to_succ_it_returns_4():
self.assertEqual(succ(3), 4)
def test_if_I_pass_a_10_to_succ_it_returns_11():
self.assertEqual(succ(10), 11)
|]
runpy code suite `shouldReturn` [(TestResult ["TestSucc", "test_if_I_pass_a_3_to_succ_it_returns_4"] Success),
(TestResult ["TestSucc", "test_if_I_pass_a_10_to_succ_it_returns_11"] Success)]
|
mumuki/mulang
|
spec/RunnerSpec.hs
|
gpl-3.0
| 6,516
| 0
| 23
| 2,275
| 1,190
| 645
| 545
| 81
| 1
|
-- | Returns a pair of consecutive Fibonacci numbers a b,
-- where (a*b) is equal to the input, or proofs that the
-- number isn't a product of two consecutive Fibonacci
-- numbers.
productFib :: Integer -> (Integer, Integer, Bool)
productFib n = locationProductFibN n 2
-- i must >= 2
locationProductFibN :: Integer -> Integer -> (Integer, Integer, Bool)
locationProductFibN n i
| fib (i-1) * fib (i) == n = ( fib (i-1) , fib (i), True)
| fib (i) * fib (i+1) > n = ( fib (i) , fib (i+1) , False)
| otherwise = locationProductFibN n (i+1)
fib :: Integer -> Integer
fib n
| n == 0 = 0
| n == 1 = 1
| otherwise = fib (n - 1) + fib (n - 2)
|
yannxia-self/code-war-hasekell-training
|
product_of_consecutive_fib_numbers.hs
|
gpl-3.0
| 659
| 0
| 12
| 156
| 276
| 143
| 133
| 12
| 1
|
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE TemplateHaskell #-}
module Toy.Backend.Classify where
import AI.NN
import Data.Aeson
import Data.Aeson.TH
import Data.Text (pack)
import qualified Data.Vector as V
import Yesod.Core
data Classify = Classify PredictModelParam deriving Show
deriveJSON defaultOptions ''Classify
classify :: Classify -> Int -> Bool -> Bool -> IO Bool
classify (Classify pmp) age smile gender = do
print (age,smile,gender)
let a = fromIntegral age / 100
s = if smile then 1 else 0
g = if gender then 1 else 0
faces = [[a,g,s]]
rt <- runPredict pmp faces
print rt
return $ not $ head rt == 0
|
Qinka/reimagined-pancake
|
toy-backend/toy-backend-classify/src/nn/Toy/Backend/Classify.hs
|
gpl-3.0
| 704
| 0
| 11
| 188
| 224
| 122
| 102
| 21
| 3
|
{-# LANGUAGE TypeSynonymInstances #-}
module Database.Design.Ampersand.ADL1.Expression (
subst
,foldlMapExpression,foldrMapExpression
,primitives,isMp1, isEEps
,isPos,isNeg, deMorganERad, deMorganECps, deMorganEUni, deMorganEIsc, notCpl, isCpl
,exprIsc2list, exprUni2list, exprCps2list, exprRad2list, exprPrd2list
,insParentheses)
where
import Database.Design.Ampersand.Basics (uni)
import Database.Design.Ampersand.Core.AbstractSyntaxTree
--import Debug.Trace
-- | subst is used to replace each occurrence of a relation
-- with an expression. The parameter expr will therefore be applied to an
-- expression of the form Erel rel.
subst :: (Declaration,Expression) -> Expression -> Expression
subst (decl,expr) = subs
where
subs (EEqu (l,r)) = EEqu (subs l,subs r)
subs (EInc (l,r)) = EInc (subs l,subs r)
subs (EIsc (l,r)) = EIsc (subs l,subs r)
subs (EUni (l,r)) = EUni (subs l,subs r)
subs (EDif (l,r)) = EDif (subs l,subs r)
subs (ELrs (l,r)) = ELrs (subs l,subs r)
subs (ERrs (l,r)) = ERrs (subs l,subs r)
subs (EDia (l,r)) = EDia (subs l,subs r)
subs (ECps (l,r)) = ECps (subs l,subs r)
subs (ERad (l,r)) = ERad (subs l,subs r)
subs (EPrd (l,r)) = EPrd (subs l,subs r)
subs (EKl0 e ) = EKl0 (subs e)
subs (EKl1 e ) = EKl1 (subs e)
subs (EFlp e ) = EFlp (subs e)
subs (ECpl e ) = ECpl (subs e)
subs (EBrk e) = EBrk (subs e)
subs e@(EDcD d ) | d==decl = expr
| otherwise = e
subs e@EDcI{} = e
subs e@EEps{} = e
subs e@EDcV{} = e
subs e@EMp1{} = e
foldlMapExpression :: (a -> r -> a) -> (Declaration->r) -> a -> Expression -> a
foldlMapExpression f = foldrMapExpression f' where f' x y = f y x
foldrMapExpression :: (r -> a -> a) -> (Declaration->r) -> a -> Expression -> a
foldrMapExpression f g a (EEqu (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EInc (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EIsc (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EUni (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EDif (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ELrs (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ERrs (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EDia (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ECps (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ERad (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EPrd (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EKl0 e) = foldrMapExpression f g a e
foldrMapExpression f g a (EKl1 e) = foldrMapExpression f g a e
foldrMapExpression f g a (EFlp e) = foldrMapExpression f g a e
foldrMapExpression f g a (ECpl e) = foldrMapExpression f g a e
foldrMapExpression f g a (EBrk e) = foldrMapExpression f g a e
foldrMapExpression f g a (EDcD d) = f (g d) a
foldrMapExpression _ _ a EDcI{} = a
foldrMapExpression _ _ a EEps{} = a
foldrMapExpression _ _ a EDcV{} = a
foldrMapExpression _ _ a EMp1{} = a
primitives :: Expression -> [Expression]
primitives expr =
case expr of
(EEqu (l,r)) -> primitives l `uni` primitives r
(EInc (l,r)) -> primitives l `uni` primitives r
(EIsc (l,r)) -> primitives l `uni` primitives r
(EUni (l,r)) -> primitives l `uni` primitives r
(EDif (l,r)) -> primitives l `uni` primitives r
(ELrs (l,r)) -> primitives l `uni` primitives r
(ERrs (l,r)) -> primitives l `uni` primitives r
(EDia (l,r)) -> primitives l `uni` primitives r
(ECps (l,r)) -> primitives l `uni` primitives r
(ERad (l,r)) -> primitives l `uni` primitives r
(EPrd (l,r)) -> primitives l `uni` primitives r
(EKl0 e) -> primitives e
(EKl1 e) -> primitives e
(EFlp e) -> primitives e
(ECpl e) -> primitives e
(EBrk e) -> primitives e
EDcD{} -> [expr]
EDcI{} -> [expr]
EEps{} -> [] -- Since EEps is inserted for typing reasons only, we do not consider it a primitive..
EDcV{} -> [expr]
EMp1{} -> [expr]
-- | The rule of De Morgan requires care with respect to the complement.
-- The following function provides a function to manipulate with De Morgan correctly.
deMorganERad :: Expression -> Expression
deMorganERad (ECpl (ERad (l,r)))
= notCpl (deMorganERad l) .:. notCpl (deMorganERad r)
deMorganERad (ERad (l,r))
= notCpl (notCpl (deMorganERad l) .:. notCpl (deMorganERad r))
deMorganERad e = e
deMorganECps :: Expression -> Expression
deMorganECps (ECpl (ECps (l,r)))
= notCpl (deMorganECps l) .!. notCpl (deMorganECps r)
deMorganECps (ECps (l,r))
= notCpl (notCpl (deMorganECps l) .!. notCpl (deMorganECps r))
deMorganECps e = e
deMorganEUni :: Expression -> Expression
deMorganEUni (ECpl (EUni (l,r)))
= notCpl (deMorganEUni l) ./\. notCpl (deMorganEUni r)
deMorganEUni (EUni (l,r))
= notCpl (notCpl (deMorganEUni l) ./\. notCpl (deMorganEUni r))
deMorganEUni e = e
deMorganEIsc :: Expression -> Expression
deMorganEIsc (ECpl (EIsc (l,r)))
= notCpl (deMorganEIsc l) .\/. notCpl (deMorganEIsc r)
deMorganEIsc (EIsc (l,r))
= notCpl (notCpl (deMorganEIsc l) .\/. notCpl (deMorganEIsc r))
deMorganEIsc e = e
notCpl :: Expression -> Expression
notCpl (ECpl e) = e
notCpl e = ECpl e
isCpl :: Expression -> Bool
isCpl (ECpl{}) = True
isCpl _ = False
isPos :: Expression -> Bool
isPos = not . isNeg
isNeg :: Expression -> Bool
isNeg = isCpl
isMp1 :: Expression -> Bool
isMp1 EMp1{} = True
isMp1 _ = False
isEEps :: Expression -> Bool
isEEps EEps{} = True
isEEps _ = False
exprIsc2list, exprUni2list, exprCps2list, exprRad2list, exprPrd2list :: Expression -> [Expression]
exprIsc2list (EIsc (l,r)) = exprIsc2list l++exprIsc2list r
exprIsc2list r = [r]
exprUni2list (EUni (l,r)) = exprUni2list l++exprUni2list r
exprUni2list r = [r]
exprCps2list (ECps (l,r)) = exprCps2list l++exprCps2list r
exprCps2list r = [r]
exprRad2list (ERad (l,r)) = exprRad2list l++exprRad2list r
exprRad2list r = [r]
exprPrd2list (EPrd (l,r)) = exprPrd2list l++exprPrd2list r
exprPrd2list r = [r]
insParentheses :: Expression -> Expression
insParentheses = insPar 0
where
wrap :: Integer -> Integer -> Expression -> Expression
wrap i j e' = if i<=j then e' else EBrk (insPar 0 e')
insPar :: Integer -> Expression -> Expression
insPar i (EEqu (l,r)) = wrap i 0 (insPar 1 l .==. insPar 1 r)
insPar i (EInc (l,r)) = wrap i 0 (insPar 1 l .|-. insPar 1 r)
insPar i x@EIsc{} = wrap i 2 (foldr1 (./\.) [insPar 3 e | e<-exprIsc2list x ])
insPar i x@EUni{} = wrap i 2 (foldr1 (.\/.) [insPar 3 e | e<-exprUni2list x ])
insPar i (EDif (l,r)) = wrap i 4 (insPar 5 l .-. insPar 5 r)
insPar i (ELrs (l,r)) = wrap i 6 (insPar 7 l ./. insPar 7 r)
insPar i (ERrs (l,r)) = wrap i 6 (insPar 7 l .\. insPar 7 r)
insPar i (EDia (l,r)) = wrap i 6 (insPar 7 l .<>. insPar 7 r)
insPar i x@ECps{} = wrap i 8 (foldr1 (.:.) [insPar 9 e | e<-exprCps2list x ])
insPar i x@ERad{} = wrap i 8 (foldr1 (.!.) [insPar 9 e | e<-exprRad2list x ])
insPar i x@EPrd{} = wrap i 8 (foldr1 (.*.) [insPar 9 e | e<-exprPrd2list x ])
insPar _ (EKl0 e) = EKl0 (insPar 10 e)
insPar _ (EKl1 e) = EKl1 (insPar 10 e)
insPar _ (EFlp e) = EFlp (insPar 10 e)
insPar _ (ECpl e) = ECpl (insPar 10 e)
insPar i (EBrk e) = insPar i e
insPar _ x = x
{-
insPar 0 (r/\s/\t/\x/\y |- p)
=
wrap 0 0 (insPar 1 (r/\s/\t/\x/\y) |- insPar 1 p)
=
insPar 1 (r/\s/\t/\x/\y) |- insPar 1 p
=
wrap 1 2 (foldr1 f [insPar 3 e | e<-exprIsc2list (r/\s/\t/\x/\y) ]) |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 e | e<-exprIsc2list (r/\s/\t/\x/\y) ] |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 e | e<-[r,s,t,x,y] ] |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 r,insPar 3 s,insPar 3 t,insPar 3 x,insPar 3 y] |- p where f x y = EIsc (x,y)
=
foldr1 f [r,s,t,x,y] |- p where f x y = EIsc (x,y)
=
r/\s/\t/\x/\y |- p
insPar 0 (r;s;t;x;y |- p)
=
wrap 0 0 (insPar 1 (r;s;t;x;y) |- insPar 1 p)
=
insPar 1 (r;s;t;x;y) |- insPar 1 p
=
wrap 1 8 (insPar 8 r ; insPar 8 (s;t;x;y)) |- p
=
r; insPar 8 (s;t;x;y) |- p
=
r; wrap 8 8 (insPar 8 s; insPar 8 (t;x;y)) |- p
=
r; insPar 8 s; insPar 8 (t;x;y) |- p
=
r; s; insPar 8 (t;x;y) |- p
-}
|
4ZP6Capstone2015/ampersand
|
src/Database/Design/Ampersand/ADL1/Expression.hs
|
gpl-3.0
| 9,113
| 0
| 13
| 2,432
| 3,692
| 1,905
| 1,787
| 155
| 21
|
-- Copyright 2016, 2017 Robin Raymond
--
-- This file is part of Purple Muon
--
-- Purple Muon is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Purple Muon is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Purple Muon. If not, see <http://www.gnu.org/licenses/>.
{-|
Module : PurpleMuon.Physics.Constants
Description : Constants used by the Physics module of PurpleMuon
Copyright : (c) Robin Raymond, 2016-2017
License : GPL-3
Maintainer : robin@robinraymond.de
Portability : POSIX
-}
module PurpleMuon.Physics.Constants
( g
, physicalSize
, physicsStep
, minimumDistance
) where
import Protolude
import qualified Linear.V2 as LV2
import qualified PurpleMuon.Physics.Types as PPT
import qualified PurpleMuon.Types as PPY
g :: PPT.GravitationalConstant
g = PPT.GravitationalConstant 0.01
physicalSize :: PPT.PhysicalSize
physicalSize = PPT.PhysicalSize (LV2.V2 1 1)
-- |Do physics at 30 Hz
physicsStep :: PPT.DeltaTime
physicsStep = PPT.DeltaTime (1 / 30)
-- | The minimum distance two objects are allowed to have
-- This makes the integration numerically much more stable
minimumDistance :: PPY.FlType
minimumDistance = 0.1
|
r-raymond/purple-muon
|
src/PurpleMuon/Physics/Constants.hs
|
gpl-3.0
| 1,670
| 0
| 8
| 331
| 149
| 98
| 51
| 17
| 1
|
module Main (main) where
import LambdaFeed.Main (start)
main :: IO ()
main = start
|
markus1189/lambda-feed
|
main/Main.hs
|
gpl-3.0
| 85
| 0
| 6
| 16
| 34
| 20
| 14
| 4
| 1
|
module Gui where
import Graphics.UI.Gtk
import SimpleListView
import Web
import Cache
import Data.IORef
data GuiData = GuiData [IO CacheEntry] (UpdateTreeView) Image Label Label | EmptyGuiData
guidata_get_cache_list (GuiData cache_list _ _ _ _) = cache_list
guidata_get_update_list (GuiData _ update_list _ _ _) = update_list
guidata_get_image (GuiData _ _ image _ _) = image
guidata_get_title_label (GuiData _ _ _ title _) = title
guidata_get_description_label (GuiData _ _ _ _ description) = description
guidata_ref_update_cache_list state cache_list = do
guidata <- readIORef state
writeIORef state (GuiData cache_list (guidata_get_update_list guidata) (guidata_get_image guidata) (guidata_get_title_label guidata) (guidata_get_description_label guidata))
guidata_ref_update_list state update_list = do
guidata <- readIORef state
writeIORef state (GuiData (guidata_get_cache_list guidata) update_list (guidata_get_image guidata) (guidata_get_title_label guidata) (guidata_get_description_label guidata))
selected_item :: IORef GuiData -> String -> IO()
selected_item state s = do
guidata <- readIORef state
list <- return (guidata_get_cache_list guidata)
let id = read s :: Int
cacheentry <- list !! (id - 1)
putStrLn $ show (get_cache_entry_id cacheentry)
imageSetFromFile (guidata_get_image guidata) (cache_entry_file_image_path id)
let titletext = cache_entry_get_title cacheentry
let desctext = cache_entry_get_description cacheentry
let title = guidata_get_title_label guidata
let desc = guidata_get_description_label guidata
set_title_label_text title titletext
labelSetText desc desctext
prepare_comics_buffer_widget :: IORef GuiData -> IO()
prepare_comics_buffer_widget state = do
guidata <- readIORef state
UpdateTreeView updatelist <- return $ guidata_get_update_list guidata
let showids = map (\i -> show i) [1..cache_comics_num]
new_ul <- updatelist showids (selected_item state)
guidata_ref_update_list state new_ul
set_title_label_text label text = do
labelSetMarkup label ("<big><b>" ++ text ++ "</b></big>")
gui :: IO()
gui = do
guidataref <- newIORef EmptyGuiData
initGUI
window <- windowNew
mainhbox <- hBoxNew False 10
imagevbox <- vBoxNew False 10
labelsvbox <- vBoxNew False 10
titlelabel <- labelNew (Just "")
set_title_label_text titlelabel ("Tu znajduje się tytuł komiksu")
descriptionlabel <- labelNew (Just "Tu znajduje się opis komiksu")
imageview <- imageNew
listscrolledwindow <- scrolledWindowNew Nothing Nothing
scrolledWindowSetPolicy listscrolledwindow PolicyNever PolicyAutomatic
(listview, UpdateTreeView updateList) <- simpleListView "Bufor komiksów" [] (selected_item guidataref)
scrolledWindowAddWithViewport listscrolledwindow listview
writeIORef guidataref (GuiData init_cache_list (UpdateTreeView updateList) imageview titlelabel descriptionlabel)
imagescrolledwindow <- scrolledWindowNew Nothing Nothing
scrolledWindowSetPolicy imagescrolledwindow PolicyAutomatic PolicyAutomatic
scrolledWindowAddWithViewport imagescrolledwindow imageview
set window [containerBorderWidth := 10, containerChild := mainhbox]
-- list
boxPackStart mainhbox listscrolledwindow PackNatural 0
-- image box
boxPackStart mainhbox imagevbox PackGrow 0
-- labels
boxPackStart imagevbox labelsvbox PackNatural 0
boxPackStart labelsvbox titlelabel PackNatural 0
boxPackStart labelsvbox descriptionlabel PackNatural 0
-- image
boxPackStart imagevbox imagescrolledwindow PackGrow 0
onDestroy window mainQuit
widgetShowAll window
prepare_comics_buffer_widget guidataref
mainGUI
|
chyla/haskell-xkcd
|
src/Gui.hs
|
gpl-3.0
| 3,638
| 0
| 13
| 528
| 983
| 454
| 529
| 72
| 1
|
module Main where
import System.Arte.Tracker (runTracker)
main :: IO ()
main = runTracker
|
imalsogreg/arte-ephys
|
exec/tracker.hs
|
gpl-3.0
| 92
| 0
| 6
| 15
| 30
| 18
| 12
| 4
| 1
|
module Nary_tree(make_tree, make_tree_dot) where
import Data.List
data NTree a =
NTreeFork a [(NTree a)]
| NTreeEmpty
deriving(Show,Eq)
make_tree a = let a' = reverse a in (build_dumb_tree NTreeEmpty a')
build_dumb_tree acc [] = acc
build_dumb_tree acc (a:as) =
let acc' = (NTreeFork a [acc]) in
build_dumb_tree acc' as
-- Boolean function used to check to see if a tree has any contents
has_contents (NTreeEmpty) = False
has_contents (NTreeFork a _) = True
-- Function used to extract a tree's immediate contents (on that level)
get_contents (NTreeFork a _) = a
get_contents (NTreeEmpty) = error "An empty tree has no contents"
-- Function used to extract a tree's immediate children
get_children (NTreeFork a cs) = cs
get_children (NTreeEmpty) = error "An empty tree has no children"
-- Function used to calculate the height of a tree
height (NTreeEmpty) = 0
height (NTreeFork a cs) =
let hcs = map height cs in
1 + (foldr f 0 hcs)
where f a b = if a > b then a else b
-- Function used to extract all "nodes" that are between the root (level 0) and level i (inclusive)
get_level i t =
let h = (height t) in
if (h > i) then
accum_levels [] 0 i t
else
error "Requested tree does not have enough levels"
-- Helper function for get_level
accum_levels acc cur_lev des_lev (NTreeEmpty) = acc
accum_levels acc cur_lev des_lev (NTreeFork a cs) =
if (cur_lev > des_lev) then
acc
else
-- Grab the contents of the next levels for each child tree
let res = map (accum_levels (acc) (cur_lev + 1) (des_lev)) cs in
-- Accumulate the results and add on the current tree node
a : (foldr (++) [] res)
-- Function used to get nodes at each level in a tree
level_map t =
let ids = extract_all t in
let rev = rev_tuples [] ids in
create_map 0 (height t) rev
rev_tuples acc [] = acc
rev_tuples acc ((a,b):cs) =
let acc' = (b,a):acc in
rev_tuples acc' cs
create_map curh maxh l =
-- Stop if the height of the tree has been reached
if (curh == maxh) then
[]
else
-- Get all nodes on current level and add to the map
let cur_nodes = find_all [] curh l in
((cur_nodes) : (create_map (curh + 1) (maxh) l))
find_all acc lev [] = (lev, acc)
find_all acc lev ((l,n):ls) =
let acc' = if (lev == l) then (n:acc) else acc in
(find_all acc' lev ls)
-- Function used to extract all "nodes" and their associated levels
extract_all t =
identify_levels [] 0 (height t) t
-- Helper function to get a list of annotated (node,level) pairs
identify_levels acc cur_lev des_lev (NTreeEmpty) = acc
identify_levels acc cur_lev des_lev (NTreeFork a cs) =
if (cur_lev == des_lev) then
acc
else
-- Grab the contents of the next levels for each child tree
let res = map (identify_levels (acc) (cur_lev + 1) (des_lev)) cs in
-- Combine the results and add on the current tree node
((a,cur_lev) : (foldr (++) [] res))
-- Function used to search for a given "node" in the tree (earliest first)
search_for_item des t =
let res = find_item des t in
if (res == []) then
error "Item not found!"
else
(res)
-- Function used to find a given "node" in a tree (returns a list)
find_item des t = find_item_helper [] des t
find_item_helper acc des (NTreeEmpty) = acc
find_item_helper acc des (NTreeFork a cs) =
let acc' = if (a == des) then (a:acc) else acc
accs = foldr (++) [] (map (find_item_helper acc des) cs)
in
acc'++accs
-- Function used to "dotify" a tree
make_tree_dot t =
unlines [
"digraph G {",
dotify_tree t,
"}"
]
dotify_tree (NTreeEmpty) = []
dotify_tree (NTreeFork a cs) =
let dc = unlines $ gen_dc a cs in
unlines [
(show a),
dc,
concatMap dotify_tree cs
]
gen_dc a cs = map (gen_direct_children a) cs
gen_direct_children :: (Show a, Show t) => a -> NTree t -> String
gen_direct_children a (NTreeFork b _) = (show a)++" ->"++(show b)
gen_direct_children a (NTreeEmpty) = ""
-- Function used to turn a tree map back into a tree
map_to_tree m = (map_to_tree_helper [NTreeEmpty] (reverse (sort m)))!!0
map_to_tree_helper c_accs [] = c_accs
map_to_tree_helper c_accs ((l,cs):ls) =
let cur_lev = map (create_node c_accs) cs in
map_to_tree_helper cur_lev ls
where
create_node cs n = (NTreeFork n cs)
-- Function used to replace a "level" in a tree map
replace_level lev new_lev lmap =
case (lookup lev lmap) of
(Nothing) -> error "Level not found in map!"
(Just old_lev) -> let lmap' = delete (lev,old_lev) lmap in
let lmap'' = (new_lev : lmap') in
lmap''
-- Function used to insert a new node at a given level
insert_item lev new_tree t =
let lmap = level_map t in
let (b,new_level) = add_node_to_level new_tree lev lmap in
if (b == True) then
let new_map = replace_level lev new_level lmap in
map_to_tree new_map
else
map_to_tree (new_level:lmap)
add_node_to_level node lev lmap =
case (lookup lev lmap) of
(Nothing) -> (False,(lev,[node]))-- error "Level not found in tree!"
(Just old_lev) -> (True,(lev,(node:old_lev)))
-- **********************************
-- Test Cases
-- **********************************
empty_tree = NTreeEmpty
node0= (NTreeFork "c = a + b" [empty_tree])
node1= (NTreeFork "d = a * 2" [empty_tree])
node2= (NTreeFork "c = 0" [empty_tree])
node3= (NTreeFork "d = 1" [empty_tree])
tree0 = node0
subtree1 = (NTreeFork "a = 0" [subsubtree1])
subsubtree1 = (NTreeFork "c = a + 20" [empty_tree])
subtree2 = (NTreeFork "b = 10" [subsubtree2])
subsubtree2 = (NTreeFork "d = b + a" [empty_tree])
tree1 = (NTreeFork "root" [subtree1, subtree2])
simple_tree = (NTreeFork "abc" [empty_tree])
test1 = insert_item 1 "xxx" simple_tree
test2 = insert_item 1 "yzy" test1
|
luebbers/reconos
|
tools/fsmLanguage/fpga_scripts/generate_fsm/src/Nary_tree.hs
|
gpl-3.0
| 5,855
| 74
| 15
| 1,370
| 1,946
| 1,024
| 922
| 122
| 2
|
module Main where
import Brainfuck (runMachine, makeMachine, makeProgram, compileProgram)
helloworld :: String
helloworld="+++++++++[>++++++++>+++++++++++>+++++<<<-]>.>++.+++++++..+++.>-.------------.<++++++++.--------.+++.------.--------.>+."
echo :: String
echo = "+[>,.<]"
main::IO()
main = do prg <- return $ makeProgram $ compileProgram echo
m <- return $ makeMachine prg
_ <- runMachine m
putStrLn ""
return ()
|
ledyba/Brainfucks
|
haskell/src/Main.hs
|
gpl-3.0
| 461
| 0
| 9
| 89
| 112
| 57
| 55
| 12
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.MapsEngine.Rasters.Upload
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create a skeleton raster asset for upload.
--
-- /See:/ <https://developers.google.com/maps-engine/ Google Maps Engine API Reference> for @mapsengine.rasters.upload@.
module Network.Google.Resource.MapsEngine.Rasters.Upload
(
-- * REST Resource
RastersUploadResource
-- * Creating a Request
, rastersUpload
, RastersUpload
-- * Request Lenses
, ruPayload
) where
import Network.Google.MapsEngine.Types
import Network.Google.Prelude
-- | A resource alias for @mapsengine.rasters.upload@ method which the
-- 'RastersUpload' request conforms to.
type RastersUploadResource =
"mapsengine" :>
"v1" :>
"rasters" :>
"upload" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Raster :> Post '[JSON] Raster
-- | Create a skeleton raster asset for upload.
--
-- /See:/ 'rastersUpload' smart constructor.
newtype RastersUpload = RastersUpload'
{ _ruPayload :: Raster
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RastersUpload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ruPayload'
rastersUpload
:: Raster -- ^ 'ruPayload'
-> RastersUpload
rastersUpload pRuPayload_ =
RastersUpload'
{ _ruPayload = pRuPayload_
}
-- | Multipart request metadata.
ruPayload :: Lens' RastersUpload Raster
ruPayload
= lens _ruPayload (\ s a -> s{_ruPayload = a})
instance GoogleRequest RastersUpload where
type Rs RastersUpload = Raster
type Scopes RastersUpload =
'["https://www.googleapis.com/auth/mapsengine"]
requestClient RastersUpload'{..}
= go (Just AltJSON) _ruPayload mapsEngineService
where go
= buildClient (Proxy :: Proxy RastersUploadResource)
mempty
|
rueshyna/gogol
|
gogol-maps-engine/gen/Network/Google/Resource/MapsEngine/Rasters/Upload.hs
|
mpl-2.0
| 2,669
| 0
| 13
| 608
| 308
| 189
| 119
| 48
| 1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -Wall #-}
-- The first step of compiling is to build all reactimated 'Event's into a
-- graph. The graph structure keeps track of the current heads.
module Reactive.Impulse.Internal.Graph (
initialRunningDynGraph
, compileHeadMap
, dynUpdateGraph
)
where
import Reactive.Impulse.Core
import Reactive.Impulse.Internal.RWST hiding ((<>))
import Reactive.Impulse.Internal.Types
import Reactive.Impulse.Internal.Chain
import Reactive.Impulse.Internal.Weak
import Reactive.Impulse.STM.Fence
import Control.Applicative
import Control.Concurrent.STM hiding (mkWeakTVar)
import Control.Lens
import Control.Monad.Identity
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import Data.IntSet.Lens
import Data.Semigroup
import System.Mem.Weak
import GHC.Conc.Sync (unsafeIOToSTM)
import System.IO.Unsafe (unsafePerformIO)
import Unsafe.Coerce
initialRunningDynGraph :: IO RunningDynGraph
initialRunningDynGraph = do
tv1 <- newTVarIO mempty
tv2 <- newTVarIO mempty
return $ DynGraph tv1 tv2 mempty IM.empty mempty
compileHeadMap :: SGState -> IO NetHeadMap
compileHeadMap sg = do
mapvar <- newTVarIO IM.empty
addToHeadMap mapvar $ sg^.inputs.to IM.elems
return mapvar
addToHeadMap :: NetHeadMap -> [SGInput] -> IO ()
addToHeadMap mapvar sgInputs = do
currentHeads <- readTVarIO mapvar
mTrace $ "curHeads" ++ show (IM.keys currentHeads)
currentHeads2 <- foldM folder currentHeads sgInputs
atomically $ writeTVar mapvar currentHeads2
mTrace $ "curHeads(added)" ++ show (IM.keys currentHeads2)
where
folder !acc i = (\(l,e) -> IM.insert l e acc) <$> mkDynInput i
mkDynInput :: SGInput -> IO (Label, EInput)
mkDynInput (SGInput t e) =
let !l = e^.label
finishIt = Just . atomically $
modifyTVar' mapvar (IM.delete l)
in (l,) . EInput <$> mkWeakTVar t finishIt
addToHeadMap' :: NetHeadMap -> [SGInput] -> STM ()
addToHeadMap' mapvar sgInputs = do
currentHeads <- readTVar mapvar
mTrace $ "curHeads" ++ show (IM.keys currentHeads)
unsafeIOToSTM (foldM folder currentHeads sgInputs) >>= writeTVar mapvar
currentHeads2 <- readTVar mapvar
mTrace $ "curHeads(added)" ++ show (IM.keys currentHeads2)
where
folder !acc i = (\(l,e) -> IM.insert l e acc) <$> mkDynInput i
mkDynInput :: SGInput -> IO (Label, EInput)
mkDynInput (SGInput t e) =
let !l = e^.label
finishIt = Just . atomically $
modifyTVar' mapvar (IM.delete l)
in (l,) . EInput <$> mkWeakTVar t finishIt
runFireOnce :: Network -> FireOnce -> IO ()
runFireOnce net (FireOnce l a) = do
nIn <- net^!nInputs.act readTVarIO
oneRan <- IM.lookup l nIn^!_Just.act (\(EInput wk) -> deRefWeak wk >>= \case
Just tv -> Any True <$ (readTVarIO tv >>= ($ unsafeCoerce a))
Nothing -> return mempty)
when (not $ getAny oneRan) $ do
rg <- net^!nDynGraph.dgHeads.act readTVarIO
IM.lookup l rg^!act (\(Just wk) -> deRefWeak wk >>= \case
Just (EChain _ c) -> (runUpdates net $ compileChain c id (unsafeCoerce a))
_ -> error $ "impulse <runFireOnce>: chain expired: " ++ show l )
-- dynamically update a network with the given ChainM building action
-- we convert the ChainM into a ModGraphM to update a frozen graph,
-- then freeze the network and run the ModGraphM against the now-frozen network.
-- next we merge the results of the build step, recompile everything that's been
-- marked dirty, and finally unfreeze the network, returning any 'onBuild'-type
-- actions.
dynUpdateGraph :: Network -> ChainM () -> STM (IO ())
dynUpdateGraph net builder = do
let rg = net^.nDynGraph
runDyn :: ModGraphM DirtyLog
runDyn = do
baseGraph <- view frozenMutGraph
baseBuilder <- get -- this should always be empty I think
(_,output,dirtyLog) <- lift
$ runRWST builder (boundSet baseGraph) baseBuilder
put output
return dirtyLog
doMergePrep = do
dl <- runDyn
prepareForMerge $ dl^.dlRemSet
s <- get
return (dl,s)
(dirties2,final,(dirtyLog,finalGraph)) <- replacingRunningGraph rg doMergePrep
let pushEvents = appEndo (dirtyLog^.dlEvents) []
addNewHeads = addToHeadMap' (net^.nInputs)
$ appEndo (dirtyLog^.dlAddInp) []
dirties = dirties2 <> dirtyLog^.dlChains
addNewHeads
knownInputs <- net^!nInputs.act readTVar
let recompile :: Label -> EChain -> STM ()
recompile lbl (EChain _ c) = IM.lookup lbl knownInputs^!_Just.act (
\(EInput wk) -> wk^!act (unsafeIOToSTM .deRefWeak)._Just.act (
\pushVar -> let cc = compileChain (unsafeCoerce c) id
in do mTrace $ showChainTree c
writeTVar pushVar $ runUpdates net . cc ))
-- we only want to run the action for each node once. For some reason
-- I think we may have multiple occurrences of them.
-- TODO: see if this step is really necessary.
checkFireOnce :: IntSet -> FireOnce -> IO IntSet
checkFireOnce acc fo
| IntSet.member (fo^.label) acc = return acc
| otherwise = IntSet.insert (fo^.label) acc <$ runFireOnce net fo
mTrace $ "final graph chains:\n" ++ unlines (finalGraph^.dgHeads._Wrapped.traverse._Wrapped.to (\(EChain _ x) -> [showChainTree x]))
mTrace "*** end ***"
mapMOf_ (from dirtyChains.members)
(\lbl -> finalGraph ^! dgHeads._Wrapped.to (IM.lookup lbl)._Just
._Wrapped.act (recompile lbl))
dirties
let curChains = atomically $ net^!nDynGraph.dgHeads.act readTVar.traverse.act (unsafeIOToSTM.deRefWeak)._Just.to (\(EChain _ x) -> [showChainTree x])
return $ final >> void (foldM checkFireOnce mempty pushEvents) >> mTrace ("Current heads\n" ++ (unlines $ unsafePerformIO curChains))
-- perform an operation on a 'RunningDynGraph', and re-write it when
-- finished.
-- return the dirty heads so we know which to rebuild,
-- and which pushers to update.
replacingRunningGraph :: RunningDynGraph -> ModGraphM a -> STM (DirtyChains,IO (), a)
replacingRunningGraph g m = do
f <- freezeDynGraph
(a,newG,dirtyLog) <- runRWST m f startBuildingGraph
final <- thawFrozen dirtyLog newG
return (dirtyLog^.dlChains,final,a)
where
freezeDynGraph :: STM FrozenDynGraph
freezeDynGraph = do
let freezeMap :: Lens' (DynGraph TVar Weak) (TVar (IntMap (Weak a)))
-> STM (IntMap (Weak a), IntMap (Maybe a))
freezeMap l = do
w' <- g ^! l.act readTVar
m' <- traverse (unsafeIOToSTM.deRefWeak) w'
return (w',m')
(heads'w,heads'm) <- freezeMap dgHeads
(behs'w,behs'm) <- freezeMap dgBehaviors
let noMaybes = IM.mapMaybe (fmap Identity)
sourcegraph = startBuildingGraph
& dgHeads._Wrapped .~ heads'w
& dgBehaviors._Wrapped .~ behs'w
mutgraph = startBuildingGraph
& dgHeads._Wrapped .~ noMaybes heads'm
& dgBehaviors._Wrapped .~ noMaybes behs'm
return $ emptyFrozenGraph & frozenMutGraph .~ mutgraph
& frozenSource .~ sourcegraph
thawFrozen :: DirtyLog -> BuildingDynGraph -> STM (IO ())
thawFrozen dirtyLog newg = do
-- run this as a finalizer action, because we won't need heads for
-- anything else and we can't trust mkWeak inside STM.
-- Get all the dirty heads out of the mutgraph, make new weak refs,
-- and update the map. Have the STM action be a union so we don't need
-- to worry about existing elements. `union` is left-biased, so we
-- want to merge our new map (which might have updated dirties) as the
-- left.
--
-- this is a little harder than the generic reconstitutor because we
-- only want to change things that have been dirtied, whereas in other
-- cases we can add everything.
let dirties = dirtyLog^.dlChains
mkAWeakRef t =
let !lbl = t^.label
mkw = newg^.dgMkWeaks.to (IM.lookup lbl)
evictor = Just $ evictHead g lbl
in maybe (error $ "impulse <replacingRunningGraph>: missing MkWeak for " ++ show lbl)
(\w -> unMkWeak w t evictor)
mkw
folder map' dirtyLbl = do
h' <- traverse (mkAWeakRef.runIdentity)
$ newg^.dgHeads._Wrapped.to (IM.lookup dirtyLbl)
return $! maybe id (IM.insert dirtyLbl) h' map'
mkWeakHeads = do
mg1 <- foldlMOf (from dirtyChains.members) folder mempty dirties
atomically $ g^!dgHeads.act (flip modifyTVar' (IM.union mg1))
-- for behaviors et al, do the same thing, except we don't need to
-- worry about dirties (just add everything)
let mkAWeakB t = let !lbl = t^.label
eviction = Just $ evictBehavior g lbl
in weakEB eviction t
mkWeakBs = reconstituter dgBehaviors mkAWeakB
reconstituter :: (forall f w. Lens' (DynGraph f w) (f (IntMap (w t)))) -> (t -> IO (Weak t)) -> IO ()
reconstituter fieldLens weakor = do
mb1 <- traverse (weakor.runIdentity)
$ newg^.fieldLens._Wrapped
atomically $ g^!fieldLens.act (flip modifyTVar' (IM.union mb1))
return $ mkWeakHeads >> mkWeakBs
-- This function takes the constructed BuildingDynGraph in ModGraphM's state
-- and merges it with the frozenMutGraph, putting the merged graph back into
-- the state. This should only be called immediately before unfreezing the
-- state.
prepareForMerge :: ChainEdgeMap -> ModGraphM ()
prepareForMerge cem = do
-- the initial, frozen graph.
baseg <- view $ frozenMutGraph.dgHeads._Wrapped
-- we need to prune the pre-existing graph. The new chains should
-- already be pruned though, so we can leave them be.
let remSet = cem^.from chainEdgeMap.to (IM.keysSet)
pruneEChain e@(EChain p c) = if not . IntSet.null . IntSet.intersection remSet $ c^.cPushSet'
then (DirtyChains $ IntSet.singleton (c^.label), EChain p $ removeEdges cem c)
else (mempty, e)
let doAcc :: DirtyChains -> EChain -> (DirtyChains,EChain)
doAcc !s e = pruneEChain e & _1 <>~ s
(rmDirties, baseg') = mapAccumLOf (traverse._Wrapped) doAcc mempty baseg
scribe dlChains rmDirties
-- the built sub-graph to add
newg <- get
newg' <- foldlMOf (dgHeads._Wrapped.traverse._Wrapped)
(procNewHead $ newg^.dgBoundMap ) baseg' newg
dgHeads._Wrapped.=newg'
where
procNewHead :: BoundaryMap -> IntMap (Identity EChain) -> EChain
-> ModGraphM (IntMap (Identity EChain))
procNewHead boundMap runningGraph newHead = do
-- 1. for each head in BuildingDynGraph
-- *. if the label isn't known, copy over the head and mark it dirty
-- *. if the label is known (it exists in the BoundaryMap),
-- push the head into the graph, and mark all parents dirty.
let lbl = newHead^.label
parentSet = boundMap^.from boundaryMap.to (IM.lookup lbl)._Just
f' (EChain p ec) = EChain p $ IntSet.foldl'
(\c l' -> insertAt l' newHead c) ec parentSet
if IntSet.null parentSet
then IM.insert lbl (Identity newHead) runningGraph
<$ markDirty lbl
else IM.map (over _Wrapped f') runningGraph
<$ markDirties parentSet
boundSet :: BuildingDynGraph -> BoundarySet
boundSet g = g^.dgHeads._Wrapped.traverse._Wrapped.cBoundarySet
data Stepper =
AllDone [IO ()]
| NeedsIO UpdateStep UpdateBuilder
needsIO :: Stepper -> Bool
needsIO (NeedsIO _ _) = True
needsIO _ = False
runUpdates :: Network -> STM UpdateBuilder -> IO ()
runUpdates network doSteps = do
let tm = network^.nTManager
let stepBuilderIO :: UpdateBuilder -> IO ()
stepBuilderIO ub = atomically (stepBuilder network ub) >>= \case
AllDone finalSteps -> sequence_ finalSteps
NeedsIO (DynMod akt) ub' -> do
(chn,dynUb) <- akt
updateFinalizers <- atomically $ dynUpdateGraph network chn
ub'2 <- atomically dynUb
stepBuilderIO $ ub' <> ub'2 & ubOutputs %~ (updateFinalizers:)
NeedsIO _ _ -> error "<impulse>: stepBuilderIO: didn't get a DynMod!"
maybeExclusive tm needsIO (doSteps >>= stepBuilder network) >>= \case
(AllDone finalSteps, Nothing) -> sequence_ finalSteps
(NeedsIO ioStep rest, Just ticket) -> do
stepBuilderIO $ rest & modSteps %~ (ioStep:)
commitExclusive tm ticket
(AllDone _, Just _) -> error "<impulse> Got a ticket?"
(NeedsIO _ _, Nothing) -> error "<impulse> needs a ticket!"
stepBuilder :: Network -> UpdateBuilder -> STM Stepper
stepBuilder network = go
where
go ub = case ub^.modSteps of
(Mod m:steps) -> do
cleanupActs <- dynUpdateGraph network m
go $ ub & modSteps .~ steps & ubOutputs %~ (cleanupActs :)
(step@(DynMod _):steps) -> return $ NeedsIO step $ ub & modSteps .~ steps
[] -> case ub^.readSteps of
(step:steps) -> do
ub' <- step
let outsF = case ub^.ubOutputs of
[] -> id
[x] -> ubOutputs %~ (x:)
xs -> ubOutputs %~ (xs ++)
go $ ub' & readSteps %~ (steps <>) & outsF
[] -> return . AllDone $ ub^.ubOutputs
------------------------------------------------------------------
-- helpers for handling weak refs.
-- make a weak reference for an EBehavior
weakEB :: Maybe (IO ()) -> EBehavior -> IO (Weak EBehavior)
weakEB finalizer e@(EBehavior _ cb) = case cb of
ReadCB a -> mkWeak a e finalizer
PushCB tv -> mkWeakTVarKey tv e finalizer
SwchCB (CBSwitch tv) -> mkWeakTVarKey tv e finalizer
-- eviction function for behaviors
evictBehavior :: RunningDynGraph -> Label -> IO ()
evictBehavior rg lbl = atomically $
rg ^! dgBehaviors.act (flip modifyTVar' (IM.delete lbl))
evictHead :: RunningDynGraph -> Label -> IO ()
evictHead rg lbl = atomically $
rg ^! dgHeads.act (flip modifyTVar' (IM.delete lbl))
|
JohnLato/impulse
|
src/Reactive/Impulse/Internal/Graph.hs
|
lgpl-3.0
| 14,841
| 2
| 24
| 4,121
| 4,117
| 2,054
| 2,063
| -1
| -1
|
-- ex0
-- halve xs = (take n xs, drop n xs)
-- where n = (length xs) `div` 2
-- halve xs = splitAt (length xs `div` 2) xs
-- ex1
safetail [] = []
safetail xs = tail xs
-- ex4
mult :: Num a => a -> a -> a -> a
mult = \x -> (\y -> (\z -> x * y * z))
|
rranelli/rrfuncprog
|
week2/hw.hs
|
unlicense
| 254
| 0
| 11
| 74
| 88
| 49
| 39
| 4
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Gitomail.Opts
( Opts(..)
, Command(..)
, GitRef
, opts
, runCommand
, configPaths
, noImplicitConfigs
, repositoryPath
, repositoryName
, outputPath
, noAutoMaintainers
, version
, verbose
, extraCC
, extraTo
, dryRun
, gitRef
) where
------------------------------------------------------------------------------------
import Control.Lens (makeLenses)
import Data.Text (Text)
import qualified Data.Text as T
import Options.Applicative (many, short, long, Mod, OptionFields,
Parser, strOption, switch, help,
optional, subparser, command, info,
argument, str, metavar, progDesc,
ParserInfo, helper, idm, (<**>))
import Data.Monoid ((<>))
------------------------------------------------------------------------------------
type RepPath = FilePath
type GitRef = Text
type EmailAddress = Text
type Subject = Text
type ReplyToId = Text
type DryRun = Bool
data Command
= WhoMaintainsCmnd
| ShowIneffectiveDefinitions
| SendOne (Maybe Subject) (Maybe ReplyToId)
| ShowOne
| AutoMailer
| ShowAutoMailerRefs
| ForgetHash
| SeenHash String
| BranchesContaining String
| ParseMaintainerFile FilePath
| ParseConfigFile FilePath
| JiraCCByIssue String
| EvalConfigs
| Misc
deriving (Show)
textOption :: Mod OptionFields String -> Parser Text
textOption = (fmap T.pack) . strOption
data Opts = Opts
{ _verbose :: Bool
, _version :: Bool
, _dryRun :: DryRun
, _outputPath :: Maybe FilePath
, _configPaths :: [FilePath]
, _noImplicitConfigs :: Bool
, _noAutoMaintainers :: Bool
, _extraCC :: [EmailAddress]
, _extraTo :: [EmailAddress]
, _repositoryPath :: Maybe RepPath
, _repositoryName :: Maybe Text
, _gitRef :: Maybe GitRef
, _runCommand :: Maybe Command
} deriving (Show)
makeLenses ''Opts
optsParse :: Parser Opts
optsParse = Opts
<$> switch ( long "debug" <> short 'd' <> help "Enable debug prints" )
<*> switch ( long "version" <> short 'v' <> help "Just print version and exit" )
<*> switch ( long "dry-run" <> short 'n' <> help "Don't actually send or output emails, nor update DB" )
<*> ( optional . strOption)
( long "output-path" <> short 'o' <> help "Local directory in which to place emails instead of sending by SMTP" )
<*> ( many . strOption)
( long "config" <> short 'c' <> help "Configuration files" )
<*> switch ( long "no-implicit-configs" <> help "Don't read configs paths such as ~/.gitomailconf.yaml or $GIT_DIR/gitomailconf.yaml" )
<*> switch ( long "no-auto-maintainers" <> help "Don't use Maintainers data from repository")
<*> ( many . textOption) ( long "cc" <> help "Extra people for 'Cc:' in this invocation" )
<*> ( many . textOption) ( long "to" <> help "Extra people for 'To:' in this invocation" )
<*> ( optional . strOption)
( long "repo" <> short 'r' <> help "Repository pathname" )
<*> ( optional . textOption)
( long "repo-name" <> help "Repository name" )
<*> ( optional . textOption)
( long "g" <> short 'g' <> help "Git revision" )
<*> optional (subparser
(
command "who-maintains" showWhoMaintains
<> command "show-ineffectives" showIneffectiveDefinitions
<> command "show-one" showOneRef
<> command "send-one" sendOneRef
<> command "auto-mailer" autoMailer
<> command "debug" debugCommands
))
where
oneArg ctr = info (ctr <$> (argument str (metavar "ARG")))
debugCommands = info (subparser (
command "parse-maintainers-file" (oneArg ParseMaintainerFile (progDesc ""))
<> command "parse-config-file" (oneArg ParseConfigFile (progDesc ""))
<> command "eval-configs" (info (pure EvalConfigs) (progDesc ""))
<> command "show-auto-mailer-refs" (info (pure ShowAutoMailerRefs) (progDesc ""))
<> command "forget-hash" (info (pure ForgetHash) (progDesc ""))
<> command "seen-hash" (oneArg SeenHash (progDesc ""))
<> command "jira-cc-by-issue" (oneArg JiraCCByIssue (progDesc ""))
<> command "branches-containing" (oneArg BranchesContaining (progDesc ""))
<> command "misc" (info (pure Misc) (progDesc ""))
)) (progDesc "Various debugging commands")
showWhoMaintains = info (pure WhoMaintainsCmnd)
(progDesc "Show current state of maintainership")
showIneffectiveDefinitions = info (pure ShowIneffectiveDefinitions)
(progDesc "Show ineffective statements in the tree")
sendOneRef = info (SendOne <$> ( optional . textOption) ( long "subject" <> help "Force email sibject" )
<*> ( optional . textOption) ( long "in-reply-to" <> help "Message ID to reply to" ))
(progDesc "Send a single commit email for a specified git revision")
showOneRef = info (pure ShowOne)
(progDesc "Console print of a single commit, in ANSI")
autoMailer = info (pure AutoMailer)
(progDesc "Automatically send mail for new commits (read the docs first!)")
opts :: ParserInfo Opts
opts = info (optsParse <**> helper) idm
|
kernelim/gitomail
|
src/Gitomail/Opts.hs
|
apache-2.0
| 6,011
| 0
| 23
| 1,921
| 1,349
| 714
| 635
| 123
| 1
|
module Lib
(
-- Types
Error
, UTF8String
, UTF8Char
-- Base64
, base64ToBytes
, bytesToBase64
-- Base16
, hexToBytes
, bytesToHex
, isHex
-- ByteString helper functions
, bitwiseCombine
, hammingDistance
, allPairsHammingDistance
, bytesToString
, buildFreqTable
, buildDelta
, freqTableDelta
, word8ToChar
, charToWord8
, stringToBytes
, splitBytes
, plusNL
-- Crypto functions
, decryptAES128ECB
, encryptAES128ECB
, encryptAES128CBC
, decryptAES128CBC
, pkcs7Pad
, breakIntoBlocksPkcs7
-- Data
, englishFreqTable
, asciiFreqTable
, asciiFreqTableNoNL
) where
import Control.Arrow
import Data.Bits
import qualified Data.ByteString.Base16.Lazy as B16
import qualified Data.ByteString.Base64.Lazy as B64
import qualified Data.ByteString.Lazy as B
import Data.List
import qualified Data.Map as Map
import qualified Data.Text.Lazy as Txt
import qualified Data.Text.Lazy.Encoding as TxtEnc
import Data.Word
import qualified Debug.Trace as Debug
import qualified Test.QuickCheck as QC
-- import qualified Codec.Crypto.AES as AES2
--
-- import qualified Crypto.Cipher.AES as AES
import qualified Codec.Crypto.SimpleAES as AES
-- Error
-- (error message, arguments for return_help, and whether to show usage)
type Error = (String, [B.ByteString], Bool)
type UTF8String = String
type UTF8Char = Char
-- Base64 functions
base64ToBytes :: B.ByteString -> B.ByteString
base64ToBytes = B64.decodeLenient
bytesToBase64 :: B.ByteString -> B.ByteString
bytesToBase64 = B64.encode
-- Base16 (hex) functions
hexToBytes :: B.ByteString -> B.ByteString
hexToBytes = fst . B16.decode
bytesToHex :: B.ByteString -> B.ByteString
bytesToHex = B16.encode
isHex :: B.ByteString -> Bool
isHex x = all (`elem` map charToWord8 (['A'..'F'] ++ ['a'..'f'] ++ ['0'..'9'])) $ B.unpack x
-- ByteString helper functions
bitwiseCombine :: (Word8 -> Word8 -> Word8) -> B.ByteString -> B.ByteString -> B.ByteString
bitwiseCombine f x y = B.pack $ B.zipWith f x y
-- NB: if the ByteStrings are not of euqal length, this truncates the longer one
hammingDistance :: B.ByteString -> B.ByteString -> Int
hammingDistance x y = B.foldl (\a b -> a + popCount b) 0 $ bitwiseCombine xor x y
-- Takes a list of ByteStrings of the same length
allPairsHammingDistance :: [B.ByteString] -> Int
allPairsHammingDistance input = sum [sum [hammingDistance (head xs) x | x <- tail xs] | xs <- [drop n input | n <- [0..(length input - 1)]]]
buildDelta :: Int -> Map.Map Word8 Double -> B.ByteString -> Double
buildDelta totalCount startingMap haystack = Map.fold (\x y -> abs x + y) 0 $ B.foldl (flip (Map.adjust (\a -> a - (1/realToFrac totalCount)))) startingMap haystack
-- buildFreqTable startingValue haystack = (realToFrac (totalCount - inCount) / realToFrac totalCount, freqMap) -- {-# SCC "build-normalize-map" #-} Map.map (/realToFrac inCount) freqMap)
-- where (inCount, totalCount, freqMap) = {-# SCC "build-perform-fold" #-} B.foldl' buidFreqTableFold startingValue haystack
-- buildDelta startingMap haystack = Map.fold (+) 0 $ B.foldl (\x -> Map.adjust (-1/realToFrac totalCount) x) startingMap haystack
-- buildDelta totalCount startingMap haystack = Map.fold (\x y -> {-# SCC "buildDelta-fold-lambda" #-}x + y) 0 $ B.foldl' (flip (Map.adjust (\a -> {-# SCC "buildDelta-adjust-lambda" #-} abs $ a - (1/realToFrac totalCount)))) startingMap haystack
-- where totalCount = B.length haystack
-- The map that is returned is (k, v) with one k for each needle (first [Word8])
-- so v will be 0 for those not found. Otherwise v is the proportion of k occurring
-- in the subset of haystack which contains only the needles.
-- The Double that is returned is the proportion of the total length of the haystack
-- which is Word8s not in the needles.
buildFreqTable :: (Int, Int, Map.Map Word8 Double) -> B.ByteString -> (Double, Map.Map Word8 Double)
buildFreqTable startingValue haystack = (realToFrac (totalCount - inCount) / realToFrac totalCount, freqMap)
where (inCount, totalCount, freqMap) = B.foldl' buidFreqTableFold startingValue haystack
buidFreqTableFold :: (Int, Int, Map.Map Word8 Double) -> Word8 -> (Int, Int, Map.Map Word8 Double)
buidFreqTableFold (inCount, totalCount, accumulatorMap) newByte
| isIn = (inCount + 1, totalCount, Map.adjust (+(1/realToFrac totalCount)) newByte accumulatorMap)
| otherwise = (inCount, totalCount, accumulatorMap)
where isIn = Map.member newByte accumulatorMap
-- buidFreqTableRecursive :: Map.Map Word8 Double -> Double -> Double -> B.ByteString -> (Double, Map.Map Word8 Double)
-- buidFreqTableRecursive accumulatorMap inCount totalCount bytesToAdd
-- | isNull = (0, accumulatorMap)
-- | isEmpty = {-# SCC "build-branch-empty" #-} ((totalCount - inCount) / totalCount, {-# SCC "build-normalize-map" #-} Map.map (/inCount) accumulatorMap)
-- | isIn = {-# SCC "build-branch-in" #-} let newmap = {-# SCC "build-adjust-map" #-} Map.adjust (+1) hd accumulatorMap
-- in buidFreqTableRecursive newmap (inCount + 1) (totalCount + 1) tl
-- | otherwise = buidFreqTableRecursive accumulatorMap inCount (totalCount + 1) tl
-- where hd = B.head bytesToAdd
-- tl = B.tail bytesToAdd
-- isIn = {-# SCC "build-isin?" #-} Map.member hd accumulatorMap -- fix with memoization
-- isEmpty = B.null bytesToAdd
-- isNull = isEmpty && (totalCount == 0)
charToWord8 :: UTF8Char -> Word8
charToWord8 = B.head . TxtEnc.encodeUtf8 . Txt.singleton
word8ToChar :: Word8 -> UTF8Char
word8ToChar = head . Txt.unpack . TxtEnc.decodeUtf8 . B.singleton
bytesToString :: B.ByteString -> UTF8String
bytesToString = Txt.unpack . TxtEnc.decodeUtf8With (\_ _ -> Just '�')
stringToBytes :: UTF8String -> B.ByteString
stringToBytes = TxtEnc.encodeUtf8 . Txt.pack
-- Splits an input byte string into an array of byte strings, each of which is <= n bytes in length
splitBytes :: Int -> B.ByteString -> [B.ByteString]
splitBytes n bytes
| B.null bytes = []
| otherwise = fst split : splitBytes n (snd split)
where split = B.splitAt (fromIntegral n) bytes
freqTableDelta :: Map.Map Word8 Double -> Map.Map Word8 Double -> Double
freqTableDelta x y = sum [abs (snd (Map.elemAt i x) - snd (Map.elemAt i y)) | i <- [0..Map.size x - 1]]
plusNL :: B.ByteString -> B.ByteString
plusNL x = B.append x $ B.singleton (charToWord8 '\n')
-- Crypto functions
pkcs7Pad :: Int -> B.ByteString -> B.ByteString
pkcs7Pad len input
| padlen > 0 = B.append input $ B.replicate padlen (fromIntegral padlen::Word8)
| otherwise = input
where padlen = fromIntegral len - B.length input
breakIntoBlocksPkcs7 :: Int -> B.ByteString -> [B.ByteString]
breakIntoBlocksPkcs7 blocksize str = init split ++ [pkcs7Pad blocksize (last split)]
where split = splitBytes blocksize str
decryptAES128ECB :: B.ByteString -> B.ByteString -> B.ByteString
-- decryptAES128ECB k = AES.crypt AES.ECB (B.toStrict k) (B.toStrict $ B.replicate 16 (0::Word8)) AES.Decrypt
decryptAES128ECB k = AES.crypt AES.ECB (B.toStrict k) (B.toStrict $ B.replicate 16 (0::Word8)) AES.Decrypt
encryptAES128ECB :: B.ByteString -> B.ByteString -> B.ByteString
encryptAES128ECB k input = B.concat $ map (AES.crypt AES.ECB (B.toStrict k) (B.toStrict $ B.replicate 16 (0::Word8)) AES.Encrypt) blocks
where blocks = breakIntoBlocksPkcs7 16 input
encryptAES128CBC :: B.ByteString -> B.ByteString -> B.ByteString -> B.ByteString
encryptAES128CBC iv k input = foldl (\a b -> B.append a $ encryptAES128CBC' k (B.drop (B.length b - 16) b) a) iv blocks
where blocks = breakIntoBlocksPkcs7 16 input
decryptAES128CBC :: B.ByteString -> B.ByteString -> B.ByteString -> B.ByteString
decryptAES128CBC iv k input = B.concat . snd $ recursiveDecrypt (iv:blocks, [])
where
recursiveDecrypt (c, p)
| length c < 2 = ([], p)
| otherwise = recursiveDecrypt(init c, decryptAES128CBC' k (last $ init c) (last c):p)
blocks = splitBytes 16 input
encryptAES128CBC' :: B.ByteString -> B.ByteString -> B.ByteString -> B.ByteString
encryptAES128CBC' k prev cur = encryptAES128ECB k (bitwiseCombine xor prev cur)
decryptAES128CBC' :: B.ByteString -> B.ByteString -> B.ByteString -> B.ByteString
decryptAES128CBC' k prev cur = bitwiseCombine xor prev $ decryptAES128ECB k cur
-- Data
englishFreqTable :: Map.Map Word8 Double
englishFreqTable = Map.fromList $ map (first charToWord8)
[ ('A', 0.0651738)
, ('B', 0.0124248)
, ('C', 0.0217339)
, ('D', 0.0349835)
, ('E', 0.1041442)
, ('F', 0.0197881)
, ('G', 0.0158610)
, ('H', 0.0492888)
, ('I', 0.0558094)
, ('J', 0.0009033)
, ('K', 0.0050529)
, ('L', 0.0331490)
, ('M', 0.0202124)
, ('N', 0.0564513)
, ('O', 0.0596302)
, ('P', 0.0137645)
, ('Q', 0.0008606)
, ('R', 0.0497563)
, ('S', 0.0515760)
, ('T', 0.0729357)
, ('U', 0.0225134)
, ('V', 0.0082903)
, ('W', 0.0171272)
, ('X', 0.0013692)
, ('Y', 0.0145984)
, ('Z', 0.0007836)
, (' ', 0.1918182)
]
asciiFreqTableNoNL :: Map.Map Word8 Double
asciiFreqTableNoNL = freqTableRemove 10 asciiFreqTable
-- Remove an element of a frequency table and re-normalize it
freqTableRemove :: (Ord a) => a -> Map.Map a Double -> Map.Map a Double
freqTableRemove = freqTableSetFreq 0.0
-- Set an element of a frequency table to a new value (<1) and re-normalize
-- The new value will be the new *normalized* frequency
freqTableSetFreq :: (Ord a) => Double -> a -> Map.Map a Double -> Map.Map a Double
freqTableSetFreq vNew k oldMap = Map.map normalizer newMap
where newMap = Map.update (\_ -> Just vNew) k oldMap
normalizer = case Map.lookup k oldMap
of Just 1.0 -> id -- Avoid division by zero
Just vOld -> \x -> (x / (1.0 - vOld)) * (1.0 - vNew)
_ -> id
asciiFreqTable :: Map.Map Word8 Double
asciiFreqTable = Map.fromList -- This comes from IMDb biographies (~154 MB)
[ ( 10, 0.0166623841) -- New lines are common!
-- NB: this uses \n line endings; you should strip \r endings!
, ( 32, 0.1493452336)
, ( 33, 0.0000877131)
, ( 34, 0.0039434812)
, ( 35, 0.0000242677)
, ( 36, 0.0000232542)
, ( 37, 0.0000050115)
, ( 38, 0.0002297750)
, ( 39, 0.0036935298)
, ( 40, 0.0033630778)
, ( 41, 0.0033603918)
, ( 42, 0.0000068084)
, ( 43, 0.0000100105)
, ( 44, 0.0109746709)
, ( 45, 0.0018378580)
, ( 46, 0.0083998790)
, ( 47, 0.0003024661)
, ( 48, 0.0029106658)
, ( 49, 0.0031379039)
, ( 50, 0.0020484266)
, ( 51, 0.0006482944)
, ( 52, 0.0006231376)
, ( 53, 0.0006974888)
, ( 54, 0.0006034835)
, ( 55, 0.0006037695)
, ( 56, 0.0007166642)
, ( 57, 0.0024395819)
, ( 58, 0.0002724035)
, ( 59, 0.0002067820)
, ( 60, 0.0000000249)
, ( 61, 0.0000020270)
, ( 62, 0.0000000311)
, ( 63, 0.0000311134)
, ( 64, 0.0000022322)
, ( 65, 0.0044852354)
, ( 66, 0.0029482766)
, ( 67, 0.0037505026)
, ( 68, 0.0022110691)
, ( 69, 0.0013691369)
, ( 70, 0.0022042669)
, ( 71, 0.0014638200)
, ( 72, 0.0030768151)
, ( 73, 0.0021547430)
, ( 74, 0.0013806024)
, ( 75, 0.0008927370)
, ( 76, 0.0020431788)
, ( 77, 0.0031132383)
, ( 78, 0.0015841325)
, ( 79, 0.0010364217)
, ( 80, 0.0021186990)
, ( 81, 0.0000892488)
, ( 82, 0.0016801151)
, ( 83, 0.0047621654)
, ( 84, 0.0038583547)
, ( 85, 0.0008843866)
, ( 86, 0.0010261563)
, ( 87, 0.0015732453)
, ( 88, 0.0000846415)
, ( 89, 0.0006201096)
, ( 90, 0.0001401904)
, ( 91, 0.0000163028)
, ( 92, 0.0000004352)
, ( 93, 0.0000162966)
, ( 94, 0.0000001741)
, ( 95, 0.0013607182)
, ( 96, 0.0000021513)
, ( 97, 0.0664147438)
, ( 98, 0.0082465257)
, ( 99, 0.0227508279)
, (100, 0.0313071255)
, (101, 0.0879005501)
, (102, 0.0150727676)
, (103, 0.0151489158)
, (104, 0.0336337629)
, (105, 0.0583783203)
, (106, 0.0008706828)
, (107, 0.0053374890)
, (108, 0.0310655990)
, (109, 0.0179510951)
, (110, 0.0589311109)
, (111, 0.0548135280)
, (112, 0.0122177814)
, (113, 0.0017306401)
, (114, 0.0527059338)
, (115, 0.0472023266)
, (116, 0.0553994775)
, (117, 0.0181021976)
, (118, 0.0090335307)
, (119, 0.0126625084)
, (120, 0.0011941461)
, (121, 0.0115988780)
, (122, 0.0009457739)
, (123, 0.0000044208)
, (124, 0.0000000062)
, (125, 0.0000044332)
, (126, 0.0000008953)
, (128, 0.0000000062)
, (129, 0.0000000062)
, (130, 0.0000000062)
, (131, 0.0000000124)
, (132, 0.0000000062)
, (139, 0.0000000062)
, (157, 0.0000000062)
, (159, 0.0000000062)
, (160, 0.0000017720)
, (161, 0.0000004415)
, (162, 0.0000000746)
, (163, 0.0000009575)
, (165, 0.0000000062)
, (166, 0.0000000062)
, (167, 0.0000000249)
, (168, 0.0000008580)
, (169, 0.0000005409)
, (170, 0.0000000808)
, (171, 0.0000015420)
, (172, 0.0000000497)
, (173, 0.0000005036)
, (174, 0.0000048685)
, (175, 0.0000000124)
, (176, 0.0000004042)
, (177, 0.0000000435)
, (178, 0.0000000684)
, (179, 0.0000000870)
, (182, 0.0000000062)
, (183, 0.0000006529)
, (184, 0.0000000311)
, (185, 0.0000000870)
, (186, 0.0000001990)
, (187, 0.0000015544)
, (188, 0.0000000622)
, (189, 0.0000005409)
, (190, 0.0000000249)
, (191, 0.0000002301)
, (192, 0.0000002238)
, (193, 0.0000011130)
, (194, 0.0000000560)
, (195, 0.0000001803)
, (196, 0.0000002674)
, (197, 0.0000005596)
, (198, 0.0000000187)
, (199, 0.0000002425)
, (200, 0.0000001057)
, (201, 0.0000021700)
, (202, 0.0000000187)
, (203, 0.0000000249)
, (205, 0.0000001057)
, (206, 0.0000000435)
, (208, 0.0000000062)
, (209, 0.0000000373)
, (210, 0.0000000311)
, (211, 0.0000003731)
, (212, 0.0000001554)
, (213, 0.0000000435)
, (214, 0.0000006777)
, (215, 0.0000000622)
, (216, 0.0000002549)
, (218, 0.0000002487)
, (219, 0.0000000373)
, (220, 0.0000002984)
, (221, 0.0000000124)
, (222, 0.0000000311)
, (223, 0.0000007026)
, (224, 0.0000033700)
, (225, 0.0000179878)
, (226, 0.0000017161)
, (227, 0.0000048622)
, (228, 0.0000064913)
, (229, 0.0000027669)
, (230, 0.0000009575)
, (231, 0.0000048498)
, (232, 0.0000083566)
, (233, 0.0000646641)
, (234, 0.0000015917)
, (235, 0.0000024871)
, (236, 0.0000002674)
, (237, 0.0000133618)
, (238, 0.0000006839)
, (239, 0.0000011316)
, (240, 0.0000002549)
, (241, 0.0000076105)
, (242, 0.0000005409)
, (243, 0.0000133867)
, (244, 0.0000032705)
, (245, 0.0000006529)
, (246, 0.0000101162)
, (248, 0.0000032705)
, (249, 0.0000003917)
, (250, 0.0000034570)
, (251, 0.0000007461)
, (252, 0.0000090219)
, (253, 0.0000003668)
, (254, 0.0000000497)
, (255, 0.0000000249)
]
|
mjec/cryptopals-haskell
|
src/Lib.hs
|
bsd-3-clause
| 15,425
| 0
| 16
| 3,641
| 4,300
| 2,569
| 1,731
| 351
| 3
|
module Main where
import Control.Monad.IO.Class
import Network.HTTP.Media ((//), (/:))
import Network.Wai
import Network.Wai.Handler.Warp
import Servant.Server
import Servant.API
import Data.Proxy
import System.Posix.User
import API
fromUserEnt :: UserEntry -> User
fromUserEnt (UserEntry name pass uid gid gecos homedir sh) = User name (fromIntegral uid) (fromIntegral gid) sh
userdb :: IO [User]
userdb = getAllUserEntries >>= \ents -> return . map fromUserEnt . filter (\ent -> ((fromIntegral . userID) ent) >= 100) $ ents
server1 :: [User] -> Server API
server1 users = listUsers users
:<|> queryUser users
listUsers users = return users
queryUser users uid_ gid_ = return $! case (uid_, gid_) of
(Nothing, Nothing) -> []
(Just uid, Just gid) -> filter (\x -> userid x == uid && groupid x == gid) users
(Just uid, Nothing) -> filter (\x -> userid x == uid) users
(Nothing, Just gid) -> filter (\x -> groupid x == gid) users
app1 :: [User] -> Application
app1 db = serve userAPI (server1 db)
main :: IO ()
main = userdb >>= \db -> run 8000 (app1 db)
|
cl04/rest1
|
hask-rest1/app/Server.hs
|
bsd-3-clause
| 1,085
| 0
| 16
| 205
| 458
| 245
| 213
| -1
| -1
|
{-#LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
module FileServer where
import Network hiding (accept, sClose)
import Network.Socket hiding (send, recv, sendTo, recvFrom, Broadcast)
import Network.Socket.ByteString
import Data.ByteString.Char8 (pack, unpack)
import System.Environment
import System.IO
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad (forever, when, join)
import Data.List.Split
import Data.Word
import Text.Printf (printf)
import System.Directory
--Server data type allows me to pass address and port details easily
data FileServer = FileServer { address :: String, port :: String }
--Constructor
newFileServer :: String -> String -> IO FileServer
newFileServer address port = atomically $ do FileServer <$> return address <*> return port
--4 is easy for testing the pooling
maxnumThreads = 4
serverport :: String
serverport = "7007"
serverhost :: String
serverhost = "localhost"
run:: IO ()
run = withSocketsDo $ do
--Command line arguments for port and address
--args <- getArgs
createDirectoryIfMissing True ("distserver" ++ serverhost ++ ":" ++ serverport ++ "/")
setCurrentDirectory ("distserver" ++ serverhost ++ ":" ++ serverport ++ "/")
addrInfo <- getAddrInfo Nothing (Just serverhost) (Just $ show (serverport))
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "JOIN:" ++ "\\n" ++
"ADDRESS:" ++ serverhost ++ "\n" ++
"PORT:" ++ serverport ++ "\n"
resp <- recv clsock 1024
let msg = unpack resp
printf msg
server <- newFileServer serverhost serverport
--sock <- listenOn (PortNumber (fromIntegral serverport))
addrinfos <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just serverport)
let serveraddr = head addrinfos
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
bindSocket sock (addrAddress serveraddr)
listen sock 5
_ <- printf "Listening on port %s\n" serverport
--Listen on port from command line argument
--New Abstract FIFO Channel
chan <- newChan
--Tvars are variables Stored in memory, this way we can access the numThreads from any method
numThreads <- atomically $ newTVar 0
--Spawns a new thread to handle the clientconnectHandler method, passes socket, channel, numThreads and server
forkIO $ clientconnectHandler sock chan numThreads server
--Calls the mainHandler which will monitor the FIFO channel
mainHandler sock chan
mainHandler :: Socket -> Chan String -> IO ()
mainHandler sock chan = do
--Read current message on the FIFO channel
chanMsg <- readChan chan
--If KILL_SERVICE, stop mainHandler running, If anything else, call mainHandler again, keeping the service running
case (chanMsg) of
("KILL_SERVICE") -> putStrLn "Terminating the Service!"
_ -> mainHandler sock chan
clientconnectHandler :: Socket -> Chan String -> TVar Int -> FileServer -> IO ()
clientconnectHandler sock chan numThreads server = do
--Accept the socket which returns a handle, host and port
--(handle, host, port) <- accept sock
(s,a) <- accept sock
--handle <- socketToHandle s ReadWriteMode
--Read numThreads from memory and print it on server console
count <- atomically $ readTVar numThreads
putStrLn $ "numThreads = " ++ show count
--If there are still threads remaining create new thread and increment (exception if thread is lost -> decrement), else tell user capacity has been reached
if (count < maxnumThreads) then do
forkFinally (clientHandler s chan server) (\_ -> atomically $ decrementTVar numThreads)
atomically $ incrementTVar numThreads
else do
send s (pack ("Maximum number of threads in use. try again soon"++"\n\n"))
sClose s
clientconnectHandler sock chan numThreads server
clientHandler :: Socket -> Chan String -> FileServer -> IO ()
clientHandler sock chan server@FileServer{..} =
forever $ do
message <- recv sock 1024
let msg = unpack message
print $ msg ++ "!ENDLINE!"
let cmd = head $ words $ head $ splitOn ":" msg
print cmd
case cmd of
("HELO") -> heloCommand sock server $ (words msg) !! 1
("KILL_SERVICE") -> killCommand chan sock
("DOWNLOAD") -> downloadCommand sock server msg
("UPLOAD") -> uploadCommand sock server msg
("UPDATE") -> updateCommand sock server msg
_ -> do send sock (pack ("Unknown Command - " ++ msg ++ "\n\n")) ; return ()
--Function called when HELO text command recieved
heloCommand :: Socket -> FileServer -> String -> IO ()
heloCommand sock FileServer{..} msg = do
send sock $ pack $ "HELO " ++ msg ++ "\n" ++
"IP:" ++ "192.168.6.129" ++ "\n" ++
"Port:" ++ port ++ "\n" ++
"StudentID:12306421\n\n"
return ()
killCommand :: Chan String -> Socket -> IO ()
killCommand chan sock = do
send sock $ pack $ "Service is now terminating!"
writeChan chan "KILL_SERVICE"
downloadCommand :: Socket -> FileServer -> String -> IO ()
downloadCommand sock server@FileServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
doesFileExist filename >>= \case
True -> do fdata <- readFile filename
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ fdata ++ "\n\n"
False -> send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ "File not Found!!" ++ "\n\n"
return ()
uploadCommand :: Socket -> FileServer -> String -> IO ()
uploadCommand sock server@FileServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fdata = (splitOn ":" $ clines !! 2) !! 1
doesFileExist filename >>= \case
True -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n" ++
"Failed: " ++ "File Already Exists!" ++ "\n\n"
False -> do file <- writeFile filename fdata
send sock $ pack $ "UPLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "Success" ++ "\n\n"
return ()
updateCommand :: Socket -> FileServer -> String -> IO ()
updateCommand sock server@FileServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fdata = (splitOn ":" $ clines !! 2) !! 1
doesFileExist filename >>= \case
True -> do file <- appendFile filename fdata
send sock $ pack $ "UPDATE: " ++ filename ++ "\n" ++
"STATUS: " ++ "Success" ++ "\n\n"
False -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n" ++
"STATUS " ++ "Failed; File doesn't exist!" ++ "\n\n"
return ()
--Increment Tvar stored in memory i.e. numThreads
incrementTVar :: TVar Int -> STM ()
incrementTVar tv = modifyTVar tv ((+) 1)
--Decrement Tvar stored in memory i.e. numThreads
decrementTVar :: TVar Int -> STM ()
decrementTVar tv = modifyTVar tv (subtract 1)
|
Garygunn94/DFS
|
.stack-work/intero/intero18234Jfb.hs
|
bsd-3-clause
| 7,344
| 194
| 17
| 1,861
| 1,911
| 984
| 927
| 135
| 6
|
module Main where
import Acme.Dont
main :: IO ()
main = don't $ do
return ()
return ()
|
bennofs/ghc-server
|
tests/db-reloading/Main.hs
|
bsd-3-clause
| 94
| 0
| 9
| 25
| 44
| 22
| 22
| 6
| 1
|
module Doukaku.BusTest (tests) where
import Distribution.TestSuite
import Doukaku.TestHelper
import qualified Doukaku.Bus as Bus
tests :: IO [Test]
tests = createTests $ newDoukakuTest {
tsvPath = "test/Doukaku/bus.tsv"
, solve = Bus.solve
}
|
hiratara/doukaku-past-questions-advent-2013
|
test/Doukaku/BusTest.hs
|
bsd-3-clause
| 250
| 0
| 8
| 39
| 65
| 40
| 25
| 8
| 1
|
{- | = External Sort
When you don't have enough memory to sort a large number of values an
<https://en.wikipedia.org/wiki/External_sorting external sort>
can be used.
This library implements an /external sort/ algorithm using Gabriel Gonzales'
<https://hackage.haskell.org/package/pipes pipes> library and Ben Gamari's
<https://hackage.haskell.org/package/pipes-interleave pipes-interleave> extension.
== Usage
The library exports two functions, 'externalSortFile' and 'externalSortHandle'. The former is
simply a wrapper around the latter. The first argument to both functions is a configuration
value with the polymorphic type @'ExternalSortCfg' a@. The configuration value contains:
* 'readVal': a function to read a value of type @a@ from a 'Handle'
* 'writeVal': a function to write a value of type @a@ to a 'Handle'
* 'chunkSize': the number of values that will be read into memory at any one time
* 'sorter': a sort algorithm to use on each chunk. (Rather than choose one for you, you are
free to use your own)
* 'comparer': a function to compare two values
It is up to the user to work out how much memory @chunkSize@ values will use at it will depend
on the type being read/written.
== Algorithm design
The algorithm proceeds in two stages. In the first phase @chunkSize@ values are repeatedly read
into memory, sorted, and then written to temporary intermediate files. The number of intermediate
files should be @ceiling (n div chunkSize)@ where @n@ is the number of values in the file.
In the second phase all of the temporary intermediate files are opened for reading and a k-way
merge is performed on the values. The results are streamed to the output file/handle (depending
on which algorithm you use). It is important that your operating system allow this many open files
otherwise you will receive a @openFile: resource exhausted (Too many open files)@ exception.
-}
module Data.ExternalSort (
-- | = Types
ExternalSortCfg(..)
-- | = Functions
-- $externalSortFile
, externalSortFile
-- $externalSortHandle
, externalSortHandle
) where
import Data.ExternalSort.Internal
-- $externalSortFile
-- @externalSortFile cfg inFile outFile@ externally sorts @inFile@ and writes its output to @outFile@.
-- It is simply a wrapper around 'externalSortHandle'.
--
-- $externalSortHandle
-- @externalSortHandle cfg inH outH@ externally sorts records from handle @inH@ and writes its
-- output to handle @outH@
--
|
sseefried/external-sort
|
src/Data/ExternalSort.hs
|
bsd-3-clause
| 2,451
| 0
| 5
| 401
| 40
| 31
| 9
| 5
| 0
|
module Yawn.Logger(
Level (LOG_INFO, LOG_DEBUG, LOG_ERROR),
doLog,
system,
trace
) where
import System.IO (IOMode (AppendMode), hPutStr, withFile)
import System.IO.Error (try)
import Yawn.Util.Time (getCalendarTime)
import Yawn.Configuration (Configuration, logRoot)
data Level = LOG_INFO | LOG_DEBUG | LOG_ERROR | LOG_TRACE deriving (Eq)
instance Show Level where
show LOG_INFO = "info"
show LOG_DEBUG = "debug"
show LOG_ERROR = "error"
show LOG_TRACE = "trace"
doLog :: Show a => Configuration -> Level -> a -> IO ()
doLog c l s = formatMessage s l >>= writeOut c l
system :: Show a => a -> IO ()
system s = formatMessage s LOG_INFO >>= putStrLn
trace :: Show a => a -> IO ()
trace s = formatMessage s LOG_TRACE >>= putStrLn
writeOut :: Configuration -> Level -> String -> IO ()
writeOut c l s = do
let logFile = logRoot c ++ show l ++ ".log"
try (withFile logFile AppendMode (\h -> hPutStr h (s ++ "\n"))) >>= \st -> case st of
Left e -> error ("Unable to open logfile. " ++ show e) >> return ()
Right _ok -> putStrLn s
formatMessage :: Show a => a -> Level -> IO (String)
formatMessage s l = getCalendarTime >>= \t -> return $ "[" ++ t ++ "] [" ++ show l ++ "] " ++ show s
|
ameingast/yawn
|
src/Yawn/Logger.hs
|
bsd-3-clause
| 1,214
| 0
| 16
| 254
| 501
| 259
| 242
| 31
| 2
|
--------------------------------------------------------------------------------
-- |
-- Module : System.Information.StreamInfo
-- Copyright : (c) José A. Romero L.
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : José A. Romero L. <escherdragon@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- Generic code to poll any of the many data files maintained by the kernel in
-- POSIX systems. Provides methods for applying a custom parsing function to the
-- contents of the file and to calculate differentials across one or more values
-- provided via the file.
--
--------------------------------------------------------------------------------
module System.Information.StreamInfo
( getParsedInfo
, getLoad
, getAccLoad
, getTransfer
) where
import Control.Concurrent ( threadDelay )
import Data.IORef
import Data.Maybe ( fromMaybe )
-- | Apply the given parser function to the file under the given path to produce
-- a lookup map, then use the given selector as key to extract from it the
-- desired value.
getParsedInfo :: FilePath -> (String -> [(String, [a])]) -> String -> IO [a]
getParsedInfo path parser selector = do
file <- readFile path
(length file) `seq` return ()
return (fromMaybe [] $ lookup selector $ parser file)
truncVal :: (RealFloat a) => a -> a
truncVal v
| isNaN v || v < 0.0 = 0.0
| otherwise = v
-- | Convert the given list of Integer to a list of the ratios of each of its
-- elements against their sum.
toRatioList :: (Integral a, RealFloat b) => [a] -> [b]
toRatioList deltas = map truncVal ratios
where total = fromIntegral $ foldr (+) 0 deltas
ratios = map ((/total) . fromIntegral) deltas
-- | Execute the given action twice with the given delay in-between and return
-- the difference between the two samples.
probe :: (Num a, RealFrac b) => IO [a] -> b -> IO [a]
probe action delay = do
a <- action
threadDelay $ round (delay * 1e6)
b <- action
return $ zipWith (-) b a
-- | Execute the given action once and return the difference between the
-- obtained sample and the one contained in the given IORef.
accProbe :: (Num a) => IO [a] -> IORef [a] -> IO [a]
accProbe action sample = do
a <- readIORef sample
b <- action
writeIORef sample b
return $ zipWith (-) b a
-- | Probe the given action and, interpreting the result as a variation in time,
-- return the speed of change of its values.
getTransfer :: (Integral a, RealFloat b) => b -> IO [a] -> IO [b]
getTransfer interval action = do
deltas <- probe action interval
return $ map (truncVal . (/interval) . fromIntegral) deltas
-- | Probe the given action and return the relative variation of each of the
-- obtained values against the whole, where the whole is calculated as the sum
-- of all the values in the probe.
getLoad :: (Integral a, RealFloat b) => b -> IO [a] -> IO [b]
getLoad interval action = do
deltas <- probe action interval
return $ toRatioList deltas
-- | Similar to getLoad, but execute the given action only once and use the
-- given IORef to calculate the result and to save the current value, so it
-- can be reused in the next call.
getAccLoad :: (Integral a, RealFloat b) => IORef [a] -> IO [a] -> IO [b]
getAccLoad sample action = do
deltas <- accProbe action sample
return $ toRatioList deltas
|
Undeterminant/taffybar
|
src/System/Information/StreamInfo.hs
|
bsd-3-clause
| 3,369
| 0
| 12
| 701
| 754
| 399
| 355
| 45
| 1
|
module Maven.Types.Pom (
Pom (Pom)
, DependencyManagement(DepMan)
, Dependency(..)
, Parent(..)
, groupId
, artifactId
, version
, dependencies
, dependencyManagement
) where
import Data.Map as Map
import Data.Text as T
data Pom = Pom
{ _groupId :: Maybe T.Text
, _artifactId :: T.Text
, _version :: Maybe T.Text
, _parent :: Maybe Parent
, _properties :: Map.Map T.Text T.Text
, _dependencyManagement :: Maybe DependencyManagement
, _dependencies :: Maybe [Dependency]
, _modules :: Maybe [T.Text]
} deriving (Eq, Show)
newtype Parent = Parent Dependency deriving (Eq, Show)
newtype DependencyManagement = DepMan [Dependency] deriving (Eq, Show)
-- It would be better to express version as a Maybe as it can be missing,
-- currently it will just appear as an empty String
data Dependency = Dependency
-- | groupId
( Maybe T.Text )
-- | artifactId
T.Text
-- | version
( Maybe T.Text ) deriving (Eq, Show)
class HasPackageInfo a where
groupId :: a -> Maybe T.Text
artifactId :: a -> T.Text
version :: a -> Maybe T.Text
instance HasPackageInfo Dependency where
groupId (Dependency gid _ _) = gid
artifactId (Dependency _ aid _) = aid
version (Dependency _ _ v) = v
instance HasPackageInfo Pom where
groupId = _groupId
artifactId = _artifactId
version = _version
properties = _properties
dependencyManagement= _dependencyManagement
dependencies = _dependencies
modules = _modules
|
wayofthepie/pom-analyzer
|
src/Maven/Types/Pom.hs
|
bsd-3-clause
| 1,661
| 0
| 11
| 500
| 418
| 239
| 179
| 48
| 1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
module Duckling.Rules
( allRules
, rulesFor
) where
import Data.HashSet (HashSet)
import Prelude
import qualified Data.HashSet as HashSet
import Duckling.Dimensions
import Duckling.Dimensions.Types
import Duckling.Locale
import Duckling.Types
import qualified Duckling.Rules.AF as AFRules
import qualified Duckling.Rules.AR as ARRules
import qualified Duckling.Rules.Common as CommonRules
import qualified Duckling.Rules.BG as BGRules
import qualified Duckling.Rules.BN as BNRules
import qualified Duckling.Rules.CA as CARules
import qualified Duckling.Rules.CS as CSRules
import qualified Duckling.Rules.DA as DARules
import qualified Duckling.Rules.DE as DERules
import qualified Duckling.Rules.EL as ELRules
import qualified Duckling.Rules.EN as ENRules
import qualified Duckling.Rules.ES as ESRules
import qualified Duckling.Rules.ET as ETRules
import qualified Duckling.Rules.FI as FIRules
import qualified Duckling.Rules.FA as FARules
import qualified Duckling.Rules.FR as FRRules
import qualified Duckling.Rules.GA as GARules
import qualified Duckling.Rules.HE as HERules
import qualified Duckling.Rules.HI as HIRules
import qualified Duckling.Rules.HR as HRRules
import qualified Duckling.Rules.HU as HURules
import qualified Duckling.Rules.ID as IDRules
import qualified Duckling.Rules.IS as ISRules
import qualified Duckling.Rules.IT as ITRules
import qualified Duckling.Rules.JA as JARules
import qualified Duckling.Rules.KA as KARules
import qualified Duckling.Rules.KM as KMRules
import qualified Duckling.Rules.KN as KNRules
import qualified Duckling.Rules.KO as KORules
import qualified Duckling.Rules.LO as LORules
import qualified Duckling.Rules.ML as MLRules
import qualified Duckling.Rules.MN as MNRules
import qualified Duckling.Rules.MY as MYRules
import qualified Duckling.Rules.NB as NBRules
import qualified Duckling.Rules.NE as NERules
import qualified Duckling.Rules.NL as NLRules
import qualified Duckling.Rules.PL as PLRules
import qualified Duckling.Rules.PT as PTRules
import qualified Duckling.Rules.RO as RORules
import qualified Duckling.Rules.RU as RURules
import qualified Duckling.Rules.SK as SKRules
import qualified Duckling.Rules.SV as SVRules
import qualified Duckling.Rules.SW as SWRules
import qualified Duckling.Rules.TA as TARules
import qualified Duckling.Rules.TE as TERules
import qualified Duckling.Rules.TH as THRules
import qualified Duckling.Rules.TR as TRRules
import qualified Duckling.Rules.UK as UKRules
import qualified Duckling.Rules.VI as VIRules
import qualified Duckling.Rules.ZH as ZHRules
-- | Returns the minimal set of rules required for `targets`.
rulesFor :: Locale -> HashSet (Seal Dimension) -> [Rule]
rulesFor locale targets
| HashSet.null targets = allRules locale
| otherwise = [ rules | dims <- HashSet.toList $ explicitDimensions targets
, rules <- rulesFor' locale dims ]
-- | Returns all the rules for the provided locale.
-- We can't really use `allDimensions` as-is, since `TimeGrain` is not present.
allRules :: Locale -> [Rule]
allRules locale@(Locale lang _) = concatMap (rulesFor' locale) . HashSet.toList
. explicitDimensions . HashSet.fromList $ allDimensions lang
rulesFor' :: Locale -> Seal Dimension -> [Rule]
rulesFor' (Locale lang (Just region)) dim =
CommonRules.rules dim ++ langRules lang dim ++ localeRules lang region dim
rulesFor' (Locale lang Nothing) dim =
CommonRules.rules dim ++ defaultRules lang dim
-- | Default rules when no locale, for backward compatibility.
defaultRules :: Lang -> Seal Dimension -> [Rule]
defaultRules AF = AFRules.defaultRules
defaultRules AR = ARRules.defaultRules
defaultRules BG = BGRules.defaultRules
defaultRules BN = BNRules.defaultRules
defaultRules CA = CARules.defaultRules
defaultRules CS = CSRules.defaultRules
defaultRules DA = DARules.defaultRules
defaultRules DE = DERules.defaultRules
defaultRules EL = ELRules.defaultRules
defaultRules EN = ENRules.defaultRules
defaultRules ES = ESRules.defaultRules
defaultRules ET = ETRules.defaultRules
defaultRules FI = FIRules.defaultRules
defaultRules FA = FARules.defaultRules
defaultRules FR = FRRules.defaultRules
defaultRules GA = GARules.defaultRules
defaultRules HE = HERules.defaultRules
defaultRules HI = HIRules.defaultRules
defaultRules HR = HRRules.defaultRules
defaultRules HU = HURules.defaultRules
defaultRules ID = IDRules.defaultRules
defaultRules IS = ISRules.defaultRules
defaultRules IT = ITRules.defaultRules
defaultRules JA = JARules.defaultRules
defaultRules KA = KARules.defaultRules
defaultRules KM = KMRules.defaultRules
defaultRules KN = KNRules.defaultRules
defaultRules KO = KORules.defaultRules
defaultRules LO = LORules.defaultRules
defaultRules ML = MLRules.defaultRules
defaultRules MN = MNRules.defaultRules
defaultRules MY = MYRules.defaultRules
defaultRules NB = NBRules.defaultRules
defaultRules NE = NERules.defaultRules
defaultRules NL = NLRules.defaultRules
defaultRules PL = PLRules.defaultRules
defaultRules PT = PTRules.defaultRules
defaultRules RO = RORules.defaultRules
defaultRules RU = RURules.defaultRules
defaultRules SK = SKRules.defaultRules
defaultRules SV = SVRules.defaultRules
defaultRules SW = SWRules.defaultRules
defaultRules TA = TARules.defaultRules
defaultRules TE = TERules.defaultRules
defaultRules TH = THRules.defaultRules
defaultRules TR = TRRules.defaultRules
defaultRules UK = UKRules.defaultRules
defaultRules VI = VIRules.defaultRules
defaultRules ZH = ZHRules.defaultRules
localeRules :: Lang -> Region -> Seal Dimension -> [Rule]
localeRules AF = AFRules.localeRules
localeRules AR = ARRules.localeRules
localeRules BG = BGRules.localeRules
localeRules BN = BNRules.localeRules
localeRules CA = CARules.localeRules
localeRules CS = CSRules.localeRules
localeRules DA = DARules.localeRules
localeRules DE = DERules.localeRules
localeRules EL = ELRules.localeRules
localeRules EN = ENRules.localeRules
localeRules ES = ESRules.localeRules
localeRules ET = ETRules.localeRules
localeRules FI = FIRules.localeRules
localeRules FA = FARules.localeRules
localeRules FR = FRRules.localeRules
localeRules GA = GARules.localeRules
localeRules HE = HERules.localeRules
localeRules HI = HIRules.localeRules
localeRules HR = HRRules.localeRules
localeRules HU = HURules.localeRules
localeRules ID = IDRules.localeRules
localeRules IS = ISRules.localeRules
localeRules IT = ITRules.localeRules
localeRules JA = JARules.localeRules
localeRules KA = KARules.localeRules
localeRules KM = KMRules.localeRules
localeRules KN = KNRules.localeRules
localeRules KO = KORules.localeRules
localeRules LO = LORules.localeRules
localeRules ML = MLRules.localeRules
localeRules MN = MNRules.localeRules
localeRules MY = MYRules.localeRules
localeRules NB = NBRules.localeRules
localeRules NE = NERules.localeRules
localeRules NL = NLRules.localeRules
localeRules PL = PLRules.localeRules
localeRules PT = PTRules.localeRules
localeRules RO = RORules.localeRules
localeRules RU = RURules.localeRules
localeRules SK = SKRules.localeRules
localeRules SV = SVRules.localeRules
localeRules SW = SWRules.localeRules
localeRules TA = TARules.localeRules
localeRules TE = TERules.localeRules
localeRules TH = THRules.localeRules
localeRules TR = TRRules.localeRules
localeRules UK = UKRules.localeRules
localeRules VI = VIRules.localeRules
localeRules ZH = ZHRules.localeRules
langRules :: Lang -> Seal Dimension -> [Rule]
langRules AF = AFRules.langRules
langRules AR = ARRules.langRules
langRules BG = BGRules.langRules
langRules BN = BNRules.langRules
langRules CA = CARules.langRules
langRules CS = CSRules.langRules
langRules DA = DARules.langRules
langRules DE = DERules.langRules
langRules EL = ELRules.langRules
langRules EN = ENRules.langRules
langRules ES = ESRules.langRules
langRules ET = ETRules.langRules
langRules FI = FIRules.langRules
langRules FA = FARules.langRules
langRules FR = FRRules.langRules
langRules GA = GARules.langRules
langRules HE = HERules.langRules
langRules HI = HIRules.langRules
langRules KN = KNRules.langRules
langRules HR = HRRules.langRules
langRules HU = HURules.langRules
langRules ID = IDRules.langRules
langRules IS = ISRules.langRules
langRules IT = ITRules.langRules
langRules JA = JARules.langRules
langRules KA = KARules.langRules
langRules KM = KMRules.langRules
langRules KO = KORules.langRules
langRules LO = LORules.langRules
langRules ML = MLRules.langRules
langRules MN = MNRules.langRules
langRules MY = MYRules.langRules
langRules NB = NBRules.langRules
langRules NE = NERules.langRules
langRules NL = NLRules.langRules
langRules PL = PLRules.langRules
langRules PT = PTRules.langRules
langRules RO = RORules.langRules
langRules RU = RURules.langRules
langRules SK = SKRules.langRules
langRules SV = SVRules.langRules
langRules SW = SWRules.langRules
langRules TA = TARules.langRules
langRules TE = TERules.langRules
langRules TH = THRules.langRules
langRules TR = TRRules.langRules
langRules UK = UKRules.langRules
langRules VI = VIRules.langRules
langRules ZH = ZHRules.langRules
|
facebookincubator/duckling
|
Duckling/Rules.hs
|
bsd-3-clause
| 9,272
| 0
| 11
| 1,124
| 2,302
| 1,287
| 1,015
| 225
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1998
\section[Literal]{@Literal@: Machine literals (unboxed, of course)}
-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module ETA.BasicTypes.Literal
(
-- * Main data type
Literal(..) -- Exported to ParseIface
-- ** Creating Literals
, mkMachInt, mkMachWord
, mkMachInt64, mkMachWord64
, mkMachFloat, mkMachDouble
, mkMachChar, mkMachString
, mkLitInteger
-- ** Operations on Literals
, literalType
, hashLiteral
, absentLiteralOf
, pprLiteral
-- ** Predicates on Literals and their contents
, litIsDupable, litIsTrivial, litIsLifted
, inIntRange, inWordRange, inCharRange
, maxInt, minInt, maxWord, maxChar
, isZeroLit
, litFitsInChar
-- ** Coercions
, word2IntLit, int2WordLit
, narrow8IntLit, narrow16IntLit, narrow32IntLit
, narrow8WordLit, narrow16WordLit, narrow32WordLit
, char2IntLit, int2CharLit
, float2IntLit, int2FloatLit, double2IntLit, int2DoubleLit
, nullAddrLit, float2DoubleLit, double2FloatLit
) where
#include "HsVersions.h"
import ETA.Prelude.TysPrim
import ETA.Prelude.TysWiredIn (jstringTy)
import ETA.Prelude.PrelNames
import ETA.Types.Type
import ETA.Types.TyCon
import ETA.Utils.Outputable
import ETA.Utils.FastTypes
import ETA.Utils.FastString
import ETA.BasicTypes.BasicTypes
import ETA.Utils.Binary
import ETA.Main.Constants
import ETA.Main.DynFlags
import ETA.Utils.UniqFM
import ETA.Utils.Util
import Data.ByteString (ByteString)
import Data.Int
import Data.Ratio
import Data.Word
import Data.Char
import Data.Data ( Data, Typeable )
import Numeric ( fromRat )
{-
************************************************************************
* *
\subsection{Literals}
* *
************************************************************************
-}
-- | So-called 'Literal's are one of:
--
-- * An unboxed (/machine/) literal ('MachInt', 'MachFloat', etc.),
-- which is presumed to be surrounded by appropriate constructors
-- (@Int#@, etc.), so that the overall thing makes sense.
--
-- * The literal derived from the label mentioned in a \"foreign label\"
-- declaration ('MachLabel')
data Literal
= ------------------
-- First the primitive guys
MachChar Char -- ^ @Char#@ - at least 31 bits. Create with 'mkMachChar'
| MachStr ByteString -- ^ A string-literal: stored and emitted
-- UTF-8 encoded, we'll arrange to decode it
-- at runtime. Also emitted with a @'\0'@
-- terminator. Create with 'mkMachString'
| MachNullAddr -- ^ The @NULL@ pointer, the only pointer value
-- that can be represented as a Literal. Create
-- with 'nullAddrLit'
| MachInt Integer -- ^ @Int#@ - at least @WORD_SIZE_IN_BITS@ bits. Create with 'mkMachInt'
| MachInt64 Integer -- ^ @Int64#@ - at least 64 bits. Create with 'mkMachInt64'
| MachWord Integer -- ^ @Word#@ - at least @WORD_SIZE_IN_BITS@ bits. Create with 'mkMachWord'
| MachWord64 Integer -- ^ @Word64#@ - at least 64 bits. Create with 'mkMachWord64'
| MachFloat Rational -- ^ @Float#@. Create with 'mkMachFloat'
| MachDouble Rational -- ^ @Double#@. Create with 'mkMachDouble'
| MachLabel FastString
(Maybe Int)
FunctionOrData
-- ^ A label literal. Parameters:
--
-- 1) The name of the symbol mentioned in the declaration
--
-- 2) The size (in bytes) of the arguments
-- the label expects. Only applicable with
-- @stdcall@ labels. @Just x@ => @\<x\>@ will
-- be appended to label name when emitting assembly.
| LitInteger Integer Type -- ^ Integer literals
-- See Note [Integer literals]
deriving (Data, Typeable)
{-
Note [Integer literals]
~~~~~~~~~~~~~~~~~~~~~~~
An Integer literal is represented using, well, an Integer, to make it
easier to write RULEs for them. They also contain the Integer type, so
that e.g. literalType can return the right Type for them.
They only get converted into real Core,
mkInteger [c1, c2, .., cn]
during the CorePrep phase, although TidyPgm looks ahead at what the
core will be, so that it can see whether it involves CAFs.
When we initally build an Integer literal, notably when
deserialising it from an interface file (see the Binary instance
below), we don't have convenient access to the mkInteger Id. So we
just use an error thunk, and fill in the real Id when we do tcIfaceLit
in TcIface.
Binary instance
-}
instance Binary Literal where
put_ bh (MachChar aa) = do putByte bh 0; put_ bh aa
put_ bh (MachStr ab) = do putByte bh 1; put_ bh ab
put_ bh (MachNullAddr) = do putByte bh 2
put_ bh (MachInt ad) = do putByte bh 3; put_ bh ad
put_ bh (MachInt64 ae) = do putByte bh 4; put_ bh ae
put_ bh (MachWord af) = do putByte bh 5; put_ bh af
put_ bh (MachWord64 ag) = do putByte bh 6; put_ bh ag
put_ bh (MachFloat ah) = do putByte bh 7; put_ bh ah
put_ bh (MachDouble ai) = do putByte bh 8; put_ bh ai
put_ bh (MachLabel aj mb fod)
= do putByte bh 9
put_ bh aj
put_ bh mb
put_ bh fod
put_ bh (LitInteger i _) = do putByte bh 10; put_ bh i
get bh = do
h <- getByte bh
case h of
0 -> do
aa <- get bh
return (MachChar aa)
1 -> do
ab <- get bh
return (MachStr ab)
2 -> do
return (MachNullAddr)
3 -> do
ad <- get bh
return (MachInt ad)
4 -> do
ae <- get bh
return (MachInt64 ae)
5 -> do
af <- get bh
return (MachWord af)
6 -> do
ag <- get bh
return (MachWord64 ag)
7 -> do
ah <- get bh
return (MachFloat ah)
8 -> do
ai <- get bh
return (MachDouble ai)
9 -> do
aj <- get bh
mb <- get bh
fod <- get bh
return (MachLabel aj mb fod)
_ -> do
i <- get bh
-- See Note [Integer literals]
return $ mkLitInteger i (panic "Evaluated the place holder for mkInteger")
instance Outputable Literal where
ppr lit = pprLiteral (\d -> d) lit
instance Eq Literal where
a == b = case (a `compare` b) of { EQ -> True; _ -> False }
a /= b = case (a `compare` b) of { EQ -> False; _ -> True }
instance Ord Literal where
a <= b = case (a `compare` b) of { LT -> True; EQ -> True; GT -> False }
a < b = case (a `compare` b) of { LT -> True; EQ -> False; GT -> False }
a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True }
a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True }
compare a b = cmpLit a b
{-
Construction
~~~~~~~~~~~~
-}
-- | Creates a 'Literal' of type @Int#@
mkMachInt :: DynFlags -> Integer -> Literal
mkMachInt dflags x = ASSERT2( inIntRange dflags x, integer x )
MachInt x
-- | Creates a 'Literal' of type @Word#@
mkMachWord :: DynFlags -> Integer -> Literal
mkMachWord dflags x = ASSERT2( inWordRange dflags x, integer x )
MachWord x
-- | Creates a 'Literal' of type @Int64#@
mkMachInt64 :: Integer -> Literal
mkMachInt64 x = MachInt64 x
-- | Creates a 'Literal' of type @Word64#@
mkMachWord64 :: Integer -> Literal
mkMachWord64 x = MachWord64 x
-- | Creates a 'Literal' of type @Float#@
mkMachFloat :: Rational -> Literal
mkMachFloat = MachFloat
-- | Creates a 'Literal' of type @Double#@
mkMachDouble :: Rational -> Literal
mkMachDouble = MachDouble
-- | Creates a 'Literal' of type @Char#@
mkMachChar :: Char -> Literal
mkMachChar = MachChar
-- | Creates a 'Literal' of type @Addr#@, which is appropriate for passing to
-- e.g. some of the \"error\" functions in GHC.Err such as @GHC.Err.runtimeError@
mkMachString :: String -> Literal
-- stored UTF-8 encoded
mkMachString s = MachStr (fastStringToByteString $ mkFastString s)
mkLitInteger :: Integer -> Type -> Literal
mkLitInteger = LitInteger
inIntRange, inWordRange :: DynFlags -> Integer -> Bool
inIntRange _ x = x >= minInt
&& x <= maxInt
inWordRange _ x = x >= 0 && x <= maxWord
inCharRange :: Char -> Bool
inCharRange c = c >= '\0' && c <= chr tARGET_MAX_CHAR
-- | Tests whether the literal represents a zero of whatever type it is
isZeroLit :: Literal -> Bool
isZeroLit (MachInt 0) = True
isZeroLit (MachInt64 0) = True
isZeroLit (MachWord 0) = True
isZeroLit (MachWord64 0) = True
isZeroLit (MachFloat 0) = True
isZeroLit (MachDouble 0) = True
isZeroLit _ = False
{-
Coercions
~~~~~~~~~
-}
narrow8IntLit, narrow16IntLit, narrow32IntLit,
narrow8WordLit, narrow16WordLit, narrow32WordLit,
char2IntLit, int2CharLit,
float2IntLit, int2FloatLit, double2IntLit, int2DoubleLit,
float2DoubleLit, double2FloatLit
:: Literal -> Literal
word2IntLit, int2WordLit :: DynFlags -> Literal -> Literal
word2IntLit _ (MachWord w)
| w > maxInt = MachInt (w - maxWord - 1)
| otherwise = MachInt w
word2IntLit _ l = pprPanic "word2IntLit" (ppr l)
int2WordLit _ (MachInt i)
| i < 0 = MachWord (1 + maxWord + i) -- (-1) ---> tARGET_MAX_WORD
| otherwise = MachWord i
int2WordLit _ l = pprPanic "int2WordLit" (ppr l)
narrow8IntLit (MachInt i) = MachInt (toInteger (fromInteger i :: Int8))
narrow8IntLit l = pprPanic "narrow8IntLit" (ppr l)
narrow16IntLit (MachInt i) = MachInt (toInteger (fromInteger i :: Int16))
narrow16IntLit l = pprPanic "narrow16IntLit" (ppr l)
narrow32IntLit (MachInt i) = MachInt (toInteger (fromInteger i :: Int32))
narrow32IntLit l = pprPanic "narrow32IntLit" (ppr l)
narrow8WordLit (MachWord w) = MachWord (toInteger (fromInteger w :: Word8))
narrow8WordLit l = pprPanic "narrow8WordLit" (ppr l)
narrow16WordLit (MachWord w) = MachWord (toInteger (fromInteger w :: Word16))
narrow16WordLit l = pprPanic "narrow16WordLit" (ppr l)
narrow32WordLit (MachWord w) = MachWord (toInteger (fromInteger w :: Word32))
narrow32WordLit l = pprPanic "narrow32WordLit" (ppr l)
char2IntLit (MachChar c) = MachInt (toInteger (ord c))
char2IntLit l = pprPanic "char2IntLit" (ppr l)
int2CharLit (MachInt i) = MachChar (chr (fromInteger i))
int2CharLit l = pprPanic "int2CharLit" (ppr l)
float2IntLit (MachFloat f) = MachInt (truncate f)
float2IntLit l = pprPanic "float2IntLit" (ppr l)
int2FloatLit (MachInt i) = MachFloat (fromInteger i)
int2FloatLit l = pprPanic "int2FloatLit" (ppr l)
double2IntLit (MachDouble f) = MachInt (truncate f)
double2IntLit l = pprPanic "double2IntLit" (ppr l)
int2DoubleLit (MachInt i) = MachDouble (fromInteger i)
int2DoubleLit l = pprPanic "int2DoubleLit" (ppr l)
float2DoubleLit (MachFloat f) = MachDouble f
float2DoubleLit l = pprPanic "float2DoubleLit" (ppr l)
double2FloatLit (MachDouble d) = MachFloat d
double2FloatLit l = pprPanic "double2FloatLit" (ppr l)
nullAddrLit :: Literal
nullAddrLit = MachNullAddr
{-
Predicates
~~~~~~~~~~
-}
-- | True if there is absolutely no penalty to duplicating the literal.
-- False principally of strings
litIsTrivial :: Literal -> Bool
-- c.f. CoreUtils.exprIsTrivial
litIsTrivial (MachStr _) = False
litIsTrivial (LitInteger {}) = False
litIsTrivial _ = True
-- | True if code space does not go bad if we duplicate this literal
-- Currently we treat it just like 'litIsTrivial'
litIsDupable :: DynFlags -> Literal -> Bool
-- c.f. CoreUtils.exprIsDupable
litIsDupable _ (MachStr _) = False
litIsDupable dflags (LitInteger i _) = inIntRange dflags i
litIsDupable _ _ = True
litFitsInChar :: Literal -> Bool
litFitsInChar (MachInt i) = i >= toInteger (ord minBound)
&& i <= toInteger (ord maxBound)
litFitsInChar _ = False
litIsLifted :: Literal -> Bool
litIsLifted (LitInteger {}) = True
litIsLifted _ = False
{-
Types
~~~~~
-}
-- | Find the Haskell 'Type' the literal occupies
literalType :: Literal -> Type
literalType MachNullAddr = addrPrimTy
literalType (MachChar _) = charPrimTy
literalType (MachStr _) = mkObjectPrimTy jstringTy
literalType (MachInt _) = intPrimTy
literalType (MachWord _) = wordPrimTy
literalType (MachInt64 _) = int64PrimTy
literalType (MachWord64 _) = word64PrimTy
literalType (MachFloat _) = floatPrimTy
literalType (MachDouble _) = doublePrimTy
literalType (MachLabel _ _ _) = addrPrimTy
literalType (LitInteger _ t) = t
absentLiteralOf :: TyCon -> Maybe Literal
-- Return a literal of the appropriate primtive
-- TyCon, to use as a placeholder when it doesn't matter
absentLiteralOf tc = lookupUFM absent_lits (tyConName tc)
absent_lits :: UniqFM Literal
absent_lits = listToUFM [ (addrPrimTyConKey, MachNullAddr)
, (charPrimTyConKey, MachChar 'x')
, (intPrimTyConKey, MachInt 0)
, (int64PrimTyConKey, MachInt64 0)
, (floatPrimTyConKey, MachFloat 0)
, (doublePrimTyConKey, MachDouble 0)
, (wordPrimTyConKey, MachWord 0)
, (word64PrimTyConKey, MachWord64 0) ]
{-
Comparison
~~~~~~~~~~
-}
cmpLit :: Literal -> Literal -> Ordering
cmpLit (MachChar a) (MachChar b) = a `compare` b
cmpLit (MachStr a) (MachStr b) = a `compare` b
cmpLit (MachNullAddr) (MachNullAddr) = EQ
cmpLit (MachInt a) (MachInt b) = a `compare` b
cmpLit (MachWord a) (MachWord b) = a `compare` b
cmpLit (MachInt64 a) (MachInt64 b) = a `compare` b
cmpLit (MachWord64 a) (MachWord64 b) = a `compare` b
cmpLit (MachFloat a) (MachFloat b) = a `compare` b
cmpLit (MachDouble a) (MachDouble b) = a `compare` b
cmpLit (MachLabel a _ _) (MachLabel b _ _) = a `compare` b
cmpLit (LitInteger a _) (LitInteger b _) = a `compare` b
cmpLit lit1 lit2 | litTag lit1 <# litTag lit2 = LT
| otherwise = GT
litTag :: Literal -> FastInt
litTag (MachChar _) = _ILIT(1)
litTag (MachStr _) = _ILIT(2)
litTag (MachNullAddr) = _ILIT(3)
litTag (MachInt _) = _ILIT(4)
litTag (MachWord _) = _ILIT(5)
litTag (MachInt64 _) = _ILIT(6)
litTag (MachWord64 _) = _ILIT(7)
litTag (MachFloat _) = _ILIT(8)
litTag (MachDouble _) = _ILIT(9)
litTag (MachLabel _ _ _) = _ILIT(10)
litTag (LitInteger {}) = _ILIT(11)
{-
Printing
~~~~~~~~
* MachX (i.e. unboxed) things are printed unadornded (e.g. 3, 'a', "foo")
exceptions: MachFloat gets an initial keyword prefix.
-}
pprLiteral :: (SDoc -> SDoc) -> Literal -> SDoc
-- The function is used on non-atomic literals
-- to wrap parens around literals that occur in
-- a context requiring an atomic thing
pprLiteral _ (MachChar ch) = pprHsChar ch
pprLiteral _ (MachStr s) = pprHsBytes s
pprLiteral _ (MachInt i) = pprIntVal i
pprLiteral _ (MachDouble d) = double (fromRat d)
pprLiteral _ (MachNullAddr) = ptext (sLit "__NULL")
pprLiteral add_par (LitInteger i _) = add_par (ptext (sLit "__integer") <+> integer i)
pprLiteral add_par (MachInt64 i) = add_par (ptext (sLit "__int64") <+> integer i)
pprLiteral add_par (MachWord w) = add_par (ptext (sLit "__word") <+> integer w)
pprLiteral add_par (MachWord64 w) = add_par (ptext (sLit "__word64") <+> integer w)
pprLiteral add_par (MachFloat f) = add_par (ptext (sLit "__float") <+> float (fromRat f))
pprLiteral add_par (MachLabel l mb fod) = add_par (ptext (sLit "__label") <+> b <+> ppr fod)
where b = case mb of
Nothing -> pprHsString l
Just x -> doubleQuotes (text (unpackFS l ++ '@':show x))
pprIntVal :: Integer -> SDoc
-- ^ Print negative integers with parens to be sure it's unambiguous
pprIntVal i | i < 0 = parens (integer i)
| otherwise = integer i
{-
************************************************************************
* *
\subsection{Hashing}
* *
************************************************************************
Hash values should be zero or a positive integer. No negatives please.
(They mess up the UniqFM for some reason.)
-}
hashLiteral :: Literal -> Int
hashLiteral (MachChar c) = ord c + 1000 -- Keep it out of range of common ints
hashLiteral (MachStr s) = hashByteString s
hashLiteral (MachNullAddr) = 0
hashLiteral (MachInt i) = hashInteger i
hashLiteral (MachInt64 i) = hashInteger i
hashLiteral (MachWord i) = hashInteger i
hashLiteral (MachWord64 i) = hashInteger i
hashLiteral (MachFloat r) = hashRational r
hashLiteral (MachDouble r) = hashRational r
hashLiteral (MachLabel s _ _) = hashFS s
hashLiteral (LitInteger i _) = hashInteger i
hashRational :: Rational -> Int
hashRational r = hashInteger (numerator r)
hashInteger :: Integer -> Int
hashInteger i = 1 + abs (fromInteger (i `rem` 10000))
-- The 1+ is to avoid zero, which is a Bad Number
-- since we use * to combine hash values
hashFS :: FastString -> Int
hashFS s = uniqueOfFS s
maxInt, minInt, maxWord :: Integer
minInt = toInteger (minBound :: Int32)
maxInt = toInteger (maxBound :: Int32)
maxWord = toInteger (maxBound :: Word32)
maxChar :: Int
maxChar = fromIntegral (maxBound :: Word32)
|
AlexeyRaga/eta
|
compiler/ETA/BasicTypes/Literal.hs
|
bsd-3-clause
| 18,943
| 0
| 16
| 5,777
| 4,673
| 2,422
| 2,251
| 309
| 2
|
module GraphReduction.Machine where
import Data.List (mapAccumL)
import Core.AST
import Core.Prelude
import Util.Heap
data GmState = GmState
{ code :: GmCode
, stack :: GmStack
, dump :: GmDump
, heap :: GmHeap
, globals :: GmGlobals
, stats :: GmStats
} deriving Show
type GmCode = [Instruction]
type GmStack = [Addr]
type GmDump = [(GmCode, GmStack)]
type GmHeap = Heap Node
type GmGlobals = [(Name, Addr)]
type GmStats = Int
incStats :: GmState -> GmState
incStats s = s { stats = stats s + 1 }
data Node = NNum Int
| NApp Addr Addr
| NGlobal Int GmCode
| NInd Addr
deriving Eq
instance Show Node where
show (NNum i) = show i
show (NApp a1 a2) = "@" ++ show a1 ++ " " ++ show a2
show (NGlobal a c) = "g" ++ show a ++ " " ++ show c
show (NInd a) = "p" ++ show a
data Instruction
= Unwind
| Mkap
| PushGlobal Name
| PushInt Int
| Push Int
| Pop Int
| Slide Int
| Update Int
| Alloc Int
| Eval
| Add | Sub | Mul | Div | Neg
| Eq | Ne | Lt | Le | Gt | Ge
| Cond GmCode GmCode
deriving (Show, Eq)
putCode :: GmCode -> GmState -> GmState
putCode i s = s { code = i }
putStack :: GmStack -> GmState -> GmState
putStack st s = s { stack = st }
putDump :: GmDump -> GmState -> GmState
putDump d s = s { dump = d }
eval :: GmState -> [GmState]
eval s = s : rest
where
rest | gmFinal s = []
| otherwise = eval next
next = incStats (step s)
gmFinal :: GmState -> Bool
gmFinal = null . code
step :: GmState -> GmState
step s = dispatch i (putCode is s)
where i:is = code s
dispatch :: Instruction -> GmState -> GmState
dispatch Unwind = unwind
dispatch Mkap = mkap
dispatch (PushGlobal n) = pushGlobal n
dispatch (PushInt n) = pushInt n
dispatch (Push n) = push n
dispatch (Pop n) = pop n
dispatch (Slide n) = slide n
dispatch (Update n) = update n
dispatch (Alloc n) = allocNodes n
dispatch Eval = newstate
where newstate s = s { code = [Unwind], stack = [a], dump = (code s,st):dump s }
where a:st = stack s
dispatch Add = binaryArithmetic (+)
dispatch Sub = binaryArithmetic (-)
dispatch Mul = binaryArithmetic (*)
dispatch Div = binaryArithmetic div
dispatch Neg = unaryArithmetic negate
dispatch Eq = comparison (==)
dispatch Ne = comparison (/=)
dispatch Lt = comparison (<)
dispatch Le = comparison (<=)
dispatch Gt = comparison (>)
dispatch Ge = comparison (>=)
dispatch (Cond i1 i2) = cond i1 i2
boxInt :: Int -> GmState -> GmState
boxInt n s = s { stack = a:stack s, heap = h }
where (h, a) = alloc (heap s) (NNum n)
unboxInt :: Addr -> GmState -> Int
unboxInt a s = ub (hLookup (heap s) a)
where
ub (NNum i) = i
ub _ = error "Cannot unbox non-integer."
boxBoolean b s = s { stack = a:stack s, heap = h }
where
(h, a) = alloc (heap s) (NNum b')
b' | b = 1 | otherwise = 0
unaryPrimitive :: (b -> GmState -> GmState) -- boxing function
-> (Addr -> GmState -> a) -- unboxing function
-> (a -> b) -- operator
-> (GmState -> GmState) -- state transition
unaryPrimitive box unbox op s = box (op (unbox a s)) (putStack as s)
where a:as = stack s
binaryPrimitive :: (b -> GmState -> GmState) -- boxing function
-> (Addr -> GmState -> a) -- unboxing function
-> (a -> a -> b) -- operator
-> (GmState -> GmState) -- state transition
binaryPrimitive box unbox op s = box (op (unbox a0 s) (unbox a1 s)) (putStack as s)
where a0:a1:as = stack s
unaryArithmetic :: (Int -> Int) -> (GmState -> GmState)
unaryArithmetic = unaryPrimitive boxInt unboxInt
binaryArithmetic :: (Int -> Int -> Int) -> (GmState -> GmState)
binaryArithmetic = binaryPrimitive boxInt unboxInt
comparison :: (Int -> Int -> Bool) -> GmState -> GmState
comparison = binaryPrimitive boxBoolean unboxInt
unwind :: GmState -> GmState
unwind s = newState (hLookup h a)
where
h = heap s
st = stack s
(a:as) = st
newState (NNum _)
| null (dump s) = s
| otherwise = s { code = i', stack = a:s', dump = d }
where
(i', s'):d = dump s
newState (NApp a1 _) = putCode [Unwind] (putStack (a1:a:as) s)
newState (NInd a1) = putCode [Unwind] (putStack (a1:as) s)
newState (NGlobal n c)
| length as < n = error "Unwinding stack with too few arguments."
| otherwise = putCode c (putStack rearranged s)
where
rearranged = take n as' ++ drop n st
as' = map (getArg . hLookup h) as
getArg (NApp _ a2) = a2
mkap :: GmState -> GmState
mkap s = s { heap = h', stack = a:as' }
where
(h', a) = alloc (heap s) (NApp a1 a2)
(a1:a2:as') = stack s
pushGlobal :: Name -> GmState -> GmState
pushGlobal n s = putStack (a : stack s) s
where
a = findWithDefault err n (globals s)
err = error $ "Undeclared global " ++ n
pushInt :: Int -> GmState -> GmState
pushInt n s = case lookup sn (globals s) of
Nothing -> s { globals = (sn, a) : globals s, heap = h, stack = a : stack s }
where (h, a) = alloc (heap s) (NNum n)
Just a -> putStack (a : stack s) s
where
sn = show n
push :: Int -> GmState -> GmState
push n s = putStack (a:as) s
where
as = stack s
a = as !! n
pop :: Int -> GmState -> GmState
pop n s = putStack (drop n (stack s)) s
slide :: Int -> GmState -> GmState
slide n s = putStack (a : drop n as) s
where (a:as) = stack s
update :: Int -> GmState -> GmState
update n s = s { stack = newStack , heap = h }
where
(a:as) = stack s
(h, an') = alloc (heap s) (NInd a)
newStack = case splitAt n as of
(xs,_:ys) -> xs ++ an' : ys
(xs,[]) -> xs ++ [an']
allocNodes :: Int -> GmState -> GmState
allocNodes n s = s { stack = newaddrs ++ stack s, heap = hp }
where
(hp, newaddrs) = allocNodes' n (heap s)
allocNodes' 0 h = (h, [])
allocNodes' n h = (h'', a:as)
where
(h', as) = allocNodes' (n - 1) h
(h'', a) = alloc h' (NInd hNull)
cond :: GmCode -> GmCode -> GmState -> GmState
cond i1 i2 s = s { code = branchCode ++ code s, stack = st }
where
a:st = stack s
NNum n = hLookup (heap s) a
branchCode
| n == 1 = i1
| n == 0 = i2
|
WraithM/CoreCompiler
|
src/GraphReduction/Machine.hs
|
bsd-3-clause
| 6,297
| 0
| 12
| 1,828
| 2,804
| 1,473
| 1,331
| 176
| 4
|
module Data.Array.Accelerate.BLAS.Internal.Axpy where
import Data.Array.Accelerate.BLAS.Internal.Common
import Data.Array.Accelerate
import Data.Array.Accelerate.CUDA.Foreign
import qualified Foreign.CUDA.BLAS as BL
import Prelude hiding (zipWith, map)
cudaAxpyF :: (Scalar Float, Vector Float, Vector Float)
-> CIO (Vector Float)
cudaAxpyF (alpha, x, y) = do
let n = arraySize (arrayShape y)
y' <- allocateArray (arrayShape y)
copyArray y y'
aptr <- devSF alpha
xptr <- devVF x
y'ptr <- devVF y'
liftIO $ BL.withCublas $ \handle -> execute handle n aptr xptr y'ptr
return y'
where
execute h n a xp yp =
BL.saxpy h n a xp 1 yp 1
cudaAxpyD :: (Scalar Double, Vector Double, Vector Double)
-> CIO (Vector Double)
cudaAxpyD (alpha, x, y) = do
let n = arraySize (arrayShape y)
y' <- allocateArray (arrayShape y)
copyArray y y'
aptr <- devSD alpha
xptr <- devVD x
y'ptr <- devVD y'
liftIO $ BL.withCublas $ \handle -> execute handle n aptr xptr y'ptr
return y'
where
execute h n a xp yp =
BL.daxpy h n a xp 1 yp 1
-- | Execute /alpha.x + y/ where /x, y/ are /vectors/ and /alpha/ is /scalar/, using
-- CUBLAS in the CUDA backend if available, fallback to a "pure"
-- implementation otherwise:
--
-- >>> zipWith (+) y $ map (*alpha) x
saxpy :: Acc (Scalar Float) -> Acc (Vector Float) -> Acc (Vector Float) -> Acc (Vector Float)
saxpy alpha x y = foreignAcc foreignSaxpy pureSaxpy $ lift (alpha, x, y)
where foreignSaxpy = CUDAForeignAcc "cudaAxpyF" cudaAxpyF
pureSaxpy :: Acc (Scalar Float, Vector Float, Vector Float) -> Acc (Vector Float)
pureSaxpy vs = let (a, u, v) = unlift vs
in zipWith (+) v $ map (*(the a)) u
-- | Execute /alpha.x + y/ using
-- CUBLAS in the CUDA backend if available, fallback to a "pure"
-- implementation otherwise:
--
-- >>> zipWith (+) y $ map (*alpha) x
daxpy :: Acc (Scalar Double) -> Acc (Vector Double) -> Acc (Vector Double) -> Acc (Vector Double)
daxpy alpha x y = foreignAcc foreignDaxpy pureDaxpy $ lift (alpha, x, y)
where foreignDaxpy = CUDAForeignAcc "cudaAxpyD" cudaAxpyD
pureDaxpy :: Acc (Scalar Double, Vector Double, Vector Double) -> Acc (Vector Double)
pureDaxpy vs = let (a, u, v) = unlift vs
in zipWith (+) v $ map (* (the a)) u
|
alpmestan/accelerate-blas
|
src/Data/Array/Accelerate/BLAS/Internal/Axpy.hs
|
bsd-3-clause
| 2,439
| 0
| 13
| 642
| 844
| 426
| 418
| 44
| 1
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecursiveDo #-}
module Reflex.Dom.Lists where
import GHCJS.Foreign ()
import Data.JSString()
import Reflex.Dom
import qualified Data.Map.Strict as M
import Control.Lens
addListWorkflow :: MonadWidget t m
=> a -- ^ default value
-> M.Map Int a -- ^ Initial value
-> (forall b. m b -> m b) -- ^ header for the list
-> (a -> m (Event t a)) -- ^ handle a single list item
-> m (Event t ()) -- ^ Add button
-> Event t x -- ^ Refresh event
-> Workflow t m (M.Map Int a)
addListWorkflow defValue initList hdr handlemodification addbutton refresh = Workflow $ do
modificationEvents <- mergeMap <$> hdr (traverse handlemodification initList)
listD <- foldDyn M.union initList modificationEvents
addEvent <- addbutton
let refreshListE = tagDyn listD refresh
addListE = attachDynWith (\d _ -> addElement d) listD addEvent
addElement mp = let mx = if M.null mp then 0 else fst (M.findMax mp) + 1
in mp & at mx ?~ defValue
changeEvents = leftmost [refreshListE, addListE]
return (initList, fmap (\mp -> addListWorkflow defValue mp hdr handlemodification addbutton refresh) changeEvents)
addList :: MonadWidget t m
=> a -- ^ default value
-> [a] -- ^ Initial value
-> (forall b. m b -> m b) -- ^ header for the list
-> (a -> m (Event t a)) -- ^ handle a single list item
-> m (Event t ()) -- ^ Add button
-> Event t x -- ^ Refresh event
-> m (Dynamic t [a])
addList defValue initList hdr handlemodification addbutton refresh = workflow (addListWorkflow defValue initmap hdr handlemodification addbutton refresh) >>= mapDyn M.elems
where
initmap = M.fromList $ zip [0..] initList
data DefaultFilter a = FilterFunc (a -> Bool)
| RefreshFilter
instance Monoid (DefaultFilter a) where
mempty = FilterFunc (const True)
mappend _ RefreshFilter = mempty
mappend RefreshFilter a = a
mappend (FilterFunc a) (FilterFunc b) = FilterFunc (\x -> a x && b x)
filterList :: (MonadWidget t m, Monoid f)
=> [a] -- ^ Initial list
-> f -- ^ Initial filter
-> ([a] -> f -> m (Event t f)) -- ^ list elements display, including a filtering signal
-> (f -> a -> Bool) -- ^ filtering function
-> m (Event t [a]) -- ^ the list in its current form
filterList initlist initFilter dispElems filterElem = do
rec let initMap = M.fromList $ zip ([0..] :: [Int]) initlist
filtermap f = M.filter (filterElem f) initMap
filterD <- foldDyn (flip mappend) initFilter filterE
elemMapD <- mapDyn filtermap filterD
foo <- forDyn filterD $ \f -> dispElems (M.elems (filtermap f)) f
filterE <- dyn foo >>= switchPromptly never
return (M.elems <$> updated elemMapD)
|
bartavelle/reflex-dom-bootstrap-components
|
src/Reflex/Dom/Lists.hs
|
bsd-3-clause
| 2,952
| 0
| 20
| 842
| 946
| 482
| 464
| 57
| 2
|
{-# LANGUAGE NoMonomorphismRestriction, BangPatterns #-}
module Data.Iteratee.Parallel (
psequence_
-- ,psequence
,parE
,parI
,liftParI
,mapReduce
)
where
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Iteratee as I hiding (mapM_, zip, filter)
import qualified Data.ListLike as LL
import Data.Monoid
import Control.Concurrent
import Control.Parallel
import Control.Monad
-- | Transform usual Iteratee into parallel composable one, introducing
-- one step extra delay.
--
-- Ex - time spent in Enumerator working on x'th packet
-- Ix - time spent in Iteratee working on x'th packet
-- z - last packet, y = (z-1)'th packet
--
-- regular Iteratee: E0 - I0, E1 - I1, E2 - I2 .. Ez -> Iz
-- parallel Iteratee: E0, E1, E2, .. Ez
-- \_ I0\_ I1\_ .. Iy\__ Iz
--
parI :: (Nullable s, Monoid s) => Iteratee s IO a -> Iteratee s IO a
parI = liftI . firstStep
where
-- first step, here we fork separete thread for the next chain and at the
-- same time ask for more date from the previous chain
firstStep iter chunk = do
var <- liftIO newEmptyMVar
_ <- sideStep var chunk iter
liftI $ go var
-- somewhere in the middle, we are getting iteratee from previous step,
-- feeding it with some new data, asking for more data and starting
-- more processing in separete thread
go var chunk@(Chunk _) = do
iter <- liftIO $ takeMVar var
_ <- sideStep var chunk iter
liftI $ go var
-- final step - no more data, so we need to inform our consumer about it
go var e = do
iter <- liftIO $ takeMVar var
join . lift $ enumChunk e iter
-- forks away from the main computation, return results via MVar
sideStep var chunk iter = liftIO . forkIO $ runIter iter onDone onCont
where
onDone a s = putMVar var $ idone a s
onCont k _ = runIter (k chunk) onDone onFina
onFina k e = putMVar var $ icont k e
-- | Transform an Enumeratee into a parallel composable one, introducing
-- one step extra delay, see 'parI'.
parE ::
(Nullable s1, Nullable s2, Monoid s1)
=> Enumeratee s1 s2 IO r
-> Enumeratee s1 s2 IO r
parE outer inner = parI (outer inner)
-- | Enumerate a list of iteratees over a single stream simultaneously
-- and discard the results. Each iteratee runs in a separate forkIO thread,
-- passes all errors from iteratees up.
psequence_ ::
(LL.ListLike s el, Nullable s)
=> [Iteratee s IO a]
-> Iteratee s IO ()
psequence_ = I.sequence_ . map parI
{-
-- | Enumerate a list of iteratees over a single stream simultaneously
-- and keeps the results. Each iteratee runs in a separete forkIO thread, passes all
-- errors from iteratees up.
psequence = I.sequence . map parI
-}
-- | A variant of 'parI' with the parallelized iteratee lifted into an
-- arbitrary MonadIO.
liftParI ::
(Nullable s, Monoid s, MonadIO m)
=> Iteratee s IO a
-> Iteratee s m a
liftParI = ilift liftIO . parI
-- | Perform a parallel map/reduce. The `bufsize` parameter controls
-- the maximum number of chunks to read at one time. A larger bufsize
-- allows for greater parallelism, but will require more memory.
--
-- Implementation of `sum`
--
-- > sum :: (Monad m, LL.ListLike s, Nullable s) => Iteratee s m Int64
-- > sum = getSum <$> mapReduce 4 (Sum . LL.sum)
mapReduce ::
(Monad m, Nullable s, Monoid b)
=> Int -- ^ maximum number of chunks to read
-> (s -> b) -- ^ map function
-> Iteratee s m b
mapReduce bufsize f = liftI (step (0, []))
where
step a@(!buf,acc) (Chunk xs)
| nullC xs = liftI (step a)
| buf >= bufsize =
let acc' = mconcat acc
b' = f xs
in b' `par` acc' `pseq` liftI (step (0,[b' `mappend` acc']))
| otherwise =
let b' = f xs
in b' `par` liftI (step (succ buf,b':acc))
step (_,acc) s@(EOF Nothing) =
idone (mconcat acc) s
step acc (EOF (Just err)) =
throwRecoverableErr err (step acc)
|
iteloo/tsuru-sample
|
iteratee-0.8.9.6/src/Data/Iteratee/Parallel.hs
|
bsd-3-clause
| 4,112
| 0
| 16
| 1,114
| 905
| 479
| 426
| 66
| 3
|
{-# LANGUAGE CPP, ScopedTypeVariables, BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Copyright : (c) 2011 Simon Meier
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Stability : experimental
-- Portability : tested on GHC only
--
-- Testing composition of 'Builders'.
module Data.ByteString.Builder.Tests (tests) where
import Control.Applicative
import Control.Monad.State
import Control.Monad.Writer
import Foreign (Word, Word8, Word64, minusPtr)
import System.IO.Unsafe (unsafePerformIO)
import Data.Char (ord, chr)
import qualified Data.DList as D
import Data.Foldable (asum, foldMap)
import qualified Data.ByteString as S
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Builder
import Data.ByteString.Builder.Extra
import Data.ByteString.Builder.ASCII
import Data.ByteString.Builder.Internal (Put, putBuilder, fromPut)
import qualified Data.ByteString.Builder.Internal as BI
import qualified Data.ByteString.Builder.Prim as BP
import qualified Data.ByteString.Builder.Prim.Extra as BP
import Data.ByteString.Builder.Prim.TestUtils
import Numeric (readHex)
import Control.Exception (evaluate)
import System.IO (openTempFile, hPutStr, hClose, hSetBinaryMode)
#if MIN_VERSION_base(4,2,0)
import System.IO (hSetEncoding, utf8)
#endif
import System.Directory
import Foreign (ForeignPtr, withForeignPtr, castPtr)
import TestFramework
import Test.QuickCheck
( Arbitrary(..), oneof, choose, listOf, elements )
import Test.QuickCheck.Property
( printTestCase, morallyDubiousIOProperty )
tests :: [Test]
tests =
[ testBuilderRecipe
#if MIN_VERSION_base(4,2,0)
, testHandlePutBuilder
#endif
, testHandlePutBuilderChar8
, testPut
, testRunBuilder
] ++
testsEncodingToBuilder ++
testsBinary ++
testsASCII ++
testsChar8 ++
testsUtf8
------------------------------------------------------------------------------
-- Testing 'Builder' execution
------------------------------------------------------------------------------
testBuilderRecipe :: Test
testBuilderRecipe =
testProperty "toLazyByteStringWith" $ testRecipe <$> arbitrary
where
testRecipe r =
printTestCase msg $ x1 == x2
where
x1 = renderRecipe r
x2 = buildRecipe r
toString = map (chr . fromIntegral)
msg = unlines
[ "recipe: " ++ show r
, "render: " ++ toString x1
, "build : " ++ toString x2
, "diff : " ++ show (dropWhile (uncurry (==)) $ zip x1 x2)
]
#if MIN_VERSION_base(4,2,0)
testHandlePutBuilder :: Test
testHandlePutBuilder =
testProperty "hPutBuilder" testRecipe
where
testRecipe :: (String, String, String, Recipe) -> Bool
testRecipe args@(before, between, after, recipe) = unsafePerformIO $ do
tempDir <- getTemporaryDirectory
(tempFile, tempH) <- openTempFile tempDir "TestBuilder"
-- switch to UTF-8 encoding
hSetEncoding tempH utf8
-- output recipe with intermediate direct writing to handle
let b = fst $ recipeComponents recipe
hPutStr tempH before
hPutBuilder tempH b
hPutStr tempH between
hPutBuilder tempH b
hPutStr tempH after
hClose tempH
-- read file
lbs <- L.readFile tempFile
_ <- evaluate (L.length $ lbs)
removeFile tempFile
-- compare to pure builder implementation
let lbsRef = toLazyByteString $ mconcat
[stringUtf8 before, b, stringUtf8 between, b, stringUtf8 after]
-- report
let msg = unlines
[ "task: " ++ show args
, "via file: " ++ show lbs
, "direct : " ++ show lbsRef
-- , "diff : " ++ show (dropWhile (uncurry (==)) $ zip x1 x2)
]
success = lbs == lbsRef
unless success (error msg)
return success
#endif
testHandlePutBuilderChar8 :: Test
testHandlePutBuilderChar8 =
testProperty "char8 hPutBuilder" testRecipe
where
testRecipe :: (String, String, String, Recipe) -> Bool
testRecipe args@(before, between, after, recipe) = unsafePerformIO $ do
tempDir <- getTemporaryDirectory
(tempFile, tempH) <- openTempFile tempDir "TestBuilder"
-- switch to binary / latin1 encoding
hSetBinaryMode tempH True
-- output recipe with intermediate direct writing to handle
let b = fst $ recipeComponents recipe
hPutStr tempH before
hPutBuilder tempH b
hPutStr tempH between
hPutBuilder tempH b
hPutStr tempH after
hClose tempH
-- read file
lbs <- L.readFile tempFile
_ <- evaluate (L.length $ lbs)
removeFile tempFile
-- compare to pure builder implementation
let lbsRef = toLazyByteString $ mconcat
[string8 before, b, string8 between, b, string8 after]
-- report
let msg = unlines
[ "task: " ++ show args
, "via file: " ++ show lbs
, "direct : " ++ show lbsRef
-- , "diff : " ++ show (dropWhile (uncurry (==)) $ zip x1 x2)
]
success = lbs == lbsRef
unless success (error msg)
return success
-- Recipes with which to test the builder functions
---------------------------------------------------
data Mode =
Threshold Int
| Insert
| Copy
| Smart
| Hex
deriving( Eq, Ord, Show )
data Action =
SBS Mode S.ByteString
| LBS Mode L.ByteString
| W8 Word8
| W8S [Word8]
| String String
| FDec Float
| DDec Double
| Flush
| EnsureFree Word
| ModState Int
deriving( Eq, Ord, Show )
data Strategy = Safe | Untrimmed
deriving( Eq, Ord, Show )
data Recipe = Recipe Strategy Int Int L.ByteString [Action]
deriving( Eq, Ord, Show )
renderRecipe :: Recipe -> [Word8]
renderRecipe (Recipe _ firstSize _ cont as) =
D.toList $ execWriter (evalStateT (mapM_ renderAction as) firstSize)
`mappend` renderLBS cont
where
renderAction (SBS Hex bs) = tell $ foldMap hexWord8 $ S.unpack bs
renderAction (SBS _ bs) = tell $ D.fromList $ S.unpack bs
renderAction (LBS Hex lbs) = tell $ foldMap hexWord8 $ L.unpack lbs
renderAction (LBS _ lbs) = tell $ renderLBS lbs
renderAction (W8 w) = tell $ return w
renderAction (W8S ws) = tell $ D.fromList ws
renderAction (String cs) = tell $ foldMap (D.fromList . charUtf8_list) cs
renderAction Flush = tell $ mempty
renderAction (EnsureFree _) = tell $ mempty
renderAction (FDec f) = tell $ D.fromList $ encodeASCII $ show f
renderAction (DDec d) = tell $ D.fromList $ encodeASCII $ show d
renderAction (ModState i) = do
s <- get
tell (D.fromList $ encodeASCII $ show s)
put (s - i)
renderLBS = D.fromList . L.unpack
hexWord8 = D.fromList . wordHexFixed_list
buildAction :: Action -> StateT Int Put ()
buildAction (SBS Hex bs) = lift $ putBuilder $ byteStringHex bs
buildAction (SBS Smart bs) = lift $ putBuilder $ byteString bs
buildAction (SBS Copy bs) = lift $ putBuilder $ byteStringCopy bs
buildAction (SBS Insert bs) = lift $ putBuilder $ byteStringInsert bs
buildAction (SBS (Threshold i) bs) = lift $ putBuilder $ byteStringThreshold i bs
buildAction (LBS Hex lbs) = lift $ putBuilder $ lazyByteStringHex lbs
buildAction (LBS Smart lbs) = lift $ putBuilder $ lazyByteString lbs
buildAction (LBS Copy lbs) = lift $ putBuilder $ lazyByteStringCopy lbs
buildAction (LBS Insert lbs) = lift $ putBuilder $ lazyByteStringInsert lbs
buildAction (LBS (Threshold i) lbs) = lift $ putBuilder $ lazyByteStringThreshold i lbs
buildAction (W8 w) = lift $ putBuilder $ word8 w
buildAction (W8S ws) = lift $ putBuilder $ BP.primMapListFixed BP.word8 ws
buildAction (String cs) = lift $ putBuilder $ stringUtf8 cs
buildAction (FDec f) = lift $ putBuilder $ floatDec f
buildAction (DDec d) = lift $ putBuilder $ doubleDec d
buildAction Flush = lift $ putBuilder $ flush
buildAction (EnsureFree minFree) = lift $ putBuilder $ ensureFree $ fromIntegral minFree
buildAction (ModState i) = do
s <- get
lift $ putBuilder $ intDec s
put (s - i)
buildRecipe :: Recipe -> [Word8]
buildRecipe recipe =
L.unpack $ toLBS b
where
(b, toLBS) = recipeComponents recipe
recipeComponents :: Recipe -> (Builder, Builder -> L.ByteString)
recipeComponents (Recipe how firstSize otherSize cont as) =
(b, toLBS)
where
toLBS = toLazyByteStringWith (strategy how firstSize otherSize) cont
where
strategy Safe = safeStrategy
strategy Untrimmed = untrimmedStrategy
b = fromPut $ evalStateT (mapM_ buildAction as) firstSize
-- 'Arbitary' instances
-----------------------
instance Arbitrary L.ByteString where
arbitrary = L.fromChunks <$> listOf arbitrary
shrink lbs
| L.null lbs = []
| otherwise = pure $ L.take (L.length lbs `div` 2) lbs
instance Arbitrary S.ByteString where
arbitrary =
trim S.drop =<< trim S.take =<< S.pack <$> listOf arbitrary
where
trim f bs = oneof [pure bs, f <$> choose (0, S.length bs) <*> pure bs]
shrink bs
| S.null bs = []
| otherwise = pure $ S.take (S.length bs `div` 2) bs
instance Arbitrary Mode where
arbitrary = oneof
[Threshold <$> arbitrary, pure Smart, pure Insert, pure Copy, pure Hex]
shrink (Threshold i) = Threshold <$> shrink i
shrink _ = []
instance Arbitrary Action where
arbitrary = oneof
[ SBS <$> arbitrary <*> arbitrary
, LBS <$> arbitrary <*> arbitrary
, W8 <$> arbitrary
, W8S <$> listOf arbitrary
-- ensure that larger character codes are also tested
, String <$> listOf ((\c -> chr (ord c * ord c)) <$> arbitrary)
, pure Flush
-- never request more than 64kb free space
, (EnsureFree . (`mod` 0xffff)) <$> arbitrary
, FDec <$> arbitrary
, DDec <$> arbitrary
, ModState <$> arbitrary
]
where
shrink (SBS m bs) =
(SBS <$> shrink m <*> pure bs) <|>
(SBS <$> pure m <*> shrink bs)
shrink (LBS m lbs) =
(LBS <$> shrink m <*> pure lbs) <|>
(LBS <$> pure m <*> shrink lbs)
shrink (W8 w) = W8 <$> shrink w
shrink (W8S ws) = W8S <$> shrink ws
shrink (String cs) = String <$> shrink cs
shrink Flush = []
shrink (EnsureFree i) = EnsureFree <$> shrink i
shrink (FDec f) = FDec <$> shrink f
shrink (DDec d) = DDec <$> shrink d
shrink (ModState i) = ModState <$> shrink i
instance Arbitrary Strategy where
arbitrary = elements [Safe, Untrimmed]
shrink _ = []
instance Arbitrary Recipe where
arbitrary =
Recipe <$> arbitrary
<*> ((`mod` 33333) <$> arbitrary) -- bound max chunk-sizes
<*> ((`mod` 33337) <$> arbitrary)
<*> arbitrary
<*> listOf arbitrary
-- shrinking the actions first is desirable
shrink (Recipe a b c d e) = asum
[ (\x -> Recipe a b c d x) <$> shrink e
, (\x -> Recipe a b c x e) <$> shrink d
, (\x -> Recipe a b x d e) <$> shrink c
, (\x -> Recipe a x c d e) <$> shrink b
, (\x -> Recipe x b c d e) <$> shrink a
]
------------------------------------------------------------------------------
-- Creating Builders from basic encodings
------------------------------------------------------------------------------
testsEncodingToBuilder :: [Test]
testsEncodingToBuilder =
[ test_encodeUnfoldrF
, test_encodeUnfoldrB
, compareImpls "encodeSize/Chunked/Size/Chunked (recipe)"
(testBuilder id)
(
parseChunks parseHexLen .
parseSizePrefix parseHexLen .
parseChunks parseVar .
parseSizePrefix parseHexLen .
testBuilder (
prefixHexSize .
encodeVar .
prefixHexSize .
encodeHex
)
)
]
-- Unfoldr fused with encoding
------------------------------
test_encodeUnfoldrF :: Test
test_encodeUnfoldrF =
compareImpls "encodeUnfoldrF word8" id encode
where
toLBS = toLazyByteStringWith (safeStrategy 23 101) L.empty
encode =
L.unpack . toLBS . BP.primUnfoldrFixed BP.word8 go
where
go [] = Nothing
go (w:ws) = Just (w, ws)
test_encodeUnfoldrB :: Test
test_encodeUnfoldrB =
compareImpls "encodeUnfoldrB charUtf8" (concatMap charUtf8_list) encode
where
toLBS = toLazyByteStringWith (safeStrategy 23 101) L.empty
encode =
L.unpack . toLBS . BP.primUnfoldrBounded BP.charUtf8 go
where
go [] = Nothing
go (c:cs) = Just (c, cs)
-- Chunked encoding and size prefix
-----------------------------------
testBuilder :: (Builder -> Builder) -> Recipe -> L.ByteString
testBuilder f recipe =
toLBS (f b)
where
(b, toLBS) = recipeComponents $ clearTail recipe
-- need to remove tail of recipe to have a tighter
-- check on encodeWithSize
clearTail (Recipe how firstSize otherSize _ as) =
Recipe how firstSize otherSize L.empty as
-- | Chunked encoding using base-128, variable-length encoding for the
-- chunk-size.
encodeVar :: Builder -> Builder
encodeVar =
(`mappend` BP.primFixed BP.word8 0)
. (BP.encodeChunked 5 BP.word64VarFixedBound BP.emptyB)
-- | Chunked encoding using 0-padded, space-terminated hexadecimal numbers
-- for encoding the chunk-size.
encodeHex :: Builder -> Builder
encodeHex =
(`mappend` BP.primFixed (hexLen 0) 0)
. (BP.encodeChunked 7 hexLen BP.emptyB)
hexLen :: Word64 -> BP.FixedPrim Word64
hexLen bound =
(\x -> (x, ' ')) BP.>$< (BP.word64HexFixedBound '0' bound BP.>*< BP.char8)
parseHexLen :: [Word8] -> (Int, [Word8])
parseHexLen ws = case span (/= 32) ws of
(lenWS, 32:ws') -> case readHex (map (chr . fromIntegral) lenWS) of
[(len, [])] -> (len, ws')
_ -> error $ "hex parse failed: " ++ show ws
(_, _) -> error $ "unterminated hex-length:" ++ show ws
parseChunks :: ([Word8] -> (Int, [Word8])) -> L.ByteString -> L.ByteString
parseChunks parseLen =
L.pack . go . L.unpack
where
go ws
| chunkLen == 0 = rest
| chunkLen <= length ws' = chunk ++ go rest
| otherwise = error $ "too few bytes: " ++ show ws
where
(chunkLen, ws') = parseLen ws
(chunk, rest) = splitAt chunkLen ws'
-- | Prefix with size. We use an inner buffer size of 77 (almost primes are good) to
-- get several buffer full signals.
prefixHexSize :: Builder -> Builder
prefixHexSize = BP.encodeWithSize 77 hexLen
parseSizePrefix :: ([Word8] -> (Int, [Word8])) -> L.ByteString -> L.ByteString
parseSizePrefix parseLen =
L.pack . go . L.unpack
where
go ws
| len <= length ws' = take len ws'
| otherwise = error $ "too few bytes: " ++ show (len, ws, ws')
where
(len, ws') = parseLen ws
------------------------------------------------------------------------------
-- Testing the Put monad
------------------------------------------------------------------------------
testPut :: Test
testPut = testGroup "Put monad"
[ testLaw "identity" (\v -> (pure id <*> putInt v) `eqPut` (putInt v))
, testLaw "composition" $ \(u, v, w) ->
(pure (.) <*> minusInt u <*> minusInt v <*> putInt w) `eqPut`
(minusInt u <*> (minusInt v <*> putInt w))
, testLaw "homomorphism" $ \(f, x) ->
(pure (f -) <*> pure x) `eqPut` (pure (f - x))
, testLaw "interchange" $ \(u, y) ->
(minusInt u <*> pure y) `eqPut` (pure ($ y) <*> minusInt u)
, testLaw "ignore left value" $ \(u, v) ->
(putInt u *> putInt v) `eqPut` (pure (const id) <*> putInt u <*> putInt v)
, testLaw "ignore right value" $ \(u, v) ->
(putInt u <* putInt v) `eqPut` (pure const <*> putInt u <*> putInt v)
, testLaw "functor" $ \(f, x) ->
(fmap (f -) (putInt x)) `eqPut` (pure (f -) <*> putInt x)
]
where
putInt i = putBuilder (integerDec i) >> return i
minusInt i = (-) <$> putInt i
run p = toLazyByteString $ fromPut (do i <- p; _ <- putInt i; return ())
eqPut p1 p2 = (run p1, run p2)
testLaw name f = compareImpls name (fst . f) (snd . f)
------------------------------------------------------------------------------
-- Testing the Driver <-> Builder protocol
------------------------------------------------------------------------------
-- | Ensure that there are at least 'n' free bytes for the following 'Builder'.
{-# INLINE ensureFree #-}
ensureFree :: Int -> Builder
ensureFree minFree =
BI.builder step
where
step k br@(BI.BufferRange op ope)
| ope `minusPtr` op < minFree = return $ BI.bufferFull minFree op next
| otherwise = k br
where
next br'@(BI.BufferRange op' ope')
| freeSpace < minFree =
error $ "ensureFree: requested " ++ show minFree ++ " bytes, " ++
"but got only " ++ show freeSpace ++ " bytes"
| otherwise = k br'
where
freeSpace = ope' `minusPtr` op'
------------------------------------------------------------------------------
-- Testing the Builder runner
------------------------------------------------------------------------------
testRunBuilder :: Test
testRunBuilder =
testProperty "runBuilder" prop
where
prop actions =
morallyDubiousIOProperty $ do
let (builder, _) = recipeComponents recipe
expected = renderRecipe recipe
actual <- bufferWriterOutput (runBuilder builder)
return (S.unpack actual == expected)
where
recipe = Recipe Safe 0 0 mempty actions
bufferWriterOutput :: BufferWriter -> IO S.ByteString
bufferWriterOutput bwrite0 = do
let len0 = 8
buf <- S.mallocByteString len0
bss <- go [] buf len0 bwrite0
return (S.concat (reverse bss))
where
go :: [S.ByteString] -> ForeignPtr Word8 -> Int -> BufferWriter -> IO [S.ByteString]
go bss !buf !len bwrite = do
(wc, next) <- withForeignPtr buf $ \ptr -> bwrite ptr len
bs <- getBuffer buf wc
case next of
Done -> return (bs:bss)
More m bwrite' | m <= len -> go (bs:bss) buf len bwrite'
| otherwise -> do let len' = m
buf' <- S.mallocByteString len'
go (bs:bss) buf' len' bwrite'
Chunk c bwrite' -> go (c:bs:bss) buf len bwrite'
getBuffer :: ForeignPtr Word8 -> Int -> IO S.ByteString
getBuffer buf len = withForeignPtr buf $ \ptr ->
S.packCStringLen (castPtr ptr, len)
------------------------------------------------------------------------------
-- Testing the pre-defined builders
------------------------------------------------------------------------------
testBuilderConstr :: (Arbitrary a, Show a)
=> TestName -> (a -> [Word8]) -> (a -> Builder) -> Test
testBuilderConstr name ref mkBuilder =
testProperty name check
where
check x =
(ws ++ ws) ==
(L.unpack $ toLazyByteString $ mkBuilder x `mappend` mkBuilder x)
where
ws = ref x
testsBinary :: [Test]
testsBinary =
[ testBuilderConstr "word8" bigEndian_list word8
, testBuilderConstr "int8" bigEndian_list int8
-- big-endian
, testBuilderConstr "int16BE" bigEndian_list int16BE
, testBuilderConstr "int32BE" bigEndian_list int32BE
, testBuilderConstr "int64BE" bigEndian_list int64BE
, testBuilderConstr "word16BE" bigEndian_list word16BE
, testBuilderConstr "word32BE" bigEndian_list word32BE
, testBuilderConstr "word64BE" bigEndian_list word64BE
, testBuilderConstr "floatLE" (float_list littleEndian_list) floatLE
, testBuilderConstr "doubleLE" (double_list littleEndian_list) doubleLE
-- little-endian
, testBuilderConstr "int16LE" littleEndian_list int16LE
, testBuilderConstr "int32LE" littleEndian_list int32LE
, testBuilderConstr "int64LE" littleEndian_list int64LE
, testBuilderConstr "word16LE" littleEndian_list word16LE
, testBuilderConstr "word32LE" littleEndian_list word32LE
, testBuilderConstr "word64LE" littleEndian_list word64LE
, testBuilderConstr "floatBE" (float_list bigEndian_list) floatBE
, testBuilderConstr "doubleBE" (double_list bigEndian_list) doubleBE
-- host dependent
, testBuilderConstr "int16Host" hostEndian_list int16Host
, testBuilderConstr "int32Host" hostEndian_list int32Host
, testBuilderConstr "int64Host" hostEndian_list int64Host
, testBuilderConstr "intHost" hostEndian_list intHost
, testBuilderConstr "word16Host" hostEndian_list word16Host
, testBuilderConstr "word32Host" hostEndian_list word32Host
, testBuilderConstr "word64Host" hostEndian_list word64Host
, testBuilderConstr "wordHost" hostEndian_list wordHost
, testBuilderConstr "floatHost" (float_list hostEndian_list) floatHost
, testBuilderConstr "doubleHost" (double_list hostEndian_list) doubleHost
]
testsASCII :: [Test]
testsASCII =
[ testBuilderConstr "char7" char7_list char7
, testBuilderConstr "string7" (concatMap char7_list) string7
, testBuilderConstr "int8Dec" dec_list int8Dec
, testBuilderConstr "int16Dec" dec_list int16Dec
, testBuilderConstr "int32Dec" dec_list int32Dec
, testBuilderConstr "int64Dec" dec_list int64Dec
, testBuilderConstr "intDec" dec_list intDec
, testBuilderConstr "word8Dec" dec_list word8Dec
, testBuilderConstr "word16Dec" dec_list word16Dec
, testBuilderConstr "word32Dec" dec_list word32Dec
, testBuilderConstr "word64Dec" dec_list word64Dec
, testBuilderConstr "wordDec" dec_list wordDec
, testBuilderConstr "integerDec" dec_list integerDec
, testBuilderConstr "floatDec" dec_list floatDec
, testBuilderConstr "doubleDec" dec_list doubleDec
, testBuilderConstr "word8Hex" hex_list word8Hex
, testBuilderConstr "word16Hex" hex_list word16Hex
, testBuilderConstr "word32Hex" hex_list word32Hex
, testBuilderConstr "word64Hex" hex_list word64Hex
, testBuilderConstr "wordHex" hex_list wordHex
, testBuilderConstr "word8HexFixed" wordHexFixed_list word8HexFixed
, testBuilderConstr "word16HexFixed" wordHexFixed_list word16HexFixed
, testBuilderConstr "word32HexFixed" wordHexFixed_list word32HexFixed
, testBuilderConstr "word64HexFixed" wordHexFixed_list word64HexFixed
, testBuilderConstr "int8HexFixed" int8HexFixed_list int8HexFixed
, testBuilderConstr "int16HexFixed" int16HexFixed_list int16HexFixed
, testBuilderConstr "int32HexFixed" int32HexFixed_list int32HexFixed
, testBuilderConstr "int64HexFixed" int64HexFixed_list int64HexFixed
, testBuilderConstr "floatHexFixed" floatHexFixed_list floatHexFixed
, testBuilderConstr "doubleHexFixed" doubleHexFixed_list doubleHexFixed
]
testsChar8 :: [Test]
testsChar8 =
[ testBuilderConstr "charChar8" char8_list char8
, testBuilderConstr "stringChar8" (concatMap char8_list) string8
]
testsUtf8 :: [Test]
testsUtf8 =
[ testBuilderConstr "charUtf8" charUtf8_list charUtf8
, testBuilderConstr "stringUtf8" (concatMap charUtf8_list) stringUtf8
]
|
markflorisson/hpack
|
testrepo/bytestring-0.10.2.0/tests/builder/Data/ByteString/Builder/Tests.hs
|
bsd-3-clause
| 23,982
| 0
| 17
| 6,257
| 6,542
| 3,386
| 3,156
| -1
| -1
|
-- |
-- Module: Crypto.HKDF
-- Maintainer: Jiri Marsicek <jiri.marsicek@gmail.com>
--
-- This module provides implementation of HKDF function defined in
-- RFC-5869 (<http://www.ietf.org/rfc/rfc5869.txt>),
-- It is using "HashAlgorithm" from "cryptohash" as underlying implementation
--
module Crypto.HKDF
( hkdfExtract
, hkdfExpand
, hkdf
) where
import Crypto.Hash (HashAlgorithm)
import Crypto.MAC (HMAC, hmacAlg)
import Data.Byteable (toBytes)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS (concat, empty, length, take)
import qualified Data.ByteString.Char8 as C8 (singleton)
import Data.Char (chr)
-- | Extract function.
--
-- Synonym to 'hmacAlg'
hkdfExtract :: (HashAlgorithm a) => a -- ^ hash algorithm
-> ByteString -- ^ optional salt value (a non-secret random value)
-> ByteString -- ^ input keying material
-> HMAC a -- ^ a pseudorandom key
hkdfExtract = hmacAlg
-- | Expand function.
--
-- "Nothing" is returned in case (length of output > 255 * hash length)
hkdfExpand :: (HashAlgorithm a) => a -- ^ hash algorithm
-> ByteString -- ^ pseudorandom key
-> ByteString -- ^ info
-> Int -- ^ length of output keying material in octets
-> Maybe ByteString -- ^ output keying material
hkdfExpand alg prk info l
| l <= 255 * chunkSize = Just $ BS.take l $ BS.concat $ take (l `div` chunkSize + 2) hkdfChunks
| otherwise = Nothing
where hkdfChunks = map fst $ iterate (hkdfSingle alg prk info) (BS.empty, 1)
chunkSize = BS.length $ hkdfChunks !! 1
type HKDFIteration = (ByteString, Int)
hkdfSingle :: (HashAlgorithm a) => a -- ^ hash algorithm
-> ByteString -- ^ pseudorandom key
-> ByteString -- ^ info
-> HKDFIteration -- ^ output of previous iteration
-> HKDFIteration -- ^ output of current iteration
hkdfSingle alg prk info (prev, n) = (toBytes $ hmacAlg alg prk $ BS.concat [prev, info, C8.singleton $ chr n], n + 1)
-- | Function combining extract and expand functions.
hkdf :: (HashAlgorithm a) => a -- ^ hash algorithm
-> ByteString -- ^ optional salt value (a non-secret random value)
-> ByteString -- ^ input keying material
-> ByteString -- ^ info
-> Int -- ^ length of output keying material in octets
-> Maybe ByteString -- ^ output keying material
hkdf alg salt ikm = hkdfExpand alg (toBytes $ hkdfExtract alg salt ikm)
|
j1r1k/hkdf
|
src/Crypto/HKDF.hs
|
bsd-3-clause
| 2,660
| 0
| 11
| 761
| 510
| 294
| 216
| 40
| 1
|
{-# LANGUAGE FlexibleInstances #-}
module LibSpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import Generic.Random.Generic
import Control.Lens hiding (elements)
import Lib hiding (main)
instance Arbitrary Location where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary Target where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary CardModType where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary CardCappingType where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary RangeType where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary CardProps where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary CardFilter where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary (BoardCheckType Bool) where
arbitrary = oneof [return HasHighestBoardCard
, return HasDifferentBoardCards
]
instance Arbitrary Card where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary PlayerState where
arbitrary = genericArbitrary' Z uniform
instance Arbitrary CheckTransform where
arbitrary = elements [ CheckTransform AvgCardVal SetAll
]
testPlayer_EmptyDeckHand = PlayerState {_hand = [], _deck = [], _board = [NumberCard 0, NumberCard 0, NumberCard 0], _winner = False}
testGS_All0 = GameState {_playerState = [testPlayer_EmptyDeckHand, testPlayer_EmptyDeckHand], _playerTurn = 0, _turnCount = 0}
testPlayer_Board123 = PlayerState {_hand = [], _deck = [], _board = [NumberCard 1, NumberCard 2, NumberCard 3], _winner = False}
testGS_Board123 = GameState {_playerState = [testPlayer_Board123, testPlayer_Board123], _playerTurn = 0, _turnCount = 0}
testPlayer_HandDeck0 = PlayerState {_hand = replicate 3 (NumberCard 0), _deck = replicate 3 (NumberCard 0), _board = replicate 3 (NumberCard 0), _winner = False }
testGS_HandDeck0 = GameState {_playerState = [testPlayer_HandDeck0, testPlayer_HandDeck0], _playerTurn = 0, _turnCount = 0}
testPlayer_WithBoard b = PlayerState {_hand = [], _deck = [], _board = b, _winner = False}
testGS_B1B2 b1 b2 = GameState {_playerState = [testPlayer_WithBoard b1, testPlayer_WithBoard b2], _playerTurn = 0, _turnCount = 0}
testPlayer_WithDeck d = PlayerState {_hand = [], _deck = d, _board = [NumberCard 1], _winner = False}
testGS_D1D2 d1 d2 = GameState {_playerState = [testPlayer_WithDeck d1, testPlayer_WithDeck d2], _playerTurn = 0, _turnCount = 0}
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "locAsLens" $ do
it "should work in board case" $ property $ do
\ps -> _board ps == ps ^. locAsLens Board
{- it "should work in deck case" $ property $ do
\ps -> _deck ps == ps ^. locAsLens Deck
it "should work in hand case" $ property $ do
\ps -> _hand ps == ps ^. locAsLens Hand
describe "discardCard" $ do
it "should decrease hand size" $ property $ do
\ps -> length (discardCard ps ^. hand) <= length (ps ^. hand)
describe "card1" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card1 testGS_Board123
newBoard `shouldBe` (testGS_Board123 & (playerState . element 0 . board . element 0) .~ NumberCard 10)
describe "card2" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card2 testGS_Board123
newBoard `shouldBe` (testGS_Board123 & (playerState . element 0 . board . element 0) .~ NumberCard 6
& (playerState . element 1 . board . element 0) .~ NumberCard 6
)
describe "card3" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card3 testGS_All0
newBoard `shouldBe` (testGS_All0 & (playerState . element 0 . board) .~ replicate 3 (NumberCard 2))
describe "card4" $ do
it "should work in basic case" $ do
let gs = testGS_B1B2 (map NumberCard [0, 1, 0]) (map NumberCard [0, 1, 0])
newBoard <- playCard 0 card4 gs
newBoard `shouldBe` (gs & (playerState . element 0 . board) .~ replicate 3 (NumberCard 1))
it "should work in highest is rightmost case" $ do
let gs = testGS_B1B2 (map NumberCard [0, 0, 1]) (map NumberCard [0, 1, 0])
newBoard <- playCard 0 card4 gs
newBoard `shouldBe` (gs & (playerState . element 0 . board) .~ map NumberCard [0, 1, 1])
it "should work in highest is leftmost case" $ do
let gs = testGS_B1B2 (map NumberCard [1, 0, 0]) (map NumberCard [0, 1, 0])
newBoard <- playCard 0 card4 gs
newBoard `shouldBe` (gs & (playerState . element 0 . board) .~ map NumberCard [1, 1, 0])
describe "card5" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card5 testGS_Board123
newBoard `shouldBe` (testGS_Board123 & (playerState . element 0 . board) .~ map NumberCard [0, 2, 4])
describe "card6" $ do
it "should work in basic case " $ do
newBoard <- playCard 0 card6 testGS_Board123
newBoard `shouldBe` (testGS_Board123 & (playerState . element 0 . board) .~ map NumberCard [2, 2, 2]
& (playerState . element 1 . board) .~ map NumberCard [2, 2, 2]
)
describe "card7" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card7 testGS_HandDeck0
newBoard `shouldBe` (testGS_HandDeck0 & (playerState . element 0 . deck) .~ replicate 3 (NumberCard 2))
describe "card8" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card8 testGS_Board123
newBoard `shouldBe` testGS_All0
describe "card9" $ do
it "should work in basic case" $ do
let gs = testGS_D1D2 (map NumberCard [1, 2]) (map NumberCard [1,2])
newBoard <- playCard 0 card9 gs
let newDeck = map NumberCard [0, 1]
newBoard ^.. (playerState . element 1 . deck . traverse) `shouldBe` newDeck
newBoard `shouldBe` (gs & (playerState . element 1 . deck) .~ newDeck)
describe "card10" $ do
it "should work in basic case" $ do
let gs = testGS_D1D2 (map NumberCard [1,2,3,4]) (map NumberCard [1,2,3,4])
newBoard <- playCard 0 (card10 (NumberCard 3) (NumberCard 4)) gs
let newDeck = map NumberCard [3,4,1,2]
newBoard ^.. (playerState . element 0 . deck . traverse) `shouldBe` newDeck
newBoard `shouldBe` (gs & (playerState . element 0 . deck) .~ newDeck)
describe "card12" $ do
it "should work in basic case" $ do
newBoard <- playCard 0 card12 testGS_Board123
newBoard `shouldBe` (testGS_Board123 & (playerState . element 0 . board . element 2) .~ NumberCard 8)
describe "card13" $ do
it "should work in basic case" $ do
let gs = testGS_B1B2 (map NumberCard [1,2,2,2]) (map NumberCard [1,1,2,2])
newBoard <- playCard 0 card13 gs
newBoard ^.. playerState . element 0 . board . traverse `shouldBe` map NumberCard [3,2,2,2]
newBoard ^.. playerState . element 1 . board . traverse `shouldBe` map NumberCard [0,0,2,2]
describe "card14" $ do
it "should work in basic case" $ do
let gs = testGS_D1D2 (map NumberCard [2,3,4]) []
newBoard <- playCard 0 card14 gs
newBoard ^.. playerState . element 0 . board . traverse `shouldBe` [NumberCard 3]
newBoard ^.. playerState . element 0 . deck . traverse `shouldBe` [NumberCard 4]
describe "card15" $ do
it "should work in basic case" $ do
let gs = testGS_B1B2 (map NumberCard [1,2]) (map NumberCard [1,1])
newBoard <- playCard 0 (card15 (NumberCard 5)) gs
newBoard ^.. playerState . element 0 . board . traverse `shouldBe` map NumberCard [5,2]
it "should work when condition doesn't hold" $ do
let gs = testGS_B1B2 (map NumberCard [1,1]) (map NumberCard [1,2])
newBoard <- playCard 0 (card15 (NumberCard 5)) gs
newBoard ^.. playerState . element 0 . board . traverse `shouldBe` map NumberCard [1,1]
-}
|
rubenpieters/gre-project
|
shared/test/LibSpec.hs
|
bsd-3-clause
| 7,867
| 0
| 16
| 1,819
| 771
| 431
| 340
| 47
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Text.Pandoc.Filters.ShortcutLinks
License : BSD3
Maintainer : Artyom <yom@artyom.me>
This filter turns links that look like “[something](\@w)” into <https://en.wikipedia.org/wiki/Something>. For details, see <https://github.com/aelve/shortcut-links>.
-}
module Text.Pandoc.Filters.ShortcutLinks
(
shortcutLinks,
)
where
-- General
import Data.Maybe
import Control.Applicative
-- Parsing
import Text.Parsec hiding (optional, (<|>))
-- Text
import Text.Printf
import qualified Data.Text as T
import Data.Text (Text)
-- Pandoc
import Text.Pandoc.Definition
import Text.Pandoc.Walk
-- shortcut-links
import ShortcutLinks
-- local
import Text.Pandoc.Filters.Utils
shortcutLinks :: Inline -> IO Inline
shortcutLinks i@(Link attr is (url, title)) | '@':_ <- url = do
-- %20s are introduced by Pandoc and needs to be converted back to spaces
let urlOriginalT = T.replace "%20" " " (T.pack url)
urlOriginalS = T.unpack urlOriginalT
case parseLink urlOriginalT of
Left err -> do
printf "'%s' is not a proper shortcut link: %s\n" urlOriginalS err
return i
Right (shortcut, option, text) -> do
let shortcut' = shortcut
option' = option
text' = fromMaybe (T.pack (stringify is)) text
case useShortcut shortcut' option' text' of
Success link -> return (Link attr is (T.unpack link, title))
Warning warnings link -> do
printf "Warnings when processing a shortcut link (%s):\n"
urlOriginalS
mapM_ putStrLn warnings
return (Link attr is (T.unpack link, title))
Failure err -> do
error $ printf "Error when processing a shortcut link (%s): %s\n"
urlOriginalS err
shortcutLinks other = return other
-- | Parse a shortcut link. Allowed formats:
--
-- @
-- \@name
-- \@name:text
-- \@name(option)
-- \@name(option):text
-- @
parseLink :: Text -> Either String (Text, Maybe Text, Maybe Text)
parseLink = either (Left . show) Right . parse p ""
where
shortcut = some (alphaNum <|> char '-')
option = char '(' *> some (noneOf ")") <* char ')'
text = char ':' *> some anyChar
p = do
char '@'
(,,) <$> T.pack <$> shortcut
<*> optional (T.pack <$> option)
<*> optional (T.pack <$> text)
|
aelve/pandoc-contrib
|
lib/Text/Pandoc/Filters/ShortcutLinks.hs
|
bsd-3-clause
| 2,367
| 0
| 23
| 567
| 595
| 310
| 285
| 47
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.