code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
{-# LANGUAGE FlexibleContexts #-}
module TypeCheck where
import Control.Monad.Except
import Control.Monad.Reader
import Data.List
import Text.Printf
type TypeVar = String
data Type = Type :->: Type
| TVar TypeVar
| ForAll TypeVar Type
deriving (Show)
infixr :->:
instance Eq Type where
(==) = go []
where
go :: [(TypeVar, TypeVar)] -> Type -> Type -> Bool
go ctx (ll :->: lr) (rl :->: rr) = (go ctx ll rl) && (go ctx lr rr)
go ctx (ForAll vl l) (ForAll vr r) = go ((vl, vr):ctx) l r
go ctx (TVar vl) (TVar vr) = let res = find (\(name, _) -> name == vl) ctx in
case res of Nothing -> vl == vr
Just (_, n) -> n == vr
go _ _ _ = False
type ExprVar = String
data Expr = EVar ExprVar
| Abs (ExprVar, Type) Expr
| TAbs TypeVar Expr
| Expr :@: Expr
| Expr :$: Type
deriving (Show, Eq)
infixl :@: -- Application term to term
infixl :$: -- Application type to term
data Assumption = EAs ExprVar Type
| TAs TypeVar
newtype Context = Context [Assumption]
getTypeFromContext :: ExprVar -> Context -> Maybe Type
getTypeFromContext varName (Context ctx) = go ctx
where
go :: [Assumption] -> Maybe Type
go [] = Nothing
go ((EAs varName' t):xs) | varName' == varName = Just t
go (_:xs) = go xs
addEAs :: ExprVar -> Type -> Context -> Context
addEAs varName varType (Context ctx) = Context $ (EAs varName varType):ctx
addTAs :: TypeVar -> Context -> Context
addTAs typeVarName (Context ctx) = Context $ (TAs typeVarName):ctx
emptyContext :: Context
emptyContext = Context []
fromListContext :: [Assumption] -> Context
fromListContext = Context
typeSubst :: TypeVar -> Type -> Type -> Type
typeSubst typeVarName concreteType = go
where
go (tl :->: tr) = go tl :->: go tr
go t@(TVar name) = if name == typeVarName then concreteType
else t
go t@(ForAll name st) = if name /= typeVarName then ForAll name $ go st
else t
typeCheck :: ( Monad m
, MonadReader Context m
, MonadError String m
) => Expr -> m Type
typeCheck (EVar varName) = do
t <- reader $ getTypeFromContext varName
case t of Nothing -> throwError $ printf "Unknown variable: %s" varName
Just t -> return t
typeCheck (Abs (varName, varType) e) = do
t <- local (addEAs varName varType) (typeCheck e)
return $ varType :->: t
typeCheck (TAbs typeVarName e) = do
t <- local (addTAs typeVarName) (typeCheck e)
return $ ForAll typeVarName t
typeCheck (el :@: er) = do
tl <- typeCheck el
tr <- typeCheck er
case tl of (argType :->: resultType) | argType == tr -> return resultType
_ -> throwError errorMsg
where
errorMsg = printf ("Term (%s) :: (%s)\n" ++
"is not applicable to\n" ++
"term (%s) :: (%s)")
(show er) (show tr) (show el) (show tl)
typeCheck (e :$: concreteType) = do
exprType <- typeCheck e
case exprType of (ForAll typeVarName t) -> return $ typeSubst typeVarName concreteType t
_ -> throwError errorMsg
where
errorMsg = printf ("Type (%s)\n" ++
"is not applicable to\n" ++
"term (%s) :: (%s)\n")
(show concreteType) (show e) (show exprType)
runTypeCheckInContext :: Context -> Expr -> Either String Type
runTypeCheckInContext ctx expr = runExcept $ runReaderT (typeCheck expr) ctx
runTypeCheck :: Expr -> Either String Type
runTypeCheck expr = runTypeCheckInContext emptyContext expr
|
SergeevPavel/system-f-typecheck
|
src/TypeCheck.hs
|
Haskell
|
bsd-3-clause
| 4,261
|
{-# language CPP #-}
-- No documentation found for Chapter "BufferView"
module Vulkan.Core10.BufferView ( createBufferView
, withBufferView
, destroyBufferView
, BufferViewCreateInfo(..)
, BufferView(..)
, BufferViewCreateFlags(..)
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.NamedType ((:::))
import Vulkan.Core10.AllocationCallbacks (AllocationCallbacks)
import Vulkan.Core10.Handles (Buffer)
import Vulkan.Core10.Handles (BufferView)
import Vulkan.Core10.Handles (BufferView(..))
import Vulkan.Core10.Enums.BufferViewCreateFlags (BufferViewCreateFlags)
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkCreateBufferView))
import Vulkan.Dynamic (DeviceCmds(pVkDestroyBufferView))
import Vulkan.Core10.FundamentalTypes (DeviceSize)
import Vulkan.Core10.Handles (Device_T)
import Vulkan.Core10.Enums.Format (Format)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
import Vulkan.Core10.Handles (BufferView(..))
import Vulkan.Core10.Enums.BufferViewCreateFlags (BufferViewCreateFlags(..))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCreateBufferView
:: FunPtr (Ptr Device_T -> Ptr BufferViewCreateInfo -> Ptr AllocationCallbacks -> Ptr BufferView -> IO Result) -> Ptr Device_T -> Ptr BufferViewCreateInfo -> Ptr AllocationCallbacks -> Ptr BufferView -> IO Result
-- | vkCreateBufferView - Create a new buffer view object
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCreateBufferView-device-parameter# @device@ /must/ be a
-- valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkCreateBufferView-pCreateInfo-parameter# @pCreateInfo@ /must/
-- be a valid pointer to a valid 'BufferViewCreateInfo' structure
--
-- - #VUID-vkCreateBufferView-pAllocator-parameter# If @pAllocator@ is
-- not @NULL@, @pAllocator@ /must/ be a valid pointer to a valid
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' structure
--
-- - #VUID-vkCreateBufferView-pView-parameter# @pView@ /must/ be a valid
-- pointer to a 'Vulkan.Core10.Handles.BufferView' handle
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks',
-- 'Vulkan.Core10.Handles.BufferView', 'BufferViewCreateInfo',
-- 'Vulkan.Core10.Handles.Device'
createBufferView :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that creates the buffer view.
Device
-> -- | @pCreateInfo@ is a pointer to a 'BufferViewCreateInfo' structure
-- containing parameters to be used to create the buffer view.
BufferViewCreateInfo
-> -- | @pAllocator@ controls host memory allocation as described in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation Memory Allocation>
-- chapter.
("allocator" ::: Maybe AllocationCallbacks)
-> io (BufferView)
createBufferView device createInfo allocator = liftIO . evalContT $ do
let vkCreateBufferViewPtr = pVkCreateBufferView (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkCreateBufferViewPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCreateBufferView is null" Nothing Nothing
let vkCreateBufferView' = mkVkCreateBufferView vkCreateBufferViewPtr
pCreateInfo <- ContT $ withCStruct (createInfo)
pAllocator <- case (allocator) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
pPView <- ContT $ bracket (callocBytes @BufferView 8) free
r <- lift $ traceAroundEvent "vkCreateBufferView" (vkCreateBufferView' (deviceHandle (device)) pCreateInfo pAllocator (pPView))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pView <- lift $ peek @BufferView pPView
pure $ (pView)
-- | A convenience wrapper to make a compatible pair of calls to
-- 'createBufferView' and 'destroyBufferView'
--
-- To ensure that 'destroyBufferView' is always called: pass
-- 'Control.Exception.bracket' (or the allocate function from your
-- favourite resource management library) as the last argument.
-- To just extract the pair pass '(,)' as the last argument.
--
withBufferView :: forall io r . MonadIO io => Device -> BufferViewCreateInfo -> Maybe AllocationCallbacks -> (io BufferView -> (BufferView -> io ()) -> r) -> r
withBufferView device pCreateInfo pAllocator b =
b (createBufferView device pCreateInfo pAllocator)
(\(o0) -> destroyBufferView device o0 pAllocator)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkDestroyBufferView
:: FunPtr (Ptr Device_T -> BufferView -> Ptr AllocationCallbacks -> IO ()) -> Ptr Device_T -> BufferView -> Ptr AllocationCallbacks -> IO ()
-- | vkDestroyBufferView - Destroy a buffer view object
--
-- == Valid Usage
--
-- - #VUID-vkDestroyBufferView-bufferView-00936# All submitted commands
-- that refer to @bufferView@ /must/ have completed execution
--
-- - #VUID-vkDestroyBufferView-bufferView-00937# If
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' were
-- provided when @bufferView@ was created, a compatible set of
-- callbacks /must/ be provided here
--
-- - #VUID-vkDestroyBufferView-bufferView-00938# If no
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' were
-- provided when @bufferView@ was created, @pAllocator@ /must/ be
-- @NULL@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkDestroyBufferView-device-parameter# @device@ /must/ be a
-- valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkDestroyBufferView-bufferView-parameter# If @bufferView@ is
-- not 'Vulkan.Core10.APIConstants.NULL_HANDLE', @bufferView@ /must/ be
-- a valid 'Vulkan.Core10.Handles.BufferView' handle
--
-- - #VUID-vkDestroyBufferView-pAllocator-parameter# If @pAllocator@ is
-- not @NULL@, @pAllocator@ /must/ be a valid pointer to a valid
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks' structure
--
-- - #VUID-vkDestroyBufferView-bufferView-parent# If @bufferView@ is a
-- valid handle, it /must/ have been created, allocated, or retrieved
-- from @device@
--
-- == Host Synchronization
--
-- - Host access to @bufferView@ /must/ be externally synchronized
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks',
-- 'Vulkan.Core10.Handles.BufferView', 'Vulkan.Core10.Handles.Device'
destroyBufferView :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that destroys the buffer view.
Device
-> -- | @bufferView@ is the buffer view to destroy.
BufferView
-> -- | @pAllocator@ controls host memory allocation as described in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation Memory Allocation>
-- chapter.
("allocator" ::: Maybe AllocationCallbacks)
-> io ()
destroyBufferView device bufferView allocator = liftIO . evalContT $ do
let vkDestroyBufferViewPtr = pVkDestroyBufferView (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkDestroyBufferViewPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkDestroyBufferView is null" Nothing Nothing
let vkDestroyBufferView' = mkVkDestroyBufferView vkDestroyBufferViewPtr
pAllocator <- case (allocator) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
lift $ traceAroundEvent "vkDestroyBufferView" (vkDestroyBufferView' (deviceHandle (device)) (bufferView) pAllocator)
pure $ ()
-- | VkBufferViewCreateInfo - Structure specifying parameters of a newly
-- created buffer view
--
-- == Valid Usage
--
-- - #VUID-VkBufferViewCreateInfo-offset-00925# @offset@ /must/ be less
-- than the size of @buffer@
--
-- - #VUID-VkBufferViewCreateInfo-range-00928# If @range@ is not equal to
-- 'Vulkan.Core10.APIConstants.WHOLE_SIZE', @range@ /must/ be greater
-- than @0@
--
-- - #VUID-VkBufferViewCreateInfo-range-00929# If @range@ is not equal to
-- 'Vulkan.Core10.APIConstants.WHOLE_SIZE', @range@ /must/ be an
-- integer multiple of the texel block size of @format@
--
-- - #VUID-VkBufferViewCreateInfo-range-00930# If @range@ is not equal to
-- 'Vulkan.Core10.APIConstants.WHOLE_SIZE', the number of texel buffer
-- elements given by (⌊@range@ \/ (texel block size)⌋ × (texels per
-- block)) where texel block size and texels per block are as defined
-- in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#formats-compatibility Compatible Formats>
-- table for @format@, /must/ be less than or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxTexelBufferElements@
--
-- - #VUID-VkBufferViewCreateInfo-offset-00931# If @range@ is not equal
-- to 'Vulkan.Core10.APIConstants.WHOLE_SIZE', the sum of @offset@ and
-- @range@ /must/ be less than or equal to the size of @buffer@
--
-- - #VUID-VkBufferViewCreateInfo-range-04059# If @range@ is equal to
-- 'Vulkan.Core10.APIConstants.WHOLE_SIZE', the number of texel buffer
-- elements given by (⌊(size - @offset@) \/ (texel block size)⌋ ×
-- (texels per block)) where size is the size of @buffer@, and texel
-- block size and texels per block are as defined in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#formats-compatibility Compatible Formats>
-- table for @format@, /must/ be less than or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxTexelBufferElements@
--
-- - #VUID-VkBufferViewCreateInfo-buffer-00932# @buffer@ /must/ have been
-- created with a @usage@ value containing at least one of
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT'
-- or
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT'
--
-- - #VUID-VkBufferViewCreateInfo-buffer-00933# If @buffer@ was created
-- with @usage@ containing
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT',
-- @format@ /must/ be supported for uniform texel buffers, as specified
-- by the
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT'
-- flag in
-- 'Vulkan.Core10.DeviceInitialization.FormatProperties'::@bufferFeatures@
-- returned by
-- 'Vulkan.Core10.DeviceInitialization.getPhysicalDeviceFormatProperties'
--
-- - #VUID-VkBufferViewCreateInfo-buffer-00934# If @buffer@ was created
-- with @usage@ containing
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT',
-- @format@ /must/ be supported for storage texel buffers, as specified
-- by the
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT'
-- flag in
-- 'Vulkan.Core10.DeviceInitialization.FormatProperties'::@bufferFeatures@
-- returned by
-- 'Vulkan.Core10.DeviceInitialization.getPhysicalDeviceFormatProperties'
--
-- - #VUID-VkBufferViewCreateInfo-buffer-00935# If @buffer@ is non-sparse
-- then it /must/ be bound completely and contiguously to a single
-- 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-VkBufferViewCreateInfo-offset-02749# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-texelBufferAlignment texelBufferAlignment>
-- feature is not enabled, @offset@ /must/ be a multiple of
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@minTexelBufferOffsetAlignment@
--
-- - #VUID-VkBufferViewCreateInfo-buffer-02750# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-texelBufferAlignment texelBufferAlignment>
-- feature is enabled and if @buffer@ was created with @usage@
-- containing
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT',
-- @offset@ /must/ be a multiple of the lesser of
-- 'Vulkan.Core13.Promoted_From_VK_EXT_texel_buffer_alignment.PhysicalDeviceTexelBufferAlignmentProperties'::@storageTexelBufferOffsetAlignmentBytes@
-- or, if
-- 'Vulkan.Core13.Promoted_From_VK_EXT_texel_buffer_alignment.PhysicalDeviceTexelBufferAlignmentProperties'::@storageTexelBufferOffsetSingleTexelAlignment@
-- is 'Vulkan.Core10.FundamentalTypes.TRUE', the size of a texel of the
-- requested @format@. If the size of a texel is a multiple of three
-- bytes, then the size of a single component of @format@ is used
-- instead
--
-- - #VUID-VkBufferViewCreateInfo-buffer-02751# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-texelBufferAlignment texelBufferAlignment>
-- feature is enabled and if @buffer@ was created with @usage@
-- containing
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT',
-- @offset@ /must/ be a multiple of the lesser of
-- 'Vulkan.Core13.Promoted_From_VK_EXT_texel_buffer_alignment.PhysicalDeviceTexelBufferAlignmentProperties'::@uniformTexelBufferOffsetAlignmentBytes@
-- or, if
-- 'Vulkan.Core13.Promoted_From_VK_EXT_texel_buffer_alignment.PhysicalDeviceTexelBufferAlignmentProperties'::@uniformTexelBufferOffsetSingleTexelAlignment@
-- is 'Vulkan.Core10.FundamentalTypes.TRUE', the size of a texel of the
-- requested @format@. If the size of a texel is a multiple of three
-- bytes, then the size of a single component of @format@ is used
-- instead
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkBufferViewCreateInfo-sType-sType# @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO'
--
-- - #VUID-VkBufferViewCreateInfo-pNext-pNext# @pNext@ /must/ be @NULL@
--
-- - #VUID-VkBufferViewCreateInfo-flags-zerobitmask# @flags@ /must/ be
-- @0@
--
-- - #VUID-VkBufferViewCreateInfo-buffer-parameter# @buffer@ /must/ be a
-- valid 'Vulkan.Core10.Handles.Buffer' handle
--
-- - #VUID-VkBufferViewCreateInfo-format-parameter# @format@ /must/ be a
-- valid 'Vulkan.Core10.Enums.Format.Format' value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'Vulkan.Core10.Handles.Buffer',
-- 'Vulkan.Core10.Enums.BufferViewCreateFlags.BufferViewCreateFlags',
-- 'Vulkan.Core10.FundamentalTypes.DeviceSize',
-- 'Vulkan.Core10.Enums.Format.Format',
-- 'Vulkan.Core10.Enums.StructureType.StructureType', 'createBufferView'
data BufferViewCreateInfo = BufferViewCreateInfo
{ -- | @flags@ is reserved for future use.
flags :: BufferViewCreateFlags
, -- | @buffer@ is a 'Vulkan.Core10.Handles.Buffer' on which the view will be
-- created.
buffer :: Buffer
, -- | @format@ is a 'Vulkan.Core10.Enums.Format.Format' describing the format
-- of the data elements in the buffer.
format :: Format
, -- | @offset@ is an offset in bytes from the base address of the buffer.
-- Accesses to the buffer view from shaders use addressing that is relative
-- to this starting offset.
offset :: DeviceSize
, -- | @range@ is a size in bytes of the buffer view. If @range@ is equal to
-- 'Vulkan.Core10.APIConstants.WHOLE_SIZE', the range from @offset@ to the
-- end of the buffer is used. If 'Vulkan.Core10.APIConstants.WHOLE_SIZE' is
-- used and the remaining size of the buffer is not a multiple of the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#texel-block-size texel block size>
-- of @format@, the nearest smaller multiple is used.
range :: DeviceSize
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BufferViewCreateInfo)
#endif
deriving instance Show BufferViewCreateInfo
instance ToCStruct BufferViewCreateInfo where
withCStruct x f = allocaBytes 56 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BufferViewCreateInfo{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr BufferViewCreateFlags)) (flags)
poke ((p `plusPtr` 24 :: Ptr Buffer)) (buffer)
poke ((p `plusPtr` 32 :: Ptr Format)) (format)
poke ((p `plusPtr` 40 :: Ptr DeviceSize)) (offset)
poke ((p `plusPtr` 48 :: Ptr DeviceSize)) (range)
f
cStructSize = 56
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 24 :: Ptr Buffer)) (zero)
poke ((p `plusPtr` 32 :: Ptr Format)) (zero)
poke ((p `plusPtr` 40 :: Ptr DeviceSize)) (zero)
poke ((p `plusPtr` 48 :: Ptr DeviceSize)) (zero)
f
instance FromCStruct BufferViewCreateInfo where
peekCStruct p = do
flags <- peek @BufferViewCreateFlags ((p `plusPtr` 16 :: Ptr BufferViewCreateFlags))
buffer <- peek @Buffer ((p `plusPtr` 24 :: Ptr Buffer))
format <- peek @Format ((p `plusPtr` 32 :: Ptr Format))
offset <- peek @DeviceSize ((p `plusPtr` 40 :: Ptr DeviceSize))
range <- peek @DeviceSize ((p `plusPtr` 48 :: Ptr DeviceSize))
pure $ BufferViewCreateInfo
flags buffer format offset range
instance Storable BufferViewCreateInfo where
sizeOf ~_ = 56
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero BufferViewCreateInfo where
zero = BufferViewCreateInfo
zero
zero
zero
zero
zero
|
expipiplus1/vulkan
|
src/Vulkan/Core10/BufferView.hs
|
Haskell
|
bsd-3-clause
| 20,307
|
module BowlingKata.Day6Spec (spec) where
import Test.Hspec
import BowlingKata.Day6 (score)
spec :: Spec
spec = do
it "is a gutter game"
((score . replicate 20 $ 0) == 0)
it "rolls all ones"
((score . replicate 20 $ 1) == 20)
it "rolls one spare"
((score $ 5:5:3:(replicate 17 $ 0)) == 16)
it "rolls one strike"
((score $ 10:4:3:(replicate 16 $ 0)) == 24)
it "is a perfect game"
((score . replicate 12 $ 10) == 300)
|
Alex-Diez/haskell-tdd-kata
|
old-katas/test/BowlingKata/Day6Spec.hs
|
Haskell
|
bsd-3-clause
| 537
|
{-# LANGUAGE BangPatterns #-}
module BinaryHeapSTMSpec where
import Control.Concurrent.STM
import Data.IORef (readIORef)
import Data.List (group, sort)
import Test.Hspec
import qualified BinaryHeapSTM as P
spec :: Spec
spec = do
describe "base priority queue" $ do
it "queues entries based on weight" $ do
q <- atomically $ P.new 100
e1 <- atomically $ P.newEntry 1 201
atomically $ P.enqueue e1 q
e2 <- atomically $ P.newEntry 3 101
atomically $ P.enqueue e2 q
e3 <- atomically $ P.newEntry 5 1
atomically $ P.enqueue e3 q
xs <- enqdeq q 1000
map length (group (sort xs)) `shouldBe` [664,333,3]
it "deletes properly" $ do
q <- atomically $ P.new 100
e1 <- atomically $ P.newEntry 1 201
e3 <- atomically $ P.newEntry 3 50
e5 <- atomically $ P.newEntry 5 5
e7 <- atomically $ P.newEntry 7 1
atomically $ P.enqueue e1 q
atomically $ P.enqueue e3 q
atomically $ P.enqueue e5 q
atomically $ P.enqueue e7 q
i1 <- atomically $ P.dequeue q
atomically (readTVar (P.item i1)) `shouldReturn` 1
atomically $ P.delete e5 q
i3 <- atomically $ P.dequeue q
atomically (readTVar (P.item i3)) `shouldReturn` 3
i7 <- atomically $ P.dequeue q
atomically (readTVar (P.item i7)) `shouldReturn` 7
enqdeq :: P.PriorityQueue Int -> Int -> IO [Int]
enqdeq pq num = loop pq num []
where
loop _ 0 vs = return vs
loop !q !n vs = do
ent <- atomically $ P.dequeue q
atomically $ P.enqueue ent q
v <- atomically . readTVar $ P.item ent
loop q (n - 1) (v:vs)
|
kazu-yamamoto/http2
|
bench-priority/test/BinaryHeapSTMSpec.hs
|
Haskell
|
bsd-3-clause
| 1,801
|
{-- snippet all --}
module PodParser where
import PodTypes
import Text.XML.HaXml
import Text.XML.HaXml.Parse
import Text.XML.HaXml.Html.Generate(showattr)
import Data.Char
import Data.List
data Item = Item {itemtitle :: String,
enclosureurl :: String
}
deriving (Eq, Show, Read)
data Feed = Feed {channeltitle :: String,
items :: [Item]}
deriving (Eq, Show, Read)
{- | Given a podcast and an Item, produce an Episode -}
item2ep :: Podcast -> Item -> Episode
item2ep pc item =
Episode {epId = 0,
epCast = pc,
epURL = enclosureurl item,
epDone = False}
{- | Parse the data from a given string, with the given name to use
in error messages. -}
parse :: String -> String -> Feed
parse content name =
Feed {channeltitle = getTitle doc,
items = getEnclosures doc}
where parseResult = xmlParse name (stripUnicodeBOM content)
doc = getContent parseResult
getContent :: Document -> Content
getContent (Document _ _ e _) = CElem e
{- | Some Unicode documents begin with a binary sequence;
strip it off before processing. -}
stripUnicodeBOM :: String -> String
stripUnicodeBOM ('\xef':'\xbb':'\xbf':x) = x
stripUnicodeBOM x = x
{- | Pull out the channel part of the document.
Note that HaXml defines CFilter as:
> type CFilter = Content -> [Content]
-}
channel :: CFilter
channel = tag "rss" /> tag "channel"
getTitle :: Content -> String
getTitle doc =
contentToStringDefault "Untitled Podcast"
(channel /> tag "title" /> txt $ doc)
getEnclosures :: Content -> [Item]
getEnclosures doc =
concatMap procItem $ getItems doc
where procItem :: Content -> [Item]
procItem item = concatMap (procEnclosure title) enclosure
where title = contentToStringDefault "Untitled Episode"
(keep /> tag "title" /> txt $ item)
enclosure = (keep /> tag "enclosure") item
getItems :: CFilter
getItems = channel /> tag "item"
procEnclosure :: String -> Content -> [Item]
procEnclosure title enclosure =
map makeItem (showattr "url" enclosure)
where makeItem :: Content -> Item
makeItem x = Item {itemtitle = title,
enclosureurl = contentToString [x]}
{- | Convert [Content] to a printable String, with a default if the
passed-in [Content] is [], signifying a lack of a match. -}
contentToStringDefault :: String -> [Content] -> String
contentToStringDefault msg [] = msg
contentToStringDefault _ x = contentToString x
{- | Convert [Content] to a printable string, taking care to unescape it.
An implementation without unescaping would simply be:
> contentToString = concatMap (show . content)
Because HaXml's unescaping only works on Elements, we must make sure that
whatever Content we have is wrapped in an Element, then use txt to
pull the insides back out. -}
contentToString :: [Content] -> String
contentToString =
concatMap procContent
where procContent x =
verbatim $ keep /> txt $ CElem (unesc (fakeElem x))
fakeElem :: Content -> Element
fakeElem x = Elem "fake" [] [x]
unesc :: Element -> Element
unesc = xmlUnEscape stdXmlEscaper
{-- /snippet all --}
|
binesiyu/ifl
|
examples/ch23/PodParser.hs
|
Haskell
|
mit
| 3,472
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
-- Note: this file may contain spoilers
-- (although I would be really surprised if it did, I haven't seen the films)
module StarWars where
import GHC.Generics
import Data.Generics.Product
data Episode = NEWHOPE | EMPIRE | JEDI
deriving (Generic, Show, Eq)
data Character = Character
{ name :: String
, friends :: [Character]
, appearsIn :: [Episode]
} deriving (Generic, Show, Eq)
data Human = Human
{ name :: String
, friends :: [Character]
, appearsIn :: [Episode]
, homePlanet :: String
} deriving (Generic, Show)
data Droid = Droid
{ friends :: [Character]
, appearsIn :: [Episode]
, name :: String
, primaryFunction :: String
} deriving (Generic, Show)
luke :: Human
luke = Human
{ name = "Luke Skywalker"
, friends = []
, appearsIn = [NEWHOPE, EMPIRE, JEDI]
, homePlanet = "Saturn (?)"
}
r2d2 :: Droid
r2d2 = Droid
{ name = "R2-D2"
, friends = [upcast luke]
, appearsIn = [NEWHOPE, EMPIRE, JEDI]
, primaryFunction = "repair ships"
}
c3po :: Droid
c3po = Droid
{ name = "C3PO"
, friends = [upcast r2d2, upcast luke]
, appearsIn = [NEWHOPE, EMPIRE, JEDI]
, primaryFunction = "protocol and human relations"
}
getName :: HasField' "name" r a => r -> a
getName = getField @"name"
-- upcast :: Subtype a b => a -> b
characters :: [Character]
characters = [upcast r2d2, upcast luke, upcast c3po]
names :: [String]
names = map getName characters
-- => ["R2-D2","Luke Skywalker","C3PO"]
|
kcsongor/generic-lens
|
generic-optics/examples/StarWars.hs
|
Haskell
|
bsd-3-clause
| 1,786
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeOperators #-}
module CommonServer where
import Data.Aeson
import GHC.Generics
------------------------------
-- File Structure
------------------------------
data File = File {
fileName :: FilePath,
fileContent :: String
} deriving (Eq, Show, Generic)
instance ToJSON File
instance FromJSON File
------------------------------
-- Server Identity
------------------------------
data Identity = Identity {
address :: String,
port :: String,
serverType :: ServerType
} deriving (Eq, Show, Generic)
instance ToJSON Identity
instance FromJSON Identity
------------------------------
-- Registered Server Types
------------------------------
data ServerType =
FileServer |
DirectoryServer |
ProxyServer |
SecurityServer |
TransactionServer |
IdentityServer |
ReplicationServer
deriving(Eq, Show, Generic)
instance ToJSON ServerType
instance FromJSON ServerType
------------------------------
-- Resources Directory
------------------------------
data Resources = Resources {
path :: String
} deriving (Eq, Show, Generic)
instance ToJSON Resources
instance FromJSON Resources
------------------------------
-- Client Data
------------------------------
data Client = Client {
username :: String,
password :: String
} deriving (Eq, Show, Generic)
instance ToJSON Client
instance FromJSON Client
------------------------------
-- Security Token
------------------------------
data Token = Token {
sessionId :: String,
sessionKey :: String,
ticket :: String,
client :: Identity
} deriving (Eq, Show, Generic)
instance ToJSON Token
instance FromJSON Token
------------------------------
-- Response Packet
------------------------------
data Response = Response {
code :: ResponseCode,
server :: Identity
} deriving (Eq, Show, Generic)
instance ToJSON Response
instance FromJSON Response
------------------------------
-- Response Codes
------------------------------
data ResponseCode =
FileUploadComplete |
FileUploadError |
HandshakeSuccessful |
HandshakeError |
IdentityFound |
IdentityNotFound |
IdentityReceived
deriving(Eq, Show, Generic)
instance ToJSON ResponseCode
instance FromJSON ResponseCode
|
Coggroach/Gluon
|
.stack-work/intero/intero8374TU0.hs
|
Haskell
|
bsd-3-clause
| 2,366
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Network.Wai.Handler.Warp.Request (
recvRequest
, headerLines
, pauseTimeoutKey
, getFileInfoKey
, getClientCertificateKey
, NoKeepAliveRequest (..)
) where
import qualified Control.Concurrent as Conc (yield)
import UnliftIO (throwIO, Exception)
import Data.Array ((!))
import qualified Data.ByteString as S
import qualified Data.ByteString.Unsafe as SU
import qualified Data.CaseInsensitive as CI
import qualified Data.IORef as I
import Data.Typeable (Typeable)
import qualified Data.Vault.Lazy as Vault
import Data.X509
import qualified Network.HTTP.Types as H
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal
import Prelude hiding (lines)
import System.IO.Unsafe (unsafePerformIO)
import qualified System.TimeManager as Timeout
import Network.Wai.Handler.Warp.Conduit
import Network.Wai.Handler.Warp.FileInfoCache
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.Imports hiding (readInt, lines)
import Network.Wai.Handler.Warp.ReadInt
import Network.Wai.Handler.Warp.RequestHeader
import Network.Wai.Handler.Warp.Settings (Settings, settingsNoParsePath, settingsMaxTotalHeaderLength)
----------------------------------------------------------------
-- | Receiving a HTTP request from 'Connection' and parsing its header
-- to create 'Request'.
recvRequest :: Bool -- ^ first request on this connection?
-> Settings
-> Connection
-> InternalInfo
-> Timeout.Handle
-> SockAddr -- ^ Peer's address.
-> Source -- ^ Where HTTP request comes from.
-> Transport
-> IO (Request
,Maybe (I.IORef Int)
,IndexedHeader
,IO ByteString) -- ^
-- 'Request' passed to 'Application',
-- how many bytes remain to be consumed, if known
-- 'IndexedHeader' of HTTP request for internal use,
-- Body producing action used for flushing the request body
recvRequest firstRequest settings conn ii th addr src transport = do
hdrlines <- headerLines (settingsMaxTotalHeaderLength settings) firstRequest src
(method, unparsedPath, path, query, httpversion, hdr) <- parseHeaderLines hdrlines
let idxhdr = indexRequestHeader hdr
expect = idxhdr ! fromEnum ReqExpect
cl = idxhdr ! fromEnum ReqContentLength
te = idxhdr ! fromEnum ReqTransferEncoding
handle100Continue = handleExpect conn httpversion expect
rawPath = if settingsNoParsePath settings then unparsedPath else path
vaultValue = Vault.insert pauseTimeoutKey (Timeout.pause th)
$ Vault.insert getFileInfoKey (getFileInfo ii)
$ Vault.insert getClientCertificateKey (getTransportClientCertificate transport)
Vault.empty
(rbody, remainingRef, bodyLength) <- bodyAndSource src cl te
-- body producing function which will produce '100-continue', if needed
rbody' <- timeoutBody remainingRef th rbody handle100Continue
-- body producing function which will never produce 100-continue
rbodyFlush <- timeoutBody remainingRef th rbody (return ())
let req = Request {
requestMethod = method
, httpVersion = httpversion
, pathInfo = H.decodePathSegments path
, rawPathInfo = rawPath
, rawQueryString = query
, queryString = H.parseQuery query
, requestHeaders = hdr
, isSecure = isTransportSecure transport
, remoteHost = addr
, requestBody = rbody'
, vault = vaultValue
, requestBodyLength = bodyLength
, requestHeaderHost = idxhdr ! fromEnum ReqHost
, requestHeaderRange = idxhdr ! fromEnum ReqRange
, requestHeaderReferer = idxhdr ! fromEnum ReqReferer
, requestHeaderUserAgent = idxhdr ! fromEnum ReqUserAgent
}
return (req, remainingRef, idxhdr, rbodyFlush)
----------------------------------------------------------------
headerLines :: Int -> Bool -> Source -> IO [ByteString]
headerLines maxTotalHeaderLength firstRequest src = do
bs <- readSource src
if S.null bs
-- When we're working on a keep-alive connection and trying to
-- get the second or later request, we don't want to treat the
-- lack of data as a real exception. See the http1 function in
-- the Run module for more details.
then if firstRequest then throwIO ConnectionClosedByPeer else throwIO NoKeepAliveRequest
else push maxTotalHeaderLength src (THStatus 0 0 id id) bs
data NoKeepAliveRequest = NoKeepAliveRequest
deriving (Show, Typeable)
instance Exception NoKeepAliveRequest
----------------------------------------------------------------
handleExpect :: Connection
-> H.HttpVersion
-> Maybe HeaderValue
-> IO ()
handleExpect conn ver (Just "100-continue") = do
connSendAll conn continue
Conc.yield
where
continue
| ver == H.http11 = "HTTP/1.1 100 Continue\r\n\r\n"
| otherwise = "HTTP/1.0 100 Continue\r\n\r\n"
handleExpect _ _ _ = return ()
----------------------------------------------------------------
bodyAndSource :: Source
-> Maybe HeaderValue -- ^ content length
-> Maybe HeaderValue -- ^ transfer-encoding
-> IO (IO ByteString
,Maybe (I.IORef Int)
,RequestBodyLength
)
bodyAndSource src cl te
| chunked = do
csrc <- mkCSource src
return (readCSource csrc, Nothing, ChunkedBody)
| otherwise = do
isrc@(ISource _ remaining) <- mkISource src len
return (readISource isrc, Just remaining, bodyLen)
where
len = toLength cl
bodyLen = KnownLength $ fromIntegral len
chunked = isChunked te
toLength :: Maybe HeaderValue -> Int
toLength Nothing = 0
toLength (Just bs) = readInt bs
isChunked :: Maybe HeaderValue -> Bool
isChunked (Just bs) = CI.foldCase bs == "chunked"
isChunked _ = False
----------------------------------------------------------------
timeoutBody :: Maybe (I.IORef Int) -- ^ remaining
-> Timeout.Handle
-> IO ByteString
-> IO ()
-> IO (IO ByteString)
timeoutBody remainingRef timeoutHandle rbody handle100Continue = do
isFirstRef <- I.newIORef True
let checkEmpty =
case remainingRef of
Nothing -> return . S.null
Just ref -> \bs -> if S.null bs
then return True
else do
x <- I.readIORef ref
return $! x <= 0
return $ do
isFirst <- I.readIORef isFirstRef
when isFirst $ do
-- Only check if we need to produce the 100 Continue status
-- when asking for the first chunk of the body
handle100Continue
-- Timeout handling was paused after receiving the full request
-- headers. Now we need to resume it to avoid a slowloris
-- attack during request body sending.
Timeout.resume timeoutHandle
I.writeIORef isFirstRef False
bs <- rbody
-- As soon as we finish receiving the request body, whether
-- because the application is not interested in more bytes, or
-- because there is no more data available, pause the timeout
-- handler again.
isEmpty <- checkEmpty bs
when isEmpty (Timeout.pause timeoutHandle)
return bs
----------------------------------------------------------------
type BSEndo = ByteString -> ByteString
type BSEndoList = [ByteString] -> [ByteString]
data THStatus = THStatus
!Int -- running total byte count (excluding current header chunk)
!Int -- current header chunk byte count
BSEndoList -- previously parsed lines
BSEndo -- bytestrings to be prepended
----------------------------------------------------------------
{- FIXME
close :: Sink ByteString IO a
close = throwIO IncompleteHeaders
-}
push :: Int -> Source -> THStatus -> ByteString -> IO [ByteString]
push maxTotalHeaderLength src (THStatus totalLen chunkLen lines prepend) bs'
-- Too many bytes
| currentTotal > maxTotalHeaderLength = throwIO OverLargeHeader
| otherwise = push' mNL
where
currentTotal = totalLen + chunkLen
-- bs: current header chunk, plus maybe (parts of) next header
bs = prepend bs'
bsLen = S.length bs
-- Maybe newline
-- Returns: Maybe
-- ( length of this chunk up to newline
-- , position of newline in relation to entire current header
-- , is this part of a multiline header
-- )
mNL = do
-- 10 is the code point for newline (\n)
chunkNL <- S.elemIndex 10 bs'
let headerNL = chunkNL + S.length (prepend "")
chunkNLlen = chunkNL + 1
-- check if there are two more bytes in the bs
-- if so, see if the second of those is a horizontal space
if bsLen > headerNL + 1 then
let c = S.index bs (headerNL + 1)
b = case headerNL of
0 -> True
1 -> S.index bs 0 == 13
_ -> False
isMultiline = not b && (c == 32 || c == 9)
in Just (chunkNLlen, headerNL, isMultiline)
else
Just (chunkNLlen, headerNL, False)
{-# INLINE push' #-}
push' :: Maybe (Int, Int, Bool) -> IO [ByteString]
-- No newline find in this chunk. Add it to the prepend,
-- update the length, and continue processing.
push' Nothing = do
bst <- readSource' src
when (S.null bst) $ throwIO IncompleteHeaders
push maxTotalHeaderLength src status bst
where
prepend' = S.append bs
thisChunkLen = S.length bs'
newChunkLen = chunkLen + thisChunkLen
status = THStatus totalLen newChunkLen lines prepend'
-- Found a newline, but next line continues as a multiline header
push' (Just (chunkNLlen, end, True)) =
push maxTotalHeaderLength src status rest
where
rest = S.drop (end + 1) bs
prepend' = S.append (SU.unsafeTake (checkCR bs end) bs)
-- If we'd just update the entire current chunk up to newline
-- we wouldn't count all the dropped newlines in between.
-- So update 'chunkLen' with current chunk up to newline
-- and use 'chunkLen' later on to add to 'totalLen'.
newChunkLen = chunkLen + chunkNLlen
status = THStatus totalLen newChunkLen lines prepend'
-- Found a newline at position end.
push' (Just (chunkNLlen, end, False))
-- leftover
| S.null line = do
when (start < bsLen) $ leftoverSource src (SU.unsafeDrop start bs)
return (lines [])
-- more headers
| otherwise = let lines' = lines . (line:)
newTotalLength = totalLen + chunkLen + chunkNLlen
status = THStatus newTotalLength 0 lines' id
in if start < bsLen then
-- more bytes in this chunk, push again
let bs'' = SU.unsafeDrop start bs
in push maxTotalHeaderLength src status bs''
else do
-- no more bytes in this chunk, ask for more
bst <- readSource' src
when (S.null bs) $ throwIO IncompleteHeaders
push maxTotalHeaderLength src status bst
where
start = end + 1 -- start of next chunk
line = SU.unsafeTake (checkCR bs end) bs
{-# INLINE checkCR #-}
checkCR :: ByteString -> Int -> Int
checkCR bs pos = if pos > 0 && 13 == S.index bs p then p else pos -- 13 is CR (\r)
where
!p = pos - 1
pauseTimeoutKey :: Vault.Key (IO ())
pauseTimeoutKey = unsafePerformIO Vault.newKey
{-# NOINLINE pauseTimeoutKey #-}
getFileInfoKey :: Vault.Key (FilePath -> IO FileInfo)
getFileInfoKey = unsafePerformIO Vault.newKey
{-# NOINLINE getFileInfoKey #-}
getClientCertificateKey :: Vault.Key (Maybe CertificateChain)
getClientCertificateKey = unsafePerformIO Vault.newKey
{-# NOINLINE getClientCertificateKey #-}
|
kazu-yamamoto/wai
|
warp/Network/Wai/Handler/Warp/Request.hs
|
Haskell
|
mit
| 12,688
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser}
-}
{-# LANGUAGE CPP #-}
module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs
, deepSplitProductType_maybe, findTypeShape
) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, mkCast )
import Id
import IdInfo ( vanillaIdInfo )
import DataCon
import Demand
import MkCore ( mkRuntimeErrorApp, aBSENT_ERROR_ID, mkCoreUbxTup )
import MkId ( voidArgId, voidPrimId )
import TysPrim ( voidPrimTy )
import TysWiredIn ( tupleDataCon )
import Type
import Coercion
import FamInstEnv
import BasicTypes ( Boxity(..), OneShotInfo(..), worstOneShot )
import Literal ( absentLiteralOf )
import TyCon
import UniqSupply
import Unique
import Maybes
import Util
import Outputable
import DynFlags
import FastString
import ListSetOps
{-
************************************************************************
* *
\subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@}
* *
************************************************************************
Here's an example. The original function is:
\begin{verbatim}
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
From this, we want to produce:
\begin{verbatim}
-- wrapper (an unfolding)
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
I# x# -> $wg a x# ys
-- call the worker; don't forget the type args!
-- worker
$wg :: forall a . Int# -> [a] -> a
$wg = \/\ a -> \ x# ys ->
let
x = I# x#
in
case x of -- note: body of g moved intact
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
Something we have to be careful about: Here's an example:
\begin{verbatim}
-- "f" strictness: U(P)U(P)
f (I# a) (I# b) = a +# b
g = f -- "g" strictness same as "f"
\end{verbatim}
\tr{f} will get a worker all nice and friendly-like; that's good.
{\em But we don't want a worker for \tr{g}}, even though it has the
same strictness as \tr{f}. Doing so could break laziness, at best.
Consequently, we insist that the number of strictness-info items is
exactly the same as the number of lambda-bound arguments. (This is
probably slightly paranoid, but OK in practice.) If it isn't the
same, we ``revise'' the strictness info, so that we won't propagate
the unusable strictness-info into the interfaces.
************************************************************************
* *
\subsection{The worker wrapper core}
* *
************************************************************************
@mkWwBodies@ is called when doing the worker\/wrapper split inside a module.
-}
mkWwBodies :: DynFlags
-> FamInstEnvs
-> Type -- Type of original function
-> [Demand] -- Strictness of original function
-> DmdResult -- Info about function result
-> [OneShotInfo] -- One-shot-ness of the function, value args only
-> UniqSM (Maybe ([Demand], -- Demands for worker (value) args
Id -> CoreExpr, -- Wrapper body, lacking only the worker Id
CoreExpr -> CoreExpr)) -- Worker body, lacking the original function rhs
-- wrap_fn_args E = \x y -> E
-- work_fn_args E = E x y
-- wrap_fn_str E = case x of { (a,b) ->
-- case a of { (a1,a2) ->
-- E a1 a2 b y }}
-- work_fn_str E = \a2 a2 b y ->
-- let a = (a1,a2) in
-- let x = (a,b) in
-- E
mkWwBodies dflags fam_envs fun_ty demands res_info one_shots
= do { let arg_info = demands `zip` (one_shots ++ repeat NoOneShotInfo)
all_one_shots = foldr (worstOneShot . snd) OneShotLam arg_info
; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs emptyTCvSubst fun_ty arg_info
; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args
-- Do CPR w/w. See Note [Always do CPR w/w]
; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty)
<- mkWWcpr (gopt Opt_CprAnal dflags) fam_envs res_ty res_info
; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args all_one_shots cpr_res_ty
worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v]
wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var
worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args
; if useful1 && not (only_one_void_argument) || useful2
then return (Just (worker_args_dmds, wrapper_body, worker_body))
else return Nothing
}
-- We use an INLINE unconditionally, even if the wrapper turns out to be
-- something trivial like
-- fw = ...
-- f = __inline__ (coerce T fw)
-- The point is to propagate the coerce to f's call sites, so even though
-- f's RHS is now trivial (size 1) we still want the __inline__ to prevent
-- fw from being inlined into f's RHS
where
-- Note [Do not split void functions]
only_one_void_argument
| [d] <- demands
, Just (arg_ty1, _) <- splitFunTy_maybe fun_ty
, isAbsDmd d && isVoidTy arg_ty1
= True
| otherwise
= False
{-
Note [Always do CPR w/w]
~~~~~~~~~~~~~~~~~~~~~~~~
At one time we refrained from doing CPR w/w for thunks, on the grounds that
we might duplicate work. But that is already handled by the demand analyser,
which doesn't give the CPR proprety if w/w might waste work: see
Note [CPR for thunks] in DmdAnal.
And if something *has* been given the CPR property and we don't w/w, it's
a disaster, because then the enclosing function might say it has the CPR
property, but now doesn't and there a cascade of disaster. A good example
is Trac #5920.
************************************************************************
* *
\subsection{Making wrapper args}
* *
************************************************************************
During worker-wrapper stuff we may end up with an unlifted thing
which we want to let-bind without losing laziness. So we
add a void argument. E.g.
f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z
==>
fw = /\ a -> \void -> E
f = /\ a -> \x y z -> fw realworld
We use the state-token type which generates no code.
-}
mkWorkerArgs :: DynFlags -> [Var]
-> OneShotInfo -- Whether all arguments are one-shot
-> Type -- Type of body
-> ([Var], -- Lambda bound args
[Var]) -- Args at call site
mkWorkerArgs dflags args all_one_shot res_ty
| any isId args || not needsAValueLambda
= (args, args)
| otherwise
= (args ++ [newArg], args ++ [voidPrimId])
where
needsAValueLambda =
isUnliftedType res_ty
|| not (gopt Opt_FunToThunk dflags)
-- see Note [Protecting the last value argument]
-- see Note [All One-Shot Arguments of a Worker]
newArg = setIdOneShotInfo voidArgId all_one_shot
{-
Note [Protecting the last value argument]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the user writes (\_ -> E), they might be intentionally disallowing
the sharing of E. Since absence analysis and worker-wrapper are keen
to remove such unused arguments, we add in a void argument to prevent
the function from becoming a thunk.
The user can avoid adding the void argument with the -ffun-to-thunk
flag. However, this can create sharing, which may be bad in two ways. 1) It can
create a space leak. 2) It can prevent inlining *under a lambda*. If w/w
removes the last argument from a function f, then f now looks like a thunk, and
so f can't be inlined *under a lambda*.
Note [All One-Shot Arguments of a Worker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes, derived join-points are just lambda-lifted thunks, whose
only argument is of the unit type and is never used. This might
interfere with the absence analysis, basing on which results these
never-used arguments are eliminated in the worker. The additional
argument `all_one_shot` of `mkWorkerArgs` is to prevent this.
Example. Suppose we have
foo = \p(one-shot) q(one-shot). y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(one-shot). y + 3
But suppse foo didn't have all one-shot args:
foo = \p(not-one-shot) q(one-shot). expensive y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(not-one-shot). y + 3
If we made the void-arg one-shot we might inline an expensive
computation for y, which would be terrible!
************************************************************************
* *
\subsection{Coercion stuff}
* *
************************************************************************
We really want to "look through" coerces.
Reason: I've seen this situation:
let f = coerce T (\s -> E)
in \x -> case x of
p -> coerce T' f
q -> \s -> E2
r -> coerce T' f
If only we w/w'd f, we'd get
let f = coerce T (\s -> fw s)
fw = \s -> E
in ...
Now we'll inline f to get
let fw = \s -> E
in \x -> case x of
p -> fw
q -> \s -> E2
r -> fw
Now we'll see that fw has arity 1, and will arity expand
the \x to get what we want.
-}
-- mkWWargs just does eta expansion
-- is driven off the function type and arity.
-- It chomps bites off foralls, arrows, newtypes
-- and keeps repeating that until it's satisfied the supplied arity
mkWWargs :: TCvSubst -- Freshening substitution to apply to the type
-- See Note [Freshen type variables]
-> Type -- The type of the function
-> [(Demand,OneShotInfo)] -- Demands and one-shot info for value arguments
-> UniqSM ([Var], -- Wrapper args
CoreExpr -> CoreExpr, -- Wrapper fn
CoreExpr -> CoreExpr, -- Worker fn
Type) -- Type of wrapper body
mkWWargs subst fun_ty arg_info
| null arg_info
= return ([], id, id, substTy subst fun_ty)
| ((dmd,one_shot):arg_info') <- arg_info
, Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= do { uniq <- getUniqueM
; let arg_ty' = substTy subst arg_ty
id = mk_wrap_arg uniq arg_ty' dmd one_shot
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst fun_ty' arg_info'
; return (id : wrap_args,
Lam id . wrap_fn_args,
work_fn_args . (`App` varToCoreExpr id),
res_ty) }
| Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty
= do { let (subst', tv') = substTyVarBndr subst tv
-- This substTyVarBndr clones the type variable when necy
-- See Note [Freshen type variables]
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst' fun_ty' arg_info
; return (tv' : wrap_args,
Lam tv' . wrap_fn_args,
work_fn_args . (`mkTyApps` [mkTyVarTy tv']),
res_ty) }
| Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty
-- The newtype case is for when the function has
-- a newtype after the arrow (rare)
--
-- It's also important when we have a function returning (say) a pair
-- wrapped in a newtype, at least if CPR analysis can look
-- through such newtypes, which it probably can since they are
-- simply coerces.
= do { (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst rep_ty arg_info
; return (wrap_args,
\e -> Cast (wrap_fn_args e) (mkSymCo co),
\e -> work_fn_args (Cast e co),
res_ty) }
| otherwise
= WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand
return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow
applyToVars :: [Var] -> CoreExpr -> CoreExpr
applyToVars vars fn = mkVarApps fn vars
mk_wrap_arg :: Unique -> Type -> Demand -> OneShotInfo -> Id
mk_wrap_arg uniq ty dmd one_shot
= mkSysLocalOrCoVar (fsLit "w") uniq ty
`setIdDemandInfo` dmd
`setIdOneShotInfo` one_shot
{-
Note [Freshen type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wen we do a worker/wrapper split, we must not use shadowed names,
else we'll get
f = /\ a /\a. fw a a
which is obviously wrong. Type variables can can in principle shadow,
within a type (e.g. forall a. a -> forall a. a->a). But type
variables *are* mentioned in <blah>, so we must substitute.
That's why we carry the TCvSubst through mkWWargs
************************************************************************
* *
\subsection{Strictness stuff}
* *
************************************************************************
-}
mkWWstr :: DynFlags
-> FamInstEnvs
-> [Var] -- Wrapper args; have their demand info on them
-- *Includes type variables*
-> UniqSM (Bool, -- Is this useful
[Var], -- Worker args
CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call
-- and without its lambdas
-- This fn adds the unboxing
CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function,
-- and lacking its lambdas.
-- This fn does the reboxing
mkWWstr _ _ []
= return (False, [], nop_fn, nop_fn)
mkWWstr dflags fam_envs (arg : args) = do
(useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg
(useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args
return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2)
{-
Note [Unpacking arguments with product and polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The argument is unpacked in a case if it has a product type and has a
strict *and* used demand put on it. I.e., arguments, with demands such
as the following ones:
<S,U(U, L)>
<S(L,S),U>
will be unpacked, but
<S,U> or <B,U>
will not, because the pieces aren't used. This is quite important otherwise
we end up unpacking massive tuples passed to the bottoming function. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Does 'main' print "error 1" or "error no"? We don't really want 'f'
to unbox its second argument. This actually happened in GHC's onwn
source code, in Packages.applyPackageFlag, which ended up un-boxing
the enormous DynFlags tuple, and being strict in the
as-yet-un-filled-in pkgState files.
-}
----------------------
-- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn)
-- * wrap_fn assumes wrap_arg is in scope,
-- brings into scope work_args (via cases)
-- * work_fn assumes work_args are in scope, a
-- brings into scope wrap_arg (via lets)
mkWWstr_one :: DynFlags -> FamInstEnvs -> Var
-> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr)
mkWWstr_one dflags fam_envs arg
| isTyVar arg
= return (False, [arg], nop_fn, nop_fn)
-- See Note [Worker-wrapper for bottoming functions]
| isAbsDmd dmd
, Just work_fn <- mk_absent_let dflags arg
-- Absent case. We can't always handle absence for arbitrary
-- unlifted types, so we need to choose just the cases we can
--- (that's what mk_absent_let does)
= return (True, [], nop_fn, work_fn)
-- See Note [Worthy functions for Worker-Wrapper split]
| isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope
-- of dropping seqs in the worker
= let arg_w_unf = arg `setIdUnfolding` evaldUnfolding
-- Tell the worker arg that it's sure to be evaluated
-- so that internal seqs can be dropped
in return (True, [arg_w_unf], mk_seq_case arg, nop_fn)
-- Pass the arg, anyway, even if it is in theory discarded
-- Consider
-- f x y = x `seq` y
-- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker
-- we ABSOLUTELY MUST record that x is evaluated in the wrapper.
-- Something like:
-- f x y = x `seq` fw y
-- fw y = let x{Evald} = error "oops" in (x `seq` y)
-- If we don't pin on the "Evald" flag, the seq doesn't disappear, and
-- we end up evaluating the absent thunk.
-- But the Evald flag is pretty weird, and I worry that it might disappear
-- during simplification, so for now I've just nuked this whole case
| isStrictDmd dmd
, Just cs <- splitProdDmd_maybe dmd
-- See Note [Unpacking arguments with product and polymorphic demands]
, Just (data_con, inst_tys, inst_con_arg_tys, co)
<- deepSplitProductType_maybe fam_envs (idType arg)
, cs `equalLength` inst_con_arg_tys
-- See Note [mkWWstr and unsafeCoerce]
= do { (uniq1:uniqs) <- getUniquesM
; let unpk_args = zipWith mk_ww_local uniqs inst_con_arg_tys
unpk_args_w_ds = zipWithEqual "mkWWstr" set_worker_arg_info unpk_args cs
unbox_fn = mkUnpackCase (Var arg) co uniq1
data_con unpk_args
rebox_fn = Let (NonRec arg con_app)
con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co
; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args_w_ds
; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) }
-- Don't pass the arg, rebox instead
| otherwise -- Other cases
= return (False, [arg], nop_fn, nop_fn)
where
dmd = idDemandInfo arg
one_shot = idOneShotInfo arg
-- If the wrapper argument is a one-shot lambda, then
-- so should (all) the corresponding worker arguments be
-- This bites when we do w/w on a case join point
set_worker_arg_info worker_arg demand
= worker_arg `setIdDemandInfo` demand
`setIdOneShotInfo` one_shot
----------------------
nop_fn :: CoreExpr -> CoreExpr
nop_fn body = body
{-
Note [mkWWstr and unsafeCoerce]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By using unsafeCoerce, it is possible to make the number of demands fail to
match the number of constructor arguments; this happened in Trac #8037.
If so, the worker/wrapper split doesn't work right and we get a Core Lint
bug. The fix here is simply to decline to do w/w if that happens.
************************************************************************
* *
Type scrutiny that is specfic to demand analysis
* *
************************************************************************
Note [Do not unpack class dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
f :: Ord a => [a] -> Int -> a
{-# INLINABLE f #-}
and we worker/wrapper f, we'll get a worker with an INLINALBE pragma
(see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which
can still be specialised by the type-class specialiser, something like
fw :: Ord a => [a] -> Int# -> a
BUT if f is strict in the Ord dictionary, we might unpack it, to get
fw :: (a->a->Bool) -> [a] -> Int# -> a
and the type-class specialiser can't specialise that. An example is
Trac #6056.
Moreover, dictinoaries can have a lot of fields, so unpacking them can
increase closure sizes.
Conclusion: don't unpack dictionaries.
-}
deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitProductType_maybe fam_envs ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkRepReflCo ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, Just con <- isDataProductTyCon_maybe tc
, not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries]
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitProductType_maybe _ _ = Nothing
deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitCprType_maybe fam_envs con_tag ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkRepReflCo ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, isDataTyCon tc
, let cons = tyConDataCons tc
, cons `lengthAtLeast` con_tag -- This might not be true if we import the
-- type constructor via a .hs-bool file (#8743)
, let con = cons `getNth` (con_tag - fIRST_TAG)
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitCprType_maybe _ _ _ = Nothing
findTypeShape :: FamInstEnvs -> Type -> TypeShape
-- Uncover the arrow and product shape of a type
-- The data type TypeShape is defined in Demand
-- See Note [Trimming a demand to a type] in Demand
findTypeShape fam_envs ty
| Just (tc, tc_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tc
= TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args)
| Just (_, res) <- splitFunTy_maybe ty
= TsFun (findTypeShape fam_envs res)
| Just (_, ty') <- splitForAllTy_maybe ty
= findTypeShape fam_envs ty'
| Just (_, ty') <- topNormaliseType_maybe fam_envs ty
= findTypeShape fam_envs ty'
| otherwise
= TsUnk
{-
************************************************************************
* *
\subsection{CPR stuff}
* *
************************************************************************
@mkWWcpr@ takes the worker/wrapper pair produced from the strictness
info and adds in the CPR transformation. The worker returns an
unboxed tuple containing non-CPR components. The wrapper takes this
tuple and re-produces the correct structured output.
The non-CPR results appear ordered in the unboxed tuple as if by a
left-to-right traversal of the result structure.
-}
mkWWcpr :: Bool
-> FamInstEnvs
-> Type -- function body type
-> DmdResult -- CPR analysis results
-> UniqSM (Bool, -- Is w/w'ing useful?
CoreExpr -> CoreExpr, -- New wrapper
CoreExpr -> CoreExpr, -- New worker
Type) -- Type of worker's body
mkWWcpr opt_CprAnal fam_envs body_ty res
-- CPR explicitly turned off (or in -O0)
| not opt_CprAnal = return (False, id, id, body_ty)
-- CPR is turned on by default for -O and O2
| otherwise
= case returnsCPR_maybe res of
Nothing -> return (False, id, id, body_ty) -- No CPR info
Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty
-> mkWWcpr_help stuff
| otherwise
-- See Note [non-algebraic or open body type warning]
-> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty )
return (False, id, id, body_ty)
mkWWcpr_help :: (DataCon, [Type], [Type], Coercion)
-> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type)
mkWWcpr_help (data_con, inst_tys, arg_tys, co)
| [arg_ty1] <- arg_tys
, isUnliftedType arg_ty1
-- Special case when there is a single result of unlifted type
--
-- Wrapper: case (..call worker..) of x -> C x
-- Worker: case ( ..body.. ) of C x -> x
= do { (work_uniq : arg_uniq : _) <- getUniquesM
; let arg = mk_ww_local arg_uniq arg_ty1
con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co
; return ( True
, \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con [arg] (varToCoreExpr arg)
-- varToCoreExpr important here: arg can be a coercion
-- Lacking this caused Trac #10658
, arg_ty1 ) }
| otherwise -- The general case
-- Wrapper: case (..call worker..) of (# a, b #) -> C a b
-- Worker: case ( ...body... ) of C a b -> (# a, b #)
= do { (work_uniq : uniqs) <- getUniquesM
; let (wrap_wild : args) = zipWith mk_ww_local uniqs (ubx_tup_ty : arg_tys)
ubx_tup_ty = exprType ubx_tup_app
ubx_tup_app = mkCoreUbxTup arg_tys (map varToCoreExpr args)
con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co
; return (True
, \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt (tupleDataCon Unboxed (length arg_tys)), args, con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app
, ubx_tup_ty ) }
mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr
-- (mkUnpackCase e co uniq Con args body)
-- returns
-- case e |> co of bndr { Con args -> body }
mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking]
= Tick tickish (mkUnpackCase e co uniq con args body)
mkUnpackCase scrut co uniq boxing_con unpk_args body
= Case casted_scrut bndr (exprType body)
[(DataAlt boxing_con, unpk_args, body)]
where
casted_scrut = scrut `mkCast` co
bndr = mk_ww_local uniq (exprType casted_scrut)
{-
Note [non-algebraic or open body type warning]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a few cases where the W/W transformation is told that something
returns a constructor, but the type at hand doesn't really match this. One
real-world example involves unsafeCoerce:
foo = IO a
foo = unsafeCoerce c_exit
foreign import ccall "c_exit" c_exit :: IO ()
Here CPR will tell you that `foo` returns a () constructor for sure, but trying
to create a worker/wrapper for type `a` obviously fails.
(This was a real example until ee8e792 in libraries/base.)
It does not seem feasible to avoid all such cases already in the analyser (and
after all, the analysis is not really wrong), so we simply do nothing here in
mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch
other cases where something went avoidably wrong.
Note [Profiling and unpacking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function looked like
f = \ x -> {-# SCC "foo" #-} E
then we want the CPR'd worker to look like
\ x -> {-# SCC "foo" #-} (case E of I# x -> x)
and definitely not
\ x -> case ({-# SCC "foo" #-} E) of I# x -> x)
This transform doesn't move work or allocation
from one cost centre to another.
Later [SDM]: presumably this is because we want the simplifier to
eliminate the case, and the scc would get in the way? I'm ok with
including the case itself in the cost centre, since it is morally
part of the function (post transformation) anyway.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
Note [Absent errors]
~~~~~~~~~~~~~~~~~~~~
We make a new binding for Ids that are marked absent, thus
let x = absentError "x :: Int"
The idea is that this binding will never be used; but if it
buggily is used we'll get a runtime error message.
Coping with absence for *unlifted* types is important; see, for
example, Trac #4306. For these we find a suitable literal,
using Literal.absentLiteralOf. We don't have literals for
every primitive type, so the function is partial.
[I did try the experiment of using an error thunk for unlifted
things too, relying on the simplifier to drop it as dead code,
by making absentError
(a) *not* be a bottoming Id,
(b) be "ok for speculation"
But that relies on the simplifier finding that it really
is dead code, which is fragile, and indeed failed when
profiling is on, which disables various optimisations. So
using a literal will do.]
-}
mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr)
mk_absent_let dflags arg
| not (isUnliftedType arg_ty)
= Just (Let (NonRec arg abs_rhs))
| Just tc <- tyConAppTyCon_maybe arg_ty
, Just lit <- absentLiteralOf tc
= Just (Let (NonRec arg (Lit lit)))
| arg_ty `eqType` voidPrimTy
= Just (Let (NonRec arg (Var voidPrimId)))
| otherwise
= WARN( True, text "No absent value for" <+> ppr arg_ty )
Nothing
where
arg_ty = idType arg
abs_rhs = mkRuntimeErrorApp aBSENT_ERROR_ID arg_ty msg
msg = showSDoc dflags (ppr arg <+> ppr (idType arg))
mk_seq_case :: Id -> CoreExpr -> CoreExpr
mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)]
sanitiseCaseBndr :: Id -> Id
-- The argument we are scrutinising has the right type to be
-- a case binder, so it's convenient to re-use it for that purpose.
-- But we *must* throw away all its IdInfo. In particular, the argument
-- will have demand info on it, and that demand info may be incorrect for
-- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... }
-- Quite likely ww_arg isn't used in '...'. The case may get discarded
-- if the case binder says "I'm demanded". This happened in a situation
-- like (x+y) `seq` ....
sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo
mk_ww_local :: Unique -> Type -> Id
mk_ww_local uniq ty = mkSysLocalOrCoVar (fsLit "ww") uniq ty
|
nushio3/ghc
|
compiler/stranal/WwLib.hs
|
Haskell
|
bsd-3-clause
| 31,859
|
-- |Auxiliary functions to vectorise type abstractions.
module Vectorise.Utils.Poly
( polyAbstract
, polyApply
, polyVApply
, polyArity
)
where
import GhcPrelude
import Vectorise.Vect
import Vectorise.Monad
import Vectorise.Utils.PADict
import CoreSyn
import Type
import FastString
import Control.Monad
-- Vectorisation of type arguments -------------------------------------------------------------
-- |Vectorise under the 'PA' dictionary variables corresponding to a set of type arguments.
--
-- The dictionary variables are new local variables that are entered into the local vectorisation
-- map.
--
-- The purpose of this function is to introduce the additional 'PA' dictionary arguments that are
-- needed when vectorising type abstractions.
--
polyAbstract :: [TyVar] -> ([Var] -> VM a) -> VM a
polyAbstract tvs p
= localV
$ do { mdicts <- mapM mk_dict_var tvs
; zipWithM_ (\tv -> maybe (defLocalTyVar tv)
(defLocalTyVarWithPA tv . Var)) tvs mdicts
; p (mk_args mdicts)
}
where
mk_dict_var tv
= do { r <- paDictArgType tv
; case r of
Just ty -> liftM Just (newLocalVar (fsLit "dPA") ty)
Nothing -> return Nothing
}
mk_args mdicts = [dict | Just dict <- mdicts]
-- |Determine the number of 'PA' dictionary arguments required for a set of type variables (depends
-- on their kinds).
--
polyArity :: [TyVar] -> VM Int
polyArity tvs
= do { tys <- mapM paDictArgType tvs
; return $ length [() | Just _ <- tys]
}
-- |Apply a expression to its type arguments as well as 'PA' dictionaries for these type arguments.
--
polyApply :: CoreExpr -> [Type] -> VM CoreExpr
polyApply expr tys
= do { dicts <- mapM paDictOfType tys
; return $ expr `mkTyApps` tys `mkApps` dicts
}
-- |Apply a vectorised expression to a set of type arguments together with 'PA' dictionaries for
-- these type arguments.
--
polyVApply :: VExpr -> [Type] -> VM VExpr
polyVApply expr tys
= do { dicts <- mapM paDictOfType tys
; return $ mapVect (\e -> e `mkTyApps` tys `mkApps` dicts) expr
}
|
ezyang/ghc
|
compiler/vectorise/Vectorise/Utils/Poly.hs
|
Haskell
|
bsd-3-clause
| 2,148
|
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Hadron
-- Copyright : Soostone Inc
-- License : BSD3
--
-- Maintainer : Ozgun Ataman
-- Stability : experimental
--
-- Low level building blocks for working with Hadoop streaming.
--
-- We define all the base types for MapReduce and export map/reduce
-- maker functions that know how to deal with ByteString input and
-- output.
----------------------------------------------------------------------------
module Hadron.Basic
(
-- * Types
Key
, CompositeKey
, Mapper
, Reducer
-- * Hadoop Utilities
, emitCounter
, hsEmitCounter
, emitStatus
, getFileName
-- * MapReduce Construction
, mapReduceMain
, mapReduce
, MROptions (..)
, PartitionStrategy (..)
-- * Low-level Utilities
, mapper
, mapperWith
, combiner
, reducer
, setLineBuffering
-- * Data Serialization Utilities
, module Hadron.Protocol
) where
-------------------------------------------------------------------------------
import Blaze.ByteString.Builder
import Control.Applicative
import Control.Category
import Control.Lens
import Control.Monad
import Control.Monad.Base
import Control.Monad.Primitive
import Control.Monad.Trans
import Control.Monad.Trans.Resource
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as LB
import Data.Conduit
import Data.Conduit.Binary (sinkHandle, sourceHandle)
import Data.Conduit.Blaze
import qualified Data.Conduit.List as C
import Data.List
import Data.Monoid
import Options.Applicative
import Prelude hiding (id, (.))
import System.Environment
import System.IO
-------------------------------------------------------------------------------
import Hadron.Protocol
import Hadron.Run.Hadoop
import Hadron.Types
-------------------------------------------------------------------------------
showBS :: Show a => a -> B.ByteString
showBS = B.pack . show
-- | Emit a counter to be captured, added up and reported by Hadoop.
emitCounter
:: B.ByteString
-- ^ Group name
-> B.ByteString
-- ^ Counter name
-> Integer
-- ^ Increment
-> IO ()
emitCounter grp counter inc = LB.hPutStrLn stderr $ toLazyByteString txt
where
txt = mconcat $ map fromByteString
["reporter:counter:", grp, ",", counter, ",", showBS inc]
-- | Emit counter from this library's group
hsEmitCounter :: B.ByteString -> Integer -> IO ()
hsEmitCounter = emitCounter "Hadron"
-- | Emit a status line.
emitStatus :: B.ByteString -> IO ()
emitStatus msg = LB.hPutStrLn stderr $ toLazyByteString txt
where
txt = fromByteString "reporter:status:" <>
fromByteString msg
-- | Get the current filename from Hadoop ENV. Useful when writing
-- 'Mapper's and you would like to know what file you're currently
-- dealing with.
getFileName :: MonadIO m => m FilePath
getFileName = liftIO $ getEnv "mapreduce_map_input_file"
-------------------------------------------------------------------------------
mapper
:: Mapper B.ByteString CompositeKey B.ByteString
-- ^ A key/value producer - don't worry about putting any newline
-- endings yourself, we do that for you.
-> IO ()
mapper f = mapperWith id f
-- | Construct a mapper program using given serialization Prism.
mapperWith
:: Prism' B.ByteString t
-> Mapper B.ByteString CompositeKey t
-> IO ()
mapperWith p f = runResourceT $ do
setLineBuffering
sourceHandle stdin $=
f $=
encodeMapOutput p $$
sinkHandle stdout
-- -------------------------------------------------------------------------------
-- -- | Drop the key and simply output the value stream.
-- mapOnly
-- :: (InputStream B.ByteString -> OutputStream B.ByteString -> IO ())
-- -> IO ()
-- mapOnly f = do
-- setLineBuffering
-- f S.stdin S.stdout
-------------------------------------------------------------------------------
combiner
:: MROptions
-> Prism' B.ByteString b
-> Reducer CompositeKey b (CompositeKey, b)
-> IO ()
combiner mro mrInPrism f = runResourceT $ do
setLineBuffering
sourceHandle stdin =$=
decodeReducerInput mro mrInPrism =$=
f =$=
encodeMapOutput mrInPrism $$
sinkHandle stdout
-------------------------------------------------------------------------------
setLineBuffering :: MonadIO m => m ()
setLineBuffering = do
liftIO $ hSetBuffering stderr LineBuffering
liftIO $ hSetBuffering stdout LineBuffering
liftIO $ hSetBuffering stdin LineBuffering
-------------------------------------------------------------------------------
-- | Appropriately produce lines of mapper output in a way compliant
-- with Hadoop and 'decodeReducerInput'.
encodeMapOutput
:: (PrimMonad base, MonadBase base m)
=> Prism' B.ByteString b
-> Conduit (CompositeKey, b) m B.ByteString
encodeMapOutput mrInPrism = C.map conv $= builderToByteString
where
conv (k,v) = mconcat
[ mconcat (intersperse tab (map fromByteString k))
, tab
, fromByteString (review mrInPrism v)
, nl ]
tab = fromByteString "\t"
nl = fromByteString "\n"
-------------------------------------------------------------------------------
-- | Chunk 'stdin' into lines and try to decode the value using given 'Prism'.
decodeReducerInput
:: (MonadIO m, MonadThrow m)
=> MROptions
-> Prism' B.ByteString b
-> ConduitM a (CompositeKey, b) m ()
decodeReducerInput mro mrInPrism =
sourceHandle stdin =$=
lineC (numSegs (_mroPart mro)) =$=
C.mapMaybe (_2 (firstOf mrInPrism))
-------------------------------------------------------------------------------
reducerMain
:: MROptions
-> Prism' B.ByteString a
-> Reducer CompositeKey a B.ByteString
-> IO ()
reducerMain mro p f = do
setLineBuffering
runResourceT $ reducer mro p f $$ sinkHandle stdout
-- | Create a reducer program.
reducer
:: MROptions
-> Prism' B.ByteString a
-- ^ Input conversion function
-> Reducer CompositeKey a b
-- ^ A step function for any given key. Will be rerun from scratch
-- for each unique key based on MROptions.
-> Producer (ResourceT IO) b
reducer mro@MROptions{..} mrInPrism f = do
sourceHandle stdin =$=
decodeReducerInput mro mrInPrism =$=
go2
where
go2 = do
next <- await
case next of
Nothing -> return ()
Just x -> do
leftover x
block
go2
block = sameKey Nothing =$= f
sameKey cur = do
next <- await
case next of
Nothing -> return ()
Just x@(k,_) ->
case cur of
Just curKey -> do
let n = eqSegs _mroPart
case take n curKey == take n k of
True -> yield x >> sameKey cur
False -> leftover x
Nothing -> do
yield x
sameKey (Just k)
------------------
-- Main Program --
------------------
-------------------------------------------------------------------------------
mapReduce
:: MROptions
-> Prism' B.ByteString a
-- ^ Serialization for data between map and reduce stages
-> Mapper B.ByteString CompositeKey a
-> Reducer CompositeKey a B.ByteString
-> (IO (), IO ())
mapReduce mro mrInPrism f g = (mp, rd)
where
mp = mapperWith mrInPrism f
rd = reducerMain mro mrInPrism g
-- | A default main that will respond to 'map' and 'reduce' commands
-- to run the right phase appropriately.
--
-- This is the recommended 'main' entry point to a map-reduce program.
-- The resulting program will respond as:
--
-- > ./myProgram map
-- > ./myProgram reduce
mapReduceMain
:: MROptions
-> Prism' B.ByteString a
-- ^ Serialization function for the in-between data 'a'.
-> Mapper B.ByteString CompositeKey a
-> Reducer CompositeKey a B.ByteString
-- ^ Reducer for a stream of values belonging to the same key.
-> IO ()
mapReduceMain mro mrInPrism f g = liftIO (execParser opts) >>= run
where
(mp,rd) = mapReduce mro mrInPrism f g
run Map = mp
run Reduce = rd
opts = info (helper <*> commandParse)
( fullDesc
<> progDesc "This is a Hadron Map/Reduce binary. "
<> header "hadron - use Haskell for Hadron."
)
data Command = Map | Reduce
-------------------------------------------------------------------------------
commandParse :: Parser Command
commandParse = subparser
( command "map" (info (pure Map)
( progDesc "Run mapper." ))
<> command "reduce" (info (pure Reduce)
( progDesc "Run reducer" ))
)
|
fpinsight/hadron
|
src/Hadron/Basic.hs
|
Haskell
|
bsd-3-clause
| 9,521
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="si-LK">
<title>Directory List v2.3</title>
<maps>
<homeID>directorylistv2_3</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/directorylistv2_3/src/main/javahelp/help_si_LK/helpset_si_LK.hs
|
Haskell
|
apache-2.0
| 978
|
{-# LANGUAGE FlexibleContexts, BangPatterns #-}
import Data.Array.Repa
import Data.Array.Repa.IO.DevIL
import System.Environment
import Data.Array.Repa.Repr.ForeignPtr
import Data.Word
-- <<main
main :: IO ()
main = do
[n, f1,f2] <- getArgs
runIL $ do
(RGB v) <- readImage f1 -- <1>
rotated <- computeP $ rotate (read n) v :: IL (Array F DIM3 Word8) -- <2>
writeImage f2 (RGB rotated) -- <3>
-- >>
-- <<rotate
rotate :: Double -> Array F DIM3 Word8 -> Array D DIM3 Word8
rotate deg g = fromFunction (Z :. y :. x :. k) f -- <1>
where
sh@(Z :. y :. x :. k) = extent g
!theta = pi/180 * deg -- <2>
!st = sin theta -- <3>
!ct = cos theta
!cy = fromIntegral y / 2 :: Double -- <4>
!cx = fromIntegral x / 2 :: Double
f (Z :. i :. j :. k) -- <5>
| inShape sh old = g ! old -- <6>
| otherwise = 0 -- <7>
where
fi = fromIntegral i - cy -- <8>
fj = fromIntegral j - cx
i' = round (st * fj + ct * fi + cy) -- <9>
j' = round (ct * fj - st * fi + cx)
old = Z :. i' :. j' :. k -- <10>
-- >>
|
lywaterman/parconc-examples
|
rotateimage.hs
|
Haskell
|
bsd-3-clause
| 1,427
|
module Way.Type where
import Data.IntSet (IntSet)
import qualified Data.IntSet as Set
import Data.List
import Data.Maybe
import Development.Shake.Classes
import Hadrian.Utilities
-- Note: order of constructors is important for compatibility with the old build
-- system, e.g. we want "thr_p", not "p_thr" (see instance Show Way).
-- | A 'WayUnit' is a single way of building source code, for example with
-- profiling enabled, or dynamically linked.
data WayUnit = Threaded
| Debug
| Profiling
| Logging
| Dynamic
deriving (Bounded, Enum, Eq, Ord)
-- TODO: get rid of non-derived Show instances
instance Show WayUnit where
show unit = case unit of
Threaded -> "thr"
Debug -> "debug"
Profiling -> "p"
Logging -> "l"
Dynamic -> "dyn"
instance Read WayUnit where
readsPrec _ s = [(unit, "") | unit <- [minBound ..], show unit == s]
-- | Collection of 'WayUnit's that stands for the different ways source code
-- is to be built.
newtype Way = Way IntSet
instance Binary Way where
put = put . show
get = fmap read get
instance Hashable Way where
hashWithSalt salt = hashWithSalt salt . show
instance NFData Way where
rnf (Way s) = s `seq` ()
-- | Construct a 'Way' from multiple 'WayUnit's. Inverse of 'wayToUnits'.
wayFromUnits :: [WayUnit] -> Way
wayFromUnits = Way . Set.fromList . map fromEnum
-- | Split a 'Way' into its 'WayUnit' building blocks.
-- Inverse of 'wayFromUnits'.
wayToUnits :: Way -> [WayUnit]
wayToUnits (Way set) = map toEnum . Set.elems $ set
-- | Check whether a 'Way' contains a certain 'WayUnit'.
wayUnit :: WayUnit -> Way -> Bool
wayUnit unit (Way set) = fromEnum unit `Set.member` set
-- | Add a 'WayUnit' to a 'Way'
addWayUnit :: WayUnit -> Way -> Way
addWayUnit unit (Way set) = Way . Set.insert (fromEnum unit) $ set
-- | Remove a 'WayUnit' from 'Way'.
removeWayUnit :: WayUnit -> Way -> Way
removeWayUnit unit (Way set) = Way . Set.delete (fromEnum unit) $ set
instance Show Way where
show way = if null tag then "v" else tag
where
tag = intercalate "_" . map show . wayToUnits $ way
instance Read Way where
readsPrec _ s = if s == "v" then [(wayFromUnits [], "")] else result
where
uniqueReads token = case reads token of
[(unit, "")] -> Just unit
_ -> Nothing
units = map uniqueReads . words . replaceEq '_' ' ' $ s
result = if Nothing `elem` units
then []
else [(wayFromUnits . map fromJust $ units, "")]
instance Eq Way where
Way a == Way b = a == b
instance Ord Way where
compare (Way a) (Way b) = compare a b
|
bgamari/shaking-up-ghc
|
src/Way/Type.hs
|
Haskell
|
bsd-3-clause
| 2,727
|
import System.IO
import System.Cmd
import System.FilePath
import Text.Printf
import System.Directory
import Control.Monad
testdir = "openFile008_testdir"
-- Test repeated opening/closing of 1000 files. This is useful for guaging
-- the performance of open/close and file locking.
main = do
system ("rm -rf " ++ testdir)
createDirectory testdir
let filenames = [testdir </> printf "file%03d" (n::Int) | n <- [1..1000]]
forM_ [1..50] $ \_ -> do
hs <- mapM (\f -> openFile f WriteMode) filenames
mapM_ hClose hs
mapM_ removeFile filenames
removeDirectory testdir
|
urbanslug/ghc
|
libraries/base/tests/IO/openFile008.hs
|
Haskell
|
bsd-3-clause
| 585
|
-- file ch03/ex05.hs
-- Write a function that determines whether its input list is a palindrome.
isPali :: (Eq a) => [a] -> Bool
isPali [] = True
isPali (x:[]) = True
isPali (x:xs) = x == last(xs) && isPali(init xs)
-- init is defined as: init (xs) = take (length xs - 1) xs
|
imrehg/rwhaskell
|
ch03/ex05.hs
|
Haskell
|
mit
| 274
|
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE RecordWildCards, NamedFieldPuns #-}
{-# LANGUAGE FlexibleContexts #-}
--{-# LANGUAGE NoMonomorphismRestriction #-}
module Sudoku where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import "mtl" Control.Monad.State
n :: Int
n = 3
type Index = (Int, Int)
data Square = Square {
x :: (Int, Int),
y :: (Int, Int)
} deriving (Eq, Ord, Show)
xy :: Square -> Index
xy Square {..} = (fst x, fst y)
type Number = Int
type Map' = Map Index (Set Number)
data BoardState = BoardState {
grid :: Map',
rows :: Map',
cols :: Map',
rest :: Set Square,
board :: Map Square Number
}
indices = [(x, y) | x <- [1..n], y <- [1..n]]
squares = [Square {x, y} | x <- indices, y <- indices]
numbers = [1..n*n]
showNum :: Maybe Number -> String
showNum Nothing = " "
showNum (Just x) = show x
instance Show BoardState where
show BoardState {board} =
unlines $ map showRow indices
where showRow y = concat [showNum $ getSquare y x | x <- indices]
getSquare y x = Map.lookup Square {x, y} board
emptyMap' = Map.fromList [(i, Set.empty) | i <- indices]
initial :: BoardState
initial = BoardState {
grid = emptyMap',
rows = emptyMap',
cols = emptyMap',
rest = Set.fromList squares,
board = Map.empty
}
--solve :: (MonadTrans t, MonadState BoardState (t [])) => t [] ()
solve :: StateT BoardState [] ()
solve = do
boardState@BoardState{..} <- get
let next = Set.findMin rest
rest' = Set.delete next rest
maps = [grid, rows, cols]
ixs = map ($ next) [xy, y, x]
sets = zipWith (Map.!) maps ixs
num <- lift numbers
lift $ guard $ all (Set.notMember num) sets
let sets' = [Set.insert num s | s <- sets]
maps' = zipWith3 Map.insert ixs sets' maps
[grid', rows', cols'] = maps'
put $ boardState {
grid = grid',
rows = rows',
cols = cols',
rest = rest',
board = Map.insert next num board
}
when (not $ Set.null rest') solve
allSolutions = map snd $ runStateT solve initial
|
vladfi1/hs-misc
|
Sudoku.hs
|
Haskell
|
mit
| 2,098
|
{-# LANGUAGE DeriveGeneric #-}
import GHC.Generics (Generic)
import Data.Hashable
data Color = Red | Green | Blue deriving (Generic, Show)
instance Hashable Color where
example1 :: Int
example1 = hash Red
-- 839657738087498284
example2 :: Int
example2 = hashWithSalt 0xDEADBEEF Red
-- 62679985974121021
|
riwsky/wiwinwlh
|
src/hashable.hs
|
Haskell
|
mit
| 308
|
module PuzzleIO where
import Data.Char
import Data.Matrix
type Puzzle = Matrix (Maybe Int)
showPuzzle = unlines . (map showRow) . toLists
showRow = unwords . map showNumber
showNumber :: Maybe Int -> String
showNumber (Just n) = show n
showNumber Nothing = "-"
readPuzzle :: IO Puzzle
readPuzzle = do
lines <- sequence $ take 9 $ repeat getLine
return $ puzzleFromLines lines
puzzleFromLines lines =
let chars = filter (not . isSpace) $ unwords lines
in
fromList 9 9 $ map toCell chars
toCell :: Char -> Maybe Int
toCell n = if isHexDigit n then Just (digitToInt n) else Nothing
|
matthayter/haskell_sudoku
|
PuzzleIO.hs
|
Haskell
|
mit
| 600
|
module ARD.World where
import ARD.Camera
import ARD.Color
import ARD.Light
import ARD.Randomize
import ARD.Ray
import ARD.Shape
import ARD.ViewPlane
data World
= World
{ camera :: Camera
, viewPlane :: ViewPlane
, sceneObjects :: [Shape]
, lights :: [Light]
, ambientLight :: Light
, backgroundColor :: Color
, randomState :: Random
}
|
crazymaik/ard-haskell
|
lib/ARD/World.hs
|
Haskell
|
mit
| 356
|
{-# LANGUAGE FlexibleContexts, UndecidableInstances #-}
-----------------------------------------------------------------------------
--
-- Module : RunID3Weka
-- Copyright :
-- License : MIT
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module RunID3Weka (
run
, runIterative
, drawDecisionTree
, FinishedSplittingThreshold(..)
) where
import DecisionTrees
import DecisionTrees.Definitions
import DecisionTrees.TreeBranching
import DecisionTrees.ID3
import DecisionTrees.Utils
import WekaData
import WekaData.Show.Name
import Control.Arrow
import Data.Typeable
import Data.Map.Strict (Map)
import Data.List ((\\))
import Data.Maybe (isJust)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Tree as Tree
import GHC.Float
import Data.Random
import Data.Random.Sample
import qualified Data.Random.Extras as RE
-----------------------------------------------------------------------------
buildAttr attr = uncurry WVal . (const attr &&& id)
instance Attribute WekaVal where
-- possibleDiscreteDomains :: attr -> [PossibleDiscreteDomain attr]
possibleDiscreteDomains (WVal attr@(WekaAttrNom _ domain) _) =
flists (buildAttr attr) fsubs
where fsubs = fullSubsets . Set.fromList $ domain
fl f = map f . Set.toList
flists f = fl (fl (fl f))
-- attributeName :: attr -> AttributeName
attributeName (WVal attr _) = AttrName $ wekaAttributeName attr
separateClass :: (?clazz :: ClassDescriptor) => WekaEntry -> (WekaVal, [WekaVal])
separateClass e@(WEntry set) = maybe err f $ lookupWValInSet c set
where Class c = ?clazz
f = id &&& (Set.toList . (`Set.delete` set))
err = error $ "Class attribute '" ++ c ++ "' not found in " ++ show e
instance Entry WekaEntry where
-- listAttributes :: entry -> [AttributeContainer]
listAttributes = map Attr . snd . separateClass
-- getClass :: entry -> AttributeContainer
getClass = Attr . fst . separateClass
-- classDomain :: entry -> Set AttributeContainer
classDomain = Set.fromList . f . fst . separateClass
where f (WVal attr@(WekaAttrNom _ domain) _) =
map (Attr . WVal attr) domain
-- attrByName :: AttributeName -> entry -> AttributeContainer
attrByName (AttrName name) e@(WEntry dta) =
maybe err Attr $ lookupWValInSet name dta
where err = error $ "Attribute '" ++ name
++ "' not found in " ++ show e
hasAttribute (AttrName name) (WEntry set) = isJust $ lookupWValInSet name set
-----------------------------------------------------------------------------
type Filename = String
run :: Filename -- ^ file name
-> String -- ^ class name
-> FinishedSplittingThreshold
-> IO (Decision WekaEntry AttributeContainer) -- ^ the decision tree
run filename classname fsThreshold =
do entries <- getEntries filename
let ?clazz = Class classname
let ?config = fsThreshold
buildDecisionTree entries
getEntries filename = do
raw@(RawWekaData name attrs dta) <- readWekaData filename
return $ wekaData2Sparse raw
-----------------------------------------------------------------------------
runIterative :: Filename -- ^ file name
-> String -- ^ class name
-> FinishedSplittingThreshold
-> Float -- ^ the percent of /test/ entries
-> IO (Decision WekaEntry AttributeContainer) -- ^ the decision tree
runIterative filename classname fsThreshold testPercent = do
entries <- getEntries filename
let testEntriesCount = float2Int $ int2Float (length entries) * testPercent
testEntries <- runRVar (RE.sample testEntriesCount entries) StdRandom
let learnEntries = entries \\ testEntries
let ?clazz = Class classname
let ?config = fsThreshold
buildDecisionTreeIterative learnEntries testEntries
undefined
-----------------------------------------------------------------------------
drawDecisionTree :: Decision WekaEntry AttributeContainer -> IO()
drawDecisionTree res = do let tr = decision2Tree show res
putStrLn $ Tree.drawTree tr
|
fehu/min-dat--decision-trees
|
src/RunID3Weka.hs
|
Haskell
|
mit
| 4,305
|
module JSON where
import Text.JSON
import DB0
import DB.Get
import DB0
instance JSON Event where
showJSON (EvNewMessage mid) = makeObj [("newmessage",JSRational False $ fromIntegral mid)]
showJSON _ = JSNull
instance JSON Exposed where
showJSON (Exposed mid mdate mpid mtext mvote fs (Interface cv cp ci cr cc co cx)) = makeObj $ [
("id",JSRational False . fromIntegral $ mid),
("date",JSString $ toJSString $ mdate),
("parent",maybe JSNull (JSRational False . fromIntegral) mpid),
("text",JSString $ toJSString mtext),
("vote",JSRational False $ fromIntegral mvote),
("alter", JSArray $ map showJSON fs),
("canVote",JSBool cv),
("canPropose",JSBool cp),
("canIntervein",JSBool ci),
("canRespond",JSBool cr),
("canClose",JSBool cc),
("canOpen",JSBool co),
("canRetract",JSBool cx)
]
|
paolino/mootzoo
|
JSON.hs
|
Haskell
|
mit
| 1,031
|
module Parser where
import Monad
import Types
import Data.Maybe (fromJust)
import Options.Applicative
import Options.Applicative.Types (ReadM (..), readerAsk, readerError)
fullParser :: Parser Args
fullParser = argParser
uriReadM :: ReadM URI
uriReadM = do
s <- readerAsk
case parseURI s of
Nothing -> readerError "Not a valid seed URI"
Just uri -> return uri
argParser :: Parser Args
argParser = Args
<$> argument uriReadM
( metavar "SEED"
<> help "The url to start the spider from" )
<*> option auto
( value 1
<> short 'f'
<> long "fetchers"
<> metavar "INT"
<> help "The number of fetchers to use concurrently")
<*> flag False True
( short 's'
<> long "fetcher-status-log"
<> help "Whether to log fetcher's status")
<*> strOption
( value "/tmp/fetcher.txt"
<> long "queue-log-file"
<> metavar "FILE"
<> help "Where to log fetcher's status")
<*> strOption
( value "mail.txt"
<> short 'o'
<> long "output"
<> metavar "FILE"
<> help "Where to output the email addresses")
<*> flag False True
( short 'u'
<> long "queue-log"
<> help "Whether to log queued URL's")
<*> strOption
( value "/tmp/url.txt"
<> long "queue-log-file"
<> metavar "FILE"
<> help "Where to log queued URL's")
<*> switch
( short 'd'
<> long "stay-within-domain"
<> help "Stay within the seed URL's domain")
parseArgs :: IO Args
parseArgs = execParser opts
where
opts = info (helper <*> fullParser)
( fullDesc
<> progDesc "Web spider with the objective to find email addresses."
<> header "HEAS: Haskell Email Address Spider" )
|
NorfairKing/hess
|
src/Parser.hs
|
Haskell
|
mit
| 1,882
|
-- |
-- Module : Text.XML.Mapping.Schema.Mixed
-- Copyright : (c) Joseph Abrahamson 2013
-- License : MIT
-- .
-- Maintainer : me@jspha.com
-- Stability : experimental
-- Portability : non-portable
-- .
-- XML \"mixed\" content.
module Text.XML.Mapping.Schema.Mixed (
Mixed (Mixed, unMixed), textOnly, elementsOnly
) where
import Data.Either
import qualified Data.Text as T
newtype Mixed a = Mixed { unMixed :: [Either T.Text a] }
deriving ( Show, Eq, Ord )
textOnly :: Mixed a -> [T.Text]
textOnly = lefts . unMixed
elementsOnly :: Mixed a -> [a]
elementsOnly = rights . unMixed
|
tel/xml-mapping
|
src/Text/XML/Mapping/Schema/Mixed.hs
|
Haskell
|
mit
| 633
|
module Main where
import qualified Data.ByteString.Lazy as Bs
import Data.List
import Data.Word (Word8)
import Data.Bits
import Data.Ord (comparing)
import System.Exit
import System.Environment
import System.Console.GetOpt
import Control.Monad.Error
data RleCode = EqRle {len::Int, byte::Word8}
| IncRle {len::Int, start::Word8}
| Raw {bytes::[Word8]}
deriving (Show)
----------------------------------------Decode----------------------------------
--decodes one RleCode to corresponding byte array
decode :: RleCode -> [Word8]
decode (EqRle l b) = replicate l b
decode (IncRle l s) = take l [s..]
decode (Raw xs) = xs
--reads rleCodes from input bytestream
readRleCode :: [Word8] -> [RleCode]
readRleCode [] = []
readRleCode (count:datas@(dataByte:ds))
| count == 0xFF = [] --FF is endOfStream
| count == 0x7F = error "0x7F:New PPU address command is not implemented"
| not (testBit count 7) = EqRle (fromIntegral count) dataByte : readRleCode ds
| testBit count 6 = IncRle count' dataByte : readRleCode ds
| otherwise = Raw (take count' datas) : readRleCode (drop count' datas)
where count' = fromIntegral $ count .&. 0x3F--clear two hi bits
----------------------------------------Encode----------------------------------
-- takes only ascending (+1) init part of list (e.g. [5,6,7,2,1]->[5,6,7])
takeAsc :: (Eq a, Num a) => [a] -> [a]
takeAsc [] = []
takeAsc xss@(x:xs) = (x:) $ map fst $ takeWhile (uncurry (==)) $ zip xs $ map (+1) xss
encodeEqRle:: [Word8] -> RleCode
encodeEqRle xs = EqRle (length eqGroup) (head eqGroup)
where eqGroup = head $ group xs
encodeIncRle :: [Word8] -> RleCode
encodeIncRle xs = IncRle (length asc) (head asc)
where asc = takeAsc xs
mergeRaws :: [RleCode] -> [RleCode]
mergeRaws [] = []
mergeRaws (Raw x: Raw y :xs) = mergeRaws $ Raw (x++y): xs
mergeRaws (x:xs) = x:mergeRaws xs
--encode inits of given list to the best RleCode comparing length
encode :: [Word8] -> [RleCode]--first we get single RleCodes and then merge all raw values
encode = mergeRaws.encode'
where
encode' [] = []
encode' xs@(x:xss)
| len maxCode <= 2 = Raw [x] : encode' xss --optimization: don't break raw chains with 2-bytes Rle
| otherwise = maxCode : encode' (drop (len maxCode) xs)
where maxCode = maximumBy (comparing len) [encodeIncRle xs, encodeEqRle xs]
--serialize list of RleCodes to game's format
writeRleCode:: [RleCode] -> [Word8]
writeRleCode [] = [0xFF] --End of stream sign
writeRleCode (EqRle l b:xs)
| l > 0x7E = 0x7E: b : writeRleCode (EqRle (l-0x7E) b: xs) --0x7E is max EqRle length
| otherwise = fromIntegral l: b: writeRleCode xs
writeRleCode (IncRle l s:xs)
| l > 0x3F = 0xFF: s : writeRleCode (IncRle (l-0x3F) (s+0x3F): xs)
| otherwise = (fromIntegral l .|. 0xC0) : s : writeRleCode xs --two high bits are set at incremental Rle
writeRleCode (Raw xs: xss)
| length xs > 0x3F = 0xBF: xs ++ writeRleCode (Raw (drop 0x3F xs) : xss)
| otherwise = (fromIntegral (length xs) .|. 0x80): xs ++ writeRleCode xss -- high bit is set for raw
----------------------------------------Command line parse part----------------------------------
data Action = Decode | Encode | NoAction deriving (Show, Eq)
data Options = Options
{optHelp :: Bool
,optVersion :: Bool
,optAction :: Action
}
deriving (Show)
defaultOptions :: Options
defaultOptions = Options
{optHelp = False
,optVersion = False
,optAction = NoAction
}
usage :: String
usage = usageInfo "Usage: eintourname [-d | -e] file_name [offset]" options
options :: [OptDescr (Options -> Options)]
options =
[ Option "d" ["decode"] (NoArg (\opts -> opts {optAction = Decode})) "decode from ROM. -d <file_name offset>"
, Option "e" ["encode"] (NoArg (\opts -> opts {optAction = Encode})) "encode from raw binary. -e <file_name>"
, Option "h?" ["help"] (NoArg (\ opts -> opts { optHelp = True })) "show help."
, Option "v" ["version"] (NoArg (\ opts -> opts { optVersion = True })) "show version number."
]
toolOpts :: [String] -> IO (Options, [String])
toolOpts argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defaultOptions o, n)
(_,_,errs) -> ioError (userError (concat errs ++ usage))
----------------------------------------------Main------------------------------------------------------
main :: IO()
main = do
argv <- getArgs
(opts, nonOpts) <- toolOpts argv
when (optVersion opts) $ do
putStrLn "Eintourname. NES Teenage Mutant Ninja Turtles - Tournament Fighters RLE utility. Version 0.1"
exitSuccess
when (optHelp opts) $ do
putStrLn usage
exitSuccess
let action = optAction opts
when (action == NoAction) $ do
putStrLn "Supply action flag"
putStrLn usage
exitFailure
if action == Decode
then do
when (length nonOpts /= 2) $ do
putStrLn "Supply exactly one file name and one offset for decoding"
putStrLn usage
exitFailure
let [fileName, sOffset] = nonOpts
input <- Bs.readFile fileName
let inputU8 = drop (read sOffset) $ Bs.unpack input
decoded = concatMap decode . readRleCode $ inputU8
Bs.writeFile "decoded.bin" (Bs.pack decoded)
else do --encoding
when (length nonOpts /= 1) $ do
putStrLn "Supply exactly one file name for encoding"
putStrLn usage
exitFailure
let [fileName] = nonOpts
input <- Bs.readFile fileName
let inputU8 = Bs.unpack input
encoded = writeRleCode . encode $ inputU8
Bs.writeFile "encoded.bin" (Bs.pack encoded)
|
romhack/eintourname
|
eintourname.hs
|
Haskell
|
mit
| 5,824
|
module HConsole where
import Block
import Mino
import Graphics.Gloss
-- constants
scorePaneBackgroundColor, scorePaneBorderColor, scorePaneTextColor :: Color
scorePaneWidth, scorePaneInnerWidth, scorePaneHeight, scorePaneInnerHeight, scoreTextScale :: Float
scorePaneTextLocation, scorePaneBoxLocation, scorePaneLabelLocation :: (Float,Float)
scorePaneBackgroundColor = black
scorePaneBorderColor = makeColorI 0xFF 0xFF 0xFF 0xFF
scorePaneTextColor = makeColorI 0xFF 0xFF 0xFF 0xFF
scorePaneWidth = 200.0
scorePaneInnerWidth = 195.0
scorePaneHeight = 50.0
scorePaneInnerHeight = 45.0
scorePaneTextLocation = (-60,230)
scorePaneBoxLocation = (-10,240)
scorePaneLabelLocation = (-60,270)
scoreTextScale = 0.2
nextBoxBackgroundColor, nextBoxBorderColor, nextTextColor :: Color
nextBoxWidth, nextBoxInnerWidth, nextBoxHeight, nextBoxInnerHeight, nextTextScale :: Float
nextBlockLocation, nextBoxLocation, nextTextLocation :: (Float,Float)
nextBoxBackgroundColor = black
nextBoxBorderColor = white
nextTextColor = white
nextBoxWidth = 120.0
nextBoxInnerWidth = 115.0
nextBoxHeight = 80.0
nextBoxInnerHeight = 75.0
nextBlockLocation = (270,180)
nextBoxLocation = (160,150)
nextTextLocation = (150,200)
nextTextScale = 0.15
holdBoxBackgroundColor, holdBoxBorderColor, holdBoxTextColor :: Color
holdBoxWidth, holdBoxInnerWidth, holdBoxHeight, holdBoxInnerHeight, holdTextScale :: Float
holdBlockLocation, holdBoxLocation, holdTextLocation :: (Float,Float)
holdBoxBackgroundColor = black
holdBoxBorderColor = white
holdBoxTextColor = white
holdBoxWidth = 120.0
holdBoxInnerWidth = 115.0
holdBoxHeight = 80.0
holdBoxInnerHeight = 75.0
holdBlockLocation = (-70,180)
holdBoxLocation = (-180,150)
holdTextLocation = (-190,200)
holdTextScale = 0.15
levelBoxBackgroundColor, levelBoxBorderColor, levelBoxTextColor :: Color
levelBoxWidth, levelBoxInnerWidth, levelBoxHeight, levelBoxInnerHeight, levelTextScale :: Float
levelBlockLocation, levelBoxLocation, levelTextLocation, levelLabelLocation :: (Float,Float)
levelBoxBackgroundColor = black
levelBoxBorderColor = white
levelBoxTextColor = white
levelBoxWidth = 100.0
levelBoxInnerWidth = 95.0
levelBoxHeight = 50.0
levelBoxInnerHeight = 45.0
levelBlockLocation = (-60,50)
levelBoxLocation = (-170,20)
levelTextLocation = (-170,15)
levelLabelLocation = (-190,70)
levelTextScale = 0.15
renderScorePane :: Int -> Picture
renderScorePane score = pictures [scoreBorderBox, scoreBox, scoreText, scoreLabel]
where (tX,tY) = scorePaneTextLocation
(bX,bY) = scorePaneBoxLocation
(lX,lY) = scorePaneLabelLocation
scoreText = color scorePaneTextColor $
translate tX tY $
scale scoreTextScale scoreTextScale $
text $ show score
scoreLabel = color black $
translate lX lY $
scale 0.15 0.15 $
text "Score"
scoreBorderBox = color scorePaneBorderColor $
translate bX bY $
rectangleSolid scorePaneWidth scorePaneHeight
scoreBox = color scorePaneBackgroundColor $
translate bX bY $
rectangleSolid scorePaneInnerWidth scorePaneInnerHeight
renderNextMino :: Mino -> Picture
renderNextMino m = pictures [nextBorderBox, nextBox, nextBlocks, nextLabel]
where (tX,tY) = nextBlockLocation
(bX,bY) = nextBoxLocation
(lX,lY) = nextTextLocation
nextBlocks = pictures $ (renderBlock tX tY) <$> (minoBlocks m)
nextBorderBox = color nextBoxBorderColor $
translate bX bY $
rectangleSolid nextBoxWidth nextBoxHeight
nextBox = color nextBoxBackgroundColor $
translate bX bY $
rectangleSolid nextBoxInnerWidth nextBoxInnerHeight
nextLabel = color black $
translate lX lY $
scale nextTextScale nextTextScale $
text $ "Next"
renderHoldMino :: Maybe Mino -> Picture
renderHoldMino m = pictures [holdBorderBox, holdBox, holdBlocks, holdLabel]
where (tX,tY) = holdBlockLocation
(bX,bY) = holdBoxLocation
(lX,lY) = holdTextLocation
holdBlocks = case m of
Nothing -> pictures []
Just mino -> pictures $ (renderBlock tX tY) <$> (minoBlocks mino)
holdBorderBox = color holdBoxBorderColor $
translate bX bY $
rectangleSolid holdBoxWidth holdBoxHeight
holdBox = color holdBoxBackgroundColor $
translate bX bY $
rectangleSolid holdBoxInnerWidth holdBoxInnerHeight
holdLabel = color black $
translate lX lY $
scale holdTextScale holdTextScale $
text $ "Hold"
renderLevelBox :: Int -> Picture
renderLevelBox l = pictures [levelBorderBox, levelBox, levelText, levelLabel]
where (tX,tY) = levelTextLocation
(bX,bY) = levelBoxLocation
(lX,lY) = levelLabelLocation
levelText = color levelBoxTextColor $
translate tX tY $
scale levelTextScale levelTextScale $
text $ show l
levelBorderBox = color levelBoxBorderColor $
translate bX bY $
rectangleSolid levelBoxWidth levelBoxHeight
levelBox = color levelBoxBackgroundColor $
translate bX bY $
rectangleSolid levelBoxInnerWidth levelBoxInnerHeight
levelLabel = color black $
translate lX lY $
scale levelTextScale levelTextScale $
text $ "Level"
--data AltPicture = AltText String
-- | Nada
-- deriving (Show)
--renderAlt :: AltPicture -> IO()
--renderAlt a = case a of
-- AltText str -> do
-- GL.blend $= GL.Disabled
-- GL.preservingMatrix $ GLUT.renderString GLUT.Roman str
-- GL.blend $= GL.Enabled
-- Nada -> pure
|
maple-shaft/HaskellTetris
|
src/HConsole.hs
|
Haskell
|
mit
| 6,272
|
{-# LANGUAGE ScopedTypeVariables #-}
import Gauge.Main
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Builder as BB
import Data.Monoid
import qualified Data.Streaming.ByteString.Builder as BB
main :: IO ()
main = defaultMain [ bgroup "Data.Streaming.ByteString.Builder.toByteStringIO"
(benchmarks bIO b100_10000 b10000_100 b10000_10000)
, bgroup "Data.ByteString.Builder.toLazyByteString"
(benchmarks bLazy b100_10000 b10000_100 b10000_10000)
]
where
bIO = whnfIO . BB.toByteStringIO (const (return ()))
bLazy = nf BB.toLazyByteString
benchmarks run bld100_10000 bld10000_100 bld10000_10000 =
[ bench' run bld100_10000 100 10000
, bench' run bld10000_100 10000 100
, bench' run bld10000_10000 10000 10000
]
bench' :: (b -> Benchmarkable) -> b -> Int -> Int -> Benchmark
bench' run bld' len reps = bench (show len ++ "/" ++ show reps) (run bld')
b100_10000 = bld BB.byteString 100 10000
b10000_100 = bld BB.byteString 10000 100
b10000_10000 = bld BB.byteString 10000 10000
bld :: Data.Monoid.Monoid a => (S.ByteString -> a) -> Int -> Int -> a
bld f len reps = mconcat (replicate reps (f (S.replicate len 'x')))
|
fpco/streaming-commons
|
bench/builder-to-bytestring-io.hs
|
Haskell
|
mit
| 1,310
|
--
-- Manage asynchronous evaluation tasks
--
module Codex.Tasks (
TaskGroup, Queue,
createTaskGroup, forkTask,
createQueue, addQueue, cancelAll
) where
import Control.Monad.IO.Class
import Control.Concurrent
import Control.Exception (bracket_)
-- | a task group
-- a quantity semaphore to throttle the number of concurrent threads
newtype TaskGroup = TaskGroup QSem
-- Create a task group with n concurrent slots
createTaskGroup :: MonadIO m => Int -> m TaskGroup
createTaskGroup n = liftIO (TaskGroup <$> newQSem n)
-- Fork a new IO action under a task group
forkTask :: MonadIO m => TaskGroup -> IO () -> m ThreadId
forkTask (TaskGroup qsem) action
= liftIO $ forkIO (bracket_ (waitQSem qsem) (signalQSem qsem) action)
-- | a queue for pending evaluations
newtype Queue = Queue (MVar [ThreadId])
createQueue :: MonadIO m => m Queue
createQueue
= liftIO (Queue <$> newMVar [])
addQueue :: MonadIO m => ThreadId -> Queue -> m ()
addQueue threadId (Queue v)
= liftIO $ modifyMVar_ v (\list -> return (threadId:list))
cancelAll :: MonadIO m => Queue -> m ()
cancelAll (Queue v)
= liftIO $ modifyMVar_ v (\list -> mapM_ killThread list >> return [])
|
pbv/codex
|
src/Codex/Tasks.hs
|
Haskell
|
mit
| 1,210
|
module Nagari where
import Control.Monad
import Data.Char
import qualified Data.List as L
import Data.Monoid
import Prelude hiding (filter, iterate, take, takeWhile, map)
import qualified Prelude as P
----------------
-- Data types --
----------------
-- | Parser combinator type.
newtype Parser a = Parser { runParser :: String -> [(a, String)] }
---------------
-- Instances --
---------------
instance Monoid (Parser a) where
-- | The identity function for another parser when combined with `mappend`.
mempty = Parser $ const []
-- | Allows forking of parsing logic, concatenating the results of several
-- parsers into one parser result.
(Parser f) `mappend` (Parser g) = Parser $ \xs ->
let fResult = f xs
gResult = g xs
in fResult ++ gResult
instance MonadPlus Parser where
mzero = mempty
mplus = mappend
instance Functor Parser where
-- | Allows for mapping over parser results with a function `f`.
fmap f p = Parser $ \xs ->
[(f y, ys) | (y, ys) <- runParser p xs]
instance Monad Parser where
-- | Always succeeds at parsing a value `x`.
return x = Parser $ \xs -> [(x, xs)]
-- | Allows for combination of parsers.
Parser p >>= f = Parser $ \xs ->
concat [runParser (f y) ys | (y, ys) <- p xs]
-- | Always fails at parsing a value.
fail _ = Parser $ const []
----------------------
-- Parsers builders --
----------------------
err :: String -> Parser a
err xs = Parser $ \ys -> error $ xs ++ " near '" ++ ys ++ "'\n"
-- | Alias for `mplus`.
and :: Parser a -> Parser a -> Parser a
and = mplus
-- | Builds a parser that first attempts to parse with a parser `p` and falls
-- back to parsing with a parser `q` on failure.
or :: Parser a -> Parser a -> Parser a
p `or` q = Parser $ \xs -> case runParser p xs of
[] -> runParser q xs
r -> r
-- | Builds a parser that first attempts to parse with a parser `p` and falls
-- back to parsing with a parser `q` on failure. Parser result type uses
-- `Either`.
or' :: Parser a -> Parser b -> Parser (Either b a)
p `or'` q = Parser $ \xs ->
case runParser p xs of
[] -> case runParser q xs of
[] -> []
r2 -> [(Left y, ys) | (y, ys) <- r2]
r1 -> [(Right y, ys) | (y, ys) <- r1]
-- | Alias for `fmap`.
map :: (a -> b) -> Parser a -> Parser b
map = fmap
-- | Succeeds at parsing a single character if the given predicate is true for
-- the parser result.
takeOneIf :: (Char -> Bool) -> Parser Char
takeOneIf p = Parser $ \xs -> case xs of
[] -> []
y:ys -> [(y, ys) | p y]
takeOneIf' :: (Char -> Bool) -> Parser Char
takeOneIf' p = do
x <- char
if p x then return x else fail ""
-- | Builds a parser which will apply itself to a string the given number of
-- times.
take :: Int -> Parser a -> Parser [a]
take = replicateM
-- | Used as helper function by `takeAll`.
takeAll' :: Parser a -> Parser a
takeAll' p = Parser $ \xs ->
let rs = runParser p xs
in rs ++ concat [runParser (takeAll' p) ys | (_, ys) <- rs]
-- | Builds a parser which will apply itself to a string until further
-- applications yield no results.
takeAll :: Parser a -> Parser [a]
takeAll p = Parser $ \xs -> case runParser (takeAll' p) xs of
[] -> []
rs -> let unParsed = snd . last $ rs
results = P.map fst rs
in [(results, unParsed)]
-- | Builds a parser that will succeed as long as the predicate `p` is true for
-- characters in the input stream.
takeWhile :: (Char -> Bool) -> Parser String
takeWhile p = Parser $ \xs -> case xs of
[] -> []
_ -> let (xsInit, xsTail) = span p xs
in [(xsInit, xsTail) | not . null $ xsInit]
-- | Finds the index of the first occurrence of a list `xs` in a list `ys`.
findIn :: (Eq a) => [a] -> [a] -> Maybe Int
findIn _ [] = Nothing
findIn [] _ = Nothing
findIn xs ys = L.elemIndex True $ L.map (L.isPrefixOf xs) (L.tails ys)
-- | Builds a parser which parses a string until an occurrence of string `s` is
-- found. Fails if nothing is found.
takeUntil :: String -> Parser String
takeUntil s = Parser $ \xs -> case findIn s xs of
Nothing -> []
Just i -> [splitAt i xs]
-- | Builds a parser which performs its action and then consumes any whitespace
-- after the parsed content.
token :: Parser a -> Parser a
token p = do
x <- p
takeWhile isSpace
return x
-- | Parses a sequence of letters.
letters :: Parser String
letters = takeWhile isAlpha
-- | Parses a tokenized sequence of letters.
word :: Parser String
word = token letters
-- | Parses a sequence of digits and returns its integer value.
number :: Parser Int
number = map read $ takeWhile isDigit
-- | Parses a specific string from the input.
accept :: String -> Parser String
accept s = do
t <- take (length s) char
if s == t then return t else fail ""
------------------
-- Core parsers --
------------------
-- | Parses a single character.
char :: Parser Char
char = Parser $ \xs -> case xs of
[] -> []
y:ys -> [(y, ys)]
-- | Parses a single whitespace character.
space :: Parser Char
space = takeOneIf isSpace
-- | Parses a single alphabetical character.
alpha :: Parser Char
alpha = takeOneIf isAlpha
-- | Parses a single digit character.
digit :: Parser Char
digit = takeOneIf isDigit
-- | Parses a single alpha-numerical character.
alphaNum :: Parser Char
alphaNum = takeOneIf isAlphaNum
-- | Parses one of a given character `x`.
lit :: Char -> Parser Char
lit x = takeOneIf (==x)
-- | Succeeds at parsing a character which is not the given character `x`.
unLit :: Char -> Parser Char
unLit x = takeOneIf (/=x)
|
davesque/nagari
|
Nagari.hs
|
Haskell
|
mit
| 5,631
|
module Alder.Unique
( -- * Unique values
Unique
-- * Supplying 'Unique's
, MonadSupply(..)
-- * Tagging values
, Tagged(..)
, tag
, untag
) where
import Control.Monad
type Unique = Int
class Monad m => MonadSupply m where
getUnique :: m Unique
data Tagged a = !Unique :< a
tag :: MonadSupply m => a -> m (Tagged a)
tag a = liftM (:< a) getUnique
untag :: Tagged a -> a
untag (_ :< a) = a
|
ghcjs/ghcjs-sodium
|
src/Alder/Unique.hs
|
Haskell
|
mit
| 440
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html
module Stratosphere.ResourceProperties.GlueTriggerAction where
import Stratosphere.ResourceImports
-- | Full data type definition for GlueTriggerAction. See 'glueTriggerAction'
-- for a more convenient constructor.
data GlueTriggerAction =
GlueTriggerAction
{ _glueTriggerActionArguments :: Maybe Object
, _glueTriggerActionJobName :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON GlueTriggerAction where
toJSON GlueTriggerAction{..} =
object $
catMaybes
[ fmap (("Arguments",) . toJSON) _glueTriggerActionArguments
, fmap (("JobName",) . toJSON) _glueTriggerActionJobName
]
-- | Constructor for 'GlueTriggerAction' containing required fields as
-- arguments.
glueTriggerAction
:: GlueTriggerAction
glueTriggerAction =
GlueTriggerAction
{ _glueTriggerActionArguments = Nothing
, _glueTriggerActionJobName = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-arguments
gtaArguments :: Lens' GlueTriggerAction (Maybe Object)
gtaArguments = lens _glueTriggerActionArguments (\s a -> s { _glueTriggerActionArguments = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-trigger-action.html#cfn-glue-trigger-action-jobname
gtaJobName :: Lens' GlueTriggerAction (Maybe (Val Text))
gtaJobName = lens _glueTriggerActionJobName (\s a -> s { _glueTriggerActionJobName = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/GlueTriggerAction.hs
|
Haskell
|
mit
| 1,688
|
{-# htermination (inRangeChar :: Tup2 Char Char -> Char -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup2 a b = Tup2 a b ;
data Char = Char MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
asAs :: MyBool -> MyBool -> MyBool;
asAs MyFalse x = MyFalse;
asAs MyTrue x = x;
primCharToInt :: Char -> MyInt;
primCharToInt (Char x) = x;
fromEnumChar :: Char -> MyInt
fromEnumChar = primCharToInt;
inRangeI vv = fromEnumChar vv;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compareMyInt :: MyInt -> MyInt -> Ordering
compareMyInt = primCmpInt;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
not :: MyBool -> MyBool;
not MyTrue = MyFalse;
not MyFalse = MyTrue;
fsEsOrdering :: Ordering -> Ordering -> MyBool
fsEsOrdering x y = not (esEsOrdering x y);
ltEsMyInt :: MyInt -> MyInt -> MyBool
ltEsMyInt x y = fsEsOrdering (compareMyInt x y) GT;
inRangeChar :: Tup2 Char Char -> Char -> MyBool
inRangeChar (Tup2 c c') ci = asAs (ltEsMyInt (fromEnumChar c) (inRangeI ci)) (ltEsMyInt (inRangeI ci) (fromEnumChar c'));
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/inRange_2.hs
|
Haskell
|
mit
| 1,938
|
{-# LANGUAGE OverloadedStrings, Arrows #-}
module Main where
import Text.XML.HXT.Core
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.List
import Text.XML.HXT.Arrow.XmlState.RunIOStateArrow
import Text.XML.HXT.Arrow.XmlState.TypeDefs
type ImgState = [Image]
type ImgArrow = IOSLA (XIOState ImgState) XmlTree XmlTree
data Image = Image {
mimeType :: String
, base64Data :: BL.ByteString
} deriving (Show)
extractInlineImages :: ImgArrow
extractInlineImages = processTopDown ( extractImg `when` isInlineImg)
isInlineImg = isElem
>>> hasName "img"
>>> hasAttr "src"
>>> getAttrValue "src"
>>> isA isDataURI
where isDataURI = isPrefixOf "data:image/"
extractImg =
processAttrl (
(changeAttrValue . const $< createImage)
`when` hasName "src"
)
-- We are on the src attribute node at this point, so the children is the src value
createImage :: IOSLA (XIOState ImgState) XmlTree String
createImage =
(saveImg $< xshow getChildren)
-- >>> changeUserState (\x imgs -> (Image x ""):imgs )
saveImg :: String -> IOSLA (XIOState ImgState) XmlTree String
saveImg string =
-- in real app, process the data URI string and save IMG to DB
arrIO0 (do
putStrLn "Process this data and return a new URL"
putStrLn string
return "THIS URL IS RETURN AFTER IMAGE IS CREATED IN DB")
main = do
html <- getContents
let doc = readString [withParseHTML yes, withWarnings no] html
(s, xs) <- runIOSLA
( doc
>>> extractInlineImages
>>> writeDocument [withIndent yes ,withOutputEncoding utf8 ] "-"
) (initialState []) undefined
-- print $ xioUserState s
return ()
|
danchoi/datauri
|
Main.hs
|
Haskell
|
mit
| 1,775
|
module Main where
factors :: Int -> [Int]
factors n = [x | x <- [1..n], n `mod` x == 0]
prime :: Int -> Bool
prime n = factors n == [1, n]
primesTo :: Int -> [Int]
primesTo n = [x | x <- [2..n], prime x]
|
Crossroadsman/ProgrammingInHaskell
|
05/primes.hs
|
Haskell
|
apache-2.0
| 231
|
module Main ( main ) where
import Control.Eff.Exception
import Control.Eff.Lift
import System.Environment ( getArgs )
import Codec.FFMpeg
import Codec.FFMpeg.Format
probe :: String -> IO ()
probe fn = do
putStrLn fn
me <- runLift $ runExc (openInput fn (return (1 :: Int)))
case me of
Left e -> print (e :: IOError)
Right _ -> return ()
return ()
main :: IO ()
main = initFFMpeg >> getArgs >>= mapM_ probe
|
waldheinz/ffmpeg-effects
|
demos/Probe.hs
|
Haskell
|
apache-2.0
| 447
|
{-# LANGUAGE FlexibleContexts #-}
import qualified Data.Text as T
import qualified Data.ByteString as B
import JtagRW ( UsbBlasterState
, withUSBBlaster
, toBits, fromBits
, virWrite, vdrWrite, vdrWriteRead
, virAddrOff, virAddrRead, virAddrWrite
, flush
, printState
)
import Protolude
--
-- FOR: DE0_Nano_project_JTAG.qar
-- Derived from https://github.com/GeezerGeek/open_sld/blob/master/sld_interface.py,
-- And: http://sourceforge.net/p/ixo-jtag/code/HEAD/tree/usb_jtag/
-- Load the initialTest.sof from open_sld on the board and run... ;-)
-- The initial test wires the DS0-Nano LED bank up to the SLD and prints last VDR
outLed :: Word8 -> (StateT UsbBlasterState) IO (Maybe ByteString)
outLed v = do
_ <- virWrite 1
_ <- vdrWrite $ toBits 5 v
_ <- virWrite 0
_ <- flush
return $ Just "todo"
doStuff :: (StateT UsbBlasterState) IO (Maybe B.ByteString)
doStuff = do
_ <- mapM outLed [0..127]
_ <- mapM outLed $ join $ replicate 16 [1, 2, 4, 8, 16, 32, 64, 32, 16, 8, 4, 2, 1]
_ <- mapM outLed [127,126..0]
return Nothing
main :: IO ()
main = do
dh <- withUSBBlaster 0x10 10 5 doStuff
case dh of
Just err -> putStrLn $ T.pack $ "Error:" ++ show err
Nothing -> pure ()
|
tau-tao/FPGAIPFilter
|
test/FtdiTest1.hs
|
Haskell
|
bsd-3-clause
| 1,437
|
{-# LANGUAGE DeriveDataTypeable #-}
module Text.RE.Internal.QQ where
import Control.Exception
import Data.Typeable
import Language.Haskell.TH.Quote
data QQFailure =
QQFailure
{ _qqf_context :: String
, _qqf_component :: String
}
deriving (Show,Typeable)
instance Exception QQFailure where
qq0 :: String -> QuasiQuoter
qq0 ctx =
QuasiQuoter
{ quoteExp = const $ throw $ QQFailure ctx "expression"
, quotePat = const $ throw $ QQFailure ctx "pattern"
, quoteType = const $ throw $ QQFailure ctx "type"
, quoteDec = const $ throw $ QQFailure ctx "declaration"
}
|
cdornan/idiot
|
Text/RE/Internal/QQ.hs
|
Haskell
|
bsd-3-clause
| 646
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Linear.Extra.Random where
import Control.Monad.Random
import Linear
import Control.Lens
instance Random r => Random (V2 r) where
randomR (a,b) g = flip runRand g $ do
x <- getRandomR (a^._x,b^._x)
y <- getRandomR (a^._y,b^._y)
return (V2 x y)
random g = flip runRand g $ do
x <- getRandom
y <- getRandom
return (V2 x y)
instance Random r => Random (V3 r) where
randomR (a,b) g = flip runRand g $ do
x <- getRandomR (a^._x,b^._x)
y <- getRandomR (a^._y,b^._y)
z <- getRandomR (a^._z,b^._z)
return (V3 x y z)
random g = flip runRand g $ do
x <- getRandom
y <- getRandom
z <- getRandom
return (V3 x y z)
instance Random r => Random (V4 r) where
randomR (a,b) g = flip runRand g $ do
x <- getRandomR (a^._x,b^._x)
y <- getRandomR (a^._y,b^._y)
z <- getRandomR (a^._z,b^._z)
w <- getRandomR (a^._w,b^._w)
return (V4 x y z w)
random g = flip runRand g $ do
x <- getRandom
y <- getRandom
z <- getRandom
w <- getRandom
return (V4 x y z w)
|
lukexi/linear-extra
|
src/Linear/Extra/Random.hs
|
Haskell
|
bsd-3-clause
| 1,196
|
{-# LANGUAGE TemplateHaskell #-}
module Render.MEdgeT where
import Data.Word (Word16)
import Control.Lens (makeLenses)
import qualified Data.ByteString.Lazy as BL
import Types
import Util.Binary
import qualified Constants
mEdgeDiskSize :: Int
mEdgeDiskSize = 2 * Constants.sizeOfShort
makeLenses ''MEdgeT
newMEdgeT :: BL.ByteString -> MEdgeT
newMEdgeT = runGet getMEdgeT
getMEdgeT :: Get MEdgeT
getMEdgeT = do
v <- getWord162
return MEdgeT { _meV = v
, _meCachedEdgeOffset = 0
}
|
ksaveljev/hake-2
|
src/Render/MEdgeT.hs
|
Haskell
|
bsd-3-clause
| 529
|
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Interface between OpenCV and inline-c(pp) (Haskell)
module OpenCV.Internal.C.Inline ( openCvCtx ) where
import "base" Foreign.Ptr ( FunPtr )
import "base" Data.Monoid ( (<>) )
import qualified "containers" Data.Map as M
import qualified "inline-c" Language.C.Inline as C
import qualified "inline-c" Language.C.Types as C
import qualified "inline-c" Language.C.Inline.Context as C
import qualified "inline-c-cpp" Language.C.Inline.Cpp as C
import "this" OpenCV.Internal.C.Types
-- | Context useful to work with the OpenCV library
--
-- Based on 'C.cppCtx', 'C.bsCtx' and 'C.vecCtx'.
--
-- 'C.ctxTypesTable': converts OpenCV basic types to their counterparts in
-- "OpenCV.Internal.C.Inline".
--
-- No 'C.ctxAntiQuoters'.
openCvCtx :: C.Context
openCvCtx = C.cppCtx <> C.bsCtx <> C.vecCtx <> ctx
where
ctx = mempty
{ C.ctxTypesTable = openCvTypesTable
}
openCvTypesTable :: C.TypesTable
openCvTypesTable = M.fromList
[ ( C.TypeName "bool" , [t| C.CInt |] )
, ( C.TypeName "Exception" , [t| C'CvCppException |] )
, ( C.TypeName "Matx12f" , [t| C'Matx12f |] )
, ( C.TypeName "Matx12d" , [t| C'Matx12d |] )
, ( C.TypeName "Matx13f" , [t| C'Matx13f |] )
, ( C.TypeName "Matx13d" , [t| C'Matx13d |] )
, ( C.TypeName "Matx14f" , [t| C'Matx14f |] )
, ( C.TypeName "Matx14d" , [t| C'Matx14d |] )
, ( C.TypeName "Matx16f" , [t| C'Matx16f |] )
, ( C.TypeName "Matx16d" , [t| C'Matx16d |] )
, ( C.TypeName "Matx21f" , [t| C'Matx21f |] )
, ( C.TypeName "Matx21d" , [t| C'Matx21d |] )
, ( C.TypeName "Matx22f" , [t| C'Matx22f |] )
, ( C.TypeName "Matx22d" , [t| C'Matx22d |] )
, ( C.TypeName "Matx23f" , [t| C'Matx23f |] )
, ( C.TypeName "Matx23d" , [t| C'Matx23d |] )
, ( C.TypeName "Matx31f" , [t| C'Matx31f |] )
, ( C.TypeName "Matx31d" , [t| C'Matx31d |] )
, ( C.TypeName "Matx32f" , [t| C'Matx32f |] )
, ( C.TypeName "Matx32d" , [t| C'Matx32d |] )
, ( C.TypeName "Matx33f" , [t| C'Matx33f |] )
, ( C.TypeName "Matx33d" , [t| C'Matx33d |] )
, ( C.TypeName "Matx34f" , [t| C'Matx34f |] )
, ( C.TypeName "Matx34d" , [t| C'Matx34d |] )
, ( C.TypeName "Matx41f" , [t| C'Matx41f |] )
, ( C.TypeName "Matx41d" , [t| C'Matx41d |] )
, ( C.TypeName "Matx43f" , [t| C'Matx43f |] )
, ( C.TypeName "Matx43d" , [t| C'Matx43d |] )
, ( C.TypeName "Matx44f" , [t| C'Matx44f |] )
, ( C.TypeName "Matx44d" , [t| C'Matx44d |] )
, ( C.TypeName "Matx51f" , [t| C'Matx51f |] )
, ( C.TypeName "Matx51d" , [t| C'Matx51d |] )
, ( C.TypeName "Matx61f" , [t| C'Matx61f |] )
, ( C.TypeName "Matx61d" , [t| C'Matx61d |] )
, ( C.TypeName "Matx66f" , [t| C'Matx66f |] )
, ( C.TypeName "Matx66d" , [t| C'Matx66d |] )
, ( C.TypeName "Vec2i" , [t| C'Vec2i |] )
, ( C.TypeName "Vec2f" , [t| C'Vec2f |] )
, ( C.TypeName "Vec2d" , [t| C'Vec2d |] )
, ( C.TypeName "Vec3i" , [t| C'Vec3i |] )
, ( C.TypeName "Vec3f" , [t| C'Vec3f |] )
, ( C.TypeName "Vec3d" , [t| C'Vec3d |] )
, ( C.TypeName "Vec4i" , [t| C'Vec4i |] )
, ( C.TypeName "Vec4f" , [t| C'Vec4f |] )
, ( C.TypeName "Vec4d" , [t| C'Vec4d |] )
, ( C.TypeName "Point2i" , [t| C'Point2i |] )
, ( C.TypeName "Point2f" , [t| C'Point2f |] )
, ( C.TypeName "Point2d" , [t| C'Point2d |] )
, ( C.TypeName "Point3i" , [t| C'Point3i |] )
, ( C.TypeName "Point3f" , [t| C'Point3f |] )
, ( C.TypeName "Point3d" , [t| C'Point3d |] )
, ( C.TypeName "Size2i" , [t| C'Size2i |] )
, ( C.TypeName "Size2f" , [t| C'Size2f |] )
, ( C.TypeName "Size2d" , [t| C'Size2d |] )
, ( C.TypeName "Rect2i" , [t| C'Rect2i |] )
, ( C.TypeName "Rect2f" , [t| C'Rect2f |] )
, ( C.TypeName "Rect2d" , [t| C'Rect2d |] )
, ( C.TypeName "RotatedRect" , [t| C'RotatedRect |] )
, ( C.TypeName "TermCriteria", [t| C'TermCriteria|] )
, ( C.TypeName "Scalar" , [t| C'Scalar |] )
, ( C.TypeName "Mat" , [t| C'Mat |] )
, ( C.TypeName "Range" , [t| C'Range |] )
, ( C.TypeName "KeyPoint" , [t| C'KeyPoint |] )
, ( C.TypeName "DMatch" , [t| C'DMatch |] )
--, ( C.TypeName "MSER" , [t| C'MSER |] )
, ( C.TypeName "Ptr_ORB" , [t| C'Ptr_ORB |] )
--, ( C.TypeName "BRISK" , [t| C'BRISK |] )
--, ( C.TypeName "KAZE" , [t| C'KAZE |] )
--, ( C.TypeName "AKAZE" , [t| C'AKAZE |] )
, ( C.TypeName "BFMatcher" , [t| C'BFMatcher |] )
, ( C.TypeName "Ptr_BackgroundSubtractorKNN" , [t| C'Ptr_BackgroundSubtractorKNN |] )
, ( C.TypeName "Ptr_BackgroundSubtractorMOG2", [t| C'Ptr_BackgroundSubtractorMOG2 |] )
, ( C.TypeName "VideoCapture", [t| C'VideoCapture |] )
, ( C.TypeName "MouseCallback" , [t| FunPtr C'MouseCallback |] )
, ( C.TypeName "TrackbarCallback", [t| FunPtr C'TrackbarCallback |] )
]
|
Cortlandd/haskell-opencv
|
src/OpenCV/Internal/C/Inline.hs
|
Haskell
|
bsd-3-clause
| 5,328
|
{-# LANGUAGE CPP #-}
---------------------------------------------------------------------------
-- |
-- Copyright : (C) 2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-- Common projection matrices: e.g. perspective/orthographic transformation
-- matrices.
--
-- Analytically derived inverses are also supplied, because they can be
-- much more accurate in practice than computing them through general
-- purpose means
---------------------------------------------------------------------------
module Linear.Projection
( lookAt
, perspective, inversePerspective
, infinitePerspective, inverseInfinitePerspective
, frustum, inverseFrustum
, ortho, inverseOrtho
) where
import Control.Lens hiding (index)
import Linear.V3
import Linear.V4
import Linear.Matrix
import Linear.Epsilon
import Linear.Metric
#ifdef HLINT
{-# ANN module "HLint: ignore Reduce duplication" #-}
#endif
-- | Build a look at view matrix
lookAt
:: (Epsilon a, Floating a)
=> V3 a -- ^ Eye
-> V3 a -- ^ Center
-> V3 a -- ^ Up
-> M44 a
lookAt eye center up =
V4 (V4 (xa^._x) (xa^._y) (xa^._z) xd)
(V4 (ya^._x) (ya^._y) (ya^._z) yd)
(V4 (-za^._x) (-za^._y) (-za^._z) zd)
(V4 0 0 0 1)
where za = normalize $ center - eye
xa = normalize $ cross za up
ya = cross xa za
xd = -dot xa eye
yd = -dot ya eye
zd = dot za eye
-- | Build a matrix for a symmetric perspective-view frustum
perspective
:: Floating a
=> a -- ^ FOV (y direction, in radians)
-> a -- ^ Aspect ratio
-> a -- ^ Near plane
-> a -- ^ Far plane
-> M44 a
perspective fovy aspect near far =
V4 (V4 x 0 0 0)
(V4 0 y 0 0)
(V4 0 0 z w)
(V4 0 0 (-1) 0)
where tanHalfFovy = tan $ fovy / 2
x = 1 / (aspect * tanHalfFovy)
y = 1 / tanHalfFovy
z = -(far + near) / (far - near)
w = -(2 * far * near) / (far - near)
-- | Build an inverse perspective matrix
inversePerspective
:: Floating a
=> a -- ^ FOV (y direction, in radians)
-> a -- ^ Aspect ratio
-> a -- ^ Near plane
-> a -- ^ Far plane
-> M44 a
inversePerspective fovy aspect near far =
V4 (V4 a 0 0 0 )
(V4 0 b 0 0 )
(V4 0 0 0 (-1))
(V4 0 0 c d )
where tanHalfFovy = tan $ fovy / 2
a = aspect * tanHalfFovy
b = tanHalfFovy
c = -(far - near) / (2 * far * near)
d = (far + near) / (2 * far * near)
-- | Build a perspective matrix per the classic @glFrustum@ arguments.
frustum
:: Floating a
=> a -- ^ Left
-> a -- ^ Right
-> a -- ^ Bottom
-> a -- ^ Top
-> a -- ^ Near
-> a -- ^ Far
-> M44 a
frustum l r b t n f =
V4 (V4 x 0 a 0)
(V4 0 y e 0)
(V4 0 0 c d)
(V4 0 0 (-1) 0)
where
rml = r-l
tmb = t-b
fmn = f-n
x = 2*n/rml
y = 2*n/tmb
a = (r+l)/rml
e = (t+b)/tmb
c = negate (f+n)/fmn
d = (-2*f*n)/fmn
inverseFrustum
:: Floating a
=> a -- ^ Left
-> a -- ^ Right
-> a -- ^ Bottom
-> a -- ^ Top
-> a -- ^ Near
-> a -- ^ Far
-> M44 a
inverseFrustum l r b t n f =
V4 (V4 rx 0 0 ax)
(V4 0 ry 0 by)
(V4 0 0 0 (-1))
(V4 0 0 rd cd)
where
hrn = 0.5/n
hrnf = 0.5/(n*f)
rx = (r-l)*hrn
ry = (t-b)*hrn
ax = (r+l)*hrn
by = (t+b)*hrn
cd = (f+n)*hrnf
rd = (n-f)*hrnf
-- | Build a matrix for a symmetric perspective-view frustum with a far plane at infinite
infinitePerspective
:: Floating a
=> a -- ^ FOV (y direction, in radians)
-> a -- ^ Aspect Ratio
-> a -- ^ Near plane
-> M44 a
infinitePerspective fovy a n =
V4 (V4 x 0 0 0)
(V4 0 y 0 0)
(V4 0 0 (-1) w)
(V4 0 0 (-1) 0)
where
t = n*tan(fovy/2)
b = -t
l = b*a
r = t*a
x = (2*n)/(r-l)
y = (2*n)/(t-b)
w = -2*n
inverseInfinitePerspective
:: Floating a
=> a -- ^ FOV (y direction, in radians)
-> a -- ^ Aspect Ratio
-> a -- ^ Near plane
-> M44 a
inverseInfinitePerspective fovy a n =
V4 (V4 rx 0 0 0)
(V4 0 ry 0 0)
(V4 0 0 0 (-1))
(V4 0 0 rw (-rw))
where
t = n*tan(fovy/2)
b = -t
l = b*a
r = t*a
hrn = 0.5/n
rx = (r-l)*hrn
ry = (t-b)*hrn
rw = -hrn
-- | Build an orthographic perspective matrix from 6 clipping planes.
-- This matrix takes the region delimited by these planes and maps it
-- to normalized device coordinates between [-1,1]
--
-- This call is designed to mimic the parameters to the OpenGL @glOrtho@
-- call, so it has a slightly strange convention: Notably: the near and
-- far planes are negated.
--
-- Consequently:
--
-- @
-- 'ortho' l r b t n f !* 'V4' l b (-n) 1 = 'V4' (-1) (-1) (-1) 1
-- 'ortho' l r b t n f !* 'V4' r t (-f) 1 = 'V4' 1 1 1 1
-- @
--
-- Examples:
--
-- >>> ortho 1 2 3 4 5 6 !* V4 1 3 (-5) 1
-- V4 (-1.0) (-1.0) (-1.0) 1.0
--
-- >>> ortho 1 2 3 4 5 6 !* V4 2 4 (-6) 1
-- V4 1.0 1.0 1.0 1.0
ortho
:: Fractional a
=> a -- ^ Left
-> a -- ^ Right
-> a -- ^ Bottom
-> a -- ^ Top
-> a -- ^ Near
-> a -- ^ Far
-> M44 a
ortho l r b t n f =
V4 (V4 (-2*x) 0 0 ((r+l)*x))
(V4 0 (-2*y) 0 ((t+b)*y))
(V4 0 0 (2*z) ((f+n)*z))
(V4 0 0 0 1)
where x = recip(l-r)
y = recip(b-t)
z = recip(n-f)
-- | Build an inverse orthographic perspective matrix from 6 clipping planes
inverseOrtho
:: Fractional a
=> a -- ^ Left
-> a -- ^ Right
-> a -- ^ Bottom
-> a -- ^ Top
-> a -- ^ Near
-> a -- ^ Far
-> M44 a
inverseOrtho l r b t n f =
V4 (V4 x 0 0 c)
(V4 0 y 0 d)
(V4 0 0 z e)
(V4 0 0 0 1)
where x = 0.5*(r-l)
y = 0.5*(t-b)
z = 0.5*(n-f)
c = 0.5*(l+r)
d = 0.5*(b+t)
e = -0.5*(n+f)
|
phaazon/linear
|
src/Linear/Projection.hs
|
Haskell
|
bsd-3-clause
| 5,852
|
module DepthFirstSearchTest where
import qualified DepthFirstSearch as DFS
import qualified Data.Set as Set
import Test.Hspec
import Test.QuickCheck
import Debug.Trace
makeGraph :: DFS.Graph
makeGraph = DFS.AdjList [ [1, 2] -- 0
, [0, 3] -- 1
, [0] -- 2
, [1, 4] -- 3
, [3] -- 4
, [] -- 5
]
test = hspec $ do
describe "Dfs traversal" $ do
it "should have right start/endtime for dfs traversal" $ do
let results = DFS.depthFirstSearch makeGraph
-- DFS.DFSResult nodeId startTime endTime
([ DFS.DFSResult 0 0 9
, DFS.DFSResult 1 1 6
, DFS.DFSResult 3 2 5
, DFS.DFSResult 4 3 4
, DFS.DFSResult 2 7 8
, DFS.DFSResult 5 10 11
]
, 12 -- final time
, Set.empty) -- no node should be left.
== results
|
ashishnegi/hsalgos
|
test/DepthFirstSearchTest.hs
|
Haskell
|
bsd-3-clause
| 895
|
{-# LANGUAGE CPP, MagicHash, NondecreasingIndentation, UnboxedTuples #-}
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2005-2007
--
-- Running statements interactively
--
-- -----------------------------------------------------------------------------
module ETA.Main.InteractiveEval (
#ifdef GHCI
RunResult(..), Status(..), Resume(..), History(..),
runStmt, runStmtWithLocation, runDecls, runDeclsWithLocation,
parseImportDecl, SingleStep(..),
resume,
abandon, abandonAll,
getResumeContext,
getHistorySpan,
getModBreaks,
getHistoryModule,
back, forward,
setContext, getContext,
availsToGlobalRdrEnv,
getNamesInScope,
getRdrNamesInScope,
moduleIsInterpreted,
getInfo,
exprType,
typeKind,
parseName,
showModule,
isModuleInterpreted,
compileExpr, dynCompileExpr,
Term(..), obtainTermFromId, obtainTermFromVal, reconstructType
#endif
) where
#ifdef GHCI
#include "HsVersions.h"
import ETA.Main.InteractiveEvalTypes
import ETA.Main.GhcMonad
import ETA.Main.HscMain
import ETA.HsSyn.HsSyn
import ETA.Main.HscTypes
import ETA.BasicTypes.BasicTypes ( HValue )
import ETA.Types.InstEnv
import ETA.Types.FamInstEnv ( FamInst, orphNamesOfFamInst )
import ETA.Types.TyCon
import ETA.Types.Type hiding( typeKind )
import ETA.TypeCheck.TcType hiding( typeKind )
import ETA.BasicTypes.Var
import ETA.BasicTypes.Id
import ETA.BasicTypes.Name hiding ( varName )
import ETA.BasicTypes.NameSet
import ETA.BasicTypes.Avail
import ETA.BasicTypes.RdrName
import ETA.BasicTypes.VarSet
import ETA.BasicTypes.VarEnv
import ETA.Interactive.ByteCodeInstr
import ETA.Interactive.Linker
import ETA.Main.DynFlags
import ETA.BasicTypes.Unique
import ETA.BasicTypes.UniqSupply
import ETA.BasicTypes.Module
import ETA.Utils.Panic
import ETA.Utils.UniqFM
import ETA.Utils.Maybes
import ETA.Main.ErrUtils
import ETA.BasicTypes.SrcLoc
import ETA.Main.BreakArray
import ETA.Interactive.RtClosureInspect
import ETA.Utils.Outputable
import ETA.Utils.FastString
import ETA.Utils.MonadUtils
import System.Mem.Weak
import System.Directory
import Data.Dynamic
import Data.Either
import Data.List (find)
import Control.Monad
import Foreign
import Foreign.C
import GHC.Exts
import Data.Array
import ETA.Utils.Exception
import Control.Concurrent
import System.IO.Unsafe
-- -----------------------------------------------------------------------------
-- running a statement interactively
getResumeContext :: GhcMonad m => m [Resume]
getResumeContext = withSession (return . ic_resume . hsc_IC)
data SingleStep
= RunToCompletion
| SingleStep
| RunAndLogSteps
isStep :: SingleStep -> Bool
isStep RunToCompletion = False
isStep _ = True
mkHistory :: HscEnv -> HValue -> BreakInfo -> History
mkHistory hsc_env hval bi = let
decls = findEnclosingDecls hsc_env bi
in History hval bi decls
getHistoryModule :: History -> Module
getHistoryModule = breakInfo_module . historyBreakInfo
getHistorySpan :: HscEnv -> History -> SrcSpan
getHistorySpan hsc_env hist =
let inf = historyBreakInfo hist
num = breakInfo_number inf
in case lookupUFM (hsc_HPT hsc_env) (moduleName (breakInfo_module inf)) of
Just hmi -> modBreaks_locs (getModBreaks hmi) ! num
_ -> panic "getHistorySpan"
getModBreaks :: HomeModInfo -> ModBreaks
getModBreaks hmi
| Just linkable <- hm_linkable hmi,
[BCOs _ modBreaks] <- linkableUnlinked linkable
= modBreaks
| otherwise
= emptyModBreaks -- probably object code
{- | Finds the enclosing top level function name -}
-- ToDo: a better way to do this would be to keep hold of the decl_path computed
-- by the coverage pass, which gives the list of lexically-enclosing bindings
-- for each tick.
findEnclosingDecls :: HscEnv -> BreakInfo -> [String]
findEnclosingDecls hsc_env inf =
let hmi = expectJust "findEnclosingDecls" $
lookupUFM (hsc_HPT hsc_env) (moduleName $ breakInfo_module inf)
mb = getModBreaks hmi
in modBreaks_decls mb ! breakInfo_number inf
-- | Update fixity environment in the current interactive context.
updateFixityEnv :: GhcMonad m => FixityEnv -> m ()
updateFixityEnv fix_env = do
hsc_env <- getSession
let ic = hsc_IC hsc_env
setSession $ hsc_env { hsc_IC = ic { ic_fix_env = fix_env } }
-- | Run a statement in the current interactive context. Statement
-- may bind multple values.
runStmt :: GhcMonad m => String -> SingleStep -> m RunResult
runStmt = runStmtWithLocation "<interactive>" 1
-- | Run a statement in the current interactive context. Passing debug information
-- Statement may bind multple values.
runStmtWithLocation :: GhcMonad m => String -> Int ->
String -> SingleStep -> m RunResult
runStmtWithLocation source linenumber expr step =
do
hsc_env <- getSession
breakMVar <- liftIO $ newEmptyMVar -- wait on this when we hit a breakpoint
statusMVar <- liftIO $ newEmptyMVar -- wait on this when a computation is running
-- Turn off -fwarn-unused-bindings when running a statement, to hide
-- warnings about the implicit bindings we introduce.
let ic = hsc_IC hsc_env -- use the interactive dflags
idflags' = ic_dflags ic `wopt_unset` Opt_WarnUnusedBinds
hsc_env' = hsc_env{ hsc_IC = ic{ ic_dflags = idflags' } }
-- compile to value (IO [HValue]), don't run
r <- liftIO $ hscStmtWithLocation hsc_env' expr source linenumber
case r of
-- empty statement / comment
Nothing -> return (RunOk [])
Just (tyThings, hval, fix_env) -> do
updateFixityEnv fix_env
status <-
withVirtualCWD $
withBreakAction (isStep step) idflags' breakMVar statusMVar $ do
liftIO $ sandboxIO idflags' statusMVar hval
let ic = hsc_IC hsc_env
bindings = (ic_tythings ic, ic_rn_gbl_env ic)
size = ghciHistSize idflags'
handleRunStatus step expr bindings tyThings
breakMVar statusMVar status (emptyHistory size)
runDecls :: GhcMonad m => String -> m [Name]
runDecls = runDeclsWithLocation "<interactive>" 1
runDeclsWithLocation :: GhcMonad m => String -> Int -> String -> m [Name]
runDeclsWithLocation source linenumber expr =
do
hsc_env <- getSession
(tyThings, ic) <- liftIO $ hscDeclsWithLocation hsc_env expr source linenumber
setSession $ hsc_env { hsc_IC = ic }
hsc_env <- getSession
hsc_env' <- liftIO $ rttiEnvironment hsc_env
modifySession (\_ -> hsc_env')
return (map getName tyThings)
withVirtualCWD :: GhcMonad m => m a -> m a
withVirtualCWD m = do
hsc_env <- getSession
let ic = hsc_IC hsc_env
let set_cwd = do
dir <- liftIO $ getCurrentDirectory
case ic_cwd ic of
Just dir -> liftIO $ setCurrentDirectory dir
Nothing -> return ()
return dir
reset_cwd orig_dir = do
virt_dir <- liftIO $ getCurrentDirectory
hsc_env <- getSession
let old_IC = hsc_IC hsc_env
setSession hsc_env{ hsc_IC = old_IC{ ic_cwd = Just virt_dir } }
liftIO $ setCurrentDirectory orig_dir
gbracket set_cwd reset_cwd $ \_ -> m
parseImportDecl :: GhcMonad m => String -> m (ImportDecl RdrName)
parseImportDecl expr = withSession $ \hsc_env -> liftIO $ hscImport hsc_env expr
emptyHistory :: Int -> BoundedList History
emptyHistory size = nilBL size
handleRunStatus :: GhcMonad m
=> SingleStep -> String-> ([TyThing],GlobalRdrEnv) -> [Id]
-> MVar () -> MVar Status -> Status -> BoundedList History
-> m RunResult
handleRunStatus step expr bindings final_ids
breakMVar statusMVar status history
| RunAndLogSteps <- step = tracing
| otherwise = not_tracing
where
tracing
| Break is_exception apStack info tid <- status
, not is_exception
= do
hsc_env <- getSession
b <- liftIO $ isBreakEnabled hsc_env info
if b
then not_tracing
-- This breakpoint is explicitly enabled; we want to stop
-- instead of just logging it.
else do
let history' = mkHistory hsc_env apStack info `consBL` history
-- probably better make history strict here, otherwise
-- our BoundedList will be pointless.
_ <- liftIO $ evaluate history'
status <- withBreakAction True (hsc_dflags hsc_env)
breakMVar statusMVar $ do
liftIO $ mask_ $ do
putMVar breakMVar () -- awaken the stopped thread
redirectInterrupts tid $
takeMVar statusMVar -- and wait for the result
handleRunStatus RunAndLogSteps expr bindings final_ids
breakMVar statusMVar status history'
| otherwise
= not_tracing
not_tracing
-- Hit a breakpoint
| Break is_exception apStack info tid <- status
= do
hsc_env <- getSession
let mb_info | is_exception = Nothing
| otherwise = Just info
(hsc_env1, names, span) <- liftIO $
bindLocalsAtBreakpoint hsc_env apStack mb_info
let
resume = Resume
{ resumeStmt = expr, resumeThreadId = tid
, resumeBreakMVar = breakMVar, resumeStatMVar = statusMVar
, resumeBindings = bindings, resumeFinalIds = final_ids
, resumeApStack = apStack, resumeBreakInfo = mb_info
, resumeSpan = span, resumeHistory = toListBL history
, resumeHistoryIx = 0 }
hsc_env2 = pushResume hsc_env1 resume
modifySession (\_ -> hsc_env2)
return (RunBreak tid names mb_info)
-- Completed with an exception
| Complete (Left e) <- status
= return (RunException e)
-- Completed successfully
| Complete (Right hvals) <- status
= do hsc_env <- getSession
let final_ic = extendInteractiveContextWithIds (hsc_IC hsc_env) final_ids
final_names = map getName final_ids
liftIO $ Linker.extendLinkEnv (zip final_names hvals)
hsc_env' <- liftIO $ rttiEnvironment hsc_env{hsc_IC=final_ic}
modifySession (\_ -> hsc_env')
return (RunOk final_names)
| otherwise
= panic "handleRunStatus" -- The above cases are in fact exhaustive
isBreakEnabled :: HscEnv -> BreakInfo -> IO Bool
isBreakEnabled hsc_env inf =
case lookupUFM (hsc_HPT hsc_env) (moduleName (breakInfo_module inf)) of
Just hmi -> do
w <- getBreak (hsc_dflags hsc_env)
(modBreaks_flags (getModBreaks hmi))
(breakInfo_number inf)
case w of Just n -> return (n /= 0); _other -> return False
_ ->
return False
foreign import ccall "&rts_stop_next_breakpoint" stepFlag :: Ptr CInt
foreign import ccall "&rts_stop_on_exception" exceptionFlag :: Ptr CInt
setStepFlag :: IO ()
setStepFlag = poke stepFlag 1
resetStepFlag :: IO ()
resetStepFlag = poke stepFlag 0
-- this points to the IO action that is executed when a breakpoint is hit
foreign import ccall "&rts_breakpoint_io_action"
breakPointIOAction :: Ptr (StablePtr (Bool -> BreakInfo -> HValue -> IO ()))
-- When running a computation, we redirect ^C exceptions to the running
-- thread. ToDo: we might want a way to continue even if the target
-- thread doesn't die when it receives the exception... "this thread
-- is not responding".
--
-- Careful here: there may be ^C exceptions flying around, so we start the new
-- thread blocked (forkIO inherits mask from the parent, #1048), and unblock
-- only while we execute the user's code. We can't afford to lose the final
-- putMVar, otherwise deadlock ensues. (#1583, #1922, #1946)
sandboxIO :: DynFlags -> MVar Status -> IO [HValue] -> IO Status
sandboxIO dflags statusMVar thing =
mask $ \restore -> -- fork starts blocked
let runIt = liftM Complete $ try (restore $ rethrow dflags thing)
in if gopt Opt_GhciSandbox dflags
then do tid <- forkIO $ do res <- runIt
putMVar statusMVar res -- empty: can't block
redirectInterrupts tid $
takeMVar statusMVar
else -- GLUT on OS X needs to run on the main thread. If you
-- try to use it from another thread then you just get a
-- white rectangle rendered. For this, or anything else
-- with such restrictions, you can turn the GHCi sandbox off
-- and things will be run in the main thread.
--
-- BUT, note that the debugging features (breakpoints,
-- tracing, etc.) need the expression to be running in a
-- separate thread, so debugging is only enabled when
-- using the sandbox.
runIt
--
-- While we're waiting for the sandbox thread to return a result, if
-- the current thread receives an asynchronous exception we re-throw
-- it at the sandbox thread and continue to wait.
--
-- This is for two reasons:
--
-- * So that ^C interrupts runStmt (e.g. in GHCi), allowing the
-- computation to run its exception handlers before returning the
-- exception result to the caller of runStmt.
--
-- * clients of the GHC API can terminate a runStmt in progress
-- without knowing the ThreadId of the sandbox thread (#1381)
--
-- NB. use a weak pointer to the thread, so that the thread can still
-- be considered deadlocked by the RTS and sent a BlockedIndefinitely
-- exception. A symptom of getting this wrong is that conc033(ghci)
-- will hang.
--
redirectInterrupts :: ThreadId -> IO a -> IO a
redirectInterrupts target wait
= do wtid <- mkWeakThreadId target
wait `catch` \e -> do
m <- deRefWeak wtid
case m of
Nothing -> wait
Just target -> do throwTo target (e :: SomeException); wait
-- We want to turn ^C into a break when -fbreak-on-exception is on,
-- but it's an async exception and we only break for sync exceptions.
-- Idea: if we catch and re-throw it, then the re-throw will trigger
-- a break. Great - but we don't want to re-throw all exceptions, because
-- then we'll get a double break for ordinary sync exceptions (you'd have
-- to :continue twice, which looks strange). So if the exception is
-- not "Interrupted", we unset the exception flag before throwing.
--
rethrow :: DynFlags -> IO a -> IO a
rethrow dflags io = Exception.catch io $ \se -> do
-- If -fbreak-on-error, we break unconditionally,
-- but with care of not breaking twice
if gopt Opt_BreakOnError dflags &&
not (gopt Opt_BreakOnException dflags)
then poke exceptionFlag 1
else case fromException se of
-- If it is a "UserInterrupt" exception, we allow
-- a possible break by way of -fbreak-on-exception
Just UserInterrupt -> return ()
-- In any other case, we don't want to break
_ -> poke exceptionFlag 0
Exception.throwIO se
-- This function sets up the interpreter for catching breakpoints, and
-- resets everything when the computation has stopped running. This
-- is a not-very-good way to ensure that only the interactive
-- evaluation should generate breakpoints.
withBreakAction :: (ExceptionMonad m, MonadIO m) =>
Bool -> DynFlags -> MVar () -> MVar Status -> m a -> m a
withBreakAction step dflags breakMVar statusMVar act
= gbracket (liftIO setBreakAction) (liftIO . resetBreakAction) (\_ -> act)
where
setBreakAction = do
stablePtr <- newStablePtr onBreak
poke breakPointIOAction stablePtr
when (gopt Opt_BreakOnException dflags) $ poke exceptionFlag 1
when step $ setStepFlag
return stablePtr
-- Breaking on exceptions is not enabled by default, since it
-- might be a bit surprising. The exception flag is turned off
-- as soon as it is hit, or in resetBreakAction below.
onBreak is_exception info apStack = do
tid <- myThreadId
putMVar statusMVar (Break is_exception apStack info tid)
takeMVar breakMVar
resetBreakAction stablePtr = do
poke breakPointIOAction noBreakStablePtr
poke exceptionFlag 0
resetStepFlag
freeStablePtr stablePtr
noBreakStablePtr :: StablePtr (Bool -> BreakInfo -> HValue -> IO ())
noBreakStablePtr = unsafePerformIO $ newStablePtr noBreakAction
noBreakAction :: Bool -> BreakInfo -> HValue -> IO ()
noBreakAction False _ _ = putStrLn "*** Ignoring breakpoint"
noBreakAction True _ _ = return () -- exception: just continue
resume :: GhcMonad m => (SrcSpan->Bool) -> SingleStep -> m RunResult
resume canLogSpan step
= do
hsc_env <- getSession
let ic = hsc_IC hsc_env
resume = ic_resume ic
case resume of
[] -> liftIO $
throwGhcExceptionIO (ProgramError "not stopped at a breakpoint")
(r:rs) -> do
-- unbind the temporary locals by restoring the TypeEnv from
-- before the breakpoint, and drop this Resume from the
-- InteractiveContext.
let (resume_tmp_te,resume_rdr_env) = resumeBindings r
ic' = ic { ic_tythings = resume_tmp_te,
ic_rn_gbl_env = resume_rdr_env,
ic_resume = rs }
modifySession (\_ -> hsc_env{ hsc_IC = ic' })
-- remove any bindings created since the breakpoint from the
-- linker's environment
let new_names = map getName (filter (`notElem` resume_tmp_te)
(ic_tythings ic))
liftIO $ Linker.deleteFromLinkEnv new_names
when (isStep step) $ liftIO setStepFlag
case r of
Resume { resumeStmt = expr, resumeThreadId = tid
, resumeBreakMVar = breakMVar, resumeStatMVar = statusMVar
, resumeBindings = bindings, resumeFinalIds = final_ids
, resumeApStack = apStack, resumeBreakInfo = info, resumeSpan = span
, resumeHistory = hist } -> do
withVirtualCWD $ do
withBreakAction (isStep step) (hsc_dflags hsc_env)
breakMVar statusMVar $ do
status <- liftIO $ mask_ $ do
putMVar breakMVar ()
-- this awakens the stopped thread...
redirectInterrupts tid $
takeMVar statusMVar
-- and wait for the result
let prevHistoryLst = fromListBL 50 hist
hist' = case info of
Nothing -> prevHistoryLst
Just i
| not $canLogSpan span -> prevHistoryLst
| otherwise -> mkHistory hsc_env apStack i `consBL`
fromListBL 50 hist
handleRunStatus step expr bindings final_ids
breakMVar statusMVar status hist'
back :: GhcMonad m => m ([Name], Int, SrcSpan)
back = moveHist (+1)
forward :: GhcMonad m => m ([Name], Int, SrcSpan)
forward = moveHist (subtract 1)
moveHist :: GhcMonad m => (Int -> Int) -> m ([Name], Int, SrcSpan)
moveHist fn = do
hsc_env <- getSession
case ic_resume (hsc_IC hsc_env) of
[] -> liftIO $
throwGhcExceptionIO (ProgramError "not stopped at a breakpoint")
(r:rs) -> do
let ix = resumeHistoryIx r
history = resumeHistory r
new_ix = fn ix
--
when (new_ix > length history) $ liftIO $
throwGhcExceptionIO (ProgramError "no more logged breakpoints")
when (new_ix < 0) $ liftIO $
throwGhcExceptionIO (ProgramError "already at the beginning of the history")
let
update_ic apStack mb_info = do
(hsc_env1, names, span) <- liftIO $ bindLocalsAtBreakpoint hsc_env
apStack mb_info
let ic = hsc_IC hsc_env1
r' = r { resumeHistoryIx = new_ix }
ic' = ic { ic_resume = r':rs }
modifySession (\_ -> hsc_env1{ hsc_IC = ic' })
return (names, new_ix, span)
-- careful: we want apStack to be the AP_STACK itself, not a thunk
-- around it, hence the cases are carefully constructed below to
-- make this the case. ToDo: this is v. fragile, do something better.
if new_ix == 0
then case r of
Resume { resumeApStack = apStack,
resumeBreakInfo = mb_info } ->
update_ic apStack mb_info
else case history !! (new_ix - 1) of
History apStack info _ ->
update_ic apStack (Just info)
-- -----------------------------------------------------------------------------
-- After stopping at a breakpoint, add free variables to the environment
result_fs :: FastString
result_fs = fsLit "_result"
bindLocalsAtBreakpoint
:: HscEnv
-> HValue
-> Maybe BreakInfo
-> IO (HscEnv, [Name], SrcSpan)
-- Nothing case: we stopped when an exception was raised, not at a
-- breakpoint. We have no location information or local variables to
-- bind, all we can do is bind a local variable to the exception
-- value.
bindLocalsAtBreakpoint hsc_env apStack Nothing = do
let exn_fs = fsLit "_exception"
exn_name = mkInternalName (getUnique exn_fs) (mkVarOccFS exn_fs) span
e_fs = fsLit "e"
e_name = mkInternalName (getUnique e_fs) (mkTyVarOccFS e_fs) span
e_tyvar = mkRuntimeUnkTyVar e_name liftedTypeKind
exn_id = Id.mkVanillaGlobal exn_name (mkTyVarTy e_tyvar)
ictxt0 = hsc_IC hsc_env
ictxt1 = extendInteractiveContextWithIds ictxt0 [exn_id]
span = mkGeneralSrcSpan (fsLit "<exception thrown>")
--
Linker.extendLinkEnv [(exn_name, unsafeCoerce# apStack)]
return (hsc_env{ hsc_IC = ictxt1 }, [exn_name], span)
-- Just case: we stopped at a breakpoint, we have information about the location
-- of the breakpoint and the free variables of the expression.
bindLocalsAtBreakpoint hsc_env apStack (Just info) = do
let
mod_name = moduleName (breakInfo_module info)
hmi = expectJust "bindLocalsAtBreakpoint" $
lookupUFM (hsc_HPT hsc_env) mod_name
breaks = getModBreaks hmi
index = breakInfo_number info
vars = breakInfo_vars info
result_ty = breakInfo_resty info
occs = modBreaks_vars breaks ! index
span = modBreaks_locs breaks ! index
-- Filter out any unboxed ids;
-- we can't bind these at the prompt
pointers = filter (\(id,_) -> isPointer id) vars
isPointer id | UnaryRep ty <- repType (idType id)
, PtrRep <- typePrimRep ty = True
| otherwise = False
(ids, offsets) = unzip pointers
free_tvs = mapUnionVarSet (tyVarsOfType . idType) ids
`unionVarSet` tyVarsOfType result_ty
-- It might be that getIdValFromApStack fails, because the AP_STACK
-- has been accidentally evaluated, or something else has gone wrong.
-- So that we don't fall over in a heap when this happens, just don't
-- bind any free variables instead, and we emit a warning.
mb_hValues <- mapM (getIdValFromApStack apStack) (map fromIntegral offsets)
let filtered_ids = [ id | (id, Just _hv) <- zip ids mb_hValues ]
when (any isNothing mb_hValues) $
debugTraceMsg (hsc_dflags hsc_env) 1 $
text "Warning: _result has been evaluated, some bindings have been lost"
us <- mkSplitUniqSupply 'I'
let (us1, us2) = splitUniqSupply us
tv_subst = newTyVars us1 free_tvs
new_ids = zipWith3 (mkNewId tv_subst) occs filtered_ids (uniqsFromSupply us2)
names = map idName new_ids
-- make an Id for _result. We use the Unique of the FastString "_result";
-- we don't care about uniqueness here, because there will only be one
-- _result in scope at any time.
let result_name = mkInternalName (getUnique result_fs)
(mkVarOccFS result_fs) span
result_id = Id.mkVanillaGlobal result_name (substTy tv_subst result_ty)
-- for each Id we're about to bind in the local envt:
-- - tidy the type variables
-- - globalise the Id (Ids are supposed to be Global, apparently).
--
let result_ok = isPointer result_id
all_ids | result_ok = result_id : new_ids
| otherwise = new_ids
id_tys = map idType all_ids
(_,tidy_tys) = tidyOpenTypes emptyTidyEnv id_tys
final_ids = zipWith setIdType all_ids tidy_tys
ictxt0 = hsc_IC hsc_env
ictxt1 = extendInteractiveContextWithIds ictxt0 final_ids
Linker.extendLinkEnv [ (name,hval) | (name, Just hval) <- zip names mb_hValues ]
when result_ok $ Linker.extendLinkEnv [(result_name, unsafeCoerce# apStack)]
hsc_env1 <- rttiEnvironment hsc_env{ hsc_IC = ictxt1 }
return (hsc_env1, if result_ok then result_name:names else names, span)
where
-- We need a fresh Unique for each Id we bind, because the linker
-- state is single-threaded and otherwise we'd spam old bindings
-- whenever we stop at a breakpoint. The InteractveContext is properly
-- saved/restored, but not the linker state. See #1743, test break026.
mkNewId :: TvSubst -> OccName -> Id -> Unique -> Id
mkNewId tv_subst occ id uniq
= Id.mkVanillaGlobalWithInfo name ty (idInfo id)
where
loc = nameSrcSpan (idName id)
name = mkInternalName uniq occ loc
ty = substTy tv_subst (idType id)
newTyVars :: UniqSupply -> TcTyVarSet -> TvSubst
-- Similarly, clone the type variables mentioned in the types
-- we have here, *and* make them all RuntimeUnk tyars
newTyVars us tvs
= mkTopTvSubst [ (tv, mkTyVarTy (mkRuntimeUnkTyVar name (tyVarKind tv)))
| (tv, uniq) <- varSetElems tvs `zip` uniqsFromSupply us
, let name = setNameUnique (tyVarName tv) uniq ]
rttiEnvironment :: HscEnv -> IO HscEnv
rttiEnvironment hsc_env@HscEnv{hsc_IC=ic} = do
let tmp_ids = [id | AnId id <- ic_tythings ic]
incompletelyTypedIds =
[id | id <- tmp_ids
, not $ noSkolems id
, (occNameFS.nameOccName.idName) id /= result_fs]
hsc_env' <- foldM improveTypes hsc_env (map idName incompletelyTypedIds)
return hsc_env'
where
noSkolems = isEmptyVarSet . tyVarsOfType . idType
improveTypes hsc_env@HscEnv{hsc_IC=ic} name = do
let tmp_ids = [id | AnId id <- ic_tythings ic]
Just id = find (\i -> idName i == name) tmp_ids
if noSkolems id
then return hsc_env
else do
mb_new_ty <- reconstructType hsc_env 10 id
let old_ty = idType id
case mb_new_ty of
Nothing -> return hsc_env
Just new_ty -> do
case improveRTTIType hsc_env old_ty new_ty of
Nothing -> return $
WARN(True, text (":print failed to calculate the "
++ "improvement for a type")) hsc_env
Just subst -> do
let dflags = hsc_dflags hsc_env
when (dopt Opt_D_dump_rtti dflags) $
printInfoForUser dflags alwaysQualify $
fsep [text "RTTI Improvement for", ppr id, equals, ppr subst]
let ic' = substInteractiveContext ic subst
return hsc_env{hsc_IC=ic'}
getIdValFromApStack :: HValue -> Int -> IO (Maybe HValue)
getIdValFromApStack apStack (I# stackDepth) = do
case getApStackVal# apStack (stackDepth +# 1#) of
-- The +1 is magic! I don't know where it comes
-- from, but this makes things line up. --SDM
(# ok, result #) ->
case ok of
0# -> return Nothing -- AP_STACK not found
_ -> return (Just (unsafeCoerce# result))
pushResume :: HscEnv -> Resume -> HscEnv
pushResume hsc_env resume = hsc_env { hsc_IC = ictxt1 }
where
ictxt0 = hsc_IC hsc_env
ictxt1 = ictxt0 { ic_resume = resume : ic_resume ictxt0 }
-- -----------------------------------------------------------------------------
-- Abandoning a resume context
abandon :: GhcMonad m => m Bool
abandon = do
hsc_env <- getSession
let ic = hsc_IC hsc_env
resume = ic_resume ic
case resume of
[] -> return False
r:rs -> do
modifySession $ \_ -> hsc_env{ hsc_IC = ic { ic_resume = rs } }
liftIO $ abandon_ r
return True
abandonAll :: GhcMonad m => m Bool
abandonAll = do
hsc_env <- getSession
let ic = hsc_IC hsc_env
resume = ic_resume ic
case resume of
[] -> return False
rs -> do
modifySession $ \_ -> hsc_env{ hsc_IC = ic { ic_resume = [] } }
liftIO $ mapM_ abandon_ rs
return True
-- when abandoning a computation we have to
-- (a) kill the thread with an async exception, so that the
-- computation itself is stopped, and
-- (b) fill in the MVar. This step is necessary because any
-- thunks that were under evaluation will now be updated
-- with the partial computation, which still ends in takeMVar,
-- so any attempt to evaluate one of these thunks will block
-- unless we fill in the MVar.
-- (c) wait for the thread to terminate by taking its status MVar. This
-- step is necessary to prevent race conditions with
-- -fbreak-on-exception (see #5975).
-- See test break010.
abandon_ :: Resume -> IO ()
abandon_ r = do
killThread (resumeThreadId r)
putMVar (resumeBreakMVar r) ()
_ <- takeMVar (resumeStatMVar r)
return ()
-- -----------------------------------------------------------------------------
-- Bounded list, optimised for repeated cons
data BoundedList a = BL
{-# UNPACK #-} !Int -- length
{-# UNPACK #-} !Int -- bound
[a] -- left
[a] -- right, list is (left ++ reverse right)
nilBL :: Int -> BoundedList a
nilBL bound = BL 0 bound [] []
consBL :: a -> BoundedList a -> BoundedList a
consBL a (BL len bound left right)
| len < bound = BL (len+1) bound (a:left) right
| null right = BL len bound [a] $! tail (reverse left)
| otherwise = BL len bound (a:left) $! tail right
toListBL :: BoundedList a -> [a]
toListBL (BL _ _ left right) = left ++ reverse right
fromListBL :: Int -> [a] -> BoundedList a
fromListBL bound l = BL (length l) bound l []
-- lenBL (BL len _ _ _) = len
-- -----------------------------------------------------------------------------
-- | Set the interactive evaluation context.
--
-- (setContext imports) sets the ic_imports field (which in turn
-- determines what is in scope at the prompt) to 'imports', and
-- constructs the ic_rn_glb_env environment to reflect it.
--
-- We retain in scope all the things defined at the prompt, and kept
-- in ic_tythings. (Indeed, they shadow stuff from ic_imports.)
setContext :: GhcMonad m => [InteractiveImport] -> m ()
setContext imports
= do { hsc_env <- getSession
; let dflags = hsc_dflags hsc_env
; all_env_err <- liftIO $ findGlobalRdrEnv hsc_env imports
; case all_env_err of
Left (mod, err) ->
liftIO $ throwGhcExceptionIO (formatError dflags mod err)
Right all_env -> do {
; let old_ic = hsc_IC hsc_env
final_rdr_env = all_env `icExtendGblRdrEnv` ic_tythings old_ic
; modifySession $ \_ ->
hsc_env{ hsc_IC = old_ic { ic_imports = imports
, ic_rn_gbl_env = final_rdr_env }}}}
where
formatError dflags mod err = ProgramError . showSDoc dflags $
text "Cannot add module" <+> ppr mod <+>
text "to context:" <+> text err
findGlobalRdrEnv :: HscEnv -> [InteractiveImport]
-> IO (Either (ModuleName, String) GlobalRdrEnv)
-- Compute the GlobalRdrEnv for the interactive context
findGlobalRdrEnv hsc_env imports
= do { idecls_env <- hscRnImportDecls hsc_env idecls
-- This call also loads any orphan modules
; return $ case partitionEithers (map mkEnv imods) of
([], imods_env) -> Right (foldr plusGlobalRdrEnv idecls_env imods_env)
(err : _, _) -> Left err }
where
idecls :: [LImportDecl RdrName]
idecls = [noLoc d | IIDecl d <- imports]
imods :: [ModuleName]
imods = [m | IIModule m <- imports]
mkEnv mod = case mkTopLevEnv (hsc_HPT hsc_env) mod of
Left err -> Left (mod, err)
Right env -> Right env
availsToGlobalRdrEnv :: ModuleName -> [AvailInfo] -> GlobalRdrEnv
availsToGlobalRdrEnv mod_name avails
= mkGlobalRdrEnv (gresFromAvails imp_prov avails)
where
-- We're building a GlobalRdrEnv as if the user imported
-- all the specified modules into the global interactive module
imp_prov = Imported [ImpSpec { is_decl = decl, is_item = ImpAll}]
decl = ImpDeclSpec { is_mod = mod_name, is_as = mod_name,
is_qual = False,
is_dloc = srcLocSpan interactiveSrcLoc }
mkTopLevEnv :: HomePackageTable -> ModuleName -> Either String GlobalRdrEnv
mkTopLevEnv hpt modl
= case lookupUFM hpt modl of
Nothing -> Left "not a home module"
Just details ->
case mi_globals (hm_iface details) of
Nothing -> Left "not interpreted"
Just env -> Right env
-- | Get the interactive evaluation context, consisting of a pair of the
-- set of modules from which we take the full top-level scope, and the set
-- of modules from which we take just the exports respectively.
getContext :: GhcMonad m => m [InteractiveImport]
getContext = withSession $ \HscEnv{ hsc_IC=ic } ->
return (ic_imports ic)
-- | Returns @True@ if the specified module is interpreted, and hence has
-- its full top-level scope available.
moduleIsInterpreted :: GhcMonad m => Module -> m Bool
moduleIsInterpreted modl = withSession $ \h ->
if modulePackageKey modl /= thisPackage (hsc_dflags h)
then return False
else case lookupUFM (hsc_HPT h) (moduleName modl) of
Just details -> return (isJust (mi_globals (hm_iface details)))
_not_a_home_module -> return False
-- | Looks up an identifier in the current interactive context (for :info)
-- Filter the instances by the ones whose tycons (or clases resp)
-- are in scope (qualified or otherwise). Otherwise we list a whole lot too many!
-- The exact choice of which ones to show, and which to hide, is a judgement call.
-- (see Trac #1581)
getInfo :: GhcMonad m => Bool -> Name -> m (Maybe (TyThing,Fixity,[ClsInst],[FamInst]))
getInfo allInfo name
= withSession $ \hsc_env ->
do mb_stuff <- liftIO $ hscTcRnGetInfo hsc_env name
case mb_stuff of
Nothing -> return Nothing
Just (thing, fixity, cls_insts, fam_insts) -> do
let rdr_env = ic_rn_gbl_env (hsc_IC hsc_env)
-- Filter the instances based on whether the constituent names of their
-- instance heads are all in scope.
let cls_insts' = filter (plausible rdr_env . orphNamesOfClsInst) cls_insts
fam_insts' = filter (plausible rdr_env . orphNamesOfFamInst) fam_insts
return (Just (thing, fixity, cls_insts', fam_insts'))
where
plausible rdr_env names
-- Dfun involving only names that are in ic_rn_glb_env
= allInfo
|| all ok (nameSetElems names)
where -- A name is ok if it's in the rdr_env,
-- whether qualified or not
ok n | n == name = True -- The one we looked for in the first place!
| isBuiltInSyntax n = True
| isExternalName n = any ((== n) . gre_name)
(lookupGRE_Name rdr_env n)
| otherwise = True
-- | Returns all names in scope in the current interactive context
getNamesInScope :: GhcMonad m => m [Name]
getNamesInScope = withSession $ \hsc_env -> do
return (map gre_name (globalRdrEnvElts (ic_rn_gbl_env (hsc_IC hsc_env))))
getRdrNamesInScope :: GhcMonad m => m [RdrName]
getRdrNamesInScope = withSession $ \hsc_env -> do
let
ic = hsc_IC hsc_env
gbl_rdrenv = ic_rn_gbl_env ic
gbl_names = concatMap greToRdrNames $ globalRdrEnvElts gbl_rdrenv
return gbl_names
-- ToDo: move to RdrName
greToRdrNames :: GlobalRdrElt -> [RdrName]
greToRdrNames GRE{ gre_name = name, gre_prov = prov }
= case prov of
LocalDef -> [unqual]
Imported specs -> concat (map do_spec (map is_decl specs))
where
occ = nameOccName name
unqual = Unqual occ
do_spec decl_spec
| is_qual decl_spec = [qual]
| otherwise = [unqual,qual]
where qual = Qual (is_as decl_spec) occ
-- | Parses a string as an identifier, and returns the list of 'Name's that
-- the identifier can refer to in the current interactive context.
parseName :: GhcMonad m => String -> m [Name]
parseName str = withSession $ \hsc_env -> liftIO $
do { lrdr_name <- hscParseIdentifier hsc_env str
; hscTcRnLookupRdrName hsc_env lrdr_name }
-- -----------------------------------------------------------------------------
-- Getting the type of an expression
-- | Get the type of an expression
-- Returns its most general type
exprType :: GhcMonad m => String -> m Type
exprType expr = withSession $ \hsc_env -> do
ty <- liftIO $ hscTcExpr hsc_env expr
return $ tidyType emptyTidyEnv ty
-- -----------------------------------------------------------------------------
-- Getting the kind of a type
-- | Get the kind of a type
typeKind :: GhcMonad m => Bool -> String -> m (Type, Kind)
typeKind normalise str = withSession $ \hsc_env -> do
liftIO $ hscKcType hsc_env normalise str
-----------------------------------------------------------------------------
-- Compile an expression, run it and deliver the resulting HValue
compileExpr :: GhcMonad m => String -> m HValue
compileExpr expr = withSession $ \hsc_env -> do
Just (ids, hval, fix_env) <- liftIO $ hscStmt hsc_env ("let __cmCompileExpr = "++expr)
updateFixityEnv fix_env
hvals <- liftIO hval
case (ids,hvals) of
([_],[hv]) -> return hv
_ -> panic "compileExpr"
-- -----------------------------------------------------------------------------
-- Compile an expression, run it and return the result as a dynamic
dynCompileExpr :: GhcMonad m => String -> m Dynamic
dynCompileExpr expr = do
iis <- getContext
let importDecl = ImportDecl {
ideclSourceSrc = Nothing,
ideclName = noLoc (mkModuleName "Data.Dynamic"),
ideclPkgQual = Nothing,
ideclSource = False,
ideclSafe = False,
ideclQualified = True,
ideclImplicit = False,
ideclAs = Nothing,
ideclHiding = Nothing
}
setContext (IIDecl importDecl : iis)
let stmt = "let __dynCompileExpr = Data.Dynamic.toDyn (" ++ expr ++ ")"
Just (ids, hvals, fix_env) <- withSession $ \hsc_env ->
liftIO $ hscStmt hsc_env stmt
setContext iis
updateFixityEnv fix_env
vals <- liftIO (unsafeCoerce# hvals :: IO [Dynamic])
case (ids,vals) of
(_:[], v:[]) -> return v
_ -> panic "dynCompileExpr"
-----------------------------------------------------------------------------
-- show a module and it's source/object filenames
showModule :: GhcMonad m => ModSummary -> m String
showModule mod_summary =
withSession $ \hsc_env -> do
interpreted <- isModuleInterpreted mod_summary
let dflags = hsc_dflags hsc_env
return (showModMsg dflags (hscTarget dflags) interpreted mod_summary)
isModuleInterpreted :: GhcMonad m => ModSummary -> m Bool
isModuleInterpreted mod_summary = withSession $ \hsc_env ->
case lookupUFM (hsc_HPT hsc_env) (ms_mod_name mod_summary) of
Nothing -> panic "missing linkable"
Just mod_info -> return (not obj_linkable)
where
obj_linkable = isObjectLinkable (expectJust "showModule" (hm_linkable mod_info))
----------------------------------------------------------------------------
-- RTTI primitives
obtainTermFromVal :: HscEnv -> Int -> Bool -> Type -> a -> IO Term
obtainTermFromVal hsc_env bound force ty x =
cvObtainTerm hsc_env bound force ty (unsafeCoerce# x)
obtainTermFromId :: HscEnv -> Int -> Bool -> Id -> IO Term
obtainTermFromId hsc_env bound force id = do
hv <- Linker.getHValue hsc_env (varName id)
cvObtainTerm hsc_env bound force (idType id) hv
-- Uses RTTI to reconstruct the type of an Id, making it less polymorphic
reconstructType :: HscEnv -> Int -> Id -> IO (Maybe Type)
reconstructType hsc_env bound id = do
hv <- Linker.getHValue hsc_env (varName id)
cvReconstructType hsc_env bound (idType id) hv
mkRuntimeUnkTyVar :: Name -> Kind -> TyVar
mkRuntimeUnkTyVar name kind = mkTcTyVar name kind RuntimeUnk
#endif /* GHCI */
|
alexander-at-github/eta
|
compiler/ETA/Main/InteractiveEval.hs
|
Haskell
|
bsd-3-clause
| 42,109
|
{-# LANGUAGE CPP, BangPatterns #-}
--------------------------------------------------------------------------------
-- |
-- Module : GalFld.Algorithmen.Berlekamp
-- Note : Implementiert eine Berlekamp Faktorisierung
--
-- Funktioniert nur auf Quadratfreien Polynomen
--
-- Enthält den 1967 von Elwyn Berlekamp enwickelten Berlekamp-Algorithmus zur
-- Faktorisierung von Polynomen über endlichen Körpern.
--
--------------------------------------------------------------------------------
module GalFld.Algorithmen.Berlekamp
( appBerlekamp, sffAndBerlekamp
, findIrred, findIrreds, findTrivialsB
-- Algorithmus
, berlekampBasis
, berlekampFactor
)where
import Data.Maybe
import Data.List
import Control.Monad
import Control.Parallel
import Control.Parallel.Strategies
import GalFld.Core
import GalFld.Algorithmen.SFreeFactorization
--------------------------------------------------------------------------------
-- Wrapper
appBerlekamp :: (Show a, FiniteField a, Num a, Fractional a) =>
[(Int,Polynom a)] -> [(Int,Polynom a)]
appBerlekamp = appFact berlekampFactor
-- |Faktorisiert ein Polynom f über einem endlichen Körper
-- Benutzt wird dazu die Quadratfreie Faktorisierung mit anschließendem
-- Berlekamp
sffAndBerlekamp :: (Show a, Fractional a, Num a, FiniteField a)
=> Polynom a -> [(Int,Polynom a)]
sffAndBerlekamp f = appBerlekamp $ sff f
-- |Wählt aus einer Liste von Polynomen das erste Irreduzibele Polynom heraus
findIrred :: (Show a, Fractional a, Num a, FiniteField a) =>
[Polynom a] -> Polynom a
findIrred = head . findIrreds
-- |Filtert mittels SFF und Berlekamp aus einer Liste die irreduzibleneiner
-- liste heraus
#if 0
-- Ist lazy.
findIrreds :: (Show a, Fractional a, Num a, FiniteField a) => [Polynom a] -> [Polynom a]
findIrreds (f:fs) = findIrreds' (f:fs)
where findIrreds' [] = []
findIrreds' (f:fs)
| (not (hasNs f es) || uDegP f < 2)
&& isTrivialFact fSff
&& isTrivialFact fB = f : findIrreds' fs
| otherwise = findIrreds' fs
where fSff = appSff $ toFact f
fB = appBerlekamp fSff
es = elems $ getReprP f
#else
-- mit backtracking
findIrreds fs = do
f <- fs
let fSff = appSff $ toFact f
guard (isTrivialFact fSff)
let fB = appBerlekamp fSff
guard (isTrivialFact fB)
return f
#endif
-- |Gibt alle Faktorisierungen zurück, welche nach Berlekamp noch trivial sind
-- Wendet zuvor (die offensichtliche Faktorisierung und) SFF an
--
-- Ist parallelisiert mittels Strategie rpar.
findTrivialsB :: (Show a, Fractional a, Num a, FiniteField a) =>
[Polynom a] -> [[(Int,Polynom a)]]
findTrivialsB ps = [fs | fs <- parMap rpar appBerlekamp (findTrivialsSff ps)
, isTrivialFact fs]
--------------------------------------------------------------------------------
-- Algorithmus
-- |Berechnet eine Basis des Berlekampraums zu f,
-- d.h. gibt eine Matrix zurück, deren Zeilen gerade den Berlekampraum
-- aufspannen bzgl der kanonischen Basis { 1, x, x², x³, ... }
berlekampBasis :: (Show a, Fractional a, Num a, FiniteField a)
=> Polynom a -> Matrix a
berlekampBasis f = transposeM $ kernelM $ transposeM $!
fromListsM [red i | i <- [0..(n-1)]] - genDiagM 1 n
where !n = fromJust $ degP f
!q = elemCount a
!a = getReprP f
{-# INLINE red #-}
red i = takeFill 0 n $ p2List $ modByP (pTupUnsave [(i*q,1)]) f
-- |Faktorisiert ein Polynom f über einem endlichen Körper
-- Voraussetzungen: f ist quadratfrei
-- Ausgabe: Liste von irreduziblen, pw teilerfremden Polynomen
berlekampFactor :: (Show a, Fractional a, Num a, FiniteField a)
=> Polynom a -> [(Int,Polynom a)]
berlekampFactor f | isNullP f = []
| uDegP f < 2 = [(1,f)]
| otherwise = berlekampFactor' f m
where !m = berlekampBasis f
{-# INLINE berlekampFactor' #-}
berlekampFactor' :: (Show a, Num a, Fractional a, FiniteField a)
=> Polynom a -> Matrix a -> [(Int,Polynom a)]
berlekampFactor' f m | uDegP f <= 1 = [(1,f)]
| getNumRowsM m == 1 = [(1,f)]
| otherwise =
berlekampFactor' g n ++ berlekampFactor' g' n'
where {-# INLINE g #-}
g = head [x | x <- [ggTP f (h - pKonst s)
| s <- elems (getReprP f)] , x /= 1]
{-# INLINE g' #-}
g' = f @/ g
{-# INLINE h #-}
h = pList $ getRowM m 2
{-# INLINE n #-}
n = newKer m g
{-# INLINE n' #-}
n' = newKer m g'
{-# INLINE newKer #-}
newKer m g = fromListsM $! take r m'
where !(k,l) = boundsM m
!m' = toListsM $ echelonM $ fromListsM
[takeFill 0 l $ p2List $
modByP (pList (getRowM m i)) g | i <- [1..k]]
!r = k-1- fromMaybe (-1) (findIndex (all (==0))
$ reverse m')
{-# INLINE takeFill #-}
takeFill :: Num a => a -> Int -> [a] -> [a]
takeFill a n [] = replicate n a
takeFill a n (x:xs) = x : takeFill a (n-1) xs
#if 0
-- |Faktorisiert ein Polynom f über einem endlichen Körper
-- Voraussetzungen: f ist quadratfrei
-- Ausgabe: Liste von irreduziblen, pw teilerfremden Polynomen
--
-- ACHTUNG: dieser Algorithmus ist NOCH NICHT FERTIG implementiert.
-- Er liefert NUR MEISTENS das richtige und sollte nicht verwendet
-- werden.
--
berlekampFactor2 :: (Show a, Fractional a, Num a, FiniteField a)
=> Polynom a -> [(Int,Polynom a)]
berlekampFactor2 f | isNullP f = []
| uDegP f < 2 = [(1,f)]
| otherwise = berlekampFactor' f m
where !m = berlekampBasis f
{-# INLINE berlekampFactor' #-}
berlekampFactor' :: (Show a, Num a, Fractional a, FiniteField a)
=> Polynom a -> Matrix a -> [(Int,Polynom a)]
berlekampFactor' f m | uDegP f <= 1 = [(1,f)]
| getNumRowsM m == 1 = [(1,f)]
| otherwise =
concat [berlekampFactor' g (newKer m g) | g <- gs]
where {-# INLINE gs #-}
gs = [x | x <- [ggTP f (h - pKonst s)
| s <- elems (getReprP f)] , x /= 1]
{-# INLINE h #-}
h = pList $ getRowM m 2
{-# INLINE newKer #-}
newKer m g = fromListsM $! take r m'
where !(k,l) = boundsM m
!m' = toListsM $ echelonM $ fromListsM
[takeFill 0 l $ p2List $
modByP (pList (getRowM m i)) g | i <- [1..k]]
!r = k-1- fromMaybe (-1) (findIndex (all (==0))
$ reverse m')
#endif
|
maximilianhuber/softwareProjekt
|
src/GalFld/Algorithmen/Berlekamp.hs
|
Haskell
|
bsd-3-clause
| 7,557
|
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Network.DHT.Kademlia.Workers.Persistence (persistRoutingTable) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.Timer
import Control.Monad
import Data.Binary
import Data.Conduit
import Data.Conduit.Network
import Data.Time.Clock
import Data.Vector ((!), (//))
import Network.DHT.Kademlia.Bucket
import Network.DHT.Kademlia.Def
import Network.DHT.Kademlia.Util
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import qualified Data.HashTable.IO as H
import qualified Data.Text as T
import qualified Data.Vector as V
-- | Periodically persist the routing table incase this node goes offline
persistRoutingTable :: KademliaEnv -> Config -> IO ()
persistRoutingTable KademliaEnv{..} config = forkIO_ $ forever $ do
threadDelay $ secToMicro 10
writeRoutingTable fp routingTable
where
fp :: FilePath
fp = T.unpack $ cfgRoutingTablePath config
|
phylake/kademlia
|
Network/DHT/Kademlia/Workers/Persistence.hs
|
Haskell
|
bsd-3-clause
| 1,315
|
{-# LANGUAGE OverloadedStrings #-}
module Bot.Action.Maven
( maven
, version
, parentVersion
, changeDependencyVersion
, updateDependencyVersions
, properties
, snapshots
) where
import Bot.Action.Action
import Bot.Action.XML
import Bot.Types
import Bot.Util
import Control.Arrow
import Control.Monad
import Control.Monad.IO.Class
import Data.List (inits)
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Text.XML.Light as X
import System.Directory
maven :: Text -> [(Text, [Text])] -> Project -> Action
maven cmd projectProfiles project = do
let path = projectPath project
isMavenProject <- liftIO $ doesFileExist $ path ++ "/pom.xml"
unless isMavenProject $ do
throwA $ "Not a project (pom.xml not found): {}" % pack path
let profiles = case lookup (projectName project) projectProfiles of
Nothing -> ""
Just ps -> "-P" <> (T.intercalate "," ps)
output <- silentProjectCommand ("mvn {} {}" %% (profiles, cmd)) project
liftIO $
if "BUILD FAILURE" `T.isInfixOf` output
then do
T.putStrLn $ red xMarkChar <> ". Command implicitly failed with 'BUILD FAILURE' log"
showOutput output
else T.putStrLn $ green checkMarkChar
version :: Project -> Action
version project = projectVersion project >>= (liftIO . T.putStrLn)
projectVersion :: Project -> ActionM Text
projectVersion project = do
pom <- readPOM project
readSingleValue pom ["project", "version"]
parentVersion :: Project -> Action
parentVersion project = do
pom <- readPOM project
name <- readSingleValue pom ["project", "parent", "artifactId"]
version <- readSingleValue pom ["project", "parent", "version"]
liftIO $ printf "{}: {}" (name, version)
changeDependencyVersion :: Project -> Text -> Text -> Action
changeDependencyVersion project depName newVersion = do
let pomPath = projectPath project ++ "/pom.xml"
openTag = "<touch.{}.version>" % depName
closeTag = "</touch.{}.version>" % depName
void $ bash ("sed 's|\\(.*{}\\).*\\({}.*\\)|\\1{}\\2|g' -i {}" %% (openTag, closeTag, newVersion, pomPath))
updateDependencyVersions :: [Project] -> Action
updateDependencyVersions projects = do
let steps = tail $ inits projects
forM_ (zip [0..] steps) $ \(i, s) -> do
changeVersionsAction s (projects !! i)
where
changeVersionsAction :: [Project] -> Project -> Action
changeVersionsAction ps t = forM_ ps $ \p -> do
let pName = projectName p
pVersion <- projectVersion p
changeDependencyVersion t pName pVersion
properties :: Project -> Maybe ((Text, Text) -> Bool) -> Action
properties project mf = do
pom <- readPOM project
ps <- readProperties pom
let f = maybe (const True) id mf
vs = filter f ps
liftIO $ do
putStrLn ""
mapM_ (\(n,v) -> T.putStrLn . indent 1 $ "{}: {}" %% (n, v)) vs
snapshots :: Project -> Action
snapshots project = do
pom <- readPOM project
props <- readProperties pom
parentName <- readSingleValue pom ["project", "parent", "artifactId"]
parentVersion <- readSingleValue pom ["project", "parent", "version"]
let ps = ((parentName, parentVersion):props)
let f (_, v) = "SNAPSHOT" `T.isInfixOf` v
vs = filter f ps
liftIO $ do
putStrLn ""
mapM_ (\(n,v) -> T.putStrLn . indent 1 $ "{}: {}" %% (n, v)) vs
readProperties :: [X.Content] -> ActionM [(Text, Text)]
readProperties rs = do
let path = ["project", "properties"]
prop = pack . X.qName . X.elName &&& value
ps = mapElementsAt path (map prop . X.elChildren) rs
case ps of
[] -> throwA $ "Couldn't find /{}" % T.intercalate "/" path
[p] -> return p
_ -> throwA $ "Found multiple /{}" % T.intercalate "/" path
readSingleValue :: [X.Content] -> Path -> ActionM Text
readSingleValue rs path = do
let vs = mapElementsAt path value rs
case vs of
[] -> throwA $ "Couldn't find /{}" % T.intercalate "/" path
[v] -> return v
_ -> throwA $ "Found multiple /{}" % T.intercalate "/" path
readPOM :: Project -> ActionM [X.Content]
readPOM project = do
let pom = projectPath project ++ "/pom.xml"
pomExists <- liftIO $ doesFileExist pom
unless pomExists $
throwA $ "pom doesn't exist at '{}'" % pack pom
readXML pom
|
andregr/bot
|
lib/Bot/Action/Maven.hs
|
Haskell
|
bsd-3-clause
| 4,303
|
-- This afternoon
-- 1. How do we constrain fully crossed?
-- 2. Set up minimal example
-- a. Minimal block rewrite
-- b. Does it desugar correctly?
-- c. Write tests & update the error handling
-- 3. Deal with blocks for real
-- 4. Deal with exact constraint syntax/wording & wtf is going on with transitions
-- 1. Import that fixes all the errors
-------- Experiment ---------
main :: IO ()
main = experiment (Block (fully-crossed design) theConstraints)
where
----------- Streams --------------
color = Stream "color" ["red", "blue"]
shape = Stream "shape" ["circle", "square"]
--------- Transitions --------------
---------- Constraints -------------
theConstraints = Constraints (count (None, 3) color) -- this count syntax doesn't fly
-- syntax/constructor for "no constraints"
noConstraints = []
---------- Design ------------
design = cross [color, shape]
----------- Blocks ------------ # fully crossed constraints not good enough!!!
|
anniecherk/pyschocnf
|
notes/parser_interface/minimal.hs
|
Haskell
|
bsd-3-clause
| 1,067
|
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.AMD.FramebufferSamplePositions
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.AMD.FramebufferSamplePositions (
-- * Extension Support
glGetAMDFramebufferSamplePositions,
gl_AMD_framebuffer_sample_positions,
-- * Enums
pattern GL_ALL_PIXELS_AMD,
pattern GL_PIXELS_PER_SAMPLE_PATTERN_X_AMD,
pattern GL_PIXELS_PER_SAMPLE_PATTERN_Y_AMD,
pattern GL_SUBSAMPLE_DISTANCE_AMD,
-- * Functions
glFramebufferSamplePositionsfvAMD,
glGetFramebufferParameterfvAMD,
glGetNamedFramebufferParameterfvAMD,
glNamedFramebufferSamplePositionsfvAMD
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/AMD/FramebufferSamplePositions.hs
|
Haskell
|
bsd-3-clause
| 1,028
|
{-# LANGUAGE TypeOperators #-}
{-# Language RebindableSyntax #-}
{-# Language ScopedTypeVariables #-}
{-# Language FlexibleContexts #-}
module Main where
import Prelude hiding ((>>=), (>>), fail, return, id, print, mod)
import Symmetry.Language
import Symmetry.Verify
import Symmetry.SymbEx
import SrcHelper
type Msg = (Pid RSing) :+: -- Poke ProcessId
Int -- Ans Int
poke_msg :: SieveSem repr => repr (Pid RSing -> Msg)
poke_msg = lam $ \pid -> inl pid
ans_msg :: SieveSem repr => repr (Int -> Msg)
ans_msg = lam $ \n -> inr n
recv_poke :: SieveSem repr => repr (Process repr (Pid RSing))
recv_poke = do msg :: repr Msg <- recv
match msg id reject
recv_ans :: SieveSem repr => repr (Process repr Int)
recv_ans = do msg :: repr Msg <- recv
match msg reject id
class ( HelperSym repr
) => SieveSem repr
instance SieveSem SymbEx
sieve_main :: SieveSem repr => repr (Process repr ())
sieve_main = do me <- self
r_gen <- newRSing
gen <- spawn r_gen (app counter (int 2))
r_s <- newRSing
spawn r_s (app2 sieve gen me)
dump
dump :: SieveSem repr => repr (Process repr ())
dump = do let f_dump = lam $ \dump -> lam $ \_ ->
do x :: repr Int <- recv
app print x
app dump tt
app (fixM f_dump) tt
counter :: SieveSem repr => repr (Int -> Process repr ())
counter = lam $ \n ->
do let f_counter = lam $ \counter -> lam $ \n ->
do poke_from <- recv_poke
send poke_from (app ans_msg n)
app counter (plus n (int 1))
app (fixM f_counter) n
ret tt
sieve :: SieveSem repr => repr (Pid RSing -> Pid RSing -> Process repr ())
sieve = lam $ \input -> lam $ \output ->
do let f_sieve = lam $ \sieve -> lam $ \arg ->
do let input = proj1 arg
output = proj2 arg
me <- self
send input (app poke_msg me)
x <- recv_ans
send output x
r <- newRSing
f <- spawn r (app2 filter2 x input)
app sieve $ pair f output
app (fixM f_sieve) $ pair input output
ret tt
type T_ar3 = () :+: Pid RSing
type T_f3 = (Int,(Pid RSing,T_ar3))
f_filter :: SieveSem repr
=> repr ((T_f3 -> Process repr T_f3) -> T_f3 -> Process repr T_f3)
f_filter = lam $ \filter -> lam $ \arg ->
do let test_n = proj1 arg
input = proj1 $ proj2 arg
m_output = proj2 $ proj2 arg
match m_output
(lam $ \_ ->
do from <- recv_poke -- filter2
app filter $ pair3 test_n input (inr from))
(lam $ \output ->
do me <- self -- filter3
send input (app poke_msg me)
y <- recv_ans
test_v <- app2 divisible_by test_n y
ifte test_v
(app filter $ pair3 test_n input (inr output))
(do send output (app ans_msg y)
app filter $ pair3 test_n input (inl tt)))
filter2 :: SieveSem repr
=> repr (Int -> Pid RSing -> Process repr ())
filter2 = lam $ \test_n -> lam $ \input ->
do app (fixM f_filter) $ pair3 test_n input (inl tt)
ret tt
divisible_by :: SieveSem repr
=> repr (Int -> Int -> Process repr Boolean)
divisible_by = lam $ \x -> lam $ \y ->
do r <- app2 mod y x
ifte (eq r (int 0))
(ret $ inl tt)
(ret $ inr tt)
main :: IO ()
main = checkerMain $ exec sieve_main
|
abakst/symmetry
|
checker/tests/todo/SrcSieve.hs
|
Haskell
|
mit
| 4,162
|
module SymBoilerPlate where
{-@ nonDet :: a -> x:Int -> {v:Int | 0 <= v && v < x } @-}
nonDet :: a -> Int -> Int
nonDet = undefined
{-@ nonDetRange :: x:Int -> y:Int -> {v:Int | x <= v && v < y} @-}
nonDetRange :: Int -> Int -> Int
nonDetRange = undefined
{-@
data Val p = VUnit {}
| VUnInit {}
| VInt { vInt :: Int }
| VString { vString :: String }
| VSet { vSetName :: String }
| VPid { vPid :: p }
| VInR { vInR :: Val p }
| VInL { vInL :: Val p }
| VPair { vLeft :: Val p, vRight :: Val p }
@-}
data Val p = VUnit {}
| VUnInit {}
| VInt { vInt :: Int }
| VString { vString :: String }
| VSet { vSetName :: String }
| VPid { vPid :: p }
| VInR { vInR :: Val p }
| VInL { vInL :: Val p }
| VPair { vLeft :: Val p, vRight :: Val p }
deriving (Show)
isVUnit, isVUnInit, isVInt, isVString, isVPid, isVInR, isVInL, isVPair, isVSet :: Val p -> Bool
isVUnit VUnit{} = True
isVUnit _ = False
isVUnInit VUnInit{} = True
isVUnInit _ = False
isVInt VInt{} = True
isVInt _ = False
isVString VString{} = True
isVString _ = False
isVSet VSet{} = True
isVSet _ = False
isVPid VPid{} = True
isVPid _ = False
isVInR VInR{} = True
isVInR _ = False
isVInL VInL{} = True
isVInL _ = False
isVPair VPair{} = True
isVPair _ = False
{-@ measure isVUnit @-}
{-@ measure isVUnInit @-}
{-@ measure isVInt @-}
{-@ measure isVString @-}
{-@ measure isVPid @-}
{-@ measure isVInL @-}
{-@ measure isVInR @-}
{-@ measure isVPair @-}
{-@ measure isVSet @-}
|
abakst/symmetry
|
checker/include/SymBoilerPlate.hs
|
Haskell
|
mit
| 1,749
|
{-# LANGUAGE GADTs, PolyKinds, RankNTypes #-}
module GADTVars where
import Data.Kind
import Data.Proxy
data T (k1 :: Type) (k2 :: Type) (a :: k2) (b :: k2) where
MkT :: T x1 Type (Proxy (y :: x1), z) z
|
sdiehl/ghc
|
testsuite/tests/dependent/should_compile/mkGADTVars.hs
|
Haskell
|
bsd-3-clause
| 207
|
module MyHttp (RequestType (..), Request(..), Response(..), Context(..), ServerPart) where
data RequestType = Get | Post
data Request = Request
{ route :: String
, reqtype :: RequestType
}
data Response = Response
{ content :: String
, statusCode :: Int
}
instance Show Response where
show (Response cntnt stts) =
"Status Code: " ++ show stts ++ "\n" ++ "Content: " ++ cntnt
data Context = Context
{ request :: Request
, response :: Response
}
type ServerPart = Context -> Maybe Context
|
nicolocodev/learnhappstack
|
3_Composition/MyHttp.hs
|
Haskell
|
mit
| 527
|
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section{Common subexpression}
-}
{-# LANGUAGE CPP #-}
module ETA.SimplCore.CSE (cseProgram) where
#include "HsVersions.h"
import ETA.Core.CoreSubst
import ETA.BasicTypes.Var ( Var )
import ETA.BasicTypes.Id ( Id, idType, idInlineActivation, zapIdOccInfo, zapIdUsageInfo )
import ETA.Core.CoreUtils ( mkAltExpr
, exprIsTrivial
, stripTicksE, stripTicksT, stripTicksTopE, mkTick, mkTicks )
import ETA.Types.Type ( tyConAppArgs )
import ETA.Core.CoreSyn
import ETA.Utils.Outputable
import ETA.BasicTypes.BasicTypes ( isAlwaysActive )
import ETA.Core.TrieMap
import Data.List
{-
Simple common sub-expression
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we see
x1 = C a b
x2 = C x1 b
we build up a reverse mapping: C a b -> x1
C x1 b -> x2
and apply that to the rest of the program.
When we then see
y1 = C a b
y2 = C y1 b
we replace the C a b with x1. But then we *dont* want to
add x1 -> y1 to the mapping. Rather, we want the reverse, y1 -> x1
so that a subsequent binding
y2 = C y1 b
will get transformed to C x1 b, and then to x2.
So we carry an extra var->var substitution which we apply *before* looking up in the
reverse mapping.
Note [Shadowing]
~~~~~~~~~~~~~~~~
We have to be careful about shadowing.
For example, consider
f = \x -> let y = x+x in
h = \x -> x+x
in ...
Here we must *not* do CSE on the inner x+x! The simplifier used to guarantee no
shadowing, but it doesn't any more (it proved too hard), so we clone as we go.
We can simply add clones to the substitution already described.
Note [Case binders 1]
~~~~~~~~~~~~~~~~~~~~~~
Consider
f = \x -> case x of wild {
(a:as) -> case a of wild1 {
(p,q) -> ...(wild1:as)...
Here, (wild1:as) is morally the same as (a:as) and hence equal to wild.
But that's not quite obvious. In general we want to keep it as (wild1:as),
but for CSE purpose that's a bad idea.
So we add the binding (wild1 -> a) to the extra var->var mapping.
Notice this is exactly backwards to what the simplifier does, which is
to try to replaces uses of 'a' with uses of 'wild1'
Note [Case binders 2]
~~~~~~~~~~~~~~~~~~~~~~
Consider
case (h x) of y -> ...(h x)...
We'd like to replace (h x) in the alternative, by y. But because of
the preceding [Note: case binders 1], we only want to add the mapping
scrutinee -> case binder
to the reverse CSE mapping if the scrutinee is a non-trivial expression.
(If the scrutinee is a simple variable we want to add the mapping
case binder -> scrutinee
to the substitution
Note [CSE for INLINE and NOINLINE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are some subtle interactions of CSE with functions that the user
has marked as INLINE or NOINLINE. (Examples from Roman Leshchinskiy.)
Consider
yes :: Int {-# NOINLINE yes #-}
yes = undefined
no :: Int {-# NOINLINE no #-}
no = undefined
foo :: Int -> Int -> Int {-# NOINLINE foo #-}
foo m n = n
{-# RULES "foo/no" foo no = id #-}
bar :: Int -> Int
bar = foo yes
We do not expect the rule to fire. But if we do CSE, then we risk
getting yes=no, and the rule does fire. Actually, it won't because
NOINLINE means that 'yes' will never be inlined, not even if we have
yes=no. So that's fine (now; perhaps in the olden days, yes=no would
have substituted even if 'yes' was NOINLINE.
But we do need to take care. Consider
{-# NOINLINE bar #-}
bar = <rhs> -- Same rhs as foo
foo = <rhs>
If CSE produces
foo = bar
then foo will never be inlined to <rhs> (when it should be, if <rhs>
is small). The conclusion here is this:
We should not add
<rhs> :-> bar
to the CSEnv if 'bar' has any constraints on when it can inline;
that is, if its 'activation' not always active. Otherwise we
might replace <rhs> by 'bar', and then later be unable to see that it
really was <rhs>.
Note that we do not (currently) do CSE on the unfolding stored inside
an Id, even if is a 'stable' unfolding. That means that when an
unfolding happens, it is always faithful to what the stable unfolding
originally was.
Note [CSE for case expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case f x of y { pat -> ...let y = f x in ... }
Then we can CSE the inner (f x) to y. In fact 'case' is like a strict
let-binding, and we can use cseRhs for dealing with the scrutinee.
************************************************************************
* *
\section{Common subexpression}
* *
************************************************************************
-}
cseProgram :: CoreProgram -> CoreProgram
cseProgram binds = snd (mapAccumL cseBind emptyCSEnv binds)
cseBind :: CSEnv -> CoreBind -> (CSEnv, CoreBind)
cseBind env (NonRec b e)
= (env2, NonRec b'' e')
where
(env1, b') = addBinder env b
(env2, (b'', e')) = cseRhs env1 (b',e)
cseBind env (Rec pairs)
= (env2, Rec pairs')
where
(bs,es) = unzip pairs
(env1, bs') = addRecBinders env bs
(env2, pairs') = mapAccumL cseRhs env1 (bs' `zip` es)
cseRhs :: CSEnv -> (OutBndr, InExpr) -> (CSEnv, (OutBndr, OutExpr))
cseRhs env (id',rhs)
= case lookupCSEnv env rhs'' of
Nothing
| always_active -> (extendCSEnv env rhs' id', (zapped_id, rhs'))
| otherwise -> (env, (id', rhs'))
Just id
| always_active -> (extendCSSubst env id' id, (id', mkTicks ticks $ Var id))
| otherwise -> (env, (id', mkTicks ticks $ Var id))
-- In the Just case, we have
-- x = rhs
-- ...
-- x' = rhs
-- We are replacing the second binding with x'=x
-- and so must record that in the substitution so
-- that subsequent uses of x' are replaced with x,
-- See Trac #5996
where
zapped_id = zapIdUsageInfo id'
-- Putting the Id into the environment makes it possible that
-- it'll become shared more than it is now, which would
-- invalidate (the usage part of) its demand info. This caused
-- Trac #100218.
-- Easiest thing is to zap the usage info; subsequently
-- performing late demand-analysis will restore it. Don't zap
-- the strictness info; it's not necessary to do so, and losing
-- it is bad for performance if you don't do late demand
-- analysis
rhs' = cseExpr env rhs
ticks = stripTicksT tickishFloatable rhs'
rhs'' = stripTicksE tickishFloatable rhs'
-- We don't want to lose the source notes when a common sub
-- expression gets eliminated. Hence we push all (!) of them on
-- top of the replaced sub-expression. This is probably not too
-- useful in practice, but upholds our semantics.
always_active = isAlwaysActive (idInlineActivation id')
-- See Note [CSE for INLINE and NOINLINE]
tryForCSE :: CSEnv -> InExpr -> OutExpr
tryForCSE env expr
| exprIsTrivial expr' = expr' -- No point
| Just smaller <- lookupCSEnv env expr'' = foldr mkTick (Var smaller) ticks
| otherwise = expr'
where
expr' = cseExpr env expr
expr'' = stripTicksE tickishFloatable expr'
ticks = stripTicksT tickishFloatable expr'
cseExpr :: CSEnv -> InExpr -> OutExpr
cseExpr env (Type t) = Type (substTy (csEnvSubst env) t)
cseExpr env (Coercion c) = Coercion (substCo (csEnvSubst env) c)
cseExpr _ (Lit lit) = Lit lit
cseExpr env (Var v) = lookupSubst env v
cseExpr env (App f a) = App (cseExpr env f) (tryForCSE env a)
cseExpr env (Tick t e) = Tick t (cseExpr env e)
cseExpr env (Cast e co) = Cast (cseExpr env e) (substCo (csEnvSubst env) co)
cseExpr env (Lam b e) = let (env', b') = addBinder env b
in Lam b' (cseExpr env' e)
cseExpr env (Let bind e) = let (env', bind') = cseBind env bind
in Let bind' (cseExpr env' e)
cseExpr env (Case scrut bndr ty alts) = Case scrut' bndr''' ty alts'
where
alts' = cseAlts env2 scrut' bndr bndr'' alts
(env1, bndr') = addBinder env bndr
bndr'' = zapIdOccInfo bndr'
-- The swizzling from Note [Case binders 2] may
-- cause a dead case binder to be alive, so we
-- play safe here and bring them all to life
(env2, (bndr''', scrut')) = cseRhs env1 (bndr'', scrut)
-- Note [CSE for case expressions]
cseAlts :: CSEnv -> OutExpr -> InBndr -> InBndr -> [InAlt] -> [OutAlt]
cseAlts env scrut' bndr bndr' alts
= map cse_alt alts
where
scrut'' = stripTicksTopE tickishFloatable scrut'
(con_target, alt_env)
= case scrut'' of
Var v' -> (v', extendCSSubst env bndr v') -- See Note [Case binders 1]
-- map: bndr -> v'
_ -> (bndr', extendCSEnv env scrut' bndr') -- See Note [Case binders 2]
-- map: scrut' -> bndr'
arg_tys = tyConAppArgs (idType bndr)
cse_alt (DataAlt con, args, rhs)
| not (null args)
-- Don't try CSE if there are no args; it just increases the number
-- of live vars. E.g.
-- case x of { True -> ....True.... }
-- Don't replace True by x!
-- Hence the 'null args', which also deal with literals and DEFAULT
= (DataAlt con, args', tryForCSE new_env rhs)
where
(env', args') = addBinders alt_env args
new_env = extendCSEnv env' con_expr con_target
con_expr = mkAltExpr (DataAlt con) args' arg_tys
cse_alt (con, args, rhs)
= (con, args', tryForCSE env' rhs)
where
(env', args') = addBinders alt_env args
{-
************************************************************************
* *
\section{The CSE envt}
* *
************************************************************************
-}
type InExpr = CoreExpr -- Pre-cloning
type InBndr = CoreBndr
type InAlt = CoreAlt
type OutExpr = CoreExpr -- Post-cloning
type OutBndr = CoreBndr
type OutAlt = CoreAlt
data CSEnv = CS { cs_map :: CoreMap (OutExpr, Id) -- Key, value
, cs_subst :: Subst }
emptyCSEnv :: CSEnv
emptyCSEnv = CS { cs_map = emptyCoreMap, cs_subst = emptySubst }
lookupCSEnv :: CSEnv -> OutExpr -> Maybe Id
lookupCSEnv (CS { cs_map = csmap }) expr
= case lookupCoreMap csmap expr of
Just (_,e) -> Just e
Nothing -> Nothing
extendCSEnv :: CSEnv -> OutExpr -> Id -> CSEnv
extendCSEnv cse expr id
= cse { cs_map = extendCoreMap (cs_map cse) sexpr (sexpr,id) }
where sexpr = stripTicksE tickishFloatable expr
csEnvSubst :: CSEnv -> Subst
csEnvSubst = cs_subst
lookupSubst :: CSEnv -> Id -> OutExpr
lookupSubst (CS { cs_subst = sub}) x = lookupIdSubst (text "CSE.lookupSubst") sub x
extendCSSubst :: CSEnv -> Id -> Id -> CSEnv
extendCSSubst cse x y = cse { cs_subst = extendIdSubst (cs_subst cse) x (Var y) }
addBinder :: CSEnv -> Var -> (CSEnv, Var)
addBinder cse v = (cse { cs_subst = sub' }, v')
where
(sub', v') = substBndr (cs_subst cse) v
addBinders :: CSEnv -> [Var] -> (CSEnv, [Var])
addBinders cse vs = (cse { cs_subst = sub' }, vs')
where
(sub', vs') = substBndrs (cs_subst cse) vs
addRecBinders :: CSEnv -> [Id] -> (CSEnv, [Id])
addRecBinders cse vs = (cse { cs_subst = sub' }, vs')
where
(sub', vs') = substRecBndrs (cs_subst cse) vs
|
alexander-at-github/eta
|
compiler/ETA/SimplCore/CSE.hs
|
Haskell
|
bsd-3-clause
| 12,560
|
{-
Copyright (C) 2009 John Goerzen <jgoerzen@complete.org>
All rights reserved.
For license and copyright information, see the file COPYRIGHT
-}
module TestInfrastructure where
import qualified Test.QuickCheck as QC
import qualified Test.HUnit as HU
import Test.HUnit.Tools
q :: QC.Testable a => String -> a -> HU.Test
q = qc2hu 250
qverbose :: QC.Testable a => String -> a -> HU.Test
qverbose = qc2hu 250
|
cabrera/hdbc
|
testsrc/TestInfrastructure.hs
|
Haskell
|
bsd-3-clause
| 410
|
{-# LANGUAGE OverloadedStrings #-}
module OperationalTests where
import Type
import Operational
base_send1 :: Test
base_send1 = Test "Base" "send1" [
Transactional [NewStream StringT "s_"] [(StreamSinkT StringT, "s_", s)],
NewList StringT out,
Transactional [Listen "l_" s (StringT, "a") [AppendList out (V "a")]] [(ListenerT, "l_", l)],
Transactional [Send s "a"] [],
Transactional [Send s "b"] [],
Unlisten l,
AssertEqual (lit ["a", "b" :: String]) out
]
where
s = "s"
out = "out"
l = "l"
operationalTests :: [Test]
operationalTests =
[base_send1]
|
kevintvh/sodium
|
common-tests/OperationalTests.hs
|
Haskell
|
bsd-3-clause
| 604
|
-------------------------------------------------------------------------------
-- |
-- Module : CCO.Tree
-- Copyright : (c) 2008 Utrecht University
-- License : All rights reserved
--
-- Maintainer : stefan@cs.uu.nl
-- Stability : provisional
-- Portability : portable
--
-- A straightforward implementation of the ATerm format for exchanging
-- tree-structured data; see
--
-- * Mark van den Brand, Hayco de Jong, Paul Klint, and Pieter A. Olivier.
-- Efficient annotated terms.
-- /Software - Practice and Experience (SPE)/, 30(3):259-291, 2000.
--
-------------------------------------------------------------------------------
module CCO.Tree (
-- * The @ATerm@ type
ATerm (..) -- instances: Eq, Read, Show, Printable
, Con -- = String
-- * The @Tree@ class
, Tree (..)
-- * Parser
, parser -- :: Component String ATerm
) where
import CCO.Tree.ATerm (Con, ATerm (..))
import CCO.Tree.ATerm.Parser (parser)
import CCO.Tree.Base (Tree (fromTree, toTree))
import CCO.Tree.Instances ()
|
UU-ComputerScience/uu-cco
|
uu-cco/src/CCO/Tree.hs
|
Haskell
|
bsd-3-clause
| 1,078
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC
-- Copyright : Isaac Jones 2003-2007
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is a fairly large module. It contains most of the GHC-specific code for
-- configuring, building and installing packages. It also exports a function
-- for finding out what packages are already installed. Configuring involves
-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions
-- this version of ghc supports and returning a 'Compiler' value.
--
-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out
-- what packages are installed.
--
-- Building is somewhat complex as there is quite a bit of information to take
-- into account. We have to build libs and programs, possibly for profiling and
-- shared libs. We have to support building libraries that will be usable by
-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files
-- using ghc. Linking, especially for @split-objs@ is remarkably complex,
-- partly because there tend to be 1,000's of @.o@ files and this can often be
-- more than we can pass to the @ld@ or @ar@ programs in one go.
--
-- Installing for libs and exes involves finding the right files and copying
-- them to the right places. One of the more tricky things about this module is
-- remembering the layout of files in the build directory (which is not
-- explicitly documented) and thus what search dirs are used for various kinds
-- of files.
module Distribution.Simple.GHC (
getGhcInfo,
configure, getInstalledPackages, getPackageDBContents,
buildLib, buildExe,
replLib, replExe,
startInterpreter,
installLib, installExe,
libAbiHash,
hcPkgInfo,
registerPackage,
componentGhcOptions,
componentCcGhcOptions,
getLibDir,
isDynamic,
getGlobalPackageDB,
pkgRoot
) where
import qualified Distribution.Simple.GHC.IPI641 as IPI641
import qualified Distribution.Simple.GHC.IPI642 as IPI642
import qualified Distribution.Simple.GHC.Internal as Internal
import Distribution.Simple.GHC.ImplInfo
import Distribution.PackageDescription as PD
( PackageDescription(..), BuildInfo(..), Executable(..), Library(..)
, allExtensions, libModules, exeModules
, hcOptions, hcSharedOptions, hcProfOptions )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
( InstalledPackageInfo(..) )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), ComponentLocalBuildInfo(..)
, absoluteInstallDirs, depLibraryPaths )
import qualified Distribution.Simple.Hpc as Hpc
import Distribution.Simple.InstallDirs hiding ( absoluteInstallDirs )
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Package
( PackageName(..) )
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.Program
( Program(..), ConfiguredProgram(..), ProgramConfiguration
, ProgramSearchPath
, rawSystemProgramStdout, rawSystemProgramStdoutConf
, getProgramInvocationOutput, requireProgramVersion, requireProgram
, userMaybeSpecifyPath, programPath, lookupProgram, addKnownProgram
, ghcProgram, ghcPkgProgram, haddockProgram, hsc2hsProgram, ldProgram )
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import qualified Distribution.Simple.Program.Ar as Ar
import qualified Distribution.Simple.Program.Ld as Ld
import qualified Distribution.Simple.Program.Strip as Strip
import Distribution.Simple.Program.GHC
import Distribution.Simple.Setup
( toFlag, fromFlag, fromFlagOrDefault, configCoverage, configDistPref )
import qualified Distribution.Simple.Setup as Cabal
( Flag(..) )
import Distribution.Simple.Compiler
( CompilerFlavor(..), CompilerId(..), Compiler(..), compilerVersion
, PackageDB(..), PackageDBStack, AbiTag(..) )
import Distribution.Version
( Version(..), anyVersion, orLaterVersion )
import Distribution.System
( Platform(..), OS(..) )
import Distribution.Verbosity
import Distribution.Text
( display )
import Distribution.Utils.NubList
( NubListR, overNubListR, toNubListR )
import Language.Haskell.Extension (Extension(..), KnownExtension(..))
import Control.Monad ( unless, when )
import Data.Char ( isDigit, isSpace )
import Data.List
import qualified Data.Map as M ( fromList )
import Data.Maybe ( catMaybes )
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid ( Monoid(..) )
#endif
import Data.Version ( showVersion )
import System.Directory
( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing )
import System.FilePath ( (</>), (<.>), takeExtension,
takeDirectory, replaceExtension,
splitExtension, isRelative )
import qualified System.Info
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration
-> IO (Compiler, Maybe Platform, ProgramConfiguration)
configure verbosity hcPath hcPkgPath conf0 = do
(ghcProg, ghcVersion, conf1) <-
requireProgramVersion verbosity ghcProgram
(orLaterVersion (Version [6,4] []))
(userMaybeSpecifyPath "ghc" hcPath conf0)
let implInfo = ghcVersionImplInfo ghcVersion
-- This is slightly tricky, we have to configure ghc first, then we use the
-- location of ghc to help find ghc-pkg in the case that the user did not
-- specify the location of ghc-pkg directly:
(ghcPkgProg, ghcPkgVersion, conf2) <-
requireProgramVersion verbosity ghcPkgProgram {
programFindLocation = guessGhcPkgFromGhcPath ghcProg
}
anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath conf1)
when (ghcVersion /= ghcPkgVersion) $ die $
"Version mismatch between ghc and ghc-pkg: "
++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " "
++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion
-- Likewise we try to find the matching hsc2hs and haddock programs.
let hsc2hsProgram' = hsc2hsProgram {
programFindLocation = guessHsc2hsFromGhcPath ghcProg
}
haddockProgram' = haddockProgram {
programFindLocation = guessHaddockFromGhcPath ghcProg
}
conf3 = addKnownProgram haddockProgram' $
addKnownProgram hsc2hsProgram' conf2
languages <- Internal.getLanguages verbosity implInfo ghcProg
extensions <- Internal.getExtensions verbosity implInfo ghcProg
ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg
let ghcInfoMap = M.fromList ghcInfo
let comp = Compiler {
compilerId = CompilerId GHC ghcVersion,
compilerAbiTag = NoAbiTag,
compilerCompat = [],
compilerLanguages = languages,
compilerExtensions = extensions,
compilerProperties = ghcInfoMap
}
compPlatform = Internal.targetPlatform ghcInfo
-- configure gcc and ld
conf4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap conf3
return (comp, compPlatform, conf4)
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find
-- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking
-- for a versioned or unversioned ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessToolFromGhcPath :: Program -> ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe FilePath)
guessToolFromGhcPath tool ghcProg verbosity searchpath
= do let toolname = programName tool
path = programPath ghcProg
dir = takeDirectory path
versionSuffix = takeVersionSuffix (dropExeExtension path)
guessNormal = dir </> toolname <.> exeExtension
guessGhcVersioned = dir </> (toolname ++ "-ghc" ++ versionSuffix)
<.> exeExtension
guessVersioned = dir </> (toolname ++ versionSuffix)
<.> exeExtension
guesses | null versionSuffix = [guessNormal]
| otherwise = [guessGhcVersioned,
guessVersioned,
guessNormal]
info verbosity $ "looking for tool " ++ toolname
++ " near compiler in " ++ dir
exists <- mapM doesFileExist guesses
case [ file | (file, True) <- zip guesses exists ] of
-- If we can't find it near ghc, fall back to the usual
-- method.
[] -> programFindLocation tool verbosity searchpath
(fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp
return (Just fp)
where takeVersionSuffix :: FilePath -> String
takeVersionSuffix = takeWhileEndLE isSuffixChar
isSuffixChar :: Char -> Bool
isSuffixChar c = isDigit c || c == '.' || c == '-'
dropExeExtension :: FilePath -> FilePath
dropExeExtension filepath =
case splitExtension filepath of
(filepath', extension) | extension == exeExtension -> filepath'
| otherwise -> filepath
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding ghc-pkg, we try looking for both a versioned and unversioned
-- ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessGhcPkgFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding hsc2hs, we try looking for both a versioned and unversioned
-- hsc2hs in the same dir, that is:
--
-- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs(.exe)
--
guessHsc2hsFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding haddock, we try looking for both a versioned and unversioned
-- haddock in the same dir, that is:
--
-- > /usr/local/bin/haddock-ghc-6.6.1(.exe)
-- > /usr/local/bin/haddock-6.6.1(.exe)
-- > /usr/local/bin/haddock(.exe)
--
guessHaddockFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram
getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)]
getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg
where
Just version = programVersion ghcProg
implInfo = ghcVersionImplInfo version
-- | Given a single package DB, return all installed packages.
getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration
-> IO InstalledPackageIndex
getPackageDBContents verbosity packagedb conf = do
pkgss <- getInstalledPackages' verbosity [packagedb] conf
toPackageIndex verbosity pkgss conf
-- | Given a package DB stack, return all installed packages.
getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packagedbs conf = do
checkPackageDbEnvVar
checkPackageDbStack comp packagedbs
pkgss <- getInstalledPackages' verbosity packagedbs conf
index <- toPackageIndex verbosity pkgss conf
return $! hackRtsPackage index
where
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_,[rts])]
-> PackageIndex.insert (removeMingwIncludeDir rts) index
_ -> index -- No (or multiple) ghc rts package is registered!!
-- Feh, whatever, the ghc test suite does some crazy stuff.
-- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a
-- @PackageIndex@. Helper function used by 'getPackageDBContents' and
-- 'getInstalledPackages'.
toPackageIndex :: Verbosity
-> [(PackageDB, [InstalledPackageInfo])]
-> ProgramConfiguration
-> IO InstalledPackageIndex
toPackageIndex verbosity pkgss conf = do
-- On Windows, various fields have $topdir/foo rather than full
-- paths. We need to substitute the right value in so that when
-- we, for example, call gcc, we have proper paths to give it.
topDir <- getLibDir' verbosity ghcProg
let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs)
| (_, pkgs) <- pkgss ]
return $! (mconcat indices)
where
Just ghcProg = lookupProgram ghcProgram conf
getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath
getLibDir verbosity lbi =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdoutConf verbosity ghcProgram
(withPrograms lbi) ["--print-libdir"]
getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath
getLibDir' verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdout verbosity ghcProg ["--print-libdir"]
-- | Return the 'FilePath' to the global GHC package database.
getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath
getGlobalPackageDB verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdout verbosity ghcProg ["--print-global-package-db"]
checkPackageDbEnvVar :: IO ()
checkPackageDbEnvVar =
Internal.checkPackageDbEnvVar "GHC" "GHC_PACKAGE_PATH"
checkPackageDbStack :: Compiler -> PackageDBStack -> IO ()
checkPackageDbStack comp = if flagPackageConf implInfo
then checkPackageDbStackPre76
else checkPackageDbStackPost76
where implInfo = ghcVersionImplInfo (compilerVersion comp)
checkPackageDbStackPost76 :: PackageDBStack -> IO ()
checkPackageDbStackPost76 (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStackPost76 rest
| GlobalPackageDB `elem` rest =
die $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
checkPackageDbStackPost76 _ = return ()
checkPackageDbStackPre76 :: PackageDBStack -> IO ()
checkPackageDbStackPre76 (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStackPre76 rest
| GlobalPackageDB `notElem` rest =
die $ "With current ghc versions the global package db is always used "
++ "and must be listed first. This ghc limitation is lifted in GHC 7.6,"
++ "see http://hackage.haskell.org/trac/ghc/ticket/5977"
checkPackageDbStackPre76 _ =
die $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This
-- breaks when you want to use a different gcc, so we need to filter
-- it out.
removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo
removeMingwIncludeDir pkg =
let ids = InstalledPackageInfo.includeDirs pkg
ids' = filter (not . ("mingw" `isSuffixOf`)) ids
in pkg { InstalledPackageInfo.includeDirs = ids' }
-- | Get the packages from specific PackageDBs, not cumulative.
--
getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration
-> IO [(PackageDB, [InstalledPackageInfo])]
getInstalledPackages' verbosity packagedbs conf
| ghcVersion >= Version [6,9] [] =
sequence
[ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb
return (packagedb, pkgs)
| packagedb <- packagedbs ]
where
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
getInstalledPackages' verbosity packagedbs conf = do
str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf ["list"]
let pkgFiles = [ init line | line <- lines str, last line == ':' ]
dbFile packagedb = case (packagedb, pkgFiles) of
(GlobalPackageDB, global:_) -> return $ Just global
(UserPackageDB, _global:user:_) -> return $ Just user
(UserPackageDB, _global:_) -> return $ Nothing
(SpecificPackageDB specific, _) -> return $ Just specific
_ -> die "cannot read ghc-pkg package listing"
pkgFiles' <- mapM dbFile packagedbs
sequence [ withFileContents file $ \content -> do
pkgs <- readPackages file content
return (db, pkgs)
| (db , Just file) <- zip packagedbs pkgFiles' ]
where
-- Depending on the version of ghc we use a different type's Read
-- instance to parse the package file and then convert.
-- It's a bit yuck. But that's what we get for using Read/Show.
readPackages
| ghcVersion >= Version [6,4,2] []
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI642.toCurrent pkgs)
_ -> failToRead file
| otherwise
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI641.toCurrent pkgs)
_ -> failToRead file
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
failToRead file = die $ "cannot read ghc package database " ++ file
-- -----------------------------------------------------------------------------
-- Building
-- | Build a library with GHC.
--
buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib = buildOrReplLib False
replLib = buildOrReplLib True
buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildOrReplLib forRepl verbosity numJobs pkg_descr lbi lib clbi = do
let libName = componentId clbi
libTargetDir = buildDir lbi
whenVanillaLib forceVanilla =
when (forceVanilla || withVanillaLib lbi)
whenProfLib = when (withProfLib lbi)
whenSharedLib forceShared =
when (forceShared || withSharedLib lbi)
whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi)
ifReplLib = when forRepl
comp = compiler lbi
ghcVersion = compilerVersion comp
implInfo = getImplInfo comp
(Platform _hostArch hostOS) = hostPlatform lbi
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let runGhcProg = runGHC verbosity ghcProg comp
libBi <- hackThreadedFlag verbosity
comp (withProfLib lbi) (libBuildInfo lib)
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi
forceVanillaLib = doingTH && not isGhcDynamic
forceSharedLib = doingTH && isGhcDynamic
-- TH always needs default libs, even when building for profiling
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi
-- Component name. Not 'libName' because that has the "HS" prefix
-- that GHC gives Haskell libraries.
cname = display $ PD.package $ localPkgDescr lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| forRepl = mempty -- HPC is not supported in ghci
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname
| otherwise = mempty
createDirectoryIfMissingVerbose verbosity True libTargetDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules?
let cObjs = map (`replaceExtension` objExtension) (cSources libBi)
baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir
vanillaOpts = baseOpts `mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptNumJobs = numJobs,
ghcOptInputModules = toNubListR $ libModules lib,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = vanillaOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag True
(withProfLibDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Prof
}
sharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptInputFiles = toNubListR
[libTargetDir </> x | x <- cObjs]
}
replOpts = vanillaOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags $
(ghcOptExtra vanillaOpts),
ghcOptNumJobs = mempty
}
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
vanillaSharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
unless (forRepl || null (libModules lib)) $
do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts)
shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts)
useDynToo = dynamicTooSupported &&
(forceVanillaLib || withVanillaLib lbi) &&
(forceSharedLib || withSharedLib lbi) &&
null (hcSharedOptions GHC libBi)
if useDynToo
then do
runGhcProg vanillaSharedOpts
case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of
(Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do
-- When the vanilla and shared library builds are done
-- in one pass, only one set of HPC module interfaces
-- are generated. This set should suffice for both
-- static and dynamically linked executables. We copy
-- the modules interfaces so they are available under
-- both ways.
copyDirectoryRecursive verbosity dynDir vanillaDir
_ -> return ()
else if isGhcDynamic
then do shared; vanilla
else do vanilla; shared
whenProfLib (runGhcProg profOpts)
-- build any C sources
unless (null (cSources libBi)) $ do
info verbosity "Building C Sources..."
sequence_
[ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo
lbi libBi clbi libTargetDir filename
vanillaCcOpts = if isGhcDynamic
-- Dynamic GHC requires C sources to be built
-- with -fPIC for REPL to work. See #2207.
then baseCcOpts { ghcOptFPic = toFlag True }
else baseCcOpts
profCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptObjSuffix = toFlag "p_o"
}
sharedCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptFPic = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptObjSuffix = toFlag "dyn_o"
}
odir = fromFlag (ghcOptObjDir vanillaCcOpts)
createDirectoryIfMissingVerbose verbosity True odir
needsRecomp <- checkNeedsRecompilation filename vanillaCcOpts
when needsRecomp $ do
runGhcProg vanillaCcOpts
unless forRepl $
whenSharedLib forceSharedLib (runGhcProg sharedCcOpts)
unless forRepl $ whenProfLib (runGhcProg profCcOpts)
| filename <- cSources libBi]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
ifReplLib $ do
when (null (libModules lib)) $ warn verbosity "No exposed modules"
ifReplLib (runGhcProg replOpts)
-- link:
unless forRepl $ do
info verbosity "Linking..."
let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension))
(cSources libBi)
cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension))
(cSources libBi)
cid = compilerId (compiler lbi)
vanillaLibFilePath = libTargetDir </> mkLibName libName
profileLibFilePath = libTargetDir </> mkProfLibName libName
sharedLibFilePath = libTargetDir </> mkSharedLibName cid libName
ghciLibFilePath = libTargetDir </> Internal.mkGHCiLibName libName
libInstallPath = libdir $ absoluteInstallDirs pkg_descr lbi NoCopyDest
sharedLibInstallPath = libInstallPath </> mkSharedLibName cid libName
stubObjs <- fmap catMaybes $ sequence
[ findFileWithExtension [objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
stubProfObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["p_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
stubSharedObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
hObjs <- Internal.getHaskellObjects implInfo lib lbi
libTargetDir objExtension True
hProfObjs <-
if (withProfLib lbi)
then Internal.getHaskellObjects implInfo lib lbi
libTargetDir ("p_" ++ objExtension) True
else return []
hSharedObjs <-
if (withSharedLib lbi)
then Internal.getHaskellObjects implInfo lib lbi
libTargetDir ("dyn_" ++ objExtension) False
else return []
unless (null hObjs && null cObjs && null stubObjs) $ do
rpaths <- getRPaths lbi clbi
let staticObjectFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
profObjectFiles =
hProfObjs
++ map (libTargetDir </>) cProfObjs
++ stubProfObjs
ghciObjFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
dynamicObjectFiles =
hSharedObjs
++ map (libTargetDir </>) cSharedObjs
++ stubSharedObjs
-- After the relocation lib is created we invoke ghc -shared
-- with the dependencies spelled out as -package arguments
-- and ghc invokes the linker with the proper library paths
ghcSharedLinkArgs =
mempty {
ghcOptShared = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptInputFiles = toNubListR dynamicObjectFiles,
ghcOptOutputFile = toFlag sharedLibFilePath,
-- For dynamic libs, Mac OS/X needs to know the install location
-- at build time. This only applies to GHC < 7.8 - see the
-- discussion in #1660.
ghcOptDylibName = if (hostOS == OSX
&& ghcVersion < Version [7,8] [])
then toFlag sharedLibInstallPath
else mempty,
ghcOptNoAutoLinkPackages = toFlag True,
ghcOptPackageDBs = withPackageDB lbi,
ghcOptPackages = toNubListR $
Internal.mkGhcOptPackages clbi ,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptRPaths = rpaths
}
info verbosity (show (ghcOptPackages ghcSharedLinkArgs))
whenVanillaLib False $ do
Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles
whenProfLib $ do
Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles
whenGHCiLib $ do
(ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi)
Ld.combineObjectFiles verbosity ldProg
ghciLibFilePath ghciObjFiles
whenSharedLib False $
runGhcProg ghcSharedLinkArgs
-- | Start a REPL without loading any source files.
startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler
-> PackageDBStack -> IO ()
startInterpreter verbosity conf comp packageDBs = do
let replOpts = mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptPackageDBs = packageDBs
}
checkPackageDbStack comp packageDBs
(ghcProg, _) <- requireProgram verbosity ghcProgram conf
runGHC verbosity ghcProg comp replOpts
-- | Build an executable with GHC.
--
buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe = buildOrReplExe False
replExe = buildOrReplExe True
buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi
exe@Executable { exeName = exeName', modulePath = modPath } clbi = do
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let comp = compiler lbi
implInfo = getImplInfo comp
runGhcProg = runGHC verbosity ghcProg comp
exeBi <- hackThreadedFlag verbosity
comp (withProfExe lbi) (buildInfo exe)
-- exeNameReal, the name that GHC really uses (with .exe on Windows)
let exeNameReal = exeName' <.>
(if takeExtension exeName' /= ('.':exeExtension)
then exeExtension
else "")
let targetDir = (buildDir lbi) </> exeName'
let exeDir = targetDir </> (exeName' ++ "-tmp")
createDirectoryIfMissingVerbose verbosity True targetDir
createDirectoryIfMissingVerbose verbosity True exeDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules? FIX: what about exeName.hi-boot?
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| forRepl = mempty -- HPC is not supported in ghci
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName'
| otherwise = mempty
-- build executables
srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath
rpaths <- getRPaths lbi clbi
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"]
cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain]
cObjs = map (`replaceExtension` objExtension) cSrcs
baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir)
`mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptInputFiles = toNubListR
[ srcMainFile | isHaskellMain],
ghcOptInputModules = toNubListR
[ m | not isHaskellMain, m <- exeModules exe]
}
staticOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticOnly,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = baseOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag False
(withProfExeDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR (hcProfOptions GHC exeBi),
ghcOptHPCDir = hpcdir Hpc.Prof
}
dynOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $
hcSharedOptions GHC exeBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
dynTooOpts = staticOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi,
ghcOptLinkLibs = toNubListR $ extraLibs exeBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi,
ghcOptInputFiles = toNubListR
[exeDir </> x | x <- cObjs]
}
dynLinkerOpts = mempty {
ghcOptRPaths = rpaths
}
replOpts = baseOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags
(ghcOptExtra baseOpts)
}
-- For a normal compile we do separate invocations of ghc for
-- compiling as for linking. But for repl we have to do just
-- the one invocation, so that one has to include all the
-- linker stuff too, like -l flags and any .o files from C
-- files etc.
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
commonOpts | withProfExe lbi = profOpts
| withDynExe lbi = dynOpts
| otherwise = staticOpts
compileOpts | useDynToo = dynTooOpts
| otherwise = commonOpts
withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi)
-- For building exe's that use TH with -prof or -dynamic we actually have
-- to build twice, once without -prof/-dynamic and then again with
-- -prof/-dynamic. This is because the code that TH needs to run at
-- compile time needs to be the vanilla ABI so it can be loaded up and run
-- by the compiler.
-- With dynamic-by-default GHC the TH object files loaded at compile-time
-- need to be .dyn_o instead of .o.
doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi
-- Should we use -dynamic-too instead of compiling twice?
useDynToo = dynamicTooSupported && isGhcDynamic
&& doingTH && withStaticExe
&& null (hcSharedOptions GHC exeBi)
compileTHOpts | isGhcDynamic = dynOpts
| otherwise = staticOpts
compileForTH
| forRepl = False
| useDynToo = False
| isGhcDynamic = doingTH && (withProfExe lbi || withStaticExe)
| otherwise = doingTH && (withProfExe lbi || withDynExe lbi)
linkOpts = commonOpts `mappend`
linkerOpts `mappend`
mempty { ghcOptLinkNoHsMain = toFlag (not isHaskellMain) } `mappend`
(if withDynExe lbi then dynLinkerOpts else mempty)
-- Build static/dynamic object files for TH, if needed.
when compileForTH $
runGhcProg compileTHOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
unless forRepl $
runGhcProg compileOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
-- build any C sources
unless (null cSrcs) $ do
info verbosity "Building C Sources..."
sequence_
[ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi
clbi exeDir filename) `mappend` mempty {
ghcOptDynLinkMode = toFlag (if withDynExe lbi
then GhcDynamicOnly
else GhcStaticOnly),
ghcOptProfilingMode = toFlag (withProfExe lbi)
}
odir = fromFlag (ghcOptObjDir opts)
createDirectoryIfMissingVerbose verbosity True odir
needsRecomp <- checkNeedsRecompilation filename opts
when needsRecomp $
runGhcProg opts
| filename <- cSrcs ]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
when forRepl $ runGhcProg replOpts
-- link:
unless forRepl $ do
info verbosity "Linking..."
runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir </> exeNameReal) }
-- | Returns True if the modification date of the given source file is newer than
-- the object file we last compiled for it, or if no object file exists yet.
checkNeedsRecompilation :: FilePath -> GhcOptions -> IO Bool
checkNeedsRecompilation filename opts = filename `moreRecentFile` oname
where oname = getObjectFileName filename opts
-- | Finds the object file name of the given source file
getObjectFileName :: FilePath -> GhcOptions -> FilePath
getObjectFileName filename opts = oname
where odir = fromFlag (ghcOptObjDir opts)
oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts)
oname = odir </> replaceExtension filename oext
-- | Calculate the RPATHs for the component we are building.
--
-- Calculates relative RPATHs when 'relocatable' is set.
getRPaths :: LocalBuildInfo
-> ComponentLocalBuildInfo -- ^ Component we are building
-> IO (NubListR FilePath)
getRPaths lbi clbi | supportRPaths hostOS = do
libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi
let hostPref = case hostOS of
OSX -> "@loader_path"
_ -> "$ORIGIN"
relPath p = if isRelative p then hostPref </> p else p
rpaths = toNubListR (map relPath libraryPaths)
return rpaths
where
(Platform _ hostOS) = hostPlatform lbi
-- The list of RPath-supported operating systems below reflects the
-- platforms on which Cabal's RPATH handling is tested. It does _NOT_
-- reflect whether the OS supports RPATH.
-- E.g. when this comment was written, the *BSD operating systems were
-- untested with regards to Cabal RPATH handling, and were hence set to
-- 'False', while those operating systems themselves do support RPATH.
supportRPaths Linux = True
supportRPaths Windows = False
supportRPaths OSX = True
supportRPaths FreeBSD = False
supportRPaths OpenBSD = False
supportRPaths NetBSD = False
supportRPaths DragonFly = False
supportRPaths Solaris = False
supportRPaths AIX = False
supportRPaths HPUX = False
supportRPaths IRIX = False
supportRPaths HaLVM = False
supportRPaths IOS = False
supportRPaths Android = False
supportRPaths Ghcjs = False
supportRPaths Hurd = False
supportRPaths (OtherOS _) = False
-- Do _not_ add a default case so that we get a warning here when a new OS
-- is added.
getRPaths _ _ = return mempty
-- | Filter the "-threaded" flag when profiling as it does not
-- work with ghc-6.8 and older.
hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo
hackThreadedFlag verbosity comp prof bi
| not mustFilterThreaded = return bi
| otherwise = do
warn verbosity $ "The ghc flag '-threaded' is not compatible with "
++ "profiling in ghc-6.8 and older. It will be disabled."
return bi { options = filterHcOptions (/= "-threaded") (options bi) }
where
mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] []
&& "-threaded" `elem` hcOptions GHC bi
filterHcOptions p hcoptss =
[ (hc, if hc == GHC then filter p opts else opts)
| (hc, opts) <- hcoptss ]
-- | Extracts a String representing a hash of the ABI of a built
-- library. It can fail if the library has not yet been built.
--
libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO String
libAbiHash verbosity _pkg_descr lbi lib clbi = do
libBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfLib lbi) (libBuildInfo lib)
let
comp = compiler lbi
vanillaArgs =
(componentGhcOptions verbosity lbi libBi clbi (buildDir lbi))
`mappend` mempty {
ghcOptMode = toFlag GhcModeAbiHash,
ghcOptInputModules = toNubListR $ exposedModules lib
}
sharedArgs = vanillaArgs `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi
}
profArgs = vanillaArgs `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag True
(withProfLibDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi
}
ghcArgs = if withVanillaLib lbi then vanillaArgs
else if withSharedLib lbi then sharedArgs
else if withProfLib lbi then profArgs
else error "libAbiHash: Can't find an enabled library way"
--
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
hash <- getProgramInvocationOutput verbosity
(ghcInvocation ghcProg comp ghcArgs)
return (takeWhile (not . isSpace) hash)
componentGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo -> FilePath
-> GhcOptions
componentGhcOptions = Internal.componentGhcOptions
componentCcGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> FilePath
-> GhcOptions
componentCcGhcOptions verbosity lbi =
Internal.componentCcGhcOptions verbosity implInfo lbi
where
comp = compiler lbi
implInfo = getImplInfo comp
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for GHC.
installExe :: Verbosity
-> LocalBuildInfo
-> InstallDirs FilePath -- ^Where to copy the files to
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> PackageDescription
-> Executable
-> IO ()
installExe verbosity lbi installDirs buildPref
(progprefix, progsuffix) _pkg exe = do
let binDir = bindir installDirs
createDirectoryIfMissingVerbose verbosity True binDir
let exeFileName = exeName exe <.> exeExtension
fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix
installBinary dest = do
installExecutableFile verbosity
(buildPref </> exeName exe </> exeFileName)
(dest <.> exeExtension)
when (stripExes lbi) $
Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi)
(dest <.> exeExtension)
installBinary (binDir </> fixedExeBaseName)
-- |Install for ghc, .hi, .a and, if --with-ghci given, .o
installLib :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic libraries
-> FilePath -- ^Build location
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> IO ()
installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do
-- copy .hi files over:
whenVanilla $ copyModuleFiles "hi"
whenProf $ copyModuleFiles "p_hi"
whenShared $ copyModuleFiles "dyn_hi"
-- copy the built library files over:
whenVanilla $ installOrdinary builtDir targetDir vanillaLibName
whenProf $ installOrdinary builtDir targetDir profileLibName
whenGHCi $ installOrdinary builtDir targetDir ghciLibName
whenShared $ installShared builtDir dynlibTargetDir sharedLibName
where
install isShared srcDir dstDir name = do
let src = srcDir </> name
dst = dstDir </> name
createDirectoryIfMissingVerbose verbosity True dstDir
if isShared
then installExecutableFile verbosity src dst
else installOrdinaryFile verbosity src dst
when (stripLibs lbi) $ Strip.stripLib verbosity
(hostPlatform lbi) (withPrograms lbi) dst
installOrdinary = install False
installShared = install True
copyModuleFiles ext =
findModuleFiles [builtDir] [ext] (libModules lib)
>>= installOrdinaryFiles verbosity targetDir
cid = compilerId (compiler lbi)
libName = componentId clbi
vanillaLibName = mkLibName libName
profileLibName = mkProfLibName libName
ghciLibName = Internal.mkGHCiLibName libName
sharedLibName = (mkSharedLibName cid) libName
hasLib = not $ null (libModules lib)
&& null (cSources (libBuildInfo lib))
whenVanilla = when (hasLib && withVanillaLib lbi)
whenProf = when (hasLib && withProfLib lbi)
whenGHCi = when (hasLib && withGHCiLib lbi)
whenShared = when (hasLib && withSharedLib lbi)
-- -----------------------------------------------------------------------------
-- Registering
hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo
hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg
, HcPkg.noPkgDbStack = v < [6,9]
, HcPkg.noVerboseFlag = v < [6,11]
, HcPkg.flagPackageConf = v < [7,5]
, HcPkg.useSingleFileDb = v < [7,9]
}
where
v = versionBranch ver
Just ghcPkgProg = lookupProgram ghcPkgProgram conf
Just ver = programVersion ghcPkgProg
registerPackage
:: Verbosity
-> InstalledPackageInfo
-> PackageDescription
-> LocalBuildInfo
-> Bool
-> PackageDBStack
-> IO ()
registerPackage verbosity installedPkgInfo _pkg lbi _inplace packageDbs =
HcPkg.reregister (hcPkgInfo $ withPrograms lbi) verbosity
packageDbs (Right installedPkgInfo)
pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath
pkgRoot verbosity lbi = pkgRoot'
where
pkgRoot' GlobalPackageDB =
let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg)
pkgRoot' UserPackageDB = do
appDir <- getAppUserDataDirectory "ghc"
let ver = compilerVersion (compiler lbi)
subdir = System.Info.arch ++ '-':System.Info.os
++ '-':showVersion ver
rootDir = appDir </> subdir
-- We must create the root directory for the user package database if it
-- does not yet exists. Otherwise '${pkgroot}' will resolve to a
-- directory at the time of 'ghc-pkg register', and registration will
-- fail.
createDirectoryIfMissing True rootDir
return rootDir
pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp)
-- -----------------------------------------------------------------------------
-- Utils
isDynamic :: Compiler -> Bool
isDynamic = Internal.ghcLookupProperty "GHC Dynamic"
supportsDynamicToo :: Compiler -> Bool
supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too"
|
thoughtpolice/cabal
|
Cabal/Distribution/Simple/GHC.hs
|
Haskell
|
bsd-3-clause
| 52,313
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Utils.Http (send) where
import qualified Control.Exception as E
import Control.Monad.Error.Class (MonadError, throwError)
import Control.Monad.Trans (MonadIO, liftIO)
import qualified Data.ByteString.Char8 as BSC
import qualified Data.List as List
import Network (withSocketsDo)
import Network.HTTP.Client
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.HTTP.Types
send
:: (MonadIO m, MonadError String m)
=> String
-> (Request -> Manager -> IO a)
-> m a
send url handler =
do result <- liftIO (sendSafe url handler)
either throwError return result
sendSafe :: String -> (Request -> Manager -> IO a) -> IO (Either String a)
sendSafe url handler =
sendUnsafe url handler
`E.catch` handleHttpError url
`E.catch` (handleAnyError url :: E.SomeException -> IO (Either String b))
sendUnsafe :: String -> (Request -> Manager -> IO a) -> IO (Either err a)
sendUnsafe url handler =
do request <- parseUrl url
result <- withSocketsDo $ withManager tlsManagerSettings (handler request)
return (Right result)
handleHttpError :: String -> HttpException -> IO (Either String b)
handleHttpError url exception =
case exception of
StatusCodeException (Status _code err) headers _ ->
let details =
case List.lookup "X-Response-Body-Start" headers of
Just msg | not (BSC.null msg) -> msg
_ -> err
in
return . Left $ BSC.unpack details
_ -> handleAnyError url exception
handleAnyError :: (E.Exception e) => String -> e -> IO (Either String b)
handleAnyError url exception =
return . Left $
"failed with '" ++ show exception ++ "' when sending request to\n" ++
" <" ++ url ++ ">"
|
laszlopandy/elm-package
|
src/Utils/Http.hs
|
Haskell
|
bsd-3-clause
| 1,824
|
{-# LANGUAGE RecursiveDo, RankNTypes, NamedFieldPuns, RecordWildCards #-}
module Distribution.Server.Features.Upload (
UploadFeature(..),
UploadResource(..),
initUploadFeature,
UploadResult(..),
) where
import Distribution.Server.Framework
import Distribution.Server.Framework.BackupDump
import Distribution.Server.Features.Upload.State
import Distribution.Server.Features.Upload.Backup
import Distribution.Server.Features.Core
import Distribution.Server.Features.Users
import Distribution.Server.Users.Backup
import Distribution.Server.Packages.Types
import qualified Distribution.Server.Users.Types as Users
import qualified Distribution.Server.Users.Group as Group
import Distribution.Server.Users.Group (UserGroup(..), GroupDescription(..), nullDescription)
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import qualified Distribution.Server.Packages.Unpack as Upload
import Distribution.Server.Packages.PackageIndex (PackageIndex)
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Data.Maybe (fromMaybe)
import Data.Time.Clock (getCurrentTime)
import Data.Function (fix)
import Data.ByteString.Lazy (ByteString)
import Distribution.Package
import Distribution.PackageDescription (GenericPackageDescription)
import Distribution.Text (display)
import qualified Distribution.Server.Util.GZip as GZip
data UploadFeature = UploadFeature {
-- | The package upload `HackageFeature`.
uploadFeatureInterface :: HackageFeature,
-- | Upload resources.
uploadResource :: UploadResource,
-- | The main upload routine. This uses extractPackage on a multipart
-- request to get contextual information.
uploadPackage :: ServerPartE UploadResult,
--TODO: consider moving the trustee and/or per-package maintainer groups
-- lower down in the feature hierarchy; many other features want to
-- use the trustee group purely for auth decisions
-- | The group of Hackage trustees.
trusteesGroup :: UserGroup,
-- | The group of package uploaders.
uploadersGroup :: UserGroup,
-- | The group of maintainers for a given package.
maintainersGroup :: PackageName -> UserGroup,
-- | Requiring being logged in as the maintainer of a package.
guardAuthorisedAsMaintainer :: PackageName -> ServerPartE (),
-- | Requiring being logged in as the maintainer of a package or a trustee.
guardAuthorisedAsMaintainerOrTrustee :: PackageName -> ServerPartE (),
-- | Takes an upload request and, depending on the result of the
-- passed-in function, either commits the uploaded tarball to the blob
-- storage or throws it away and yields an error.
extractPackage :: (Users.UserId -> UploadResult -> IO (Maybe ErrorResponse))
-> ServerPartE (Users.UserId, UploadResult, PkgTarball)
}
instance IsHackageFeature UploadFeature where
getFeatureInterface = uploadFeatureInterface
data UploadResource = UploadResource {
-- | The page for uploading a package, the same as `corePackagesPage`.
uploadIndexPage :: Resource,
-- | The page for deleting a package, the same as `corePackagePage`.
--
-- This is fairly dangerous and is not currently used.
deletePackagePage :: Resource,
-- | The maintainers group for each package.
maintainersGroupResource :: GroupResource,
-- | The trustee group.
trusteesGroupResource :: GroupResource,
-- | The allowed-uploaders group.
uploadersGroupResource :: GroupResource,
-- | URI for `maintainersGroupResource` given a format and `PackageId`.
packageMaintainerUri :: String -> PackageId -> String,
-- | URI for `trusteesGroupResource` given a format.
trusteeUri :: String -> String,
-- | URI for `uploadersGroupResource` given a format.
uploaderUri :: String -> String
}
-- | The representation of an intermediate result in the upload process,
-- indicating a package which meets the requirements to go into Hackage.
data UploadResult = UploadResult {
-- The parsed Cabal file.
uploadDesc :: !GenericPackageDescription,
-- The text of the Cabal file.
uploadCabal :: !ByteString,
-- Any warnings from unpacking the tarball.
uploadWarnings :: ![String]
}
initUploadFeature :: ServerEnv
-> IO (UserFeature -> CoreFeature -> IO UploadFeature)
initUploadFeature env@ServerEnv{serverStateDir} = do
-- Canonical state
trusteesState <- trusteesStateComponent serverStateDir
uploadersState <- uploadersStateComponent serverStateDir
maintainersState <- maintainersStateComponent serverStateDir
return $ \user@UserFeature{..} core@CoreFeature{..} -> do
-- Recusively tie the knot: the feature contains new user group resources
-- but we make the functions needed to create those resources along with
-- the feature
rec let (feature,
trusteesGroupDescription, uploadersGroupDescription,
maintainersGroupDescription)
= uploadFeature env core user
trusteesState trusteesGroup trusteesGroupResource
uploadersState uploadersGroup uploadersGroupResource
maintainersState maintainersGroup maintainersGroupResource
(trusteesGroup, trusteesGroupResource) <-
groupResourceAt "/packages/trustees" trusteesGroupDescription
(uploadersGroup, uploadersGroupResource) <-
groupResourceAt "/packages/uploaders" uploadersGroupDescription
pkgNames <- PackageIndex.packageNames <$> queryGetPackageIndex
(maintainersGroup, maintainersGroupResource) <-
groupResourcesAt "/package/:package/maintainers"
maintainersGroupDescription
(\pkgname -> [("package", display pkgname)])
(packageInPath coreResource)
pkgNames
return feature
trusteesStateComponent :: FilePath -> IO (StateComponent AcidState HackageTrustees)
trusteesStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "HackageTrustees") initialHackageTrustees
return StateComponent {
stateDesc = "Trustees"
, stateHandle = st
, getState = query st GetHackageTrustees
, putState = update st . ReplaceHackageTrustees . trusteeList
, backupState = \_ (HackageTrustees trustees) -> [csvToBackup ["trustees.csv"] $ groupToCSV trustees]
, restoreState = HackageTrustees <$> groupBackup ["trustees.csv"]
, resetState = trusteesStateComponent
}
uploadersStateComponent :: FilePath -> IO (StateComponent AcidState HackageUploaders)
uploadersStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "HackageUploaders") initialHackageUploaders
return StateComponent {
stateDesc = "Uploaders"
, stateHandle = st
, getState = query st GetHackageUploaders
, putState = update st . ReplaceHackageUploaders . uploaderList
, backupState = \_ (HackageUploaders uploaders) -> [csvToBackup ["uploaders.csv"] $ groupToCSV uploaders]
, restoreState = HackageUploaders <$> groupBackup ["uploaders.csv"]
, resetState = uploadersStateComponent
}
maintainersStateComponent :: FilePath -> IO (StateComponent AcidState PackageMaintainers)
maintainersStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "PackageMaintainers") initialPackageMaintainers
return StateComponent {
stateDesc = "Package maintainers"
, stateHandle = st
, getState = query st AllPackageMaintainers
, putState = update st . ReplacePackageMaintainers
, backupState = \_ (PackageMaintainers mains) -> [maintToExport mains]
, restoreState = maintainerBackup
, resetState = maintainersStateComponent
}
uploadFeature :: ServerEnv
-> CoreFeature
-> UserFeature
-> StateComponent AcidState HackageTrustees -> UserGroup -> GroupResource
-> StateComponent AcidState HackageUploaders -> UserGroup -> GroupResource
-> StateComponent AcidState PackageMaintainers -> (PackageName -> UserGroup) -> GroupResource
-> (UploadFeature,
UserGroup,
UserGroup,
PackageName -> UserGroup)
uploadFeature ServerEnv{serverBlobStore = store}
CoreFeature{ coreResource
, queryGetPackageIndex
, updateAddPackage
}
UserFeature{..}
trusteesState trusteesGroup trusteesGroupResource
uploadersState uploadersGroup uploadersGroupResource
maintainersState maintainersGroup maintainersGroupResource
= ( UploadFeature {..}
, trusteesGroupDescription, uploadersGroupDescription, maintainersGroupDescription)
where
uploadFeatureInterface = (emptyHackageFeature "upload") {
featureDesc = "Support for package uploads, and define groups for trustees, uploaders, and package maintainers"
, featureResources =
[ uploadIndexPage uploadResource
, groupResource maintainersGroupResource
, groupUserResource maintainersGroupResource
, groupResource trusteesGroupResource
, groupUserResource trusteesGroupResource
, groupResource uploadersGroupResource
, groupUserResource uploadersGroupResource
]
, featureState = [
abstractAcidStateComponent trusteesState
, abstractAcidStateComponent uploadersState
, abstractAcidStateComponent maintainersState
]
}
uploadResource = UploadResource
{ uploadIndexPage = (extendResource (corePackagesPage coreResource)) { resourcePost = [] }
, deletePackagePage = (extendResource (corePackagePage coreResource)) { resourceDelete = [] }
, maintainersGroupResource = maintainersGroupResource
, trusteesGroupResource = trusteesGroupResource
, uploadersGroupResource = uploadersGroupResource
, packageMaintainerUri = \format pkgname -> renderResource
(groupResource maintainersGroupResource) [display pkgname, format]
, trusteeUri = \format -> renderResource (groupResource trusteesGroupResource) [format]
, uploaderUri = \format -> renderResource (groupResource uploadersGroupResource) [format]
}
--------------------------------------------------------------------------------
-- User groups and authentication
trusteesGroupDescription :: UserGroup
trusteesGroupDescription = UserGroup {
groupDesc = trusteeDescription,
queryUserGroup = queryState trusteesState GetTrusteesList,
addUserToGroup = updateState trusteesState . AddHackageTrustee,
removeUserFromGroup = updateState trusteesState . RemoveHackageTrustee,
groupsAllowedToAdd = [adminGroup],
groupsAllowedToDelete = [adminGroup]
}
uploadersGroupDescription :: UserGroup
uploadersGroupDescription = UserGroup {
groupDesc = uploaderDescription,
queryUserGroup = queryState uploadersState GetUploadersList,
addUserToGroup = updateState uploadersState . AddHackageUploader,
removeUserFromGroup = updateState uploadersState . RemoveHackageUploader,
groupsAllowedToAdd = [adminGroup],
groupsAllowedToDelete = [adminGroup]
}
maintainersGroupDescription :: PackageName -> UserGroup
maintainersGroupDescription name =
fix $ \thisgroup ->
UserGroup {
groupDesc = maintainerDescription name,
queryUserGroup = queryState maintainersState $ GetPackageMaintainers name,
addUserToGroup = updateState maintainersState . AddPackageMaintainer name,
removeUserFromGroup = updateState maintainersState . RemovePackageMaintainer name,
groupsAllowedToAdd = [thisgroup, adminGroup],
groupsAllowedToDelete = [thisgroup, adminGroup]
}
maintainerDescription :: PackageName -> GroupDescription
maintainerDescription pkgname = GroupDescription
{ groupTitle = "Maintainers"
, groupEntity = Just (pname, Just $ "/package/" ++ pname)
, groupPrologue = "Maintainers for a package can upload new versions and adjust other attributes in the package database."
}
where pname = display pkgname
trusteeDescription :: GroupDescription
trusteeDescription = nullDescription { groupTitle = "Package trustees", groupPrologue = "The role of trustees is to help to curate the whole package collection. Trustees have a limited ability to edit package information, for the entire package database (as opposed to package maintainers who have full control over individual packages). Trustees can edit .cabal files, edit other package metadata and upload documentation but they cannot upload new package versions." }
uploaderDescription :: GroupDescription
uploaderDescription = nullDescription { groupTitle = "Package uploaders", groupPrologue = "Package uploaders are allowed to upload packages. Note that if a package already exists then you also need to be in the maintainer group for that package." }
guardAuthorisedAsMaintainer :: PackageName -> ServerPartE ()
guardAuthorisedAsMaintainer pkgname =
guardAuthorised_ [InGroup (maintainersGroup pkgname)]
guardAuthorisedAsMaintainerOrTrustee :: PackageName -> ServerPartE ()
guardAuthorisedAsMaintainerOrTrustee pkgname =
guardAuthorised_ [InGroup (maintainersGroup pkgname), InGroup trusteesGroup]
----------------------------------------------------
-- This is the upload function. It returns a generic result for multiple formats.
uploadPackage :: ServerPartE UploadResult
uploadPackage = do
guardAuthorised_ [InGroup uploadersGroup]
pkgIndex <- queryGetPackageIndex
(uid, uresult, tarball) <- extractPackage $ \uid info ->
processUpload pkgIndex uid info
now <- liftIO getCurrentTime
let (UploadResult pkg pkgStr _) = uresult
pkgid = packageId pkg
cabalfile = CabalFileText pkgStr
uploadinfo = (now, uid)
success <- updateAddPackage pkgid cabalfile uploadinfo (Just tarball)
if success
then do
-- make package maintainers group for new package
let existedBefore = packageExists pkgIndex pkgid
when (not existedBefore) $
liftIO $ addUserToGroup (maintainersGroup (packageName pkgid)) uid
return uresult
-- this is already checked in processUpload, and race conditions are highly unlikely but imaginable
else errForbidden "Upload failed" [MText "Package already exists."]
-- This is a processing funtion for extractPackage that checks upload-specific requirements.
-- Does authentication, though not with requirePackageAuth, because it has to be IO.
-- Some other checks can be added, e.g. if a package with a later version exists
processUpload :: PackageIndex PkgInfo -> Users.UserId -> UploadResult -> IO (Maybe ErrorResponse)
processUpload state uid res = do
let pkg = packageId (uploadDesc res)
pkgGroup <- queryUserGroup (maintainersGroup (packageName pkg))
if packageIdExists state pkg
then uploadError versionExists --allow trustees to do this?
else if packageExists state pkg && not (uid `Group.member` pkgGroup)
then uploadError (notMaintainer pkg)
else return Nothing
where
uploadError = return . Just . ErrorResponse 403 [] "Upload failed" . return . MText
versionExists = "This version of the package has already been uploaded.\n\nAs a matter of "
++ "policy we do not allow package tarballs to be changed after a release "
++ "(so we can guarantee stable md5sums etc). The usual recommendation is "
++ "to upload a new version, and if necessary blacklist the existing one. "
++ "In extraordinary circumstances, contact the administrators."
notMaintainer pkg = "You are not authorised to upload new versions of this package. The "
++ "package '" ++ display (packageName pkg) ++ "' exists already and you "
++ "are not a member of the maintainer group for this package.\n\n"
++ "If you believe you should be a member of the maintainer group for this "
++ "package, then ask an existing maintainer to add you to the group. If "
++ "this is a package name clash, please pick another name or talk to the "
++ "maintainers of the existing package."
-- This function generically extracts a package, useful for uploading, checking,
-- and anything else in the standard user-upload pipeline.
extractPackage :: (Users.UserId -> UploadResult -> IO (Maybe ErrorResponse))
-> ServerPartE (Users.UserId, UploadResult, PkgTarball)
extractPackage processFunc =
withDataFn (lookInput "package") $ \input ->
case inputValue input of -- HS6 this has been updated to use the new file upload support in HS6, but has not been tested at all
(Right _) -> errBadRequest "Upload failed" [MText "package field in form data is not a file."]
(Left file) ->
let fileName = (fromMaybe "noname" $ inputFilename input)
in upload fileName file
where
upload name file =
do -- initial check to ensure logged in.
--FIXME: this should have been covered earlier
uid <- guardAuthenticated
now <- liftIO getCurrentTime
let processPackage :: ByteString -> IO (Either ErrorResponse (UploadResult, BlobStorage.BlobId))
processPackage content' = do
-- as much as it would be nice to do requirePackageAuth in here,
-- processPackage is run in a handle bracket
case Upload.unpackPackage now name content' of
Left err -> return . Left $ ErrorResponse 400 [] "Invalid package" [MText err]
Right ((pkg, pkgStr), warnings) -> do
let uresult = UploadResult pkg pkgStr warnings
res <- processFunc uid uresult
case res of
Nothing ->
do let decompressedContent = GZip.decompressNamed file content'
blobIdDecompressed <- BlobStorage.add store decompressedContent
return . Right $ (uresult, blobIdDecompressed)
Just err -> return . Left $ err
mres <- liftIO $ BlobStorage.consumeFileWith store file processPackage
case mres of
Left err -> throwError err
Right ((res, blobIdDecompressed), blobId) -> do
infoGz <- liftIO $ blobInfoFromId store blobId
let tarball = PkgTarball {
pkgTarballGz = infoGz
, pkgTarballNoGz = blobIdDecompressed
}
return (uid, res, tarball)
|
ocharles/hackage-server
|
Distribution/Server/Features/Upload.hs
|
Haskell
|
bsd-3-clause
| 19,842
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Passive Scan Rules - Beta | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/pscanrulesBeta/src/main/javahelp/org/zaproxy/zap/extension/pscanrulesBeta/resources/help_hi_IN/helpset_hi_IN.hs
|
Haskell
|
apache-2.0
| 986
|
{-
(c) The University of Glasgow 2006
(c) The GRASP Project, Glasgow University, 1992-2000
Defines basic functions for printing error messages.
It's hard to put these functions anywhere else without causing
some unnecessary loops in the module dependency graph.
-}
{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-}
module Panic (
GhcException(..), showGhcException,
throwGhcException, throwGhcExceptionIO,
handleGhcException,
progName,
pgmError,
panic, sorry, assertPanic, trace,
panicDoc, sorryDoc, pgmErrorDoc,
Exception.Exception(..), showException, safeShowException, try, tryMost, throwTo,
installSignalHandlers,
pushInterruptTargetThread, popInterruptTargetThread
) where
#include "HsVersions.h"
import {-# SOURCE #-} Outputable (SDoc)
import Config
import Exception
import Control.Concurrent
import Data.Dynamic
import Debug.Trace ( trace )
import System.IO.Unsafe
import System.Environment
#ifndef mingw32_HOST_OS
import System.Posix.Signals
#endif
#if defined(mingw32_HOST_OS)
import GHC.ConsoleHandler
#endif
import GHC.Stack
import System.Mem.Weak ( Weak, deRefWeak )
-- | GHC's own exception type
-- error messages all take the form:
--
-- @
-- <location>: <error>
-- @
--
-- If the location is on the command line, or in GHC itself, then
-- <location>="ghc". All of the error types below correspond to
-- a <location> of "ghc", except for ProgramError (where the string is
-- assumed to contain a location already, so we don't print one).
data GhcException
-- | Some other fatal signal (SIGHUP,SIGTERM)
= Signal Int
-- | Prints the short usage msg after the error
| UsageError String
-- | A problem with the command line arguments, but don't print usage.
| CmdLineError String
-- | The 'impossible' happened.
| Panic String
| PprPanic String SDoc
-- | The user tickled something that's known not to work yet,
-- but we're not counting it as a bug.
| Sorry String
| PprSorry String SDoc
-- | An installation problem.
| InstallationError String
-- | An error in the user's code, probably.
| ProgramError String
| PprProgramError String SDoc
deriving (Typeable)
instance Exception GhcException
instance Show GhcException where
showsPrec _ e@(ProgramError _) = showGhcException e
showsPrec _ e@(CmdLineError _) = showString "<command line>: " . showGhcException e
showsPrec _ e = showString progName . showString ": " . showGhcException e
-- | The name of this GHC.
progName :: String
progName = unsafePerformIO (getProgName)
{-# NOINLINE progName #-}
-- | Short usage information to display when we are given the wrong cmd line arguments.
short_usage :: String
short_usage = "Usage: For basic information, try the `--help' option."
-- | Show an exception as a string.
showException :: Exception e => e -> String
showException = show
-- | Show an exception which can possibly throw other exceptions.
-- Used when displaying exception thrown within TH code.
safeShowException :: Exception e => e -> IO String
safeShowException e = do
-- ensure the whole error message is evaluated inside try
r <- try (return $! forceList (showException e))
case r of
Right msg -> return msg
Left e' -> safeShowException (e' :: SomeException)
where
forceList [] = []
forceList xs@(x : xt) = x `seq` forceList xt `seq` xs
-- | Append a description of the given exception to this string.
showGhcException :: GhcException -> String -> String
showGhcException exception
= case exception of
UsageError str
-> showString str . showChar '\n' . showString short_usage
CmdLineError str -> showString str
PprProgramError str _ ->
showGhcException (ProgramError (str ++ "\n<<details unavailable>>"))
ProgramError str -> showString str
InstallationError str -> showString str
Signal n -> showString "signal: " . shows n
PprPanic s _ ->
showGhcException (Panic (s ++ "\n<<details unavailable>>"))
Panic s
-> showString $
"panic! (the 'impossible' happened)\n"
++ " (GHC version " ++ cProjectVersion ++ " for " ++ TargetPlatform_NAME ++ "):\n\t"
++ s ++ "\n\n"
++ "Please report this as a GHC bug: http://www.haskell.org/ghc/reportabug\n"
PprSorry s _ ->
showGhcException (Sorry (s ++ "\n<<details unavailable>>"))
Sorry s
-> showString $
"sorry! (unimplemented feature or known bug)\n"
++ " (GHC version " ++ cProjectVersion ++ " for " ++ TargetPlatform_NAME ++ "):\n\t"
++ s ++ "\n"
throwGhcException :: GhcException -> a
throwGhcException = Exception.throw
throwGhcExceptionIO :: GhcException -> IO a
throwGhcExceptionIO = Exception.throwIO
handleGhcException :: ExceptionMonad m => (GhcException -> m a) -> m a -> m a
handleGhcException = ghandle
-- | Panics and asserts.
panic, sorry, pgmError :: String -> a
panic x = unsafeDupablePerformIO $ do
stack <- ccsToStrings =<< getCurrentCCS x
if null stack
then throwGhcException (Panic x)
else throwGhcException (Panic (x ++ '\n' : renderStack stack))
sorry x = throwGhcException (Sorry x)
pgmError x = throwGhcException (ProgramError x)
panicDoc, sorryDoc, pgmErrorDoc :: String -> SDoc -> a
panicDoc x doc = throwGhcException (PprPanic x doc)
sorryDoc x doc = throwGhcException (PprSorry x doc)
pgmErrorDoc x doc = throwGhcException (PprProgramError x doc)
-- | Throw an failed assertion exception for a given filename and line number.
assertPanic :: String -> Int -> a
assertPanic file line =
Exception.throw (Exception.AssertionFailed
("ASSERT failed! file " ++ file ++ ", line " ++ show line))
-- | Like try, but pass through UserInterrupt and Panic exceptions.
-- Used when we want soft failures when reading interface files, for example.
-- TODO: I'm not entirely sure if this is catching what we really want to catch
tryMost :: IO a -> IO (Either SomeException a)
tryMost action = do r <- try action
case r of
Left se ->
case fromException se of
-- Some GhcException's we rethrow,
Just (Signal _) -> throwIO se
Just (Panic _) -> throwIO se
-- others we return
Just _ -> return (Left se)
Nothing ->
case fromException se of
-- All IOExceptions are returned
Just (_ :: IOException) ->
return (Left se)
-- Anything else is rethrown
Nothing -> throwIO se
Right v -> return (Right v)
-- | Install standard signal handlers for catching ^C, which just throw an
-- exception in the target thread. The current target thread is the
-- thread at the head of the list in the MVar passed to
-- installSignalHandlers.
installSignalHandlers :: IO ()
installSignalHandlers = do
main_thread <- myThreadId
pushInterruptTargetThread main_thread
let
interrupt_exn = (toException UserInterrupt)
interrupt = do
mt <- peekInterruptTargetThread
case mt of
Nothing -> return ()
Just t -> throwTo t interrupt_exn
--
#if !defined(mingw32_HOST_OS)
_ <- installHandler sigQUIT (Catch interrupt) Nothing
_ <- installHandler sigINT (Catch interrupt) Nothing
-- see #3656; in the future we should install these automatically for
-- all Haskell programs in the same way that we install a ^C handler.
let fatal_signal n = throwTo main_thread (Signal (fromIntegral n))
_ <- installHandler sigHUP (Catch (fatal_signal sigHUP)) Nothing
_ <- installHandler sigTERM (Catch (fatal_signal sigTERM)) Nothing
return ()
#else
-- GHC 6.3+ has support for console events on Windows
-- NOTE: running GHCi under a bash shell for some reason requires
-- you to press Ctrl-Break rather than Ctrl-C to provoke
-- an interrupt. Ctrl-C is getting blocked somewhere, I don't know
-- why --SDM 17/12/2004
let sig_handler ControlC = interrupt
sig_handler Break = interrupt
sig_handler _ = return ()
_ <- installHandler (Catch sig_handler)
return ()
#endif
{-# NOINLINE interruptTargetThread #-}
interruptTargetThread :: MVar [Weak ThreadId]
interruptTargetThread = unsafePerformIO (newMVar [])
pushInterruptTargetThread :: ThreadId -> IO ()
pushInterruptTargetThread tid = do
wtid <- mkWeakThreadId tid
modifyMVar_ interruptTargetThread $ return . (wtid :)
peekInterruptTargetThread :: IO (Maybe ThreadId)
peekInterruptTargetThread =
withMVar interruptTargetThread $ loop
where
loop [] = return Nothing
loop (t:ts) = do
r <- deRefWeak t
case r of
Nothing -> loop ts
Just t -> return (Just t)
popInterruptTargetThread :: IO ()
popInterruptTargetThread =
modifyMVar_ interruptTargetThread $
\tids -> return $! case tids of [] -> []
(_:ts) -> ts
|
ml9951/ghc
|
compiler/utils/Panic.hs
|
Haskell
|
bsd-3-clause
| 9,492
|
{-# OPTIONS -fglasgow-exts #-}
-- Tests the "stupid theta" in pattern-matching
-- when there's an existential as well
module ShouldCompile where
data (Show a) => Obs a = forall b. LiftObs a b
f :: Show a => Obs a -> String
f (LiftObs _ _) = "yes"
|
hvr/jhc
|
regress/tests/1_typecheck/2_pass/ghc/tc182.hs
|
Haskell
|
mit
| 271
|
module LayoutLet2 where
-- Simple let expression, rename xxx to something longer or shorter
-- and the let/in layout should adjust accordingly
-- In this case the tokens for xxx + a + b should also shift out
foo xxx = let a = 1
b = 2 in xxx + a + b
|
mpickering/ghc-exactprint
|
tests/examples/transform/LayoutLet2.hs
|
Haskell
|
bsd-3-clause
| 266
|
-- In this example, adding 'main' to the export will fail as it is already exported.
module A1 where
import D1
import C1
import B1
main :: Tree Int ->Bool
main t = isSame (sumSquares (fringe t))
(sumSquares (B1.myFringe t)+sumSquares (C1.myFringe t))
|
SAdams601/HaRe
|
old/testing/fromConcreteToAbstract/A1_TokOut.hs
|
Haskell
|
bsd-3-clause
| 285
|
{-# LANGUAGE MagicHash #-}
module Foo where
foo# = 'a'
|
urbanslug/ghc
|
testsuite/tests/parser/should_compile/read047.hs
|
Haskell
|
bsd-3-clause
| 59
|
module Annfail01 where
-- Testing annotating things that don't exist
{-# ANN type Foo (1 :: Int) #-}
{-# ANN f (1 :: Int) #-}
|
urbanslug/ghc
|
testsuite/tests/annotations/should_fail/annfail01.hs
|
Haskell
|
bsd-3-clause
| 126
|
module Chapter8.Syntax.Number where
import Data.Monoid
import Data.Display
import Chapter8.Syntax.Type
data NumVal
= NumZero
| NumSucc NumVal
deriving (Eq, Show)
toRealNum :: NumVal -> Integer
toRealNum = walk 0
where
walk c nv = case nv of
NumZero -> c
NumSucc v -> walk (c + 1) v
instance Ord NumVal where
compare n1 n2 = toRealNum n1 `compare` toRealNum n2
instance Enum NumVal where
fromEnum = fromInteger . toRealNum
toEnum = walk NumZero
where
walk nv n
| n > 0 = walk (NumSucc nv) $ n - 1
| otherwise = nv
pred nv = case nv of
NumZero -> NumZero
NumSucc v -> v
instance Monoid NumVal where
mempty = NumZero
mappend nv1 nv2 = case (nv1, nv2) of
(NumZero, NumZero) -> NumZero
(NumZero, _) -> nv2
(_, NumZero) -> nv1
(NumSucc v, NumSucc w) -> NumSucc $ NumSucc $ mappend v w
instance Display NumVal where
toDisplay = toDisplay . fromEnum
instance HasType NumVal where
typeof _ = Right TyNat
|
VoQn/tapl-hs
|
src/Chapter8/Syntax/Number.hs
|
Haskell
|
mit
| 1,016
|
{-# LANGUAGE DeriveDataTypeable #-}
module CodeAdapter where
import Control.Monad.IfElse
import SoOSiM
import Code
import Node
data TransfomerState = TransfomerState
data CodeAdapterMsg = Compile BlockCode Architecture
deriving (Typeable)
instance ComponentIface TransfomerState where
initState = TransfomerState
componentName _ = "CodeAdapter"
componentBehaviour state (ComponentMsg caller contents)
| Just (Compile code node) <- adapterAction
-- To be completed
= return state
where adapterAction = fromDynamic contents
componentBehaviour state _ = return state
|
christiaanb/SoOSiM
|
examples/Twente/CodeAdapter.hs
|
Haskell
|
mit
| 597
|
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : DataAssociation.Explore.UI.Web.Server
-- Copyright :
-- License : MIT
--
-- Maintainer : -
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module DataAssociation.Explore.UI.Web.Server (
server
) where
import DataAssociation.Explore.UI.Web.Application
import DataAssociation.Explore.UI.State
import DataAssociation.Explore.UI.Web.Render
import Web.Spock
import Text.Blaze.Html.Renderer.Text
import Data.List (intercalate)
import qualified Data.Text as T
import qualified Data.Text.Lazy as ST
import Control.Monad.IO.Class
import System.FilePath
listenToReactiveElems :: (MonadIO m) => [SomeRenderableWebPage] -> SpockT m ()
listenToReactiveElems elems = sequence_ handlers
where handlers = do SomeRenderableWebPage e <- elems
return $ get (static . intercalate "/" $ reqPath e)
(html . ST.toStrict . renderHtml $ renderWebPage e)
server :: Int -> [SomeRenderableWebPage] -> [FilePath] -> IO ()
server port pages staticRoot = runSpock port . spockT id $ do
listenToReactiveElems pages
let getResource path name = get path $ file name $ joinPath (staticRoot ++ [T.unpack name])
getResource "static/apriori.css" "apriori.css"
getResource "static/apriori.js" "apriori.js"
|
fehu/min-dat--a-priori
|
explore-web-backend/src/DataAssociation/Explore/UI/Web/Server.hs
|
Haskell
|
mit
| 1,484
|
{-# LANGUAGE TypeOperators, OverloadedStrings #-}
module Cypher.Actions where
import Cypher.Types
import Cypher.Utils
import Data.Monoid
import Control.Monad.Free
import Data.Text as T
commitTransaction :: Neo4jRequest ~> TransactionResponse
commitTransaction req = liftFn (CommitTransaction req)
root :: Neo4jAction RootResponse
root = liftF $ GetRoot id
authenticate :: T.Text -> T.Text ~> AuthResponse
authenticate user pass = liftFn (Authenticate user pass)
getNode :: Int ~> NodeResponse
getNode nodeId = liftFn (GetNode nodeId)
createNode :: Maybe Props ~> NodeResponse
createNode props = liftFn (CreateNode props)
-- NOTE: Nodes with relationships cannot be deleted.
deleteNode :: Int ~> ()
deleteNode nodeId = liftF (DeleteNode nodeId ())
listPropertyKeys :: Neo4jAction [T.Text]
listPropertyKeys = liftFn ListPropertyKeys
getRelationship :: Int ~> RelationshipResponse
getRelationship relId = liftFn (GetRelationship relId)
-- NOTE: This is a little weird, since the start node is encoded in the relationship itself.
createRelationship :: Int -> Relationship ~> RelationshipResponse
createRelationship nodeId rel = liftFn (CreateRelationship nodeId rel)
deleteRelationship :: Int ~> ()
deleteRelationship relId = liftF (DeleteRelationship relId ())
getRelationshipProperties :: Int ~> Props
getRelationshipProperties relId = liftFn (GetRelationshipProperties relId)
getRelationshipProperty :: Int -> Prop ~> Props
getRelationshipProperty relId prop = liftFn (GetRelationshipProperty relId prop)
setRelationshipProperties :: Int -> Props ~> ()
setRelationshipProperties relId props = liftF (SetRelationshipProperties relId props ())
setRelationshipProperty :: Int -> Prop -> Prop ~> ()
setRelationshipProperty relId propKey propVal = liftF (SetRelationshipProperty relId propKey propVal ())
getNodeRelationships :: Int -> RelType -> [T.Text] ~> [RelationshipResponse]
getNodeRelationships nodeId relType types = liftFn (GetNodeRelationships nodeId relType types)
getRelationshipTypes :: Neo4jAction [T.Text]
getRelationshipTypes = liftFn GetRelationshipTypes
setNodeProperty :: Id -> Prop -> Props ~> ()
setNodeProperty nodeId prop props = liftF (SetNodeProperty nodeId prop props ())
setNodeProperties :: Id -> Props ~> Props
setNodeProperties nodeId prop = liftFn (SetNodeProperties nodeId prop)
getNodeProperty :: Id -> Prop ~> Props
getNodeProperty nodeId prop = liftFn (GetNodeProperty nodeId prop)
deleteNodeProperty :: Id -> Prop ~> ()
deleteNodeProperty nodeId prop = liftF (DeleteNodeProperty nodeId prop ())
deleteNodeProperties :: Id ~> ()
deleteNodeProperties nodeId = liftF (DeleteNodeProperties nodeId ())
findNodesByProp :: T.Text -> T.Text -> Neo4jAction TransactionResponse
findNodesByProp key val = do
let stmt = "MATCH (n{ " <> key <> ":\"" <> val <> "\"}) RETURN n"
commitTransaction (Neo4jRequest [Statement stmt] Nothing)
|
5outh/cypher
|
src/Cypher/Actions.hs
|
Haskell
|
mit
| 2,943
|
import Utils
import qualified Data.Set as S
import Control.Monad.State
import Debug.Trace
import Data.List (foldl')
nmax = 1000000
sumSquareDigit :: Int -> Int
--sumSquareDigit n = sum $ map (^2) $ numberToDigitsBackwards n
sumSquareDigit n | n<10 = n^2
| otherwise = (n `mod` 10)^2 + sumSquareDigit (n `div` 10)
type SolutionSet1 = S.Set Int
type SolutionSet89 = S.Set Int
emptyState = (S.singleton 1, S.singleton 89)
--solveUpTo :: Int -> State (SolutionSet1, SolutionSet89) Int
--solveUpTo n = do
-- forM_ [1..n] $ \k -> do
-- (sol1, sol89) <- get
-- put $ update (sol1, sol89) k
-- (sol1'', sol89'') <- get
-- return $ length sol89''
solveUpTo :: Int -> (SolutionSet1, SolutionSet89)
solveUpTo n = foldl' update emptyState [2..n]
update :: (SolutionSet1, SolutionSet89) -> Int -> (SolutionSet1, SolutionSet89)
update (s1, s89) n = let pred k = k `S.member` s89 || k `S.member` s1
(ks, rest) = break pred $ iterate sumSquareDigit n
s = head rest
ins1 = s `S.member` s1
s1' = if ins1 then foldl' (flip S.insert) s1 ks else s1
s89' = if (not ins1) then foldl' (flip S.insert) s89 ks else s89
in (s1', s89')
target n = head $ dropWhile (\k -> k /= 1 && k /= 89) $ iterate sumSquareDigit n
solve n = length $ fst $ solveUpTo n
--solve n = runState (solveUpTo n) emptyState
--correct n = monadic == fp
-- where monadic = filter (<=n) $ S.toList $ snd $ snd $ solve n
-- fp = filter (\k -> k <= n && target k == 89) [1..n]
--
--answer n = fst $ solve n
answer = solve
main = print $ answer 1000000
|
arekfu/project_euler
|
p0092/p0092.hs
|
Haskell
|
mit
| 1,704
|
module Import
( module Import
) where
import Prelude as Import hiding (head, init, last,
readFile, tail, writeFile)
import Yesod as Import hiding (Route (..))
import Yesod.Static as Import
import Yesod.Form.Bootstrap3 as Import
import Data.Text as Import (Text)
import Foundation as Import
import Model as Import
import Settings as Import
-- import Settings.Development as Import
import Settings.StaticFiles as Import
#if __GLASGOW_HASKELL__ >= 704
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat),
(<>))
#else
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat))
infixr 5 <>
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
|
collaborare/antikythera
|
src/Import.hs
|
Haskell
|
mit
| 1,088
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGAnimatedEnumeration
(js_setBaseVal, setBaseVal, js_getBaseVal, getBaseVal,
js_getAnimVal, getAnimVal, SVGAnimatedEnumeration,
castToSVGAnimatedEnumeration, gTypeSVGAnimatedEnumeration)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"baseVal\"] = $2;"
js_setBaseVal :: SVGAnimatedEnumeration -> Word -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedEnumeration.baseVal Mozilla SVGAnimatedEnumeration.baseVal documentation>
setBaseVal :: (MonadIO m) => SVGAnimatedEnumeration -> Word -> m ()
setBaseVal self val = liftIO (js_setBaseVal (self) val)
foreign import javascript unsafe "$1[\"baseVal\"]" js_getBaseVal ::
SVGAnimatedEnumeration -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedEnumeration.baseVal Mozilla SVGAnimatedEnumeration.baseVal documentation>
getBaseVal :: (MonadIO m) => SVGAnimatedEnumeration -> m Word
getBaseVal self = liftIO (js_getBaseVal (self))
foreign import javascript unsafe "$1[\"animVal\"]" js_getAnimVal ::
SVGAnimatedEnumeration -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedEnumeration.animVal Mozilla SVGAnimatedEnumeration.animVal documentation>
getAnimVal :: (MonadIO m) => SVGAnimatedEnumeration -> m Word
getAnimVal self = liftIO (js_getAnimVal (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGAnimatedEnumeration.hs
|
Haskell
|
mit
| 2,196
|
module Clavier.SortingAlgorithms (selection_sort, insertion_sort, merge_sort, quick_sort) where
import Data.List
selection_sort :: (Ord a) => [a] -> [a]
selection_sort (x : [])
= [x]
selection_sort []
= []
selection_sort li
= let
select_min min [] prev = (min, prev)
select_min min (x:xs) prev
| x < min = select_min x xs (min:prev)
| otherwise = select_min min xs (x:prev)
repeat_select [] = []
repeat_select (x:xs)
= let
(min, rest) = select_min x xs []
in min : repeat_select rest
in repeat_select li
insertion_sort :: (Ord a) => [a] -> [a]
insertion_sort (x : [])
= [x]
insertion_sort []
= []
insertion_sort li
= let
insert_into x []
= [x]
insert_into x li@(car:cdr)
| x >= car = car : (insert_into x cdr)
| otherwise = x : li
in foldl' (\ accum x -> insert_into x accum) [] li
merge_sort :: (Ord a) => [a] -> [a]
merge_sort (x : [])
= [x]
merge_sort []
= []
merge_sort li
= let
(left, right) = merge_sort_partition li [] [] 0
in merge_sort_merge (merge_sort left) (merge_sort right)
merge_sort_partition :: [a] -> [a] -> [a] -> Int -> ([a], [a])
merge_sort_partition [] left right _ = ((reverse left), (reverse right))
merge_sort_partition (x:xs) left right i
| (mod i 2) == 0 = merge_sort_partition xs (x:left) right (i + 1)
| otherwise = merge_sort_partition xs left (x:right) (i + 1)
merge_sort_merge :: (Ord a) => [a] -> [a] -> [a]
merge_sort_merge [] right = right
merge_sort_merge left [] = left
merge_sort_merge (l:ls) (r:rs)
| l <= r = l : (merge_sort_merge ls (r:rs))
| otherwise = r : (merge_sort_merge (l:ls) rs)
quick_sort :: (Ord a) => [a] -> [a]
quick_sort (x : [])
= [x]
quick_sort []
= []
quick_sort (pivot : xs)
= let
(left, right) = foldl' (\ (left, right) x -> if x <= pivot then ((x:left), right) else (left, (x:right))) ([], []) xs
in (quick_sort left) ++ (pivot : (quick_sort right))
|
Raekye/The-Modern-Clavier
|
haskell/src/Clavier/SortingAlgorithms.hs
|
Haskell
|
mit
| 1,877
|
module HaskellRead where
import File
import System.FilePath.Posix (takeExtension)
import Text.Regex.Posix
import Util
isSupportedFilePath :: FilePath -> Bool
isSupportedFilePath = (==) ".hs" . takeExtension
isSupportedFile :: File -> Bool
isSupportedFile file =
(extension file) == ".hs"
extractDependencies :: File -> [String]
extractDependencies file =
Prelude.map (moduleToFilePath . captureModule) imports
where
imports = extractImportExpressions file
moduleToFilePath :: String -> FilePath
moduleToFilePath mod =
replaceSafe "." "/" mod ++ ".hs"
extractImportExpressions :: File -> [String]
extractImportExpressions file =
getAllTextMatches $ (contents file) =~ importRegex
importRegex :: String
importRegex = "import([\r\n\t\f\v ]+qualified)?[\r\n\t\f\v ]+([^\r\n\t\f\v ]+)"
captureModule :: String -> String
captureModule importLine = mod
where
match = importLine =~ importRegex :: (String, String, String, [String])
(_, _, _, captures) = match
mod = captures !! 1
|
larioj/nemo
|
src/HaskellRead.hs
|
Haskell
|
mit
| 1,076
|
-- | Exercises for Chapter 6.
--
-- Implement the functions in this module using recursion.
module Chapter06 (and, concat, replicate, elem, (!!), merge, msort) where
import Prelude hiding (and, concat, elem, replicate, (!!))
-- * Exercise 1
-- | Decide if all logical values in a list are true.
and :: [Bool] -> Bool
and = undefined
-- | Concatenate a list of lists.
concat :: [[a]] -> [a]
concat = undefined
-- | Produce a list with identical @n@ elements.
replicate :: Int -> a -> [a]
replicate = undefined
-- | Select the nth element of a list.
(!!) :: [a] -> Int -> a
xs !! n = undefined
-- Note that you can also define @!!@ using prefix notation:
--
-- > (!!) xs n = ...
-- | Decide if a value is an element of a list.
elem :: Eq a => a -> [a] -> Bool
elem = undefined
-- * Exercise 2
-- | Merges two sorted lists of values to give a single sorted list. If any of
-- the two given lists are not sorted then the behavior is undefined.
merge :: Ord a => [a] -> [a] -> [a]
-- Remember to define @merge@ using recursion.
merge = undefined
-- * Exercise 3
msort :: Ord a => [a] -> [a]
msort = undefined
|
EindhovenHaskellMeetup/meetup
|
courses/programming-in-haskell/pih-exercises/src/Chapter06.hs
|
Haskell
|
mit
| 1,126
|
module DecryptEnvironment () where
import Text.Read(readMaybe)
import Global(Error, Arg, Rounds, KeyFilename)
type DecryptEnv = () -- TODO
|
tombusby/haskell-des
|
round/DecryptEnvironment.hs
|
Haskell
|
mit
| 142
|
-- There might be a clever way to work out the digits on paper
-- by counting the number sizes, but this is faster.
import Data.Char (ord)
digitToInt d = (ord d) - (ord '0')
frac = concat $ map show [0..]
digits = map (\i -> digitToInt $ frac !! (10^i)) [0..6]
answer = product digits
main = print answer
|
gumgl/project-euler
|
40/40.hs
|
Haskell
|
mit
| 310
|
module Data.Set.Data where
import qualified Data.Set as Set
setTo :: Int -> Set.Set Int
setTo n = Set.fromList [1..n]
set1 :: Set.Set Int
set1 = setTo 10
set2 :: Set.Set Int
set2 = setTo 20
set3 :: Set.Set Int
set3 = setTo 30
set4 :: Set.Set Int
set4 = setTo 40
set5 :: Set.Set Int
set5 = setTo 50
|
athanclark/sets
|
bench/Data/Set/Data.hs
|
Haskell
|
mit
| 307
|
{-# LANGUAGE BangPatterns #-}
module Main where
import System.Random (randomRIO)
import Control.Monad (replicateM)
import Debug.Trace
-- | Perceptron implementation in Haskell (dirty)
--
type Input = [Double]
type Data = [(Input, Double)]
type Weights = [Double]
type Rate = Double -- [0,1]
round' :: Int -> Double -> Double
round' n = (/10^n). fromInteger . round . (* 10^n)
f :: (Num a, Ord a) => a -> Double
f x
| x < 0 = 0
| otherwise = 1.0
runInput :: [Double] -> [Double] -> Double
runInput inputs ws = f $ sum $ zipWith (*) inputs ws
learn :: Rate -> Data -> Weights -> Weights
learn rate [] ws = ws
learn rate ((xs,y):ds) ws
| y == y_hat = learn rate ds ws
| otherwise = learn rate ds (update xs ws)
where
y_hat = runInput xs ws
update :: [Double] -> [Double] -> [Double]
update xs = map (\(x,w) -> round' 3 (w + rate * (y - y_hat) * x)) . zip xs
run :: Rate -> Data -> Weights -> Weights
run rate = go 0
where go :: Int -> Data -> Weights -> Weights
go n d w = let w' = learn rate d w
in case w == w' of
True -> trace ("Steps: " ++ show n) w
False -> trace (show w) $ go (n+1) d w'
test :: Weights -> Data -> IO ()
test weights dt = do
let f x
| runInput x weights == 1 = "True"
| otherwise = "False"
pretty (x,i) = putStrLn $ show i ++ " - " ++ f x
mapM_ pretty (zip (map fst dt) [0..])
-- | Data
--
odds :: Data
odds = [ (x0, 0) -- 0
, (x1, 1) -- 1
, (x2, 0) -- 2
, (x3, 1) -- 3
, (x4, 0) -- 4
, (x5, 1) -- 5
, (x6, 0) -- 6
, (x7, 1) -- 7
, (x8, 0) -- 8
, (x9, 1) -- 9
]
-- > run 0.1 odds weights
-- > rweights >>= return . run 0.1 odds
-- > test it odds
mul3 :: Data
mul3 = [ (x0, 0) -- 0
, (x1, 0) -- 1
, (x2, 0) -- 2
, (x3, 1) -- 3
, (x4, 0) -- 4
, (x5, 0) -- 5
, (x6, 1) -- 6
, (x7, 0) -- 7
, (x8, 0) -- 8
, (x9, 1) -- 9
]
-- > run 0.1 mul3 weights
-- > rweights >>= return . run 0.1 mul3
-- > test it mul3
ors :: Data
ors = [ ([1,0,0], 0)
, ([1,0,1], 1)
, ([1,1,0], 1)
, ([1,1,1], 1)
]
-- > run 0.1 ors (weightsc 3)
-- > test it ors
custom :: Data
custom = [ ([1, 2, 4 ], 1)
, ([1, 0.5, 1.5], 1)
, ([1, 1, 0.5], 0)
, ([1, 0, 0.5], 0)
]
-- > run 0.1 custom (weightsc 3)
-- > test it custom
weightsc :: Int -> [Double]
weightsc n = replicate n 0.0
weights :: [Double]
weights = replicate (length (xs !! 0)) 0.0
rweights :: IO [Double]
rweights = replicateM (length (xs !! 0)) $ randomRIO (-1.0,1.0)
xs :: [Input]
xs = [x0, x1, x2, x3, x4, x5, x6, x7, x8, x9]
x0 = [1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1] :: [Double] -- 0
x1 = [0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0] :: [Double] -- 1
x2 = [1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1] :: [Double] -- 2
x3 = [1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1] :: [Double] -- 3
x4 = [1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1] :: [Double] -- 4
x5 = [1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1] :: [Double] -- 5
x6 = [1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1] :: [Double] -- 6
x7 = [1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1] :: [Double] -- 7
x8 = [1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1] :: [Double] -- 8
x9 = [1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1] :: [Double] -- 9
|
cirquit/Personal-Repository
|
Haskell/machine-learning/perceptron/perceptron.hs
|
Haskell
|
mit
| 3,511
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Parse
import System.Environment
main = do
f <- readFile "prog.txt"
args <- getArgs
let command = if length args > 0 then prog else finish prog
case runParser command f of
xs -> do
mapM_ print $ take 5 xs
print $ length xs
|
kovach/cards
|
src/Main.hs
|
Haskell
|
mit
| 310
|
{-# Language TemplateHaskell #-}
module Main where
-- base
import Data.Monoid
import Control.Applicative
-- linear
import Linear
-- lens
import Control.Lens
-- gloss
import Graphics.Gloss
import Graphics.Gloss.Interface.Pure.Game
-- colour
import Data.Colour (Colour)
import Data.Colour.RGBSpace
import Data.Colour.RGBSpace.HSV
-- falling
import Falling
-- | The interface can be in two states: one, where everything
-- is just running by itself, and another, where we're adding
-- and changing things about one particular particle.
data Interface n
= Running
{ _particles :: [Particle n]
}
| Adding
{ _adding :: Particle n
, _particles :: [Particle n]
}
deriving
( Eq
, Ord
, Show
)
makeLenses ''Interface
makePrisms ''Interface
-- | Do something with input.
withInput :: Event -> Interface Float -> Interface Float
-- If we get a mouseclick down in 'Running', add a particle at the
-- place of the click and switch to 'Adding'; we'll be editing
-- it until the corresponding mouseclick up.
withInput (EventKey (MouseButton LeftButton) Down _ (x', y'))
(Running ps) = Adding (particle $ V3 x' y' 0) ps
-- If we get a mouse movement while in 'Adding', change the
-- velocity of the new particle to the new place of the mouse;
-- keep in mind the offset of particle.
withInput (EventKey (MouseButton LeftButton) _ _ _)
(Adding p ps) = Running (p : ps)
-- If we get a mouseclick up while in 'Adding', push the particle
-- into the list of particles and switch to 'Running'.
withInput (EventMotion (x', y')) (Adding p ps) = Adding
(velocity .~ view place p - V3 x' y' 0 $ p) ps
-- Otherwise just ignore the input.
withInput _ i = i
-- Draw an interface.
redraw :: Interface Float -> Picture
redraw i = foldMapOf (particles . traverse) single i
<> case i of
Running _ -> mempty
-- If we're in an 'Adding', draw the new particle
-- and a line indicating its velocity.
Adding p _ -> let
-- The vector being drawn from the particle.
d :: V3 Float
d = p ^. place - p ^. velocity
-- The color to draw the vector in.
c :: Color
c = uncurryRGB makeColor
(hsv (angle (d ^. _x) (d ^. _y)) 0.5 0.5) 1 in
-- Draw the particle we're adding and a line from it
-- to the mouse, representing the inverse of its
-- velocity.
single p <> Color c (Line
[ (p ^. place . _x, p ^. place . _y)
, (d ^. _x, d ^. _y)
])
where
-- Radians to degrees.
degrees :: Floating t => t -> t
degrees = (*) (180 / pi)
-- Angle of a vector, in degrees.
angle :: Floating t => t -> t -> t
angle x y = degrees . atan $ y / x
-- Draw a single particle as a white circle.
single :: Particle Float -> Picture
single = Translate <$> (^. place . _x)
<*> (^. place . _y)
?? Color white (circleSolid 2.0)
-- A massive particle at the origin.
sun :: Particle Float
sun = Particle 0 0 200
-- Iterate the world.
iteration :: Float -> Interface Float -> Interface Float
iteration t i = if has _Adding i then i
else particles %~ update . map (move t) $ i
main :: IO ()
main = play (InWindow "falling!" (300, 300) (100, 100))
black 40 (Running [sun]) redraw withInput iteration
|
startling/falling
|
main.hs
|
Haskell
|
mit
| 3,241
|
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
import Data.List
type Coord = (Int, Int) -- (Row, Col), top left is (0, 0)
type Triangle = (Coord, Coord, Coord) -- (top, bot-left, bot-right)
numRows :: Int
numRows = 32
numCols :: Int
numCols = 63
lineToChars :: [Int] -> String
lineToChars os = map (\n -> if n`elem` os then '1' else '_') $ [1..numCols]
genIteration0 :: ([String], [Triangle])
genIteration0 =
let ss = take numRows . map lineToChars . ([[numRows]] ++ ) $ unfoldr (\rs -> let nrs = ((head rs - 1) : rs) ++ [last rs + 1] in Just (nrs, nrs)) [numRows]
ts = [((0, numRows-1), (numRows-1, 0), (numRows-1, numCols-1))]
in (ss, ts)
-- takes a triangle to further fractalize, and returns
-- (triangle-to-be-nullified, [new-triangles-in-fractal])
createNewTriangles :: Triangle -> (Triangle, [Triangle])
createNewTriangles ts@(top, botl, botr) =
let szc = ((snd botl + snd botr) `div` 2) + ((snd botl + snd botr) `mod` 2)
szcby14 = ((snd botl + szc - 1) `div` 2) + ((snd botl + szc - 1) `mod` 2)
szcby34 = ((snd botr + szc + 1) `div` 2) + ((snd botr + szc + 1) `mod` 2)
szr = ((fst top + fst botl) `div` 2) -- + ((fst top + fst botl) `mod` 2)
midbot = ((fst botl), szc)
midbotm1 = ((fst botl), (szc - 1))
midbotp1 = ((fst botl), (szc + 1))
midl = (szr+1, szcby14)
midlp1 = (szr+1, szcby14 + 1)
midlup = (szr, szcby14 + 1)
midr = (szr+1, szcby34)
midrm1 = (szr+1, szcby34 - 1)
midrup = (szr, szcby34 - 1)
in ((midlp1, midrm1, midbot), [(top, midlup, midrup), (midl, botl, midbotm1), (midr, midbotp1, botr)])
nullifyRow :: [String] -> (Coord, Coord) -> [String]
nullifyRow ss (left@(lr, lc), right@(rr, rc)) = priorRows ++ [thisRow] ++ restRows
where
priorRows = take (lr) ss
restRows = drop (lr+1) ss
myRow = last . take (lr+1) $ ss
thisRow = ((take (lc) myRow) ++ (take (rc - lc + 1) (repeat '_')) ++ (drop (rc+1) myRow) )
-- sets all the locations within this triangle to '_'
nullifyTriangle :: Triangle -> [String] -> [String]
nullifyTriangle tri@(tl, tr, bot) ss = foldl' (nullifyRow) ss $ rowCoords
where
rowCoords = take (fst bot - fst tl + 1) $ ((tl, tr) : (unfoldr (\ (l@(lr, lc), r@(rr, rc)) -> let nc = ((lr+1, lc+1), (rr+1, rc-1)) in Just (nc, nc)) $ (tl, tr)))
-- runs one more iteration of fractalization
iterateTri :: ([String], [Triangle]) -> ([String], [Triangle])
iterateTri (ss, ts) = foldl' (\acc@(ass, ats) t ->
let (nt, newts) = createNewTriangles t
newss = nullifyTriangle nt ass
in (newss, ats ++ newts)
)
(ss, []) $ ts
fractalize :: Int -> Int -> ([String], [Triangle]) -> ([String], [Triangle])
fractalize 0 times _ =
let initseq = genIteration0
in if times == 1 then initseq else fractalize 1 times initseq
fractalize inum times (ss, ts) =
let newseq = iterateTri (ss, ts)
in if inum == times - 1 then newseq else fractalize (inum+1) times newseq
main :: IO ()
main = do
nstr <- getLine
let css = fst $ fractalize 0 (1 + read nstr) ([], [])
putStrLn $ unlines css
|
cbrghostrider/Hacking
|
HackerRank/FunctionalProgramming/Recursion/sierpinskiTriangleFractals.hs
|
Haskell
|
mit
| 3,648
|
module Main where
import Tokenizer
import Types
import Grammar
import FP_ParserGen -- Touching this file leaves you at your own devices
import ASTBuilder
import Checker
import CodeGen
-- import Simulation
import System.FilePath
import Sprockell
main :: IO()
main = do
putStrLn "What file do you want to run? Please provide the relative path excluding the extension."
fileName <- getLine :: IO String
putStrLn "How many Sprockells do you want to use to run this file?"
sprockells <- getLine :: IO String
putStrLn $ "Running: " ++ fileName ++ ".shl"
putStrLn $ "On " ++ sprockells ++ " Sprockells"
file <- readFile $ fileName ++ ".shl"
run $
replicate (read sprockells :: Int) $
codeGen' (read sprockells :: Int) $
checker $
pTreeToAst $
parse grammar Program $
toTokenList $
tokenizer file
|
wouwouwou/2017_module_8
|
src/haskell/PP-project-2017/Main.hs
|
Haskell
|
apache-2.0
| 897
|
{-# language DataKinds #-}
{-# language FlexibleContexts #-}
{-# language PartialTypeSignatures #-}
{-# language OverloadedLabels #-}
{-# language OverloadedStrings #-}
{-# language TypeApplications #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
module Main where
import Data.Conduit
import qualified Data.Conduit.Combinators as C
import Data.Monoid as M
import Data.Text as T
( Text )
import Mu.GRpc.Server
import Mu.Server
import Mu.Schema.Optics
import ProtobufProtocol
main :: IO ()
main = runGRpcApp msgProtoBuf 9123 server
ping :: (MonadServer m) => m ()
ping = return ()
getForecast :: (MonadServer m) => GetForecastRequest -> m GetForecastResponse
getForecast req = return resp
where
days = fromIntegral $ req ^. #days_required
forecasts = sunnyDays days
resp = record (lastUpdated, forecasts)
lastUpdated :: T.Text
lastUpdated = "2020-03-20T12:00:00Z"
sunnyDays :: Int -> [Weather]
sunnyDays n = replicate n (enum @"SUNNY")
publishRainEvents
:: (MonadServer m) => ConduitT () RainEvent m () -> m RainSummaryResponse
publishRainEvents source = toResponse <$> countRainStartedEvents
where
toResponse count = record1 $ fromIntegral (M.getSum count)
countRainStartedEvents = runConduit $ source .| C.foldMap countMsg
countMsg msg = countEvent $ msg ^. #event_type
countEvent (Just x) | x == started = M.Sum 1
countEvent Nothing = M.Sum 1
countEvent _ = M.Sum 0
subscribeToRainEvents
:: (MonadServer m)
=> SubscribeToRainEventsRequest
-> ConduitT RainEvent Void m ()
-> m ()
subscribeToRainEvents req sink = runConduit $ C.yieldMany events .| sink
where
events = toRainEvent <$> [started, stopped, started, stopped, started]
toRainEvent x = record (city, Just x)
city = req ^. #city
server :: (MonadServer m) => SingleServerT info WeatherService m _
server = singleService
( method @"ping" ping
, method @"getForecast" getForecast
, method @"publishRainEvents" publishRainEvents
, method @"subscribeToRainEvents" subscribeToRainEvents
)
|
frees-io/freestyle-rpc
|
modules/haskell-integration-tests/mu-haskell-client-server/protobuf-server/Main.hs
|
Haskell
|
apache-2.0
| 2,250
|
module Coins.A263135Spec (main, spec) where
import Test.Hspec
import Coins.A263135 (a263135)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A263135" $
it "correctly computes the first 20 elements" $
map a263135 [0..20] `shouldBe` expectedValue where
expectedValue = [0,0,1,2,3,4,6,7,8,9,11,12,13,15,16,17,19,20,21,23,24]
|
peterokagey/haskellOEIS
|
test/Coins/A263135Spec.hs
|
Haskell
|
apache-2.0
| 354
|
-- Copyright 2012-2014 Samplecount S.L.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Development.Shake.Language.C.Host.Linux (
getHostToolChain
) where
import Development.Shake
import Development.Shake.Language.C.Target
import Development.Shake.Language.C.Target.Linux
import Development.Shake.Language.C.ToolChain
import System.Process (readProcess)
-- | Get host architecture.
getHostArch :: IO Arch
getHostArch = do
arch <- fmap (head.lines) $ readProcess "uname" ["-m"] ""
return $ case arch of
"i386" -> X86 I386
"i686" -> X86 I686
"x86_64" -> X86 X86_64
"armv7l" -> Arm Armv7
_ -> error $ "Unknown host architecture " ++ arch
-- | Get host toolchain.
getHostToolChain :: IO (Target, Action ToolChain)
getHostToolChain = do
t <- fmap target getHostArch
return (t, return $ toolChain GCC)
|
samplecount/shake-language-c
|
src/Development/Shake/Language/C/Host/Linux.hs
|
Haskell
|
apache-2.0
| 1,375
|
module Sol1 where
import GS
-- 1.9
maxInt :: [Int] -> Int
maxInt [] = error "empty list"
maxInt [x] = x
maxInt (x:xs) = max x (maxInt xs)
-- 1.10
removeFst :: [Int] -> Int -> [Int]
removeFst [] y = []
removeFst (x:xs) y | x == y = xs
| otherwise = x:removeFst xs y
-- 1.13
count :: Char -> String -> Int
count x [] = 0
count x (y:ys) | x == y = 1 + count x ys
| otherwise = count x ys
-- 1.14
blowup :: String -> String
blowup [] = []
blowup x = blowup (init x) ++ replicate (length x) (last x)
-- 1.15 (simple sort)
srtStringS :: [String] -> [String]
srtStringS [] = []
srtStringS [x] = [x]
srtStringS (x:y:xs) | x > y = srtStringS(y:x:xs)
| otherwise = x : srtStringS(y:xs)
-- 1.15 alternative, quicksort (faster)
srtStringQ :: [String] -> [String]
srtStringQ [] = []
srtStringQ (x:xs) = (srtStringQ lesser) ++ [x] ++ (srtStringQ greater)
where
lesser = filter (< x) xs
greater = filter (>= x) xs
-- 1.15 routing
srtString = srtStringQ
-- prefix
prefix :: String -> String -> Bool
prefix [] ys = True
prefix (x:xs) [] = False
prefix (x:xs) (y:ys) = (x == y) && prefix xs ys
-- 1.17
substring :: String -> String -> Bool
substring [] ys = True
substring xs [] = False
substring (x:xs) (y:ys) = prefix (x:xs) (y:ys) || substring (x:xs) ys
-- 1.20
lengths :: [[a]] -> [Int]
lengths x = map length x
-- 1.21
sumLengths :: [[a]] -> Int
sumLengths x = sum (lengths x)
|
bartolkaruza/software-testing-2014-group-W1
|
week1/week_1_group_w1/Sol1.hs
|
Haskell
|
apache-2.0
| 1,655
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QGLFramebufferObject.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Opengl.QGLFramebufferObject (
Attachment, eNoAttachment, eCombinedDepthStencil, eDepth
)
where
import Foreign.C.Types
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CAttachment a = CAttachment a
type Attachment = QEnum(CAttachment Int)
ieAttachment :: Int -> Attachment
ieAttachment x = QEnum (CAttachment x)
instance QEnumC (CAttachment Int) where
qEnum_toInt (QEnum (CAttachment x)) = x
qEnum_fromInt x = QEnum (CAttachment x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> Attachment -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eNoAttachment :: Attachment
eNoAttachment
= ieAttachment $ 0
eCombinedDepthStencil :: Attachment
eCombinedDepthStencil
= ieAttachment $ 1
eDepth :: Attachment
eDepth
= ieAttachment $ 2
|
keera-studios/hsQt
|
Qtc/Enums/Opengl/QGLFramebufferObject.hs
|
Haskell
|
bsd-2-clause
| 2,489
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Setup
-- Copyright : Isaac Jones 2003-2004
-- Duncan Coutts 2007
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is a big module, but not very complicated. The code is very regular
-- and repetitive. It defines the command line interface for all the Cabal
-- commands. For each command (like @configure@, @build@ etc) it defines a type
-- that holds all the flags, the default set of flags and a 'CommandUI' that
-- maps command line flags to and from the corresponding flags type.
--
-- All the flags types are instances of 'Monoid', see
-- <http://www.haskell.org/pipermail/cabal-devel/2007-December/001509.html>
-- for an explanation.
--
-- The types defined here get used in the front end and especially in
-- @cabal-install@ which has to do quite a bit of manipulating sets of command
-- line flags.
--
-- This is actually relatively nice, it works quite well. The main change it
-- needs is to unify it with the code for managing sets of fields that can be
-- read and written from files. This would allow us to save configure flags in
-- config files.
module Distribution.Simple.Setup (
GlobalFlags(..), emptyGlobalFlags, defaultGlobalFlags, globalCommand,
ConfigFlags(..), emptyConfigFlags, defaultConfigFlags, configureCommand,
configPrograms,
RelaxDeps(..), RelaxedDep(..), isRelaxDeps,
AllowNewer(..), AllowOlder(..),
configAbsolutePaths, readPackageDbList, showPackageDbList,
CopyFlags(..), emptyCopyFlags, defaultCopyFlags, copyCommand,
InstallFlags(..), emptyInstallFlags, defaultInstallFlags, installCommand,
HaddockTarget(..),
HaddockFlags(..), emptyHaddockFlags, defaultHaddockFlags, haddockCommand,
HscolourFlags(..), emptyHscolourFlags, defaultHscolourFlags, hscolourCommand,
BuildFlags(..), emptyBuildFlags, defaultBuildFlags, buildCommand,
buildVerbose,
ReplFlags(..), defaultReplFlags, replCommand,
CleanFlags(..), emptyCleanFlags, defaultCleanFlags, cleanCommand,
RegisterFlags(..), emptyRegisterFlags, defaultRegisterFlags, registerCommand,
unregisterCommand,
SDistFlags(..), emptySDistFlags, defaultSDistFlags, sdistCommand,
TestFlags(..), emptyTestFlags, defaultTestFlags, testCommand,
TestShowDetails(..),
BenchmarkFlags(..), emptyBenchmarkFlags,
defaultBenchmarkFlags, benchmarkCommand,
CopyDest(..),
configureArgs, configureOptions, configureCCompiler, configureLinker,
buildOptions, haddockOptions, installDirsOptions,
programDbOptions, programDbPaths',
programConfigurationOptions, programConfigurationPaths',
splitArgs,
defaultDistPref, optionDistPref,
Flag(..),
toFlag,
fromFlag,
fromFlagOrDefault,
flagToMaybe,
flagToList,
BooleanFlag(..),
boolOpt, boolOpt', trueArg, falseArg,
optionVerbosity, optionNumJobs, readPToMaybe ) where
import Prelude ()
import Distribution.Compat.Prelude hiding (get)
import Distribution.Compiler
import Distribution.ReadE
import Distribution.Text
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import Distribution.Package
import Distribution.PackageDescription hiding (Flag)
import Distribution.Simple.Command hiding (boolOpt, boolOpt')
import qualified Distribution.Simple.Command as Command
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.Utils
import Distribution.Simple.Program
import Distribution.Simple.InstallDirs
import Distribution.Verbosity
import Distribution.Utils.NubList
import Distribution.Compat.Semigroup (Last' (..))
import Data.Function (on)
-- FIXME Not sure where this should live
defaultDistPref :: FilePath
defaultDistPref = "dist"
-- ------------------------------------------------------------
-- * Flag type
-- ------------------------------------------------------------
-- | All flags are monoids, they come in two flavours:
--
-- 1. list flags eg
--
-- > --ghc-option=foo --ghc-option=bar
--
-- gives us all the values ["foo", "bar"]
--
-- 2. singular value flags, eg:
--
-- > --enable-foo --disable-foo
--
-- gives us Just False
-- So this Flag type is for the latter singular kind of flag.
-- Its monoid instance gives us the behaviour where it starts out as
-- 'NoFlag' and later flags override earlier ones.
--
data Flag a = Flag a | NoFlag deriving (Eq, Generic, Show, Read)
instance Binary a => Binary (Flag a)
instance Functor Flag where
fmap f (Flag x) = Flag (f x)
fmap _ NoFlag = NoFlag
instance Monoid (Flag a) where
mempty = NoFlag
mappend = (<>)
instance Semigroup (Flag a) where
_ <> f@(Flag _) = f
f <> NoFlag = f
instance Bounded a => Bounded (Flag a) where
minBound = toFlag minBound
maxBound = toFlag maxBound
instance Enum a => Enum (Flag a) where
fromEnum = fromEnum . fromFlag
toEnum = toFlag . toEnum
enumFrom (Flag a) = map toFlag . enumFrom $ a
enumFrom _ = []
enumFromThen (Flag a) (Flag b) = toFlag `map` enumFromThen a b
enumFromThen _ _ = []
enumFromTo (Flag a) (Flag b) = toFlag `map` enumFromTo a b
enumFromTo _ _ = []
enumFromThenTo (Flag a) (Flag b) (Flag c) = toFlag `map` enumFromThenTo a b c
enumFromThenTo _ _ _ = []
toFlag :: a -> Flag a
toFlag = Flag
fromFlag :: Flag a -> a
fromFlag (Flag x) = x
fromFlag NoFlag = error "fromFlag NoFlag. Use fromFlagOrDefault"
fromFlagOrDefault :: a -> Flag a -> a
fromFlagOrDefault _ (Flag x) = x
fromFlagOrDefault def NoFlag = def
flagToMaybe :: Flag a -> Maybe a
flagToMaybe (Flag x) = Just x
flagToMaybe NoFlag = Nothing
flagToList :: Flag a -> [a]
flagToList (Flag x) = [x]
flagToList NoFlag = []
allFlags :: [Flag Bool] -> Flag Bool
allFlags flags = if all (\f -> fromFlagOrDefault False f) flags
then Flag True
else NoFlag
-- | Types that represent boolean flags.
class BooleanFlag a where
asBool :: a -> Bool
instance BooleanFlag Bool where
asBool = id
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- In fact since individual flags types are monoids and these are just sets of
-- flags then they are also monoids pointwise. This turns out to be really
-- useful. The mempty is the set of empty flags and mappend allows us to
-- override specific flags. For example we can start with default flags and
-- override with the ones we get from a file or the command line, or both.
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool
} deriving (Generic)
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False
}
globalCommand :: [Command action] -> CommandUI GlobalFlags
globalCommand commands = CommandUI
{ commandName = ""
, commandSynopsis = ""
, commandUsage = \pname ->
"This Setup program uses the Haskell Cabal Infrastructure.\n"
++ "See http://www.haskell.org/cabal/ for more information.\n"
++ "\n"
++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n"
, commandDescription = Just $ \pname ->
let
commands' = commands ++ [commandAddAction helpCommandUI undefined]
cmdDescs = getNormalCommandDescriptions commands'
maxlen = maximum $ [length name | (name, _) <- cmdDescs]
align str = str ++ replicate (maxlen - length str) ' '
in
"Commands:\n"
++ unlines [ " " ++ align name ++ " " ++ descr
| (name, descr) <- cmdDescs ]
++ "\n"
++ "For more information about a command use\n"
++ " " ++ pname ++ " COMMAND --help\n\n"
++ "Typical steps for installing Cabal packages:\n"
++ concat [ " " ++ pname ++ " " ++ x ++ "\n"
| x <- ["configure", "build", "install"]]
, commandNotes = Nothing
, commandDefaultFlags = defaultGlobalFlags
, commandOptions = \_ ->
[option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
]
}
emptyGlobalFlags :: GlobalFlags
emptyGlobalFlags = mempty
instance Monoid GlobalFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup GlobalFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
-- | Generic data type for policy when relaxing bounds in dependencies.
-- Don't use this directly: use 'AllowOlder' or 'AllowNewer' depending
-- on whether or not you are relaxing an lower or upper bound
-- (respectively).
data RelaxDeps =
-- | Default: honor the upper bounds in all dependencies, never choose
-- versions newer than allowed.
RelaxDepsNone
-- | Ignore upper bounds in dependencies on the given packages.
| RelaxDepsSome [RelaxedDep]
-- | Ignore upper bounds in dependencies on all packages.
| RelaxDepsAll
deriving (Eq, Read, Show, Generic)
-- | 'RelaxDeps' in the context of upper bounds (i.e. for @--allow-newer@ flag)
newtype AllowNewer = AllowNewer { unAllowNewer :: RelaxDeps }
deriving (Eq, Read, Show, Generic)
-- | 'RelaxDeps' in the context of lower bounds (i.e. for @--allow-older@ flag)
newtype AllowOlder = AllowOlder { unAllowOlder :: RelaxDeps }
deriving (Eq, Read, Show, Generic)
-- | Dependencies can be relaxed either for all packages in the install plan, or
-- only for some packages.
data RelaxedDep = RelaxedDep PackageName
| RelaxedDepScoped PackageName PackageName
deriving (Eq, Read, Show, Generic)
instance Text RelaxedDep where
disp (RelaxedDep p0) = disp p0
disp (RelaxedDepScoped p0 p1) = disp p0 Disp.<> Disp.colon Disp.<> disp p1
parse = scopedP Parse.<++ normalP
where
scopedP = RelaxedDepScoped `fmap` parse <* Parse.char ':' <*> parse
normalP = RelaxedDep `fmap` parse
instance Binary RelaxDeps
instance Binary RelaxedDep
instance Binary AllowNewer
instance Binary AllowOlder
instance Semigroup RelaxDeps where
RelaxDepsNone <> r = r
l@RelaxDepsAll <> _ = l
l@(RelaxDepsSome _) <> RelaxDepsNone = l
(RelaxDepsSome _) <> r@RelaxDepsAll = r
(RelaxDepsSome a) <> (RelaxDepsSome b) = RelaxDepsSome (a ++ b)
instance Monoid RelaxDeps where
mempty = RelaxDepsNone
mappend = (<>)
instance Semigroup AllowNewer where
AllowNewer x <> AllowNewer y = AllowNewer (x <> y)
instance Semigroup AllowOlder where
AllowOlder x <> AllowOlder y = AllowOlder (x <> y)
instance Monoid AllowNewer where
mempty = AllowNewer mempty
mappend = (<>)
instance Monoid AllowOlder where
mempty = AllowOlder mempty
mappend = (<>)
-- | Convert 'RelaxDeps' to a boolean.
isRelaxDeps :: RelaxDeps -> Bool
isRelaxDeps RelaxDepsNone = False
isRelaxDeps (RelaxDepsSome _) = True
isRelaxDeps RelaxDepsAll = True
relaxDepsParser :: Parse.ReadP r (Maybe RelaxDeps)
relaxDepsParser =
(Just . RelaxDepsSome) `fmap` Parse.sepBy1 parse (Parse.char ',')
relaxDepsPrinter :: (Maybe RelaxDeps) -> [Maybe String]
relaxDepsPrinter Nothing = []
relaxDepsPrinter (Just RelaxDepsNone) = []
relaxDepsPrinter (Just RelaxDepsAll) = [Nothing]
relaxDepsPrinter (Just (RelaxDepsSome pkgs)) = map (Just . display) $ pkgs
-- | Flags to @configure@ command.
--
-- IMPORTANT: every time a new flag is added, 'D.C.Setup.filterConfigureFlags'
-- should be updated.
-- IMPORTANT: every time a new flag is added, it should be added to the Eq instance
data ConfigFlags = ConfigFlags {
-- This is the same hack as in 'buildArgs' and 'copyArgs'.
-- TODO: Stop using this eventually when 'UserHooks' gets changed
configArgs :: [String],
--FIXME: the configPrograms is only here to pass info through to configure
-- because the type of configure is constrained by the UserHooks.
-- when we change UserHooks next we should pass the initial
-- ProgramDb directly and not via ConfigFlags
configPrograms_ :: Last' ProgramDb, -- ^All programs that
-- @cabal@ may run
configProgramPaths :: [(String, FilePath)], -- ^user specified programs paths
configProgramArgs :: [(String, [String])], -- ^user specified programs args
configProgramPathExtra :: NubList FilePath, -- ^Extend the $PATH
configHcFlavor :: Flag CompilerFlavor, -- ^The \"flavor\" of the
-- compiler, such as GHC or
-- JHC.
configHcPath :: Flag FilePath, -- ^given compiler location
configHcPkg :: Flag FilePath, -- ^given hc-pkg location
configVanillaLib :: Flag Bool, -- ^Enable vanilla library
configProfLib :: Flag Bool, -- ^Enable profiling in the library
configSharedLib :: Flag Bool, -- ^Build shared library
configDynExe :: Flag Bool, -- ^Enable dynamic linking of the
-- executables.
configProfExe :: Flag Bool, -- ^Enable profiling in the
-- executables.
configProf :: Flag Bool, -- ^Enable profiling in the library
-- and executables.
configProfDetail :: Flag ProfDetailLevel, -- ^Profiling detail level
-- in the library and executables.
configProfLibDetail :: Flag ProfDetailLevel, -- ^Profiling detail level
-- in the library
configConfigureArgs :: [String], -- ^Extra arguments to @configure@
configOptimization :: Flag OptimisationLevel, -- ^Enable optimization.
configProgPrefix :: Flag PathTemplate, -- ^Installed executable prefix.
configProgSuffix :: Flag PathTemplate, -- ^Installed executable suffix.
configInstallDirs :: InstallDirs (Flag PathTemplate), -- ^Installation
-- paths
configScratchDir :: Flag FilePath,
configExtraLibDirs :: [FilePath], -- ^ path to search for extra libraries
configExtraFrameworkDirs :: [FilePath], -- ^ path to search for extra
-- frameworks (OS X only)
configExtraIncludeDirs :: [FilePath], -- ^ path to search for header files
configIPID :: Flag String, -- ^ explicit IPID to be used
configCID :: Flag ComponentId, -- ^ explicit CID to be used
configDistPref :: Flag FilePath, -- ^"dist" prefix
configCabalFilePath :: Flag FilePath, -- ^ Cabal file to use
configVerbosity :: Flag Verbosity, -- ^verbosity level
configUserInstall :: Flag Bool, -- ^The --user\/--global flag
configPackageDBs :: [Maybe PackageDB], -- ^Which package DBs to use
configGHCiLib :: Flag Bool, -- ^Enable compiling library for GHCi
configSplitObjs :: Flag Bool, -- ^Enable -split-objs with GHC
configStripExes :: Flag Bool, -- ^Enable executable stripping
configStripLibs :: Flag Bool, -- ^Enable library stripping
configConstraints :: [Dependency], -- ^Additional constraints for
-- dependencies.
configDependencies :: [(PackageName, ComponentId)],
-- ^The packages depended on.
configConfigurationsFlags :: FlagAssignment,
configTests :: Flag Bool, -- ^Enable test suite compilation
configBenchmarks :: Flag Bool, -- ^Enable benchmark compilation
configCoverage :: Flag Bool, -- ^Enable program coverage
configLibCoverage :: Flag Bool, -- ^Enable program coverage (deprecated)
configExactConfiguration :: Flag Bool,
-- ^All direct dependencies and flags are provided on the command line by
-- the user via the '--dependency' and '--flags' options.
configFlagError :: Flag String,
-- ^Halt and show an error message indicating an error in flag assignment
configRelocatable :: Flag Bool, -- ^ Enable relocatable package built
configDebugInfo :: Flag DebugInfoLevel, -- ^ Emit debug info.
configAllowOlder :: Maybe AllowOlder, -- ^ dual to 'configAllowNewer'
configAllowNewer :: Maybe AllowNewer
-- ^ Ignore upper bounds on all or some dependencies. Wrapped in 'Maybe' to
-- distinguish between "default" and "explicitly disabled".
}
deriving (Generic, Read, Show)
instance Binary ConfigFlags
-- | More convenient version of 'configPrograms'. Results in an
-- 'error' if internal invariant is violated.
configPrograms :: ConfigFlags -> ProgramDb
configPrograms = maybe (error "FIXME: remove configPrograms") id . getLast' . configPrograms_
instance Eq ConfigFlags where
(==) a b =
-- configPrograms skipped: not user specified, has no Eq instance
equal configProgramPaths
&& equal configProgramArgs
&& equal configProgramPathExtra
&& equal configHcFlavor
&& equal configHcPath
&& equal configHcPkg
&& equal configVanillaLib
&& equal configProfLib
&& equal configSharedLib
&& equal configDynExe
&& equal configProfExe
&& equal configProf
&& equal configProfDetail
&& equal configProfLibDetail
&& equal configConfigureArgs
&& equal configOptimization
&& equal configProgPrefix
&& equal configProgSuffix
&& equal configInstallDirs
&& equal configScratchDir
&& equal configExtraLibDirs
&& equal configExtraIncludeDirs
&& equal configIPID
&& equal configDistPref
&& equal configVerbosity
&& equal configUserInstall
&& equal configPackageDBs
&& equal configGHCiLib
&& equal configSplitObjs
&& equal configStripExes
&& equal configStripLibs
&& equal configConstraints
&& equal configDependencies
&& equal configConfigurationsFlags
&& equal configTests
&& equal configBenchmarks
&& equal configCoverage
&& equal configLibCoverage
&& equal configExactConfiguration
&& equal configFlagError
&& equal configRelocatable
&& equal configDebugInfo
where
equal f = on (==) f a b
configAbsolutePaths :: ConfigFlags -> NoCallStackIO ConfigFlags
configAbsolutePaths f =
(\v -> f { configPackageDBs = v })
`liftM` traverse (maybe (return Nothing) (liftM Just . absolutePackageDBPath))
(configPackageDBs f)
defaultConfigFlags :: ProgramDb -> ConfigFlags
defaultConfigFlags progDb = emptyConfigFlags {
configArgs = [],
configPrograms_ = pure progDb,
configHcFlavor = maybe NoFlag Flag defaultCompilerFlavor,
configVanillaLib = Flag True,
configProfLib = NoFlag,
configSharedLib = NoFlag,
configDynExe = Flag False,
configProfExe = NoFlag,
configProf = NoFlag,
configProfDetail = NoFlag,
configProfLibDetail= NoFlag,
configOptimization = Flag NormalOptimisation,
configProgPrefix = Flag (toPathTemplate ""),
configProgSuffix = Flag (toPathTemplate ""),
configDistPref = NoFlag,
configCabalFilePath = NoFlag,
configVerbosity = Flag normal,
configUserInstall = Flag False, --TODO: reverse this
#if defined(mingw32_HOST_OS)
-- See #1589.
configGHCiLib = Flag True,
#else
configGHCiLib = NoFlag,
#endif
configSplitObjs = Flag False, -- takes longer, so turn off by default
configStripExes = Flag True,
configStripLibs = Flag True,
configTests = Flag False,
configBenchmarks = Flag False,
configCoverage = Flag False,
configLibCoverage = NoFlag,
configExactConfiguration = Flag False,
configFlagError = NoFlag,
configRelocatable = Flag False,
configDebugInfo = Flag NoDebugInfo,
configAllowNewer = Nothing
}
configureCommand :: ProgramDb -> CommandUI ConfigFlags
configureCommand progDb = CommandUI
{ commandName = "configure"
, commandSynopsis = "Prepare to build the package."
, commandDescription = Just $ \_ -> wrapText $
"Configure how the package is built by setting "
++ "package (and other) flags.\n"
++ "\n"
++ "The configuration affects several other commands, "
++ "including build, test, bench, run, repl.\n"
, commandNotes = Just $ \_pname -> programFlagsDescription progDb
, commandUsage = \pname ->
"Usage: " ++ pname ++ " configure [FLAGS]\n"
, commandDefaultFlags = defaultConfigFlags progDb
, commandOptions = \showOrParseArgs ->
configureOptions showOrParseArgs
++ programDbPaths progDb showOrParseArgs
configProgramPaths (\v fs -> fs { configProgramPaths = v })
++ programDbOption progDb showOrParseArgs
configProgramArgs (\v fs -> fs { configProgramArgs = v })
++ programDbOptions progDb showOrParseArgs
configProgramArgs (\v fs -> fs { configProgramArgs = v })
}
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions showOrParseArgs =
[optionVerbosity configVerbosity
(\v flags -> flags { configVerbosity = v })
,optionDistPref
configDistPref (\d flags -> flags { configDistPref = d })
showOrParseArgs
,option [] ["compiler"] "compiler"
configHcFlavor (\v flags -> flags { configHcFlavor = v })
(choiceOpt [ (Flag GHC, ("g", ["ghc"]), "compile with GHC")
, (Flag GHCJS, ([] , ["ghcjs"]), "compile with GHCJS")
, (Flag JHC, ([] , ["jhc"]), "compile with JHC")
, (Flag LHC, ([] , ["lhc"]), "compile with LHC")
, (Flag UHC, ([] , ["uhc"]), "compile with UHC")
-- "haskell-suite" compiler id string will be replaced
-- by a more specific one during the configure stage
, (Flag (HaskellSuite "haskell-suite"), ([] , ["haskell-suite"]),
"compile with a haskell-suite compiler")])
,option "" ["cabal-file"]
"use this Cabal file"
configCabalFilePath (\v flags -> flags { configCabalFilePath = v })
(reqArgFlag "PATH")
,option "w" ["with-compiler"]
"give the path to a particular compiler"
configHcPath (\v flags -> flags { configHcPath = v })
(reqArgFlag "PATH")
,option "" ["with-hc-pkg"]
"give the path to the package tool"
configHcPkg (\v flags -> flags { configHcPkg = v })
(reqArgFlag "PATH")
]
++ map liftInstallDirs installDirsOptions
++ [option "" ["program-prefix"]
"prefix to be applied to installed executables"
configProgPrefix
(\v flags -> flags { configProgPrefix = v })
(reqPathTemplateArgFlag "PREFIX")
,option "" ["program-suffix"]
"suffix to be applied to installed executables"
configProgSuffix (\v flags -> flags { configProgSuffix = v } )
(reqPathTemplateArgFlag "SUFFIX")
,option "" ["library-vanilla"]
"Vanilla libraries"
configVanillaLib (\v flags -> flags { configVanillaLib = v })
(boolOpt [] [])
,option "p" ["library-profiling"]
"Library profiling"
configProfLib (\v flags -> flags { configProfLib = v })
(boolOpt "p" [])
,option "" ["shared"]
"Shared library"
configSharedLib (\v flags -> flags { configSharedLib = v })
(boolOpt [] [])
,option "" ["executable-dynamic"]
"Executable dynamic linking"
configDynExe (\v flags -> flags { configDynExe = v })
(boolOpt [] [])
,option "" ["profiling"]
"Executable and library profiling"
configProf (\v flags -> flags { configProf = v })
(boolOpt [] [])
,option "" ["executable-profiling"]
"Executable profiling (DEPRECATED)"
configProfExe (\v flags -> flags { configProfExe = v })
(boolOpt [] [])
,option "" ["profiling-detail"]
("Profiling detail level for executable and library (default, " ++
"none, exported-functions, toplevel-functions, all-functions).")
configProfDetail (\v flags -> flags { configProfDetail = v })
(reqArg' "level" (Flag . flagToProfDetailLevel)
showProfDetailLevelFlag)
,option "" ["library-profiling-detail"]
"Profiling detail level for libraries only."
configProfLibDetail (\v flags -> flags { configProfLibDetail = v })
(reqArg' "level" (Flag . flagToProfDetailLevel)
showProfDetailLevelFlag)
,multiOption "optimization"
configOptimization (\v flags -> flags { configOptimization = v })
[optArg' "n" (Flag . flagToOptimisationLevel)
(\f -> case f of
Flag NoOptimisation -> []
Flag NormalOptimisation -> [Nothing]
Flag MaximumOptimisation -> [Just "2"]
_ -> [])
"O" ["enable-optimization","enable-optimisation"]
"Build with optimization (n is 0--2, default is 1)",
noArg (Flag NoOptimisation) []
["disable-optimization","disable-optimisation"]
"Build without optimization"
]
,multiOption "debug-info"
configDebugInfo (\v flags -> flags { configDebugInfo = v })
[optArg' "n" (Flag . flagToDebugInfoLevel)
(\f -> case f of
Flag NoDebugInfo -> []
Flag MinimalDebugInfo -> [Just "1"]
Flag NormalDebugInfo -> [Nothing]
Flag MaximalDebugInfo -> [Just "3"]
_ -> [])
"" ["enable-debug-info"]
"Emit debug info (n is 0--3, default is 0)",
noArg (Flag NoDebugInfo) []
["disable-debug-info"]
"Don't emit debug info"
]
,option "" ["library-for-ghci"]
"compile library for use with GHCi"
configGHCiLib (\v flags -> flags { configGHCiLib = v })
(boolOpt [] [])
,option "" ["split-objs"]
"split library into smaller objects to reduce binary sizes (GHC 6.6+)"
configSplitObjs (\v flags -> flags { configSplitObjs = v })
(boolOpt [] [])
,option "" ["executable-stripping"]
"strip executables upon installation to reduce binary sizes"
configStripExes (\v flags -> flags { configStripExes = v })
(boolOpt [] [])
,option "" ["library-stripping"]
"strip libraries upon installation to reduce binary sizes"
configStripLibs (\v flags -> flags { configStripLibs = v })
(boolOpt [] [])
,option "" ["configure-option"]
"Extra option for configure"
configConfigureArgs (\v flags -> flags { configConfigureArgs = v })
(reqArg' "OPT" (\x -> [x]) id)
,option "" ["user-install"]
"doing a per-user installation"
configUserInstall (\v flags -> flags { configUserInstall = v })
(boolOpt' ([],["user"]) ([], ["global"]))
,option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
configPackageDBs (\v flags -> flags { configPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
,option "f" ["flags"]
"Force values for the given flags in Cabal conditionals in the .cabal file. E.g., --flags=\"debug -usebytestrings\" forces the flag \"debug\" to true and \"usebytestrings\" to false."
configConfigurationsFlags (\v flags -> flags { configConfigurationsFlags = v })
(reqArg' "FLAGS" readFlagList showFlagList)
,option "" ["extra-include-dirs"]
"A list of directories to search for header files"
configExtraIncludeDirs (\v flags -> flags {configExtraIncludeDirs = v})
(reqArg' "PATH" (\x -> [x]) id)
,option "" ["ipid"]
"Installed package ID to compile this package as"
configIPID (\v flags -> flags {configIPID = v})
(reqArgFlag "IPID")
,option "" ["cid"]
"Installed component ID to compile this component as"
(fmap display . configCID) (\v flags -> flags {configCID = fmap ComponentId v})
(reqArgFlag "CID")
,option "" ["extra-lib-dirs"]
"A list of directories to search for external libraries"
configExtraLibDirs (\v flags -> flags {configExtraLibDirs = v})
(reqArg' "PATH" (\x -> [x]) id)
,option "" ["extra-framework-dirs"]
"A list of directories to search for external frameworks (OS X only)"
configExtraFrameworkDirs
(\v flags -> flags {configExtraFrameworkDirs = v})
(reqArg' "PATH" (\x -> [x]) id)
,option "" ["extra-prog-path"]
"A list of directories to search for required programs (in addition to the normal search locations)"
configProgramPathExtra (\v flags -> flags {configProgramPathExtra = v})
(reqArg' "PATH" (\x -> toNubList [x]) fromNubList)
,option "" ["constraint"]
"A list of additional constraints on the dependencies."
configConstraints (\v flags -> flags { configConstraints = v})
(reqArg "DEPENDENCY"
(readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parse))
(map (\x -> display x)))
,option "" ["dependency"]
"A list of exact dependencies. E.g., --dependency=\"void=void-0.5.8-177d5cdf20962d0581fe2e4932a6c309\""
configDependencies (\v flags -> flags { configDependencies = v})
(reqArg "NAME=CID"
(readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parseDependency))
(map (\x -> display (fst x) ++ "=" ++ display (snd x))))
,option "" ["tests"]
"dependency checking and compilation for test suites listed in the package description file."
configTests (\v flags -> flags { configTests = v })
(boolOpt [] [])
,option "" ["coverage"]
"build package with Haskell Program Coverage. (GHC only)"
configCoverage (\v flags -> flags { configCoverage = v })
(boolOpt [] [])
,option "" ["library-coverage"]
"build package with Haskell Program Coverage. (GHC only) (DEPRECATED)"
configLibCoverage (\v flags -> flags { configLibCoverage = v })
(boolOpt [] [])
,option [] ["allow-older"]
("Ignore upper bounds in all dependencies or DEPS")
(fmap unAllowOlder . configAllowOlder)
(\v flags -> flags { configAllowOlder = fmap AllowOlder v})
(optArg "DEPS"
(readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser)
(Just RelaxDepsAll) relaxDepsPrinter)
,option [] ["allow-newer"]
("Ignore upper bounds in all dependencies or DEPS")
(fmap unAllowNewer . configAllowNewer)
(\v flags -> flags { configAllowNewer = fmap AllowNewer v})
(optArg "DEPS"
(readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser)
(Just RelaxDepsAll) relaxDepsPrinter)
,option "" ["exact-configuration"]
"All direct dependencies and flags are provided on the command line."
configExactConfiguration
(\v flags -> flags { configExactConfiguration = v })
trueArg
,option "" ["benchmarks"]
"dependency checking and compilation for benchmarks listed in the package description file."
configBenchmarks (\v flags -> flags { configBenchmarks = v })
(boolOpt [] [])
,option "" ["relocatable"]
"building a package that is relocatable. (GHC only)"
configRelocatable (\v flags -> flags { configRelocatable = v})
(boolOpt [] [])
]
where
readFlagList :: String -> FlagAssignment
readFlagList = map tagWithValue . words
where tagWithValue ('-':fname) = (FlagName (lowercase fname), False)
tagWithValue fname = (FlagName (lowercase fname), True)
showFlagList :: FlagAssignment -> [String]
showFlagList fs = [ if not set then '-':fname else fname
| (FlagName fname, set) <- fs]
liftInstallDirs =
liftOption configInstallDirs (\v flags -> flags { configInstallDirs = v })
reqPathTemplateArgFlag title _sf _lf d get set =
reqArgFlag title _sf _lf d
(fmap fromPathTemplate . get) (set . fmap toPathTemplate)
readPackageDbList :: String -> [Maybe PackageDB]
readPackageDbList "clear" = [Nothing]
readPackageDbList "global" = [Just GlobalPackageDB]
readPackageDbList "user" = [Just UserPackageDB]
readPackageDbList other = [Just (SpecificPackageDB other)]
showPackageDbList :: [Maybe PackageDB] -> [String]
showPackageDbList = map showPackageDb
where
showPackageDb Nothing = "clear"
showPackageDb (Just GlobalPackageDB) = "global"
showPackageDb (Just UserPackageDB) = "user"
showPackageDb (Just (SpecificPackageDB db)) = db
showProfDetailLevelFlag :: Flag ProfDetailLevel -> [String]
showProfDetailLevelFlag NoFlag = []
showProfDetailLevelFlag (Flag dl) = [showProfDetailLevel dl]
parseDependency :: Parse.ReadP r (PackageName, ComponentId)
parseDependency = do
x <- parse
_ <- Parse.char '='
y <- parse
return (x, y)
installDirsOptions :: [OptionField (InstallDirs (Flag PathTemplate))]
installDirsOptions =
[ option "" ["prefix"]
"bake this prefix in preparation of installation"
prefix (\v flags -> flags { prefix = v })
installDirArg
, option "" ["bindir"]
"installation directory for executables"
bindir (\v flags -> flags { bindir = v })
installDirArg
, option "" ["libdir"]
"installation directory for libraries"
libdir (\v flags -> flags { libdir = v })
installDirArg
, option "" ["libsubdir"]
"subdirectory of libdir in which libs are installed"
libsubdir (\v flags -> flags { libsubdir = v })
installDirArg
, option "" ["libexecdir"]
"installation directory for program executables"
libexecdir (\v flags -> flags { libexecdir = v })
installDirArg
, option "" ["datadir"]
"installation directory for read-only data"
datadir (\v flags -> flags { datadir = v })
installDirArg
, option "" ["datasubdir"]
"subdirectory of datadir in which data files are installed"
datasubdir (\v flags -> flags { datasubdir = v })
installDirArg
, option "" ["docdir"]
"installation directory for documentation"
docdir (\v flags -> flags { docdir = v })
installDirArg
, option "" ["htmldir"]
"installation directory for HTML documentation"
htmldir (\v flags -> flags { htmldir = v })
installDirArg
, option "" ["haddockdir"]
"installation directory for haddock interfaces"
haddockdir (\v flags -> flags { haddockdir = v })
installDirArg
, option "" ["sysconfdir"]
"installation directory for configuration files"
sysconfdir (\v flags -> flags { sysconfdir = v })
installDirArg
]
where
installDirArg _sf _lf d get set =
reqArgFlag "DIR" _sf _lf d
(fmap fromPathTemplate . get) (set . fmap toPathTemplate)
emptyConfigFlags :: ConfigFlags
emptyConfigFlags = mempty
instance Monoid ConfigFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ConfigFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Copy flags
-- ------------------------------------------------------------
-- | Flags to @copy@: (destdir, copy-prefix (backwards compat), verbosity)
data CopyFlags = CopyFlags {
copyDest :: Flag CopyDest,
copyDistPref :: Flag FilePath,
copyVerbosity :: Flag Verbosity,
copyAssumeDepsUpToDate :: Flag Bool,
-- This is the same hack as in 'buildArgs'. But I (ezyang) don't
-- think it's a hack, it's the right way to make hooks more robust
-- TODO: Stop using this eventually when 'UserHooks' gets changed
copyArgs :: [String]
}
deriving (Show, Generic)
defaultCopyFlags :: CopyFlags
defaultCopyFlags = CopyFlags {
copyDest = Flag NoCopyDest,
copyDistPref = NoFlag,
copyVerbosity = Flag normal,
copyAssumeDepsUpToDate = Flag False,
copyArgs = []
}
copyCommand :: CommandUI CopyFlags
copyCommand = CommandUI
{ commandName = "copy"
, commandSynopsis = "Copy the files of all/specific components to install locations."
, commandDescription = Just $ \_ -> wrapText $
"Components encompass executables and libraries."
++ "Does not call register, and allows a prefix at install time. "
++ "Without the --destdir flag, configure determines location.\n"
, commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " build "
++ " All the components in the package\n"
++ " " ++ pname ++ " build foo "
++ " A component (i.e. lib, exe, test suite)"
, commandUsage = usageAlternatives "copy" $
[ "[FLAGS]"
, "COMPONENTS [FLAGS]"
]
, commandDefaultFlags = defaultCopyFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity copyVerbosity (\v flags -> flags { copyVerbosity = v })
,optionDistPref
copyDistPref (\d flags -> flags { copyDistPref = d })
showOrParseArgs
, option "" ["assume-deps-up-to-date"]
"One-shot copy"
copyAssumeDepsUpToDate (\c flags -> flags { copyAssumeDepsUpToDate = c })
trueArg
,option "" ["destdir"]
"directory to copy files to, prepended to installation directories"
copyDest (\v flags -> flags { copyDest = v })
(reqArg "DIR" (succeedReadE (Flag . CopyTo))
(\f -> case f of Flag (CopyTo p) -> [p]; _ -> []))
]
}
emptyCopyFlags :: CopyFlags
emptyCopyFlags = mempty
instance Monoid CopyFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup CopyFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Flags to @install@: (package db, verbosity)
data InstallFlags = InstallFlags {
installPackageDB :: Flag PackageDB,
installDistPref :: Flag FilePath,
installUseWrapper :: Flag Bool,
installInPlace :: Flag Bool,
installVerbosity :: Flag Verbosity
}
deriving (Show, Generic)
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installPackageDB = NoFlag,
installDistPref = NoFlag,
installUseWrapper = Flag False,
installInPlace = Flag False,
installVerbosity = Flag normal
}
installCommand :: CommandUI InstallFlags
installCommand = CommandUI
{ commandName = "install"
, commandSynopsis =
"Copy the files into the install locations. Run register."
, commandDescription = Just $ \_ -> wrapText $
"Unlike the copy command, install calls the register command."
++ "If you want to install into a location that is not what was"
++ "specified in the configure step, use the copy command.\n"
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " install [FLAGS]\n"
, commandDefaultFlags = defaultInstallFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity installVerbosity (\v flags -> flags { installVerbosity = v })
,optionDistPref
installDistPref (\d flags -> flags { installDistPref = d })
showOrParseArgs
,option "" ["inplace"]
"install the package in the install subdirectory of the dist prefix, so it can be used without being installed"
installInPlace (\v flags -> flags { installInPlace = v })
trueArg
,option "" ["shell-wrappers"]
"using shell script wrappers around executables"
installUseWrapper (\v flags -> flags { installUseWrapper = v })
(boolOpt [] [])
,option "" ["package-db"] ""
installPackageDB (\v flags -> flags { installPackageDB = v })
(choiceOpt [ (Flag UserPackageDB, ([],["user"]),
"upon configuration register this package in the user's local package database")
, (Flag GlobalPackageDB, ([],["global"]),
"(default) upon configuration register this package in the system-wide package database")])
]
}
emptyInstallFlags :: InstallFlags
emptyInstallFlags = mempty
instance Monoid InstallFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup InstallFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Flags to @sdist@: (snapshot, verbosity)
data SDistFlags = SDistFlags {
sDistSnapshot :: Flag Bool,
sDistDirectory :: Flag FilePath,
sDistDistPref :: Flag FilePath,
sDistListSources :: Flag FilePath,
sDistVerbosity :: Flag Verbosity
}
deriving (Show, Generic)
defaultSDistFlags :: SDistFlags
defaultSDistFlags = SDistFlags {
sDistSnapshot = Flag False,
sDistDirectory = mempty,
sDistDistPref = NoFlag,
sDistListSources = mempty,
sDistVerbosity = Flag normal
}
sdistCommand :: CommandUI SDistFlags
sdistCommand = CommandUI
{ commandName = "sdist"
, commandSynopsis =
"Generate a source distribution file (.tar.gz)."
, commandDescription = Nothing
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " sdist [FLAGS]\n"
, commandDefaultFlags = defaultSDistFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity sDistVerbosity (\v flags -> flags { sDistVerbosity = v })
,optionDistPref
sDistDistPref (\d flags -> flags { sDistDistPref = d })
showOrParseArgs
,option "" ["list-sources"]
"Just write a list of the package's sources to a file"
sDistListSources (\v flags -> flags { sDistListSources = v })
(reqArgFlag "FILE")
,option "" ["snapshot"]
"Produce a snapshot source distribution"
sDistSnapshot (\v flags -> flags { sDistSnapshot = v })
trueArg
,option "" ["output-directory"]
("Generate a source distribution in the given directory, "
++ "without creating a tarball")
sDistDirectory (\v flags -> flags { sDistDirectory = v })
(reqArgFlag "DIR")
]
}
emptySDistFlags :: SDistFlags
emptySDistFlags = mempty
instance Monoid SDistFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup SDistFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Register flags
-- ------------------------------------------------------------
-- | Flags to @register@ and @unregister@: (user package, gen-script,
-- in-place, verbosity)
data RegisterFlags = RegisterFlags {
regPackageDB :: Flag PackageDB,
regGenScript :: Flag Bool,
regGenPkgConf :: Flag (Maybe FilePath),
regInPlace :: Flag Bool,
regDistPref :: Flag FilePath,
regPrintId :: Flag Bool,
regVerbosity :: Flag Verbosity,
-- | If this is true, we don't register all libraries,
-- only directly referenced library in 'regArgs'.
regAssumeDepsUpToDate :: Flag Bool,
-- Same as in 'buildArgs' and 'copyArgs'
regArgs :: [String]
}
deriving (Show, Generic)
defaultRegisterFlags :: RegisterFlags
defaultRegisterFlags = RegisterFlags {
regPackageDB = NoFlag,
regGenScript = Flag False,
regGenPkgConf = NoFlag,
regInPlace = Flag False,
regDistPref = NoFlag,
regPrintId = Flag False,
regVerbosity = Flag normal,
regAssumeDepsUpToDate = Flag False,
regArgs = []
}
registerCommand :: CommandUI RegisterFlags
registerCommand = CommandUI
{ commandName = "register"
, commandSynopsis =
"Register this package with the compiler."
, commandDescription = Nothing
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " register [FLAGS]\n"
, commandDefaultFlags = defaultRegisterFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v })
,optionDistPref
regDistPref (\d flags -> flags { regDistPref = d })
showOrParseArgs
,option "" ["packageDB"] ""
regPackageDB (\v flags -> flags { regPackageDB = v })
(choiceOpt [ (Flag UserPackageDB, ([],["user"]),
"upon registration, register this package in the user's local package database")
, (Flag GlobalPackageDB, ([],["global"]),
"(default)upon registration, register this package in the system-wide package database")])
,option "" ["inplace"]
"register the package in the build location, so it can be used without being installed"
regInPlace (\v flags -> flags { regInPlace = v })
trueArg
,option "" ["assume-deps-up-to-date"]
"One-shot registration"
regAssumeDepsUpToDate (\c flags -> flags { regAssumeDepsUpToDate = c })
trueArg
,option "" ["gen-script"]
"instead of registering, generate a script to register later"
regGenScript (\v flags -> flags { regGenScript = v })
trueArg
,option "" ["gen-pkg-config"]
"instead of registering, generate a package registration file/directory"
regGenPkgConf (\v flags -> flags { regGenPkgConf = v })
(optArg' "PKG" Flag flagToList)
,option "" ["print-ipid"]
"print the installed package ID calculated for this package"
regPrintId (\v flags -> flags { regPrintId = v })
trueArg
]
}
unregisterCommand :: CommandUI RegisterFlags
unregisterCommand = CommandUI
{ commandName = "unregister"
, commandSynopsis =
"Unregister this package with the compiler."
, commandDescription = Nothing
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " unregister [FLAGS]\n"
, commandDefaultFlags = defaultRegisterFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v })
,optionDistPref
regDistPref (\d flags -> flags { regDistPref = d })
showOrParseArgs
,option "" ["user"] ""
regPackageDB (\v flags -> flags { regPackageDB = v })
(choiceOpt [ (Flag UserPackageDB, ([],["user"]),
"unregister this package in the user's local package database")
, (Flag GlobalPackageDB, ([],["global"]),
"(default) unregister this package in the system-wide package database")])
,option "" ["gen-script"]
"Instead of performing the unregister command, generate a script to unregister later"
regGenScript (\v flags -> flags { regGenScript = v })
trueArg
]
}
emptyRegisterFlags :: RegisterFlags
emptyRegisterFlags = mempty
instance Monoid RegisterFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup RegisterFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * HsColour flags
-- ------------------------------------------------------------
data HscolourFlags = HscolourFlags {
hscolourCSS :: Flag FilePath,
hscolourExecutables :: Flag Bool,
hscolourTestSuites :: Flag Bool,
hscolourBenchmarks :: Flag Bool,
hscolourDistPref :: Flag FilePath,
hscolourVerbosity :: Flag Verbosity
}
deriving (Show, Generic)
emptyHscolourFlags :: HscolourFlags
emptyHscolourFlags = mempty
defaultHscolourFlags :: HscolourFlags
defaultHscolourFlags = HscolourFlags {
hscolourCSS = NoFlag,
hscolourExecutables = Flag False,
hscolourTestSuites = Flag False,
hscolourBenchmarks = Flag False,
hscolourDistPref = NoFlag,
hscolourVerbosity = Flag normal
}
instance Monoid HscolourFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup HscolourFlags where
(<>) = gmappend
hscolourCommand :: CommandUI HscolourFlags
hscolourCommand = CommandUI
{ commandName = "hscolour"
, commandSynopsis =
"Generate HsColour colourised code, in HTML format."
, commandDescription = Just (\_ -> "Requires the hscolour program.\n")
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " hscolour [FLAGS]\n"
, commandDefaultFlags = defaultHscolourFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity hscolourVerbosity
(\v flags -> flags { hscolourVerbosity = v })
,optionDistPref
hscolourDistPref (\d flags -> flags { hscolourDistPref = d })
showOrParseArgs
,option "" ["executables"]
"Run hscolour for Executables targets"
hscolourExecutables (\v flags -> flags { hscolourExecutables = v })
trueArg
,option "" ["tests"]
"Run hscolour for Test Suite targets"
hscolourTestSuites (\v flags -> flags { hscolourTestSuites = v })
trueArg
,option "" ["benchmarks"]
"Run hscolour for Benchmark targets"
hscolourBenchmarks (\v flags -> flags { hscolourBenchmarks = v })
trueArg
,option "" ["all"]
"Run hscolour for all targets"
(\f -> allFlags [ hscolourExecutables f
, hscolourTestSuites f
, hscolourBenchmarks f])
(\v flags -> flags { hscolourExecutables = v
, hscolourTestSuites = v
, hscolourBenchmarks = v })
trueArg
,option "" ["css"]
"Use a cascading style sheet"
hscolourCSS (\v flags -> flags { hscolourCSS = v })
(reqArgFlag "PATH")
]
}
-- ------------------------------------------------------------
-- * Haddock flags
-- ------------------------------------------------------------
-- | When we build haddock documentation, there are two cases:
--
-- 1. We build haddocks only for the current development version,
-- intended for local use and not for distribution. In this case,
-- we store the generated documentation in @<dist>/doc/html/<package name>@.
--
-- 2. We build haddocks for intended for uploading them to hackage.
-- In this case, we need to follow the layout that hackage expects
-- from documentation tarballs, and we might also want to use different
-- flags than for development builds, so in this case we store the generated
-- documentation in @<dist>/doc/html/<package id>-docs@.
data HaddockTarget = ForHackage | ForDevelopment deriving (Eq, Show, Generic)
data HaddockFlags = HaddockFlags {
haddockProgramPaths :: [(String, FilePath)],
haddockProgramArgs :: [(String, [String])],
haddockHoogle :: Flag Bool,
haddockHtml :: Flag Bool,
haddockHtmlLocation :: Flag String,
haddockForHackage :: Flag HaddockTarget,
haddockExecutables :: Flag Bool,
haddockTestSuites :: Flag Bool,
haddockBenchmarks :: Flag Bool,
haddockInternal :: Flag Bool,
haddockCss :: Flag FilePath,
haddockHscolour :: Flag Bool,
haddockHscolourCss :: Flag FilePath,
haddockContents :: Flag PathTemplate,
haddockDistPref :: Flag FilePath,
haddockKeepTempFiles:: Flag Bool,
haddockVerbosity :: Flag Verbosity
}
deriving (Show, Generic)
defaultHaddockFlags :: HaddockFlags
defaultHaddockFlags = HaddockFlags {
haddockProgramPaths = mempty,
haddockProgramArgs = [],
haddockHoogle = Flag False,
haddockHtml = Flag False,
haddockHtmlLocation = NoFlag,
haddockForHackage = Flag ForDevelopment,
haddockExecutables = Flag False,
haddockTestSuites = Flag False,
haddockBenchmarks = Flag False,
haddockInternal = Flag False,
haddockCss = NoFlag,
haddockHscolour = Flag False,
haddockHscolourCss = NoFlag,
haddockContents = NoFlag,
haddockDistPref = NoFlag,
haddockKeepTempFiles= Flag False,
haddockVerbosity = Flag normal
}
haddockCommand :: CommandUI HaddockFlags
haddockCommand = CommandUI
{ commandName = "haddock"
, commandSynopsis = "Generate Haddock HTML documentation."
, commandDescription = Just $ \_ ->
"Requires the program haddock, version 2.x.\n"
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " haddock [FLAGS]\n"
, commandDefaultFlags = defaultHaddockFlags
, commandOptions = \showOrParseArgs ->
haddockOptions showOrParseArgs
++ programDbPaths progDb ParseArgs
haddockProgramPaths (\v flags -> flags { haddockProgramPaths = v})
++ programDbOption progDb showOrParseArgs
haddockProgramArgs (\v fs -> fs { haddockProgramArgs = v })
++ programDbOptions progDb ParseArgs
haddockProgramArgs (\v flags -> flags { haddockProgramArgs = v})
}
where
progDb = addKnownProgram haddockProgram
$ addKnownProgram ghcProgram
$ emptyProgramDb
haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags]
haddockOptions showOrParseArgs =
[optionVerbosity haddockVerbosity
(\v flags -> flags { haddockVerbosity = v })
,optionDistPref
haddockDistPref (\d flags -> flags { haddockDistPref = d })
showOrParseArgs
,option "" ["keep-temp-files"]
"Keep temporary files"
haddockKeepTempFiles (\b flags -> flags { haddockKeepTempFiles = b })
trueArg
,option "" ["hoogle"]
"Generate a hoogle database"
haddockHoogle (\v flags -> flags { haddockHoogle = v })
trueArg
,option "" ["html"]
"Generate HTML documentation (the default)"
haddockHtml (\v flags -> flags { haddockHtml = v })
trueArg
,option "" ["html-location"]
"Location of HTML documentation for pre-requisite packages"
haddockHtmlLocation (\v flags -> flags { haddockHtmlLocation = v })
(reqArgFlag "URL")
,option "" ["for-hackage"]
"Collection of flags to generate documentation suitable for upload to hackage"
haddockForHackage (\v flags -> flags { haddockForHackage = v })
(noArg (Flag ForHackage))
,option "" ["executables"]
"Run haddock for Executables targets"
haddockExecutables (\v flags -> flags { haddockExecutables = v })
trueArg
,option "" ["tests"]
"Run haddock for Test Suite targets"
haddockTestSuites (\v flags -> flags { haddockTestSuites = v })
trueArg
,option "" ["benchmarks"]
"Run haddock for Benchmark targets"
haddockBenchmarks (\v flags -> flags { haddockBenchmarks = v })
trueArg
,option "" ["all"]
"Run haddock for all targets"
(\f -> allFlags [ haddockExecutables f
, haddockTestSuites f
, haddockBenchmarks f])
(\v flags -> flags { haddockExecutables = v
, haddockTestSuites = v
, haddockBenchmarks = v })
trueArg
,option "" ["internal"]
"Run haddock for internal modules and include all symbols"
haddockInternal (\v flags -> flags { haddockInternal = v })
trueArg
,option "" ["css"]
"Use PATH as the haddock stylesheet"
haddockCss (\v flags -> flags { haddockCss = v })
(reqArgFlag "PATH")
,option "" ["hyperlink-source","hyperlink-sources"]
"Hyperlink the documentation to the source code (using HsColour)"
haddockHscolour (\v flags -> flags { haddockHscolour = v })
trueArg
,option "" ["hscolour-css"]
"Use PATH as the HsColour stylesheet"
haddockHscolourCss (\v flags -> flags { haddockHscolourCss = v })
(reqArgFlag "PATH")
,option "" ["contents-location"]
"Bake URL in as the location for the contents page"
haddockContents (\v flags -> flags { haddockContents = v })
(reqArg' "URL"
(toFlag . toPathTemplate)
(flagToList . fmap fromPathTemplate))
]
emptyHaddockFlags :: HaddockFlags
emptyHaddockFlags = mempty
instance Monoid HaddockFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup HaddockFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Clean flags
-- ------------------------------------------------------------
data CleanFlags = CleanFlags {
cleanSaveConf :: Flag Bool,
cleanDistPref :: Flag FilePath,
cleanVerbosity :: Flag Verbosity
}
deriving (Show, Generic)
defaultCleanFlags :: CleanFlags
defaultCleanFlags = CleanFlags {
cleanSaveConf = Flag False,
cleanDistPref = NoFlag,
cleanVerbosity = Flag normal
}
cleanCommand :: CommandUI CleanFlags
cleanCommand = CommandUI
{ commandName = "clean"
, commandSynopsis = "Clean up after a build."
, commandDescription = Just $ \_ ->
"Removes .hi, .o, preprocessed sources, etc.\n"
, commandNotes = Nothing
, commandUsage = \pname ->
"Usage: " ++ pname ++ " clean [FLAGS]\n"
, commandDefaultFlags = defaultCleanFlags
, commandOptions = \showOrParseArgs ->
[optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v })
,optionDistPref
cleanDistPref (\d flags -> flags { cleanDistPref = d })
showOrParseArgs
,option "s" ["save-configure"]
"Do not remove the configuration file (dist/setup-config) during cleaning. Saves need to reconfigure."
cleanSaveConf (\v flags -> flags { cleanSaveConf = v })
trueArg
]
}
emptyCleanFlags :: CleanFlags
emptyCleanFlags = mempty
instance Monoid CleanFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup CleanFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Build flags
-- ------------------------------------------------------------
data BuildFlags = BuildFlags {
buildProgramPaths :: [(String, FilePath)],
buildProgramArgs :: [(String, [String])],
buildDistPref :: Flag FilePath,
buildVerbosity :: Flag Verbosity,
buildNumJobs :: Flag (Maybe Int),
-- | If this is true, we don't build the dependencies of
-- 'buildArgs': only the directly referenced components.
buildAssumeDepsUpToDate :: Flag Bool,
-- TODO: this one should not be here, it's just that the silly
-- UserHooks stop us from passing extra info in other ways
buildArgs :: [String]
}
deriving (Show, Generic)
{-# DEPRECATED buildVerbose "Use buildVerbosity instead" #-}
buildVerbose :: BuildFlags -> Verbosity
buildVerbose = fromFlagOrDefault normal . buildVerbosity
defaultBuildFlags :: BuildFlags
defaultBuildFlags = BuildFlags {
buildProgramPaths = mempty,
buildProgramArgs = [],
buildDistPref = mempty,
buildVerbosity = Flag normal,
buildNumJobs = mempty,
buildAssumeDepsUpToDate = Flag False,
buildArgs = []
}
buildCommand :: ProgramDb -> CommandUI BuildFlags
buildCommand progDb = CommandUI
{ commandName = "build"
, commandSynopsis = "Compile all/specific components."
, commandDescription = Just $ \_ -> wrapText $
"Components encompass executables, tests, and benchmarks.\n"
++ "\n"
++ "Affected by configuration options, see `configure`.\n"
, commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " build "
++ " All the components in the package\n"
++ " " ++ pname ++ " build foo "
++ " A component (i.e. lib, exe, test suite)\n\n"
++ programFlagsDescription progDb
--TODO: re-enable once we have support for module/file targets
-- ++ " " ++ pname ++ " build Foo.Bar "
-- ++ " A module\n"
-- ++ " " ++ pname ++ " build Foo/Bar.hs"
-- ++ " A file\n\n"
-- ++ "If a target is ambiguous it can be qualified with the component "
-- ++ "name, e.g.\n"
-- ++ " " ++ pname ++ " build foo:Foo.Bar\n"
-- ++ " " ++ pname ++ " build testsuite1:Foo/Bar.hs\n"
, commandUsage = usageAlternatives "build" $
[ "[FLAGS]"
, "COMPONENTS [FLAGS]"
]
, commandDefaultFlags = defaultBuildFlags
, commandOptions = \showOrParseArgs ->
[ optionVerbosity
buildVerbosity (\v flags -> flags { buildVerbosity = v })
, optionDistPref
buildDistPref (\d flags -> flags { buildDistPref = d }) showOrParseArgs
]
++ buildOptions progDb showOrParseArgs
}
buildOptions :: ProgramDb -> ShowOrParseArgs
-> [OptionField BuildFlags]
buildOptions progDb showOrParseArgs =
[ optionNumJobs
buildNumJobs (\v flags -> flags { buildNumJobs = v })
, option "" ["assume-deps-up-to-date"]
"One-shot build"
buildAssumeDepsUpToDate (\c flags -> flags { buildAssumeDepsUpToDate = c })
trueArg
]
++ programDbPaths progDb showOrParseArgs
buildProgramPaths (\v flags -> flags { buildProgramPaths = v})
++ programDbOption progDb showOrParseArgs
buildProgramArgs (\v fs -> fs { buildProgramArgs = v })
++ programDbOptions progDb showOrParseArgs
buildProgramArgs (\v flags -> flags { buildProgramArgs = v})
emptyBuildFlags :: BuildFlags
emptyBuildFlags = mempty
instance Monoid BuildFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup BuildFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * REPL Flags
-- ------------------------------------------------------------
data ReplFlags = ReplFlags {
replProgramPaths :: [(String, FilePath)],
replProgramArgs :: [(String, [String])],
replDistPref :: Flag FilePath,
replVerbosity :: Flag Verbosity,
replReload :: Flag Bool
}
deriving (Show, Generic)
defaultReplFlags :: ReplFlags
defaultReplFlags = ReplFlags {
replProgramPaths = mempty,
replProgramArgs = [],
replDistPref = NoFlag,
replVerbosity = Flag normal,
replReload = Flag False
}
instance Monoid ReplFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ReplFlags where
(<>) = gmappend
replCommand :: ProgramDb -> CommandUI ReplFlags
replCommand progDb = CommandUI
{ commandName = "repl"
, commandSynopsis =
"Open an interpreter session for the given component."
, commandDescription = Just $ \pname -> wrapText $
"If the current directory contains no package, ignores COMPONENT "
++ "parameters and opens an interactive interpreter session; if a "
++ "sandbox is present, its package database will be used.\n"
++ "\n"
++ "Otherwise, (re)configures with the given or default flags, and "
++ "loads the interpreter with the relevant modules. For executables, "
++ "tests and benchmarks, loads the main module (and its "
++ "dependencies); for libraries all exposed/other modules.\n"
++ "\n"
++ "The default component is the library itself, or the executable "
++ "if that is the only component.\n"
++ "\n"
++ "Support for loading specific modules is planned but not "
++ "implemented yet. For certain scenarios, `" ++ pname
++ " exec -- ghci :l Foo` may be used instead. Note that `exec` will "
++ "not (re)configure and you will have to specify the location of "
++ "other modules, if required.\n"
, commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " repl "
++ " The first component in the package\n"
++ " " ++ pname ++ " repl foo "
++ " A named component (i.e. lib, exe, test suite)\n"
++ " " ++ pname ++ " repl --ghc-options=\"-lstdc++\""
++ " Specifying flags for interpreter\n"
--TODO: re-enable once we have support for module/file targets
-- ++ " " ++ pname ++ " repl Foo.Bar "
-- ++ " A module\n"
-- ++ " " ++ pname ++ " repl Foo/Bar.hs"
-- ++ " A file\n\n"
-- ++ "If a target is ambiguous it can be qualified with the component "
-- ++ "name, e.g.\n"
-- ++ " " ++ pname ++ " repl foo:Foo.Bar\n"
-- ++ " " ++ pname ++ " repl testsuite1:Foo/Bar.hs\n"
, commandUsage = \pname -> "Usage: " ++ pname ++ " repl [COMPONENT] [FLAGS]\n"
, commandDefaultFlags = defaultReplFlags
, commandOptions = \showOrParseArgs ->
optionVerbosity replVerbosity (\v flags -> flags { replVerbosity = v })
: optionDistPref
replDistPref (\d flags -> flags { replDistPref = d })
showOrParseArgs
: programDbPaths progDb showOrParseArgs
replProgramPaths (\v flags -> flags { replProgramPaths = v})
++ programDbOption progDb showOrParseArgs
replProgramArgs (\v flags -> flags { replProgramArgs = v})
++ programDbOptions progDb showOrParseArgs
replProgramArgs (\v flags -> flags { replProgramArgs = v})
++ case showOrParseArgs of
ParseArgs ->
[ option "" ["reload"]
"Used from within an interpreter to update files."
replReload (\v flags -> flags { replReload = v })
trueArg
]
_ -> []
}
-- ------------------------------------------------------------
-- * Test flags
-- ------------------------------------------------------------
data TestShowDetails = Never | Failures | Always | Streaming | Direct
deriving (Eq, Ord, Enum, Bounded, Show)
knownTestShowDetails :: [TestShowDetails]
knownTestShowDetails = [minBound..maxBound]
instance Text TestShowDetails where
disp = Disp.text . lowercase . show
parse = maybe Parse.pfail return . classify =<< ident
where
ident = Parse.munch1 (\c -> isAlpha c || c == '_' || c == '-')
classify str = lookup (lowercase str) enumMap
enumMap :: [(String, TestShowDetails)]
enumMap = [ (display x, x)
| x <- knownTestShowDetails ]
--TODO: do we need this instance?
instance Monoid TestShowDetails where
mempty = Never
mappend = (<>)
instance Semigroup TestShowDetails where
a <> b = if a < b then b else a
data TestFlags = TestFlags {
testDistPref :: Flag FilePath,
testVerbosity :: Flag Verbosity,
testHumanLog :: Flag PathTemplate,
testMachineLog :: Flag PathTemplate,
testShowDetails :: Flag TestShowDetails,
testKeepTix :: Flag Bool,
-- TODO: think about if/how options are passed to test exes
testOptions :: [PathTemplate]
} deriving (Generic)
defaultTestFlags :: TestFlags
defaultTestFlags = TestFlags {
testDistPref = NoFlag,
testVerbosity = Flag normal,
testHumanLog = toFlag $ toPathTemplate $ "$pkgid-$test-suite.log",
testMachineLog = toFlag $ toPathTemplate $ "$pkgid.log",
testShowDetails = toFlag Failures,
testKeepTix = toFlag False,
testOptions = []
}
testCommand :: CommandUI TestFlags
testCommand = CommandUI
{ commandName = "test"
, commandSynopsis =
"Run all/specific tests in the test suite."
, commandDescription = Just $ \pname -> wrapText $
"If necessary (re)configures with `--enable-tests` flag and builds"
++ " the test suite.\n"
++ "\n"
++ "Remember that the tests' dependencies must be installed if there"
++ " are additional ones; e.g. with `" ++ pname
++ " install --only-dependencies --enable-tests`.\n"
++ "\n"
++ "By defining UserHooks in a custom Setup.hs, the package can"
++ " define actions to be executed before and after running tests.\n"
, commandNotes = Nothing
, commandUsage = usageAlternatives "test"
[ "[FLAGS]"
, "TESTCOMPONENTS [FLAGS]"
]
, commandDefaultFlags = defaultTestFlags
, commandOptions = \showOrParseArgs ->
[ optionVerbosity testVerbosity (\v flags -> flags { testVerbosity = v })
, optionDistPref
testDistPref (\d flags -> flags { testDistPref = d })
showOrParseArgs
, option [] ["log"]
("Log all test suite results to file (name template can use "
++ "$pkgid, $compiler, $os, $arch, $test-suite, $result)")
testHumanLog (\v flags -> flags { testHumanLog = v })
(reqArg' "TEMPLATE"
(toFlag . toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["machine-log"]
("Produce a machine-readable log file (name template can use "
++ "$pkgid, $compiler, $os, $arch, $result)")
testMachineLog (\v flags -> flags { testMachineLog = v })
(reqArg' "TEMPLATE"
(toFlag . toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["show-details"]
("'always': always show results of individual test cases. "
++ "'never': never show results of individual test cases. "
++ "'failures': show results of failing test cases. "
++ "'streaming': show results of test cases in real time."
++ "'direct': send results of test cases in real time; no log file.")
testShowDetails (\v flags -> flags { testShowDetails = v })
(reqArg "FILTER"
(readP_to_E (\_ -> "--show-details flag expects one of "
++ intercalate ", "
(map display knownTestShowDetails))
(fmap toFlag parse))
(flagToList . fmap display))
, option [] ["keep-tix-files"]
"keep .tix files for HPC between test runs"
testKeepTix (\v flags -> flags { testKeepTix = v})
trueArg
, option [] ["test-options"]
("give extra options to test executables "
++ "(name templates can use $pkgid, $compiler, "
++ "$os, $arch, $test-suite)")
testOptions (\v flags -> flags { testOptions = v })
(reqArg' "TEMPLATES" (map toPathTemplate . splitArgs)
(const []))
, option [] ["test-option"]
("give extra option to test executables "
++ "(no need to quote options containing spaces, "
++ "name template can use $pkgid, $compiler, "
++ "$os, $arch, $test-suite)")
testOptions (\v flags -> flags { testOptions = v })
(reqArg' "TEMPLATE" (\x -> [toPathTemplate x])
(map fromPathTemplate))
]
}
emptyTestFlags :: TestFlags
emptyTestFlags = mempty
instance Monoid TestFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup TestFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Benchmark flags
-- ------------------------------------------------------------
data BenchmarkFlags = BenchmarkFlags {
benchmarkDistPref :: Flag FilePath,
benchmarkVerbosity :: Flag Verbosity,
benchmarkOptions :: [PathTemplate]
} deriving (Generic)
defaultBenchmarkFlags :: BenchmarkFlags
defaultBenchmarkFlags = BenchmarkFlags {
benchmarkDistPref = NoFlag,
benchmarkVerbosity = Flag normal,
benchmarkOptions = []
}
benchmarkCommand :: CommandUI BenchmarkFlags
benchmarkCommand = CommandUI
{ commandName = "bench"
, commandSynopsis =
"Run all/specific benchmarks."
, commandDescription = Just $ \pname -> wrapText $
"If necessary (re)configures with `--enable-benchmarks` flag and"
++ " builds the benchmarks.\n"
++ "\n"
++ "Remember that the benchmarks' dependencies must be installed if"
++ " there are additional ones; e.g. with `" ++ pname
++ " install --only-dependencies --enable-benchmarks`.\n"
++ "\n"
++ "By defining UserHooks in a custom Setup.hs, the package can"
++ " define actions to be executed before and after running"
++ " benchmarks.\n"
, commandNotes = Nothing
, commandUsage = usageAlternatives "bench"
[ "[FLAGS]"
, "BENCHCOMPONENTS [FLAGS]"
]
, commandDefaultFlags = defaultBenchmarkFlags
, commandOptions = \showOrParseArgs ->
[ optionVerbosity benchmarkVerbosity
(\v flags -> flags { benchmarkVerbosity = v })
, optionDistPref
benchmarkDistPref (\d flags -> flags { benchmarkDistPref = d })
showOrParseArgs
, option [] ["benchmark-options"]
("give extra options to benchmark executables "
++ "(name templates can use $pkgid, $compiler, "
++ "$os, $arch, $benchmark)")
benchmarkOptions (\v flags -> flags { benchmarkOptions = v })
(reqArg' "TEMPLATES" (map toPathTemplate . splitArgs)
(const []))
, option [] ["benchmark-option"]
("give extra option to benchmark executables "
++ "(no need to quote options containing spaces, "
++ "name template can use $pkgid, $compiler, "
++ "$os, $arch, $benchmark)")
benchmarkOptions (\v flags -> flags { benchmarkOptions = v })
(reqArg' "TEMPLATE" (\x -> [toPathTemplate x])
(map fromPathTemplate))
]
}
emptyBenchmarkFlags :: BenchmarkFlags
emptyBenchmarkFlags = mempty
instance Monoid BenchmarkFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup BenchmarkFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Shared options utils
-- ------------------------------------------------------------
programFlagsDescription :: ProgramDb -> String
programFlagsDescription progDb =
"The flags --with-PROG and --PROG-option(s) can be used with"
++ " the following programs:"
++ (concatMap (\line -> "\n " ++ unwords line) . wrapLine 77 . sort)
[ programName prog | (prog, _) <- knownPrograms progDb ]
++ "\n"
-- | For each known program @PROG@ in 'progDb', produce a @with-PROG@
-- 'OptionField'.
programDbPaths
:: ProgramDb
-> ShowOrParseArgs
-> (flags -> [(String, FilePath)])
-> ([(String, FilePath)] -> (flags -> flags))
-> [OptionField flags]
programDbPaths progDb showOrParseArgs get set =
programDbPaths' ("with-" ++) progDb showOrParseArgs get set
{-# DEPRECATED programConfigurationPaths' "Use programDbPaths' instead" #-}
-- | Like 'programDbPaths', but allows to customise the option name.
programDbPaths', programConfigurationPaths'
:: (String -> String)
-> ProgramDb
-> ShowOrParseArgs
-> (flags -> [(String, FilePath)])
-> ([(String, FilePath)] -> (flags -> flags))
-> [OptionField flags]
programConfigurationPaths' = programDbPaths'
programDbPaths' mkName progDb showOrParseArgs get set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [withProgramPath "PROG"]
ParseArgs -> map (withProgramPath . programName . fst)
(knownPrograms progDb)
where
withProgramPath prog =
option "" [mkName prog]
("give the path to " ++ prog)
get set
(reqArg' "PATH" (\path -> [(prog, path)])
(\progPaths -> [ path | (prog', path) <- progPaths, prog==prog' ]))
-- | For each known program @PROG@ in 'progDb', produce a @PROG-option@
-- 'OptionField'.
programDbOption
:: ProgramDb
-> ShowOrParseArgs
-> (flags -> [(String, [String])])
-> ([(String, [String])] -> (flags -> flags))
-> [OptionField flags]
programDbOption progDb showOrParseArgs get set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [programOption "PROG"]
ParseArgs -> map (programOption . programName . fst)
(knownPrograms progDb)
where
programOption prog =
option "" [prog ++ "-option"]
("give an extra option to " ++ prog ++
" (no need to quote options containing spaces)")
get set
(reqArg' "OPT" (\arg -> [(prog, [arg])])
(\progArgs -> concat [ args
| (prog', args) <- progArgs, prog==prog' ]))
{-# DEPRECATED programConfigurationOptions "Use programDbOptions instead" #-}
-- | For each known program @PROG@ in 'progDb', produce a @PROG-options@
-- 'OptionField'.
programDbOptions, programConfigurationOptions
:: ProgramDb
-> ShowOrParseArgs
-> (flags -> [(String, [String])])
-> ([(String, [String])] -> (flags -> flags))
-> [OptionField flags]
programConfigurationOptions = programDbOptions
programDbOptions progDb showOrParseArgs get set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [programOptions "PROG"]
ParseArgs -> map (programOptions . programName . fst)
(knownPrograms progDb)
where
programOptions prog =
option "" [prog ++ "-options"]
("give extra options to " ++ prog)
get set
(reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (const []))
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
boolOpt :: SFlags -> SFlags
-> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a
boolOpt = Command.boolOpt flagToMaybe Flag
boolOpt' :: OptFlags -> OptFlags
-> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a
boolOpt' = Command.boolOpt' flagToMaybe Flag
trueArg, falseArg :: MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a
trueArg sfT lfT = boolOpt' (sfT, lfT) ([], []) sfT lfT
falseArg sfF lfF = boolOpt' ([], []) (sfF, lfF) sfF lfF
reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description ->
(b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
optionDistPref :: (flags -> Flag FilePath)
-> (Flag FilePath -> flags -> flags)
-> ShowOrParseArgs
-> OptionField flags
optionDistPref get set = \showOrParseArgs ->
option "" (distPrefFlagName showOrParseArgs)
( "The directory where Cabal puts generated build files "
++ "(default " ++ defaultDistPref ++ ")")
get set
(reqArgFlag "DIR")
where
distPrefFlagName ShowArgs = ["builddir"]
distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"]
optionVerbosity :: (flags -> Flag Verbosity)
-> (Flag Verbosity -> flags -> flags)
-> OptionField flags
optionVerbosity get set =
option "v" ["verbose"]
"Control verbosity (n is 0--3, default verbosity level is 1)"
get set
(optArg "n" (fmap Flag flagToVerbosity)
(Flag verbose) -- default Value if no n is given
(fmap (Just . showForCabal) . flagToList))
optionNumJobs :: (flags -> Flag (Maybe Int))
-> (Flag (Maybe Int) -> flags -> flags)
-> OptionField flags
optionNumJobs get set =
option "j" ["jobs"]
"Run NUM jobs simultaneously (or '$ncpus' if no NUM is given)."
get set
(optArg "NUM" (fmap Flag numJobsParser)
(Flag Nothing)
(map (Just . maybe "$ncpus" show) . flagToList))
where
numJobsParser :: ReadE (Maybe Int)
numJobsParser = ReadE $ \s ->
case s of
"$ncpus" -> Right Nothing
_ -> case reads s of
[(n, "")]
| n < 1 -> Left "The number of jobs should be 1 or more."
| otherwise -> Right (Just n)
_ -> Left "The jobs value should be a number or '$ncpus'"
-- ------------------------------------------------------------
-- * Other Utils
-- ------------------------------------------------------------
readPToMaybe :: Parse.ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str
, all isSpace s ]
-- | Arguments to pass to a @configure@ script, e.g. generated by
-- @autoconf@.
configureArgs :: Bool -> ConfigFlags -> [String]
configureArgs bcHack flags
= hc_flag
++ optFlag "with-hc-pkg" configHcPkg
++ optFlag' "prefix" prefix
++ optFlag' "bindir" bindir
++ optFlag' "libdir" libdir
++ optFlag' "libexecdir" libexecdir
++ optFlag' "datadir" datadir
++ optFlag' "sysconfdir" sysconfdir
++ configConfigureArgs flags
where
hc_flag = case (configHcFlavor flags, configHcPath flags) of
(_, Flag hc_path) -> [hc_flag_name ++ hc_path]
(Flag hc, NoFlag) -> [hc_flag_name ++ display hc]
(NoFlag,NoFlag) -> []
hc_flag_name
--TODO kill off thic bc hack when defaultUserHooks is removed.
| bcHack = "--with-hc="
| otherwise = "--with-compiler="
optFlag name config_field = case config_field flags of
Flag p -> ["--" ++ name ++ "=" ++ p]
NoFlag -> []
optFlag' name config_field = optFlag name (fmap fromPathTemplate
. config_field
. configInstallDirs)
configureCCompiler :: Verbosity -> ProgramDb
-> IO (FilePath, [String])
configureCCompiler verbosity progdb = configureProg verbosity progdb gccProgram
configureLinker :: Verbosity -> ProgramDb -> IO (FilePath, [String])
configureLinker verbosity progdb = configureProg verbosity progdb ldProgram
configureProg :: Verbosity -> ProgramDb -> Program
-> IO (FilePath, [String])
configureProg verbosity programDb prog = do
(p, _) <- requireProgram verbosity prog programDb
let pInv = programInvocation p []
return (progInvokePath pInv, progInvokeArgs pInv)
-- | Helper function to split a string into a list of arguments.
-- It's supposed to handle quoted things sensibly, eg:
--
-- > splitArgs "--foo=\"C:\Program Files\Bar\" --baz"
-- > = ["--foo=C:\Program Files\Bar", "--baz"]
--
splitArgs :: String -> [String]
splitArgs = space []
where
space :: String -> String -> [String]
space w [] = word w []
space w ( c :s)
| isSpace c = word w (space [] s)
space w ('"':s) = string w s
space w s = nonstring w s
string :: String -> String -> [String]
string w [] = word w []
string w ('"':s) = space w s
string w ( c :s) = string (c:w) s
nonstring :: String -> String -> [String]
nonstring w [] = word w []
nonstring w ('"':s) = string w s
nonstring w ( c :s) = space (c:w) s
word [] s = s
word w s = reverse w : s
-- The test cases kinda have to be rewritten from the ground up... :/
--hunitTests :: [Test]
--hunitTests =
-- let m = [("ghc", GHC), ("nhc98", NHC), ("hugs", Hugs)]
-- (flags, commands', unkFlags, ers)
-- = getOpt Permute options ["configure", "foobar", "--prefix=/foo", "--ghc", "--nhc98", "--hugs", "--with-compiler=/comp", "--unknown1", "--unknown2", "--install-prefix=/foo", "--user", "--global"]
-- in [TestLabel "very basic option parsing" $ TestList [
-- "getOpt flags" ~: "failed" ~:
-- [Prefix "/foo", GhcFlag, NhcFlag, HugsFlag,
-- WithCompiler "/comp", InstPrefix "/foo", UserFlag, GlobalFlag]
-- ~=? flags,
-- "getOpt commands" ~: "failed" ~: ["configure", "foobar"] ~=? commands',
-- "getOpt unknown opts" ~: "failed" ~:
-- ["--unknown1", "--unknown2"] ~=? unkFlags,
-- "getOpt errors" ~: "failed" ~: [] ~=? ers],
--
-- TestLabel "test location of various compilers" $ TestList
-- ["configure parsing for prefix and compiler flag" ~: "failed" ~:
-- (Right (ConfigCmd (Just comp, Nothing, Just "/usr/local"), []))
-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, "configure"])
-- | (name, comp) <- m],
--
-- TestLabel "find the package tool" $ TestList
-- ["configure parsing for prefix comp flag, withcompiler" ~: "failed" ~:
-- (Right (ConfigCmd (Just comp, Just "/foo/comp", Just "/usr/local"), []))
-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name,
-- "--with-compiler=/foo/comp", "configure"])
-- | (name, comp) <- m],
--
-- TestLabel "simpler commands" $ TestList
-- [flag ~: "failed" ~: (Right (flagCmd, [])) ~=? (parseArgs [flag])
-- | (flag, flagCmd) <- [("build", BuildCmd),
-- ("install", InstallCmd Nothing False),
-- ("sdist", SDistCmd),
-- ("register", RegisterCmd False)]
-- ]
-- ]
{- Testing ideas:
* IO to look for hugs and hugs-pkg (which hugs, etc)
* quickCheck to test permutations of arguments
* what other options can we over-ride with a command-line flag?
-}
|
sopvop/cabal
|
Cabal/Distribution/Simple/Setup.hs
|
Haskell
|
bsd-3-clause
| 86,958
|
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
module Aws.SimpleDb.Model
where
import Aws.SimpleDb.Response
import Aws.Util
import Aws.Xml
import Control.Monad
import Text.XML.Cursor (($/), (&|))
import qualified Control.Failure as F
import qualified Data.ByteString as B
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Text.XML.Cursor as Cu
data Attribute a
= ForAttribute { attributeName :: T.Text, attributeData :: a }
deriving (Show)
readAttribute :: F.Failure XmlException m => Cu.Cursor -> m (Attribute T.Text)
readAttribute cursor = do
name <- forceM "Missing Name" $ cursor $/ Cu.laxElement "Name" &| decodeBase64
value <- forceM "Missing Value" $ cursor $/ Cu.laxElement "Value" &| decodeBase64
return $ ForAttribute name value
data SetAttribute
= SetAttribute { setAttribute :: T.Text, isReplaceAttribute :: Bool }
deriving (Show)
attributeQuery :: (a -> [(B.ByteString, B.ByteString)]) -> Attribute a -> [(B.ByteString, B.ByteString)]
attributeQuery f (ForAttribute name x) = ("Name", T.encodeUtf8 name) : f x
addAttribute :: T.Text -> T.Text -> Attribute SetAttribute
addAttribute name value = ForAttribute name (SetAttribute value False)
replaceAttribute :: T.Text -> T.Text -> Attribute SetAttribute
replaceAttribute name value = ForAttribute name (SetAttribute value True)
setAttributeQuery :: SetAttribute -> [(B.ByteString, B.ByteString)]
setAttributeQuery (SetAttribute value replace)
= ("Value", T.encodeUtf8 value) : [("Replace", awsTrue) | replace]
data DeleteAttribute
= DeleteAttribute
| ValuedDeleteAttribute { deleteAttributeValue :: T.Text }
deriving (Show)
deleteAttributeQuery :: DeleteAttribute -> [(B.ByteString, B.ByteString)]
deleteAttributeQuery DeleteAttribute = []
deleteAttributeQuery (ValuedDeleteAttribute value) = [("Value", T.encodeUtf8 value)]
data ExpectedAttribute
= ExpectedValue { expectedAttributeValue :: T.Text }
| ExpectedExists { expectedAttributeExists :: Bool }
deriving (Show)
expectedValue :: T.Text -> T.Text -> Attribute ExpectedAttribute
expectedValue name value = ForAttribute name (ExpectedValue value)
expectedExists :: T.Text -> Bool -> Attribute ExpectedAttribute
expectedExists name exists = ForAttribute name (ExpectedExists exists)
expectedAttributeQuery :: ExpectedAttribute -> [(B.ByteString, B.ByteString)]
expectedAttributeQuery (ExpectedValue value) = [("Value", T.encodeUtf8 value)]
expectedAttributeQuery (ExpectedExists exists) = [("Exists", awsBool exists)]
data Item a
= Item { itemName :: T.Text, itemData :: a }
deriving (Show)
readItem :: F.Failure XmlException m => Cu.Cursor -> m (Item [Attribute T.Text])
readItem cursor = do
name <- force "Missing Name" <=< sequence $ cursor $/ Cu.laxElement "Name" &| decodeBase64
attributes <- sequence $ cursor $/ Cu.laxElement "Attribute" &| readAttribute
return $ Item name attributes
itemQuery :: (a -> [(B.ByteString, B.ByteString)]) -> Item a -> [(B.ByteString, B.ByteString)]
itemQuery f (Item name x) = ("ItemName", T.encodeUtf8 name) : f x
|
jgm/aws
|
Aws/SimpleDb/Model.hs
|
Haskell
|
bsd-3-clause
| 3,217
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
module Bertrand.Parser
(parse
)where
import Prelude hiding (null)
import Control.Applicative
import Control.Monad
-- import Control.Monad.Trans.Maybe
-- import Control.Monad.State
import Data.Char
import qualified Data.Map as M
import Data.Maybe
-- import Debug.Trace
import Bertrand.Data
import Bertrand.System (systemIds)
--------------------------------------------------------------------------------
-- type Parser a = MaybeT (State ParserState) a
-- runParser = runMaybeT
newtype Parser a = Parser {runParser :: ParserState -> (Maybe a, ParserState)}
instance Functor Parser where
f `fmap` p = Parser $ \s -> let (m, s') = runParser p s
in (f `fmap` m, s')
instance Applicative Parser where
pure = return
(<*>) = ap
instance Monad Parser where
return x = Parser $ \s -> (return x, s)
p >>= f = Parser $ \s -> let (m, s') = runParser p s
in maybe (empty, s) (\a -> runParser (f a) s') m
instance Alternative Parser where
empty = Parser $ \s -> (empty, s)
p <|> q = Parser $ \s -> let a@(m, s') = runParser p s
in maybe (runParser q s) (const a) m
instance MonadPlus Parser
get :: Parser ParserState
get = Parser $ \s -> (return s, s)
put :: ParserState -> Parser ()
put s = Parser $ const (return (), s)
data ParserState = ParserState (Int, Int) String
purePS :: String -> ParserState
purePS = ParserState (1, 1)
pop :: ParserState -> (Char, ParserState)
pop (ParserState (i, j) s) = case s of
[] -> ('\0', ParserState (i, j) s)
'\n':cs -> ('\n', ParserState (i + 1, j) cs)
c :cs -> (c, ParserState (i, j + 1) cs)
null :: ParserState -> Bool
null (ParserState _ s)
| s == "" = True
| otherwise = False
item :: Parser Char
item = do
s <- get
if null s
then mzero
else let (c, s') = pop s
in do
put s'
return c
sat :: (Char -> Bool) -> Parser Char
sat f = do
s <- get
c <- item
if f c
then return c
else do
put s
mzero
test :: Parser a -> Parser a
test p = do
s <- get
a <- p
put s
return a
notp :: Parser a -> Parser ()
notp p = Parser $ \s -> let (m, s') = runParser p s
in maybe (return (), s) (const (mzero, s)) m
eof :: Parser ()
eof = do
s <- get
unless (null s) mzero
--------------------------------------------------------------------------------
parse :: [ParseOption] -> Int -> String -> Either (Int, Int) Envir
parse os i cs = let (m, ParserState p _) = runParser (parser os i) (purePS cs)
in maybe (Left p) Right m
--------------------------------------------------------------------------------
type OpeParser = Parser Expr -> Parser Expr
parser :: [ParseOption] -> Int -> Parser Envir
parser ops i = emap (env $ i + 1) <$> statement <* eof
where
env :: Int -> Expr -> Expr
env i = \case
App a b -> App (env i a) (env i b)
Lambda a b -> Lambda (env (i + 1) a) (env (i + 1) b)
Env e a -> Env (emap (env $ i + 1) e {depth = i}) $ env (i + 1) a
a -> a
expr :: Parser Expr
expr = foldr id apply opeparsers
-- expr = apply $ head opeparsers $ term expr
statement :: Parser Envir
statement = variable
<|> constraint
-- <|> bind
<|> declare
variable :: Parser Envir
variable = (\s ss -> mempty{vars = case s of
"var" -> (ss, [])
"cons" -> ([], ss)}) <$>
oneof string ["var", "cons"] <*> some identifier'
constraint :: Parser Envir
constraint = (\ss e -> mempty{cstrs = M.fromList $ map (, [e]) ss,
decls = [(ss, e)]}) <$>
some identifier' <* sign "." <*> expr
-- bind :: Parser Envir
-- bind = (,) <$> expr <*> (sign "=" *> expr) >>= f
-- where
-- f (a, e) = let x:es = toList a in case detach x of
-- Id s -> return mempty{binds = M.singleton s [foldr Lambda e es]}
-- _ -> mzero
declare :: Parser Envir
declare = expr >>= f
where
f e = case toList e of
[a, b, c] | isName "=" a ->
let x:es = toList b
in case detach x of
Id s -> return mempty{binds =
M.singleton s [foldr Lambda c es]}
_ -> mzero
_ -> return mempty{decls = [([], e)]}
opeparsers :: [OpeParser]
opeparsers = envir : lambda : map opeparser opers
opers = operators ops
lambda :: OpeParser
lambda p = (f <$> (sign "\\" *> some term) <* sign "->" <*> p)
<|> p
where
f es e = foldr1 Lambda $ es ++ [e]
envir :: OpeParser
envir p = f <$> p <*> option (sign "!" *>
((:) <$> statement <*> many (sign ";" *> statement)))
where
f a Nothing = a
f a (Just es) = Env (mconcat es) a
apply :: Parser Expr
apply = foldl App <$> (term <|> operator) <*> many term
-- termop :: OpeParser
-- termop p = term <|> operator
term :: Parser Expr
term = sign "(" *> expr <* sign ")"
<|> App (Id "~") <$> (sign "~" *> term)
<|> list
<|> ifelse
<|> caseof
<|> float
<|> number
<|> systemId
<|> Id <$> (identifier <|> wildcard <|> sign "()")
signs :: [String]
signs = "!":";":"\\":"->":",":"if":"then":"else":"case":"of":
foldr (\(is, ils, irs, ifs) s -> is ++ ils ++ irs ++ ifs ++ s) [] opers
operator :: Parser Expr
operator = Id <$> oneof sign signs
identifier :: Parser String
identifier = token ((:) <$> (letter <|> char '#') <*>
many (letter <|> digit))
>>= \s -> if s `elem` signs
then mzero
else return s
identifier' :: Parser String
identifier' = token ((:) <$> (letter <|> char '#') <*>
many (letter <|> digit))
wildcard :: Parser String
wildcard = token ((:) <$> char '_' <*> many (letter <|> digit))
list :: Parser Expr
list = (makeList .) . (++) <$>
(sign "[" *> optionL expr) <*> many (sign "," *> expr) <* sign "]"
ifelse :: Parser Expr
ifelse = (\c a b -> App (App (App (Id "#if") c) a) b) <$>
(sign "if" *> expr) <*> (sign "then" *> expr) <*> (sign "else" *> expr)
caseof :: Parser Expr
caseof = (\e cs -> App (App (Id "comma") $ makeList cs) e) <$>
(sign "case" *> expr) <* sign "of" <*>
((:) <$> clause <*> many (sign ";" *> clause))
where
clause :: Parser Expr
clause = Lambda . foldr1 App <$> some term <* sign "->" <*> expr
makeList :: [Expr] -> Expr
makeList = foldr (app2 (Id ":")) (Id "[]")
operators :: [ParseOption] -> [([String], [String], [String], [String])]
operators = M.elems . foldr f M.empty
where
f :: ParseOption -> M.Map Int ([String], [String], [String], [String]) -> M.Map Int ([String], [String], [String], [String])
f (Infix i s) = M.insertWith (const $ map1st (s:)) i ([s], [], [], [])
f (Infixl i s) = M.insertWith (const $ map2nd (s:)) i ([], [s], [], [])
f (Infixr i s) = M.insertWith (const $ map3rd (s:)) i ([], [], [s], [])
f (Infixf i s) = M.insertWith (const $ map4th (s:)) i ([], [], [], [s])
map1st f (a, b, c, d) = (f a, b, c, d)
map2nd f (a, b, c, d) = (a, f b, c, d)
map3rd f (a, b, c, d) = (a, b, f c, d)
map4th f (a, b, c, d) = (a, b, c, f d)
-- infix infixl infixr infixf
opeparser :: ([String], [String], [String], [String]) -> OpeParser
opeparser (is, ils, irs, ifs) p = infixp $ infixlp $ infixrp $ infixfp p
where
infixp :: OpeParser
infixp p = f <$> p <*> option ((,) <$> oper is <*> option p)
where
f a Nothing = a
f a (Just (o, Nothing)) = App o a
f a (Just (o, Just b)) = App (App o a) b
infixlp :: OpeParser
infixlp p = f <$> g
where
f :: [Expr] -> Expr
f [a] = a
f [a, o] = App o a
f (a:o:b:x) = f $ App (App o a) b : x
g :: Parser [Expr]
g = (:) <$> p <*> (concat <$> optionL ((:) <$> oper ils <*> (concat <$> optionL g)))
infixrp :: OpeParser
infixrp p = f <$> p <*> option ((,) <$> oper irs <*> option (infixrp p))
where
f a Nothing = a
f a (Just (o, Nothing)) = App o a
f a (Just (o, Just b)) = App (App o a) b
infixfp :: OpeParser
infixfp p = f <$> g
where
f :: [Expr] -> Expr
f [a] = a
f [a, o] = App o a
f [a, o, b] = App (App o a) b
f (a:o:b:x) = App (App (Id "and") $ f [a, o, b]) $ f (b:x)
g :: Parser [Expr]
g = (:) <$> p <*> (concat <$> optionL ((:) <$> oper ifs <*> g))
oper s = Id <$> oneof sign s <* notp symbol
systemId :: Parser Expr
systemId = token (char '#' *> many letter) >>=
(\s -> maybe mzero return $ System <$> lookup s systemIds)
float :: Parser Expr
float = token (toFraction <$> some digit <* char '.' <*> some digit)
where
toFraction xs ys = app2 (Id "/") (System . Int . read $ xs ++ ys)
(System . Int $ 10 ^ length ys)
number :: Parser Expr
number = System . Int . read <$> token (some digit)
sign :: String -> Parser String
sign s = token $ string s
token :: Parser a -> Parser a
token p = spaces *> p <* spaces
digit :: Parser Char
digit = sat isDigit
letter :: Parser Char
letter = sat isLetter
upper :: Parser Char
upper = sat isUpper
symbol :: Parser Char
symbol = oneof char "!$%&*+./<=>?@\\^|-~:"
spaces :: Parser String
spaces = many space
space :: Parser Char
space = sat isSeparator
string :: String -> Parser String
string "" = return ""
string (c : "") = (:[]) <$> char c
string (c : cs) = (:) <$> char c <*> string cs
char :: Char -> Parser Char
char c = sat (c ==)
option :: Parser a -> Parser (Maybe a)
option p = (Just <$> p) <|> return Nothing
optionL :: Parser a -> Parser [a]
optionL p = maybeToList <$> option p
oneof :: (a -> Parser b) -> [a] -> Parser b
oneof p = foldl (\q a -> q <|> p a) mzero
app2 :: Expr -> Expr -> Expr -> Expr
app2 a b = App (App a b)
-- or :: (a -> Bool) -> (a -> Bool) -> a -> Bool
-- f `or` g = \a -> f a || g a
|
fujiy00/bertrand
|
src/Bertrand/Parser.hs
|
Haskell
|
bsd-3-clause
| 11,294
|
module Generate.Functions where
import Data.Char
import Data.Maybe
import Control.Applicative
import Control.Exception(assert)
import Data.XCB
import HaskellCombinators
import Generate(valueParamName,mapAlt,xImport,mapIdents,fieldName,fieldType)
import Generate.Monad
import Generate.Facts
import Generate.Util
import Control.Monad.Reader
import Control.Monad.Trans.Maybe
-- Builds a function for every request in the module
-- Hopefully I'm not duplicating too much between here
-- and the types generation module.
-- | Returns the name of the Haskell module containing the type
-- declarations for a given XCB module.
typesModName :: GenXHeader a -> String
typesModName = typesModuleName . interCapsName
-- | Returns the name of the Haskell module containing the function
-- definitions for a given XCB module.
functionsModName :: GenXHeader a -> String
functionsModName = functionsModuleName . interCapsName
-- | Returns the name of an X module in InterCaps.
interCapsName :: GenXHeader a -> String
interCapsName xhd = case xheader_name xhd of
Nothing -> ensureUpper $ xheader_header xhd
Just name -> name
ensureLower [] = []
ensureLower (x:xs) = toLower x : xs
-- | Given a list of X modules, returns a list of generated Haskell modules
-- which contain the developer friendly functions for using XHB.
functionsModules :: [XHeader] -> [HsModule]
functionsModules xs = map go transed
where transed = standardTranslations xs
go :: HXHeader -> HsModule
go xhd = functionsModule transed xhd
-- | Generates the Haskell functions for using the functionality
-- of the passed in X module.
functionsModule :: [HXHeader] -> HXHeader -> HsModule
functionsModule xs xhd | isCoreModule xhd = buildCore xhd
| otherwise = buildExtension xs xhd
-- | Retuns 'True' if the X module is NOT for an extension.
isCoreModule = isNothing . xheader_xname
buildExtension :: [HXHeader] -> HXHeader -> HsModule
buildExtension xs xhd =
let emptyModule = newExtensionModule xhd
rs = requests xhd
fns = declareFunctions xhd rs
extId = declareExtensionId xhd
imFns = doImports xs xhd
in moveExports $ applyMany (extId ++ fns ++ imFns) emptyModule
declareExtensionId :: HXHeader -> [HsModule -> HsModule]
declareExtensionId xhd =
[addDecl $ mkTypeSig extFnName [] (mkTyCon "ExtensionId")
,addDecl $ mkSimpleFun extFnName [] $
mkStringLit $ fromJust $ xheader_xname xhd
,addExport $ mkExportVar extFnName
]
where extFnName = "extension"
doImports :: [HXHeader] -> HXHeader -> [HsModule -> HsModule]
doImports xs xhd =
let decs = xheader_decls xhd
in mapMaybe go decs
where go :: HXDecl -> Maybe (HsModule -> HsModule)
go (XImport name) = return $ xImport xs xhd name
go _ = Nothing
-- | Builds a haskel functions module for the passed in xml
-- description. Assumes it is not for extension requests.
buildCore :: HXHeader -> HsModule
buildCore xhd =
let emptyModule = newCoreModule xhd
rs = requests xhd
fns = declareFunctions xhd rs
in moveExports $ applyMany fns emptyModule
-- | moves entire-module exports to end of export list
moveExports :: HsModule -> HsModule
moveExports =
modifyExports $ \exports ->
let (modExports, otherExports) = filterAccum isModExport exports
in otherExports ++ modExports
modifyExports f mod =
case getExports mod of
Nothing -> mod
(Just exs) -> setExports (Just $ f exs) mod
applyMany = foldr (flip (.)) id
-- Creates a nearly empty Haskell module for the passed-in
-- X module. Also inserts standard Haskell imports.
newCoreModule :: HXHeader -> HsModule
newCoreModule xhd =
let name = functionsModName xhd
mod = mkModule name
in exportTypesMod xhd $ doQualImports $ doImports mod
where doImports = applyMany $ map (addImport . mkImport) $
[typesModName xhd
, packagePrefix ++ ".Connection.Internal"
, packagePrefix ++ ".Shared"
,"Data.Binary.Put"
,"Control.Concurrent.STM"
,"Foreign.C.Types"
,"Data.Word"
,"Data.Int"
,"Data.Binary.Get"
]
doQualImports = addImport $ mkQualImport $
packagePrefix ++ ".Connection.Types"
newExtensionModule :: HXHeader -> HsModule
newExtensionModule xhd =
let name = functionsModName xhd
mod = mkModule name
in exportTypesMod xhd $ doHidingImports $ doSomeImports $ doImports mod
where doImports = applyMany $ map (addImport . mkImport) $
[typesModName xhd
, packagePrefix ++ ".Connection.Internal"
, packagePrefix ++ ".Connection.Extension"
, packagePrefix ++ ".Connection.Types"
, "Control.Concurrent.STM"
, "Foreign.C.Types"
, "Data.Word"
, "Data.Int"
, "Data.Binary.Get"
]
doSomeImports = addImport $ mkSomeImport "Data.Binary.Put" ["runPut"]
doHidingImports = addImport $ mkHidingImport (packagePrefix ++ ".Shared") ["Event", "Error"]
exportTypesMod = addExport . mkExportModule . typesModName
connTyName = packagePrefix ++ ".Connection.Types.Connection"
makeReceipt :: RequestInfo -> [HsStmt]
makeReceipt req
| not (hasReply req) = empty
| unaryReply req = return $ mkBinding $
hsApp (mkVar "newEmptyReceipt") $ hsParen $
hsApp (mkVar "runGet") $ hsParen $
hsInfixApp (mkVar unaryReplyAccessorName)
(mkQOpIdent "fmap")
(mkVar "deserialize")
| otherwise = return $ mkBinding $
mkVar "newDeserReceipt"
where
mkBinding = mkGenerator (hsPTuple [mkPVar "receipt", mkPVar "rReceipt"])
unaryReplyAccessorName = accessor elemName name
where name = replyName (request_name req)
elemName = maybe (error $ "Failure in mkReceiptForReply! " ++ show req) id $
firstReplyElem req >>= fieldName
-- send rReceipt, bu still return receipt
sendRequest :: RequestInfo -> [HsStmt]
sendRequest req | hasReply req = map hsQualifier
[foldl1 hsApp $ map mkVar $
["sendRequestWithReply"
,"c"
,"chunk"
,"rReceipt"
]
,mkVar "return" `hsApp` mkVar "receipt"
]
| otherwise = map hsQualifier $
return $ (mkVar "sendRequest" `hsApp` mkVar "c")
`hsApp` mkVar "chunk"
-- account for unary/nullary reply case
resultType :: RequestInfo -> HsType
resultType req | unaryReply req =
receiptType $ fromJust $ fieldType $ fromJust $ firstReplyElem req
| hasReply req = receiptType $ replyType req
| otherwise = foldr1 hsTyApp $
[mkTyCon "IO"
,unit_tycon
]
receiptType :: HsType -> HsType
receiptType typ = foldr1 hsTyApp $
[mkTyCon "IO"
,mkTyCon "Receipt"
,typ]
replyType :: RequestInfo -> HsType
replyType = mkTyCon . replyNameFromInfo
-- | Declares Haskell functions for an X module.
declareFunctions :: HXHeader -> [RequestInfo] -> [HsModule -> HsModule]
declareFunctions xhd rInfos =
map (declareFunction (not $ isCoreModule xhd)) rInfos
-- for core requests, we can do the short form and long form
-- because we don't have to import any other modules
-- | Handles a single request in the core functions module.
declareFunction :: Bool -> RequestInfo -> (HsModule -> HsModule)
declareFunction ext req =
applyMany
[addDecl typDeclaration
,addDecl fnDeclaration
,addExport $ mkExportAbs fnName
]
where fnName = fnNameFromRequest req
fields = requestFields req
fieldCount = length fields
bigCount = 3
shortMode = fieldCount < bigCount
typDeclaration :: HsDecl
typDeclaration | shortMode = shortTypDec
| otherwise = longTypDec
fnDeclaration :: HsDecl
fnDeclaration | shortMode = shortFnDec
| otherwise = longFnDec
shortTypDec, longTypDec :: HsDecl
shortTypDec = mkTypeSig fnName [] shortTyp
longTypDec = mkTypeSig fnName [] longType
shortTyp =
let fieldTypes = fieldsToTypes fields
in foldr1 hsTyFun $
mkTyCon connTyName : fieldTypes ++ [resultType req]
longType =
foldr1 hsTyFun $
[mkTyCon connTyName
,mkTyCon $ request_name req
,resultType req
]
shortFnDec = mkSimpleFun fnName
(map mkPVar shortArgs)
(hsDo fnBody)
longFnDec = mkSimpleFun fnName
(map mkPVar ["c", "req"])
(hsDo fnBody)
shortArgs = "c" : fieldsToArgNames fields
-- constructor plus args
shortRequestExpr :: HsExp
shortRequestExpr =
foldl1 hsApp $ constructor : map mkVar (fieldsToArgNames fields)
-- TODO: share constructor name between
-- generation condebases.
constructor :: HsExp
constructor = hsCon . mkUnQName $ "Mk" ++ request_name req
fnBody :: [HsStmt]
fnBody = concat
[ makeReceipt req
, buildRequest
, serializeRequest
, sendRequest req
]
buildRequest | shortMode = return $ mkLetStmt
(mkPVar "req")
shortRequestExpr
| otherwise = empty
serializeRequest
| ext = [ mkGenerator (mkPVar "putAction")
(foldl1 hsApp $ map mkVar $
["serializeExtensionRequest"
,"c"
,"req"
]
)
, mkLetStmt (mkPVar "chunk")
(mkVar "runPut" `hsApp` mkVar "putAction")
]
| otherwise = [mkLetStmt (mkPVar "chunk")
(applyManyExp
[mkVar "runPut"
,mkVar "serialize" `hsApp` mkVar "req"
])
]
-- | Fold Haskell expressions together in a right-fold fashion
applyManyExp [] = undefined
applyManyExp [x] = x
applyManyExp (x:xs) = hsApp x $ hsParen $ applyManyExp xs
-- | Maps the fields of a X-struct into argument names to be used
-- in an arg-list for a Haskell function
fieldsToArgNames :: [HStructElem] -> [String]
fieldsToArgNames = map mapIdents . mapMaybe fieldToArgName
fieldToArgName :: HStructElem -> Maybe String
fieldToArgName = fieldName
-- | The types corresponding to the args from "fieldsToArgNames".
fieldsToTypes :: [HStructElem] -> [HsType]
fieldsToTypes = mapMaybe fieldType
-- | Extracts the requests from an X module.
requests :: HXHeader -> [RequestInfo]
requests = mapMaybe go . xheader_decls
where go (XRequest name code elems reply) = return $
RequestInfo name code elems reply
go _ = empty
data RequestInfo = RequestInfo
{request_name :: Name
,request_code :: Int
,request_elems :: [HStructElem]
,request_reply :: Maybe HXReply
} deriving Show
-- | Extracts only the fields in a request that must be specified
-- by the library end-user. That is, padding and such is excluded.
requestFields :: RequestInfo -> [HStructElem]
requestFields = filter go . request_elems
where go List{} = True
go SField{} = True
go ValueParam{} = True
go _ = False
-- | Returns true if a request has a reply
hasReply :: RequestInfo -> Bool
hasReply = not . isNothing . request_reply
-- | Return true if the reply is a unary reply - as in, has
-- on one element
unaryReply :: RequestInfo -> Bool
unaryReply RequestInfo{request_reply = Just xs}
= 1 == length (filter interestingField xs)
unaryReply _ = False
-- | Returns the first StructElem in the reply, if there is
-- one.
firstReplyElem :: RequestInfo -> Maybe HStructElem
firstReplyElem = listToMaybe . filter interestingField
. maybe [] id . request_reply
interestingField :: GenStructElem a -> Bool
interestingField Pad{} = False
interestingField ExprField{} = False
interestingField _ = True
-- | For a request, returns what the end-user Haskell function
-- is to be named
fnNameFromRequest :: RequestInfo -> String
fnNameFromRequest = ensureLower . request_name
-- | For a request, returns the name of the Haskell type constructor
-- corresponding to its reply.
replyNameFromInfo :: RequestInfo -> String
replyNameFromInfo req = assert (hasReply req) $
replyName $ request_name req
|
aslatter/xhb
|
build-utils/src/Generate/Functions.hs
|
Haskell
|
bsd-3-clause
| 13,073
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
module Game.Monsters.MTank where
import Control.Lens (use, preuse, ix, zoom, (^.), (.=), (%=), (&), (.~), (%~))
import Control.Monad (void, when, unless, liftM)
import Data.Bits ((.&.), (.|.), complement)
import Linear (V3(..), normalize)
import qualified Data.Vector as V
import {-# SOURCE #-} Game.GameImportT
import Game.LevelLocalsT
import Game.GameLocalsT
import Game.CVarT
import Game.SpawnTempT
import Game.EntityStateT
import Game.EdictT
import Game.MMoveT
import Game.GClientT
import Game.MoveInfoT
import Game.ClientPersistantT
import Game.ClientRespawnT
import Game.MonsterInfoT
import Game.PlayerStateT
import Types
import QuakeRef
import QuakeState
import CVarVariables
import Game.Adapters
import qualified Constants
import qualified Game.GameAI as GameAI
import qualified Game.GameMisc as GameMisc
import qualified Game.GameUtil as GameUtil
import qualified Game.Monster as Monster
import qualified Game.Monsters.MFlash as MFlash
import qualified Util.Lib as Lib
import qualified Util.Math3D as Math3D
frameStand01 :: Int
frameStand01 = 0
frameStand30 :: Int
frameStand30 = 29
frameWalk01 :: Int
frameWalk01 = 30
frameWalk04 :: Int
frameWalk04 = 33
frameWalk05 :: Int
frameWalk05 = 34
frameWalk20 :: Int
frameWalk20 = 49
frameWalk21 :: Int
frameWalk21 = 50
frameWalk25 :: Int
frameWalk25 = 54
frameAttack101 :: Int
frameAttack101 = 55
frameAttack110 :: Int
frameAttack110 = 64
frameAttack111 :: Int
frameAttack111 = 65
frameAttack113 :: Int
frameAttack113 = 67
frameAttack116 :: Int
frameAttack116 = 70
frameAttack117 :: Int
frameAttack117 = 71
frameAttack122 :: Int
frameAttack122 = 76
frameAttack201 :: Int
frameAttack201 = 77
frameAttack238 :: Int
frameAttack238 = 114
frameAttack301 :: Int
frameAttack301 = 115
frameAttack321 :: Int
frameAttack321 = 135
frameAttack322 :: Int
frameAttack322 = 136
frameAttack324 :: Int
frameAttack324 = 138
frameAttack327 :: Int
frameAttack327 = 141
frameAttack330 :: Int
frameAttack330 = 144
frameAttack331 :: Int
frameAttack331 = 145
frameAttack353 :: Int
frameAttack353 = 167
frameAttack401 :: Int
frameAttack401 = 168
frameAttack429 :: Int
frameAttack429 = 196
framePain101 :: Int
framePain101 = 197
framePain104 :: Int
framePain104 = 200
framePain201 :: Int
framePain201 = 201
framePain205 :: Int
framePain205 = 205
framePain301 :: Int
framePain301 = 206
framePain316 :: Int
framePain316 = 221
frameDeath101 :: Int
frameDeath101 = 222
frameDeath132 :: Int
frameDeath132 = 253
tankSight :: EntInteract
tankSight =
GenericEntInteract "tank_sight" $ \selfRef _ -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundSight <- use $ mTankGlobals.mTankSoundSight
sound (Just selfRef) Constants.chanVoice soundSight 1 Constants.attnNorm 0
return True
tankFootStep :: EntThink
tankFootStep =
GenericEntThink "tank_footstep" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundStep <- use $ mTankGlobals.mTankSoundStep
sound (Just selfRef) Constants.chanBody soundStep 1 Constants.attnNorm 0
return True
tankThud :: EntThink
tankThud =
GenericEntThink "tank_thud" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundThud <- use $ mTankGlobals.mTankSoundThud
sound (Just selfRef) Constants.chanBody soundThud 1 Constants.attnNorm 0
return True
tankWindUp :: EntThink
tankWindUp =
GenericEntThink "tank_windup" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundWindUp <- use $ mTankGlobals.mTankSoundWindUp
sound (Just selfRef) Constants.chanWeapon soundWindUp 1 Constants.attnNorm 0
return True
tankIdle :: EntThink
tankIdle =
GenericEntThink "tank_idle" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundIdle <- use $ mTankGlobals.mTankSoundIdle
sound (Just selfRef) Constants.chanVoice soundIdle 1 Constants.attnIdle 0
return True
tankFramesStand :: V.Vector MFrameT
tankFramesStand =
V.fromList [ MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
]
tankMoveStand :: MMoveT
tankMoveStand = MMoveT "tankMoveStand" frameStand01 frameStand30 tankFramesStand Nothing
tankStand :: EntThink
tankStand =
GenericEntThink "tank_stand" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just tankMoveStand)
return True
tankRun :: EntThink
tankRun =
GenericEntThink "tank_run" $ \selfRef -> do
self <- readRef selfRef
case self^.eEnemy of
Nothing ->
modifyRef selfRef (\v -> v & eMonsterInfo.miAIFlags %~ (.&. (complement Constants.aiBrutal)))
Just enemyRef -> do
enemy <- readRef enemyRef
case enemy^.eClient of
Nothing -> modifyRef selfRef (\v -> v & eMonsterInfo.miAIFlags %~ (.&. (complement Constants.aiBrutal)))
Just _ -> modifyRef selfRef (\v -> v & eMonsterInfo.miAIFlags %~ (.|. Constants.aiBrutal))
self' <- readRef selfRef
if (self'^.eMonsterInfo.miAIFlags) .&. Constants.aiStandGround /= 0
then do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just tankMoveStand)
else do
let currentMove = case self'^.eMonsterInfo.miCurrentMove of
Nothing -> tankMoveStartRun
Just move -> if (move^.mmId) == "tankMoveWalk" || (move^.mmId) == "tankMoveStartRun"
then tankMoveRun
else tankMoveStartRun
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just currentMove)
return True
tankWalk :: EntThink
tankWalk =
GenericEntThink "tank_walk" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just tankMoveWalk)
return True
tankFramesStartWalk :: V.Vector MFrameT
tankFramesStartWalk =
V.fromList [ MFrameT (Just GameAI.aiWalk) 0 Nothing
, MFrameT (Just GameAI.aiWalk) 6 Nothing
, MFrameT (Just GameAI.aiWalk) 6 Nothing
, MFrameT (Just GameAI.aiWalk) 11 (Just tankFootStep)
]
tankMoveStartWalk :: MMoveT
tankMoveStartWalk = MMoveT "tankMoveStartWalk" frameWalk01 frameWalk04 tankFramesStartWalk (Just tankWalk)
tankFramesWalk :: V.Vector MFrameT
tankFramesWalk =
V.fromList [ MFrameT (Just GameAI.aiWalk) 4 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 3 Nothing
, MFrameT (Just GameAI.aiWalk) 2 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 4 Nothing
, MFrameT (Just GameAI.aiWalk) 4 (Just tankFootStep)
, MFrameT (Just GameAI.aiWalk) 3 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 4 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 7 Nothing
, MFrameT (Just GameAI.aiWalk) 7 Nothing
, MFrameT (Just GameAI.aiWalk) 6 Nothing
, MFrameT (Just GameAI.aiWalk) 6 (Just tankFootStep)
]
tankMoveWalk :: MMoveT
tankMoveWalk = MMoveT "tankMoveWalk" frameWalk05 frameWalk20 tankFramesWalk Nothing
tankFramesStopWalk :: V.Vector MFrameT
tankFramesStopWalk =
V.fromList [ MFrameT (Just GameAI.aiWalk) 3 Nothing
, MFrameT (Just GameAI.aiWalk) 3 Nothing
, MFrameT (Just GameAI.aiWalk) 2 Nothing
, MFrameT (Just GameAI.aiWalk) 2 Nothing
, MFrameT (Just GameAI.aiWalk) 4 (Just tankFootStep)
]
tankMoveStopWalk :: MMoveT
tankMoveStopWalk = MMoveT "tankMoveStopWalk" frameWalk21 frameWalk25 tankFramesStopWalk (Just tankStand)
tankFramesStartRun :: V.Vector MFrameT
tankFramesStartRun =
V.fromList [ MFrameT (Just GameAI.aiRun) 0 Nothing
, MFrameT (Just GameAI.aiRun) 6 Nothing
, MFrameT (Just GameAI.aiRun) 6 Nothing
, MFrameT (Just GameAI.aiRun) 11 (Just tankFootStep)
]
tankMoveStartRun :: MMoveT
tankMoveStartRun = MMoveT "tankMoveStartRun" frameWalk01 frameWalk04 tankFramesStartRun (Just tankRun)
tankFramesRun :: V.Vector MFrameT
tankFramesRun =
V.fromList [ MFrameT (Just GameAI.aiRun) 4 Nothing
, MFrameT (Just GameAI.aiRun) 5 Nothing
, MFrameT (Just GameAI.aiRun) 3 Nothing
, MFrameT (Just GameAI.aiRun) 2 Nothing
, MFrameT (Just GameAI.aiRun) 5 Nothing
, MFrameT (Just GameAI.aiRun) 5 Nothing
, MFrameT (Just GameAI.aiRun) 4 Nothing
, MFrameT (Just GameAI.aiRun) 4 (Just tankFootStep)
, MFrameT (Just GameAI.aiRun) 3 Nothing
, MFrameT (Just GameAI.aiRun) 5 Nothing
, MFrameT (Just GameAI.aiRun) 4 Nothing
, MFrameT (Just GameAI.aiRun) 5 Nothing
, MFrameT (Just GameAI.aiRun) 7 Nothing
, MFrameT (Just GameAI.aiRun) 7 Nothing
, MFrameT (Just GameAI.aiRun) 6 Nothing
, MFrameT (Just GameAI.aiRun) 6 (Just tankFootStep)
]
tankMoveRun :: MMoveT
tankMoveRun = MMoveT "tankMoveRun" frameWalk05 frameWalk20 tankFramesRun Nothing
tankFramesStopRun :: V.Vector MFrameT
tankFramesStopRun =
V.fromList [ MFrameT (Just GameAI.aiRun) 3 Nothing
, MFrameT (Just GameAI.aiRun) 3 Nothing
, MFrameT (Just GameAI.aiRun) 2 Nothing
, MFrameT (Just GameAI.aiRun) 2 Nothing
, MFrameT (Just GameAI.aiRun) 4 (Just tankFootStep)
]
tankMoveStopRun :: MMoveT
tankMoveStopRun = MMoveT "tankMoveStopRun" frameWalk21 frameWalk25 tankFramesStopRun (Just tankWalk)
tankFramesPain1 :: V.Vector MFrameT
tankFramesPain1 =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
tankMovePain1 :: MMoveT
tankMovePain1 = MMoveT "tankMovePain1" framePain101 framePain104 tankFramesPain1 (Just tankRun)
tankFramesPain2 :: V.Vector MFrameT
tankFramesPain2 =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
tankMovePain2 :: MMoveT
tankMovePain2 = MMoveT "tankMovePain2" framePain201 framePain205 tankFramesPain2 (Just tankRun)
tankFramesPain3 :: V.Vector MFrameT
tankFramesPain3 =
V.fromList [ MFrameT (Just GameAI.aiMove) (-7) Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 3 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 (Just tankFootStep)
]
tankMovePain3 :: MMoveT
tankMovePain3 = MMoveT "tankMovePain3" framePain301 framePain316 tankFramesPain3 (Just tankRun)
tankPain :: EntPain
tankPain =
GenericEntPain "tank_pain" $ \selfRef _ _ damage -> do
self <- readRef selfRef
when ((self^.eHealth) < (self^.eMaxHealth) `div` 2) $
modifyRef selfRef (\v -> v & eEntityState.esSkinNum .~ 1)
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
r <- Lib.randomF
let done = if damage <= 10 || levelTime < (self^.ePainDebounceTime) || damage <= 30 && r > 0.2
then True
else False
unless done $ do
-- If hard or nightmare, don't go into pain while attacking
skillValue <- liftM (^.cvValue) skillCVar
let frame = self^.eEntityState.esFrame
skip = if skillValue >= 2 && (frame >= frameAttack301 && frame <= frameAttack330 || frame >= frameAttack101 && frame <= frameAttack116)
then True
else False
unless skip $ do
modifyRef selfRef (\v -> v & ePainDebounceTime .~ levelTime + 3)
soundPain <- use $ mTankGlobals.mTankSoundPain
sound <- use $ gameBaseGlobals.gbGameImport.giSound
sound (Just selfRef) Constants.chanVoice soundPain 1 Constants.attnNorm 0
unless (skillValue == 3) $ do -- no pain anims in nightmare
let currentMove = if | damage <= 30 -> tankMovePain1
| damage <= 60 -> tankMovePain2
| otherwise -> tankMovePain3
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just currentMove)
tankBlaster :: EntThink
tankBlaster =
GenericEntThink "TankBlaster" $ \selfRef -> do
self <- readRef selfRef
let flashNumber = if | (self^.eEntityState.esFrame) == frameAttack110 -> Constants.mz2TankBlaster1
| (self^.eEntityState.esFrame) == frameAttack113 -> Constants.mz2TankBlaster2
| otherwise -> Constants.mz2TankBlaster3
(Just forward, Just right, _) = Math3D.angleVectors (self^.eEntityState.esAngles) True True False
start = Math3D.projectSource (self^.eEntityState.esOrigin) (MFlash.monsterFlashOffset V.! flashNumber) forward right
Just enemyRef = self^.eEnemy
enemy <- readRef enemyRef
let V3 a b c = enemy^.eEntityState.esOrigin
end = V3 a b (c + fromIntegral (enemy^.eViewHeight))
dir = end - start
Monster.monsterFireBlaster selfRef start dir 30 800 flashNumber Constants.efBlaster
return True
tankStrike :: EntThink
tankStrike =
GenericEntThink "TankStrike" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundStrike <- use $ mTankGlobals.mTankSoundStrike
sound (Just selfRef) Constants.chanWeapon soundStrike 1 Constants.attnNorm 0
return True
tankRocket :: EntThink
tankRocket =
GenericEntThink "TankRocket" $ \selfRef -> do
self <- readRef selfRef
let flashNumber = if | (self^.eEntityState.esFrame) == frameAttack324 -> Constants.mz2TankRocket1
| (self^.eEntityState.esFrame) == frameAttack327 -> Constants.mz2TankRocket2
| otherwise -> Constants.mz2TankRocket3
(Just forward, Just right, _) = Math3D.angleVectors (self^.eEntityState.esAngles) True True False
start = Math3D.projectSource (self^.eEntityState.esOrigin) (MFlash.monsterFlashOffset V.! flashNumber) forward right
Just enemyRef = self^.eEnemy
enemy <- readRef enemyRef
let V3 a b c = enemy^.eEntityState.esOrigin
vec = V3 a b (c + fromIntegral (enemy^.eViewHeight))
dir = normalize (vec - start)
Monster.monsterFireRocket selfRef start dir 50 550 flashNumber
return True
tankMachineGun :: EntThink
tankMachineGun =
GenericEntThink "TankMachineGun" $ \_ -> do
io (putStrLn "MTank.tankMachineGun") >> undefined -- TODO
tankReAttackBlaster :: EntThink
tankReAttackBlaster =
GenericEntThink "tank_reattack_blaster" $ \_ -> do
io (putStrLn "MTank.tankReAttackBlaster") >> undefined -- TODO
tankFramesAttackBlast :: V.Vector MFrameT
tankFramesAttackBlast =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) (-1) Nothing
, MFrameT (Just GameAI.aiCharge) (-2) Nothing
, MFrameT (Just GameAI.aiCharge) (-1) Nothing
, MFrameT (Just GameAI.aiCharge) (-1) Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankBlaster) -- 10
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankBlaster)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankBlaster) -- 16
]
tankMoveAttackBlast :: MMoveT
tankMoveAttackBlast = MMoveT "tankMoveAttackBlast" frameAttack101 frameAttack116 tankFramesAttackBlast (Just tankReAttackBlaster)
tankFramesReAttackBlast :: V.Vector MFrameT
tankFramesReAttackBlast =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankBlaster)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankBlaster) -- 16
]
tankMoveReAttackBlast :: MMoveT
tankMoveReAttackBlast = MMoveT "tankMoveReAttackBlast" frameAttack111 frameAttack116 tankFramesReAttackBlast (Just tankReAttackBlaster)
tankFramesAttackPostBlast :: V.Vector MFrameT
tankFramesAttackPostBlast =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing -- 17
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 3 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) (-2) (Just tankFootStep) -- 22
]
tankMoveAttackPostBlast :: MMoveT
tankMoveAttackPostBlast = MMoveT "tankMoveAttackPostBlast" frameAttack117 frameAttack122 tankFramesAttackPostBlast (Just tankRun)
tankPostStrike :: EntThink
tankPostStrike =
GenericEntThink "tank_poststrike" $ \selfRef -> do
modifyRef selfRef (\v -> v & eEnemy .~ Nothing)
void $ think tankRun selfRef
return True
tankDoAttackRocket :: EntThink
tankDoAttackRocket =
GenericEntThink "tank_doattack_rocket" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just tankMoveAttackFireRocket)
return True
tankReFireRocket :: EntThink
tankReFireRocket =
GenericEntThink "tank_refire_rocket" $ \_ -> do
io (putStrLn "MTank.tankReFireRocket") >> undefined -- TODO
tankFramesAttackStrike :: V.Vector MFrameT
tankFramesAttackStrike =
V.fromList [ MFrameT (Just GameAI.aiMove) 3 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 1 Nothing
, MFrameT (Just GameAI.aiMove) 6 Nothing
, MFrameT (Just GameAI.aiMove) 7 Nothing
, MFrameT (Just GameAI.aiMove) 9 (Just tankFootStep)
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 1 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 2 (Just tankFootStep)
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) (-2) Nothing
, MFrameT (Just GameAI.aiMove) (-2) Nothing
, MFrameT (Just GameAI.aiMove) 0 (Just tankWindUp)
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 (Just tankStrike)
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) (-1) Nothing
, MFrameT (Just GameAI.aiMove) (-1) Nothing
, MFrameT (Just GameAI.aiMove) (-1) Nothing
, MFrameT (Just GameAI.aiMove) (-1) Nothing
, MFrameT (Just GameAI.aiMove) (-1) Nothing
, MFrameT (Just GameAI.aiMove) (-3) Nothing
, MFrameT (Just GameAI.aiMove) (-10) Nothing
, MFrameT (Just GameAI.aiMove) (-10) Nothing
, MFrameT (Just GameAI.aiMove) (-2) Nothing
, MFrameT (Just GameAI.aiMove) (-3) Nothing
, MFrameT (Just GameAI.aiMove) (-2) (Just tankFootStep)
]
tankMoveAttackStrike :: MMoveT
tankMoveAttackStrike = MMoveT "tankMoveAttackStrike" frameAttack201 frameAttack238 tankFramesAttackStrike (Just tankPostStrike)
tankFramesAttackPreRocket :: V.Vector MFrameT
tankFramesAttackPreRocket =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing -- 10
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 1 Nothing
, MFrameT (Just GameAI.aiCharge) 2 Nothing
, MFrameT (Just GameAI.aiCharge) 7 Nothing
, MFrameT (Just GameAI.aiCharge) 7 Nothing
, MFrameT (Just GameAI.aiCharge) 7 (Just tankFootStep)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing -- 20)
, MFrameT (Just GameAI.aiCharge) (-3) Nothing
]
tankMoveAttackPreRocket :: MMoveT
tankMoveAttackPreRocket = MMoveT "tankMoveAttackPreRocket" frameAttack301 frameAttack321 tankFramesAttackPreRocket (Just tankDoAttackRocket)
tankFramesAttackFireRocket :: V.Vector MFrameT
tankFramesAttackFireRocket =
V.fromList [ MFrameT (Just GameAI.aiCharge) (-3) Nothing -- Loop Start 22
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankRocket) -- 24
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just tankRocket)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) (-1) (Just tankRocket) -- 30 Loop End
]
tankMoveAttackFireRocket :: MMoveT
tankMoveAttackFireRocket = MMoveT "tankMoveAttackFireRocket" frameAttack322 frameAttack330 tankFramesAttackFireRocket (Just tankReFireRocket)
tankFramesAttackPostRocket :: V.Vector MFrameT
tankFramesAttackPostRocket =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing -- 31
, MFrameT (Just GameAI.aiCharge) (-1) Nothing
, MFrameT (Just GameAI.aiCharge) (-1) Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 2 Nothing
, MFrameT (Just GameAI.aiCharge) 3 Nothing
, MFrameT (Just GameAI.aiCharge) 4 Nothing
, MFrameT (Just GameAI.aiCharge) 2 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing -- 40
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) (-9) Nothing
, MFrameT (Just GameAI.aiCharge) (-8) Nothing
, MFrameT (Just GameAI.aiCharge) (-7) Nothing
, MFrameT (Just GameAI.aiCharge) (-1) Nothing
, MFrameT (Just GameAI.aiCharge) (-1) (Just tankFootStep)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing -- 50
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
]
tankMoveAttackPostRocket :: MMoveT
tankMoveAttackPostRocket = MMoveT "tankMoveAttackPostRocket" frameAttack331 frameAttack353 tankFramesAttackPostRocket (Just tankRun)
tankFramesAttackChain :: V.Vector MFrameT
tankFramesAttackChain =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT Nothing 0 (Just tankMachineGun)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
]
tankMoveAttackChain :: MMoveT
tankMoveAttackChain = MMoveT "tankMoveAttackChain" frameAttack401 frameAttack429 tankFramesAttackChain (Just tankRun)
tankAttack :: EntThink
tankAttack =
GenericEntThink "tank_attack" $ \_ -> do
io (putStrLn "MTank.tankAttack") >> undefined -- TODO
tankDead :: EntThink
tankDead =
GenericEntThink "tank_dead" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMins .~ V3 (-16) (-16) (-16)
& eMaxs .~ V3 16 16 0
& eMoveType .~ Constants.moveTypeToss
& eSvFlags %~ (.|. Constants.svfDeadMonster)
& eNextThink .~ 0)
linkEntity <- use $ gameBaseGlobals.gbGameImport.giLinkEntity
linkEntity selfRef
return True
tankFramesDeath1 :: V.Vector MFrameT
tankFramesDeath1 =
V.fromList [ MFrameT (Just GameAI.aiMove) (-7) Nothing
, MFrameT (Just GameAI.aiMove) (-2) Nothing
, MFrameT (Just GameAI.aiMove) (-2) Nothing
, MFrameT (Just GameAI.aiMove) 1 Nothing
, MFrameT (Just GameAI.aiMove) 3 Nothing
, MFrameT (Just GameAI.aiMove) 6 Nothing
, MFrameT (Just GameAI.aiMove) 1 Nothing
, MFrameT (Just GameAI.aiMove) 1 Nothing
, MFrameT (Just GameAI.aiMove) 2 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) (-2) Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) (-3) Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) (-4) Nothing
, MFrameT (Just GameAI.aiMove) (-6) Nothing
, MFrameT (Just GameAI.aiMove) (-4) Nothing
, MFrameT (Just GameAI.aiMove) (-5) Nothing
, MFrameT (Just GameAI.aiMove) (-7) Nothing
, MFrameT (Just GameAI.aiMove) (-15) (Just tankThud)
, MFrameT (Just GameAI.aiMove) (-5) Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
tankMoveDeath :: MMoveT
tankMoveDeath = MMoveT "tankMoveDeath" frameDeath101 frameDeath132 tankFramesDeath1 (Just tankDead)
tankDie :: EntDie
tankDie =
GenericEntDie "tank_die" $ \_ _ _ _ _ -> do
io (putStrLn "MTank.tankDie") >> undefined -- TODO
{-
- QUAKED monster_tank (1 .5 0) (-32 -32 -16) (32 32 72) Ambush
- Trigger_Spawn Sight
-}
{-
- QUAKED monster_tank_commander (1 .5 0) (-32 -32 -16) (32 32 72) Ambush
- Trigger_Spawn Sight
-}
spMonsterTank :: EntThink
spMonsterTank =
GenericEntThink "SP_monster_tank" $ \_ -> do
io (putStrLn "MTank.spMonsterTank") >> undefined -- TODO
|
ksaveljev/hake-2
|
src/Game/Monsters/MTank.hs
|
Haskell
|
bsd-3-clause
| 32,835
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[PatSyntax]{Abstract Haskell syntax---patterns}
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
module HsPat (
Pat(..), InPat, OutPat, LPat,
HsConPatDetails, hsConPatArgs,
HsRecFields(..), HsRecField'(..), LHsRecField',
HsRecField, LHsRecField,
HsRecUpdField, LHsRecUpdField,
hsRecFields, hsRecFieldSel, hsRecFieldId, hsRecFieldsArgs,
hsRecUpdFieldId, hsRecUpdFieldOcc, hsRecUpdFieldRdr,
mkPrefixConPat, mkCharLitPat, mkNilPat,
isUnliftedHsBind, looksLazyPatBind,
isUnliftedLPat, isBangedLPat, isBangedPatBind,
hsPatNeedsParens,
isIrrefutableHsPat,
collectEvVarsPats,
pprParendLPat, pprConArgs
) where
import {-# SOURCE #-} HsExpr (SyntaxExpr, LHsExpr, HsSplice, pprLExpr, pprSplice)
-- friends:
import HsBinds
import HsLit
import PlaceHolder
import HsTypes
import TcEvidence
import BasicTypes
-- others:
import PprCore ( {- instance OutputableBndr TyVar -} )
import TysWiredIn
import Var
import RdrName ( RdrName )
import ConLike
import DataCon
import TyCon
import Outputable
import Type
import SrcLoc
import Bag -- collect ev vars from pats
import DynFlags( gopt, GeneralFlag(..) )
import Maybes
-- libraries:
import Data.Data hiding (TyCon,Fixity)
type InPat id = LPat id -- No 'Out' constructors
type OutPat id = LPat id -- No 'In' constructors
type LPat id = Located (Pat id)
-- | Pattern
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnBang'
-- For details on above see note [Api annotations] in ApiAnnotation
data Pat id
= ------------ Simple patterns ---------------
WildPat (PostTc id Type) -- ^ Wildcard Pattern
-- The sole reason for a type on a WildPat is to
-- support hsPatType :: Pat Id -> Type
| VarPat (Located id) -- ^ Variable Pattern
-- See Note [Located RdrNames] in HsExpr
| LazyPat (LPat id) -- ^ Lazy Pattern
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnTilde'
-- For details on above see note [Api annotations] in ApiAnnotation
| AsPat (Located id) (LPat id) -- ^ As pattern
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnAt'
-- For details on above see note [Api annotations] in ApiAnnotation
| ParPat (LPat id) -- ^ Parenthesised pattern
-- See Note [Parens in HsSyn] in HsExpr
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen' @'('@,
-- 'ApiAnnotation.AnnClose' @')'@
-- For details on above see note [Api annotations] in ApiAnnotation
| BangPat (LPat id) -- ^ Bang pattern
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnBang'
-- For details on above see note [Api annotations] in ApiAnnotation
------------ Lists, tuples, arrays ---------------
| ListPat [LPat id]
(PostTc id Type) -- The type of the elements
(Maybe (PostTc id Type, SyntaxExpr id)) -- For rebindable syntax
-- For OverloadedLists a Just (ty,fn) gives
-- overall type of the pattern, and the toList
-- function to convert the scrutinee to a list value
-- ^ Syntactic List
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen' @'['@,
-- 'ApiAnnotation.AnnClose' @']'@
-- For details on above see note [Api annotations] in ApiAnnotation
| TuplePat [LPat id] -- Tuple sub-patterns
Boxity -- UnitPat is TuplePat []
[PostTc id Type] -- [] before typechecker, filled in afterwards
-- with the types of the tuple components
-- You might think that the PostTc id Type was redundant, because we can
-- get the pattern type by getting the types of the sub-patterns.
-- But it's essential
-- data T a where
-- T1 :: Int -> T Int
-- f :: (T a, a) -> Int
-- f (T1 x, z) = z
-- When desugaring, we must generate
-- f = /\a. \v::a. case v of (t::T a, w::a) ->
-- case t of (T1 (x::Int)) ->
-- Note the (w::a), NOT (w::Int), because we have not yet
-- refined 'a' to Int. So we must know that the second component
-- of the tuple is of type 'a' not Int. See selectMatchVar
-- (June 14: I'm not sure this comment is right; the sub-patterns
-- will be wrapped in CoPats, no?)
-- ^ Tuple sub-patterns
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen' @'('@ or @'(#'@,
-- 'ApiAnnotation.AnnClose' @')'@ or @'#)'@
| SumPat (LPat id) -- Sum sub-pattern
ConTag -- Alternative (one-based)
Arity -- Arity
(PostTc id [Type]) -- PlaceHolder before typechecker, filled in
-- afterwards with the types of the
-- alternative
-- ^ Anonymous sum pattern
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen' @'(#'@,
-- 'ApiAnnotation.AnnClose' @'#)'@
-- For details on above see note [Api annotations] in ApiAnnotation
| PArrPat [LPat id] -- Syntactic parallel array
(PostTc id Type) -- The type of the elements
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen' @'[:'@,
-- 'ApiAnnotation.AnnClose' @':]'@
-- For details on above see note [Api annotations] in ApiAnnotation
------------ Constructor patterns ---------------
| ConPatIn (Located id)
(HsConPatDetails id)
-- ^ Constructor Pattern In
| ConPatOut {
pat_con :: Located ConLike,
pat_arg_tys :: [Type], -- The universal arg types, 1-1 with the universal
-- tyvars of the constructor/pattern synonym
-- Use (conLikeResTy pat_con pat_arg_tys) to get
-- the type of the pattern
pat_tvs :: [TyVar], -- Existentially bound type variables
-- in correctly-scoped order e.g. [k:*, x:k]
pat_dicts :: [EvVar], -- Ditto *coercion variables* and *dictionaries*
-- One reason for putting coercion variable here, I think,
-- is to ensure their kinds are zonked
pat_binds :: TcEvBinds, -- Bindings involving those dictionaries
pat_args :: HsConPatDetails id,
pat_wrap :: HsWrapper -- Extra wrapper to pass to the matcher
-- Only relevant for pattern-synonyms;
-- ignored for data cons
}
-- ^ Constructor Pattern Out
------------ View patterns ---------------
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnRarrow'
-- For details on above see note [Api annotations] in ApiAnnotation
| ViewPat (LHsExpr id)
(LPat id)
(PostTc id Type) -- The overall type of the pattern
-- (= the argument type of the view function)
-- for hsPatType.
-- ^ View Pattern
------------ Pattern splices ---------------
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen' @'$('@
-- 'ApiAnnotation.AnnClose' @')'@
-- For details on above see note [Api annotations] in ApiAnnotation
| SplicePat (HsSplice id) -- ^ Splice Pattern (Includes quasi-quotes)
------------ Literal and n+k patterns ---------------
| LitPat HsLit -- ^ Literal Pattern
-- Used for *non-overloaded* literal patterns:
-- Int#, Char#, Int, Char, String, etc.
| NPat -- Natural Pattern
-- Used for all overloaded literals,
-- including overloaded strings with -XOverloadedStrings
(Located (HsOverLit id)) -- ALWAYS positive
(Maybe (SyntaxExpr id)) -- Just (Name of 'negate') for negative
-- patterns, Nothing otherwise
(SyntaxExpr id) -- Equality checker, of type t->t->Bool
(PostTc id Type) -- Overall type of pattern. Might be
-- different than the literal's type
-- if (==) or negate changes the type
-- ^ Natural Pattern
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnVal' @'+'@
-- For details on above see note [Api annotations] in ApiAnnotation
| NPlusKPat (Located id) -- n+k pattern
(Located (HsOverLit id)) -- It'll always be an HsIntegral
(HsOverLit id) -- See Note [NPlusK patterns] in TcPat
-- NB: This could be (PostTc ...), but that induced a
-- a new hs-boot file. Not worth it.
(SyntaxExpr id) -- (>=) function, of type t1->t2->Bool
(SyntaxExpr id) -- Name of '-' (see RnEnv.lookupSyntaxName)
(PostTc id Type) -- Type of overall pattern
-- ^ n+k pattern
------------ Pattern type signatures ---------------
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnDcolon'
-- For details on above see note [Api annotations] in ApiAnnotation
| SigPatIn (LPat id) -- Pattern with a type signature
(LHsSigWcType id) -- Signature can bind both
-- kind and type vars
-- ^ Pattern with a type signature
| SigPatOut (LPat id)
Type
-- ^ Pattern with a type signature
------------ Pattern coercions (translation only) ---------------
| CoPat HsWrapper -- Coercion Pattern
-- If co :: t1 ~ t2, p :: t2,
-- then (CoPat co p) :: t1
(Pat id) -- Why not LPat? Ans: existing locn will do
Type -- Type of whole pattern, t1
-- During desugaring a (CoPat co pat) turns into a cast with 'co' on
-- the scrutinee, followed by a match on 'pat'
-- ^ Coercion Pattern
deriving instance (DataId id) => Data (Pat id)
-- | Haskell Constructor Pattern Details
type HsConPatDetails id = HsConDetails (LPat id) (HsRecFields id (LPat id))
hsConPatArgs :: HsConPatDetails id -> [LPat id]
hsConPatArgs (PrefixCon ps) = ps
hsConPatArgs (RecCon fs) = map (hsRecFieldArg . unLoc) (rec_flds fs)
hsConPatArgs (InfixCon p1 p2) = [p1,p2]
-- | Haskell Record Fields
--
-- HsRecFields is used only for patterns and expressions (not data type
-- declarations)
data HsRecFields id arg -- A bunch of record fields
-- { x = 3, y = True }
-- Used for both expressions and patterns
= HsRecFields { rec_flds :: [LHsRecField id arg],
rec_dotdot :: Maybe Int } -- Note [DotDot fields]
deriving (Functor, Foldable, Traversable)
deriving instance (DataId id, Data arg) => Data (HsRecFields id arg)
-- Note [DotDot fields]
-- ~~~~~~~~~~~~~~~~~~~~
-- The rec_dotdot field means this:
-- Nothing => the normal case
-- Just n => the group uses ".." notation,
--
-- In the latter case:
--
-- *before* renamer: rec_flds are exactly the n user-written fields
--
-- *after* renamer: rec_flds includes *all* fields, with
-- the first 'n' being the user-written ones
-- and the remainder being 'filled in' implicitly
-- | Located Haskell Record Field
type LHsRecField' id arg = Located (HsRecField' id arg)
-- | Located Haskell Record Field
type LHsRecField id arg = Located (HsRecField id arg)
-- | Located Haskell Record Update Field
type LHsRecUpdField id = Located (HsRecUpdField id)
-- | Haskell Record Field
type HsRecField id arg = HsRecField' (FieldOcc id) arg
-- | Haskell Record Update Field
type HsRecUpdField id = HsRecField' (AmbiguousFieldOcc id) (LHsExpr id)
-- | Haskell Record Field
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual',
--
-- For details on above see note [Api annotations] in ApiAnnotation
data HsRecField' id arg = HsRecField {
hsRecFieldLbl :: Located id,
hsRecFieldArg :: arg, -- ^ Filled in by renamer when punning
hsRecPun :: Bool -- ^ Note [Punning]
} deriving (Data, Functor, Foldable, Traversable)
-- Note [Punning]
-- ~~~~~~~~~~~~~~
-- If you write T { x, y = v+1 }, the HsRecFields will be
-- HsRecField x x True ...
-- HsRecField y (v+1) False ...
-- That is, for "punned" field x is expanded (in the renamer)
-- to x=x; but with a punning flag so we can detect it later
-- (e.g. when pretty printing)
--
-- If the original field was qualified, we un-qualify it, thus
-- T { A.x } means T { A.x = x }
-- Note [HsRecField and HsRecUpdField]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- A HsRecField (used for record construction and pattern matching)
-- contains an unambiguous occurrence of a field (i.e. a FieldOcc).
-- We can't just store the Name, because thanks to
-- DuplicateRecordFields this may not correspond to the label the user
-- wrote.
--
-- A HsRecUpdField (used for record update) contains a potentially
-- ambiguous occurrence of a field (an AmbiguousFieldOcc). The
-- renamer will fill in the selector function if it can, but if the
-- selector is ambiguous the renamer will defer to the typechecker.
-- After the typechecker, a unique selector will have been determined.
--
-- The renamer produces an Unambiguous result if it can, rather than
-- just doing the lookup in the typechecker, so that completely
-- unambiguous updates can be represented by 'DsMeta.repUpdFields'.
--
-- For example, suppose we have:
--
-- data S = MkS { x :: Int }
-- data T = MkT { x :: Int }
--
-- f z = (z { x = 3 }) :: S
--
-- The parsed HsRecUpdField corresponding to the record update will have:
--
-- hsRecFieldLbl = Unambiguous "x" PlaceHolder :: AmbiguousFieldOcc RdrName
--
-- After the renamer, this will become:
--
-- hsRecFieldLbl = Ambiguous "x" PlaceHolder :: AmbiguousFieldOcc Name
--
-- (note that the Unambiguous constructor is not type-correct here).
-- The typechecker will determine the particular selector:
--
-- hsRecFieldLbl = Unambiguous "x" $sel:x:MkS :: AmbiguousFieldOcc Id
--
-- See also Note [Disambiguating record fields] in TcExpr.
hsRecFields :: HsRecFields id arg -> [PostRn id id]
hsRecFields rbinds = map (unLoc . hsRecFieldSel . unLoc) (rec_flds rbinds)
-- Probably won't typecheck at once, things have changed :/
hsRecFieldsArgs :: HsRecFields id arg -> [arg]
hsRecFieldsArgs rbinds = map (hsRecFieldArg . unLoc) (rec_flds rbinds)
hsRecFieldSel :: HsRecField name arg -> Located (PostRn name name)
hsRecFieldSel = fmap selectorFieldOcc . hsRecFieldLbl
hsRecFieldId :: HsRecField Id arg -> Located Id
hsRecFieldId = hsRecFieldSel
hsRecUpdFieldRdr :: HsRecUpdField id -> Located RdrName
hsRecUpdFieldRdr = fmap rdrNameAmbiguousFieldOcc . hsRecFieldLbl
hsRecUpdFieldId :: HsRecField' (AmbiguousFieldOcc Id) arg -> Located Id
hsRecUpdFieldId = fmap selectorFieldOcc . hsRecUpdFieldOcc
hsRecUpdFieldOcc :: HsRecField' (AmbiguousFieldOcc Id) arg -> LFieldOcc Id
hsRecUpdFieldOcc = fmap unambiguousFieldOcc . hsRecFieldLbl
{-
************************************************************************
* *
* Printing patterns
* *
************************************************************************
-}
instance (OutputableBndrId name) => Outputable (Pat name) where
ppr = pprPat
pprPatBndr :: OutputableBndr name => name -> SDoc
pprPatBndr var -- Print with type info if -dppr-debug is on
= getPprStyle $ \ sty ->
if debugStyle sty then
parens (pprBndr LambdaBind var) -- Could pass the site to pprPat
-- but is it worth it?
else
pprPrefixOcc var
pprParendLPat :: (OutputableBndrId name) => LPat name -> SDoc
pprParendLPat (L _ p) = pprParendPat p
pprParendPat :: (OutputableBndrId name) => Pat name -> SDoc
pprParendPat p = sdocWithDynFlags $ \ dflags ->
if need_parens dflags p
then parens (pprPat p)
else pprPat p
where
need_parens dflags p
| CoPat {} <- p = gopt Opt_PrintTypecheckerElaboration dflags
| otherwise = hsPatNeedsParens p
-- For a CoPat we need parens if we are going to show it, which
-- we do if -fprint-typechecker-elaboration is on (c.f. pprHsWrapper)
-- But otherwise the CoPat is discarded, so it
-- is the pattern inside that matters. Sigh.
pprPat :: (OutputableBndrId name) => Pat name -> SDoc
pprPat (VarPat (L _ var)) = pprPatBndr var
pprPat (WildPat _) = char '_'
pprPat (LazyPat pat) = char '~' <> pprParendLPat pat
pprPat (BangPat pat) = char '!' <> pprParendLPat pat
pprPat (AsPat name pat) = hcat [pprPrefixOcc (unLoc name), char '@', pprParendLPat pat]
pprPat (ViewPat expr pat _) = hcat [pprLExpr expr, text " -> ", ppr pat]
pprPat (ParPat pat) = parens (ppr pat)
pprPat (LitPat s) = ppr s
pprPat (NPat l Nothing _ _) = ppr l
pprPat (NPat l (Just _) _ _) = char '-' <> ppr l
pprPat (NPlusKPat n k _ _ _ _)= hcat [ppr n, char '+', ppr k]
pprPat (SplicePat splice) = pprSplice splice
pprPat (CoPat co pat _) = pprHsWrapper co (\parens -> if parens
then pprParendPat pat
else pprPat pat)
pprPat (SigPatIn pat ty) = ppr pat <+> dcolon <+> ppr ty
pprPat (SigPatOut pat ty) = ppr pat <+> dcolon <+> ppr ty
pprPat (ListPat pats _ _) = brackets (interpp'SP pats)
pprPat (PArrPat pats _) = paBrackets (interpp'SP pats)
pprPat (TuplePat pats bx _) = tupleParens (boxityTupleSort bx) (pprWithCommas ppr pats)
pprPat (SumPat pat alt arity _) = sumParens (pprAlternative ppr pat alt arity)
pprPat (ConPatIn con details) = pprUserCon (unLoc con) details
pprPat (ConPatOut { pat_con = con, pat_tvs = tvs, pat_dicts = dicts,
pat_binds = binds, pat_args = details })
= sdocWithDynFlags $ \dflags ->
-- Tiresome; in TcBinds.tcRhs we print out a
-- typechecked Pat in an error message,
-- and we want to make sure it prints nicely
if gopt Opt_PrintTypecheckerElaboration dflags then
ppr con
<> braces (sep [ hsep (map pprPatBndr (tvs ++ dicts))
, ppr binds])
<+> pprConArgs details
else pprUserCon (unLoc con) details
pprUserCon :: (OutputableBndr con, OutputableBndrId id)
=> con -> HsConPatDetails id -> SDoc
pprUserCon c (InfixCon p1 p2) = ppr p1 <+> pprInfixOcc c <+> ppr p2
pprUserCon c details = pprPrefixOcc c <+> pprConArgs details
pprConArgs :: (OutputableBndrId id) => HsConPatDetails id -> SDoc
pprConArgs (PrefixCon pats) = sep (map pprParendLPat pats)
pprConArgs (InfixCon p1 p2) = sep [pprParendLPat p1, pprParendLPat p2]
pprConArgs (RecCon rpats) = ppr rpats
instance (Outputable arg)
=> Outputable (HsRecFields id arg) where
ppr (HsRecFields { rec_flds = flds, rec_dotdot = Nothing })
= braces (fsep (punctuate comma (map ppr flds)))
ppr (HsRecFields { rec_flds = flds, rec_dotdot = Just n })
= braces (fsep (punctuate comma (map ppr (take n flds) ++ [dotdot])))
where
dotdot = text ".." <+> ifPprDebug (ppr (drop n flds))
instance (Outputable id, Outputable arg)
=> Outputable (HsRecField' id arg) where
ppr (HsRecField { hsRecFieldLbl = f, hsRecFieldArg = arg,
hsRecPun = pun })
= ppr f <+> (ppUnless pun $ equals <+> ppr arg)
{-
************************************************************************
* *
* Building patterns
* *
************************************************************************
-}
mkPrefixConPat :: DataCon -> [OutPat id] -> [Type] -> OutPat id
-- Make a vanilla Prefix constructor pattern
mkPrefixConPat dc pats tys
= noLoc $ ConPatOut { pat_con = noLoc (RealDataCon dc), pat_tvs = [], pat_dicts = [],
pat_binds = emptyTcEvBinds, pat_args = PrefixCon pats,
pat_arg_tys = tys, pat_wrap = idHsWrapper }
mkNilPat :: Type -> OutPat id
mkNilPat ty = mkPrefixConPat nilDataCon [] [ty]
mkCharLitPat :: String -> Char -> OutPat id
mkCharLitPat src c = mkPrefixConPat charDataCon
[noLoc $ LitPat (HsCharPrim src c)] []
{-
************************************************************************
* *
* Predicates for checking things about pattern-lists in EquationInfo *
* *
************************************************************************
\subsection[Pat-list-predicates]{Look for interesting things in patterns}
Unlike in the Wadler chapter, where patterns are either ``variables''
or ``constructors,'' here we distinguish between:
\begin{description}
\item[unfailable:]
Patterns that cannot fail to match: variables, wildcards, and lazy
patterns.
These are the irrefutable patterns; the two other categories
are refutable patterns.
\item[constructor:]
A non-literal constructor pattern (see next category).
\item[literal patterns:]
At least the numeric ones may be overloaded.
\end{description}
A pattern is in {\em exactly one} of the above three categories; `as'
patterns are treated specially, of course.
The 1.3 report defines what ``irrefutable'' and ``failure-free'' patterns are.
-}
isUnliftedLPat :: LPat id -> Bool
isUnliftedLPat (L _ (ParPat p)) = isUnliftedLPat p
isUnliftedLPat (L _ (TuplePat _ Unboxed _)) = True
isUnliftedLPat (L _ (SumPat _ _ _ _)) = True
isUnliftedLPat _ = False
isUnliftedHsBind :: HsBind id -> Bool
-- A pattern binding with an outermost bang or unboxed tuple or sum must be
-- matched strictly.
-- Defined in this module because HsPat is above HsBinds in the import graph
isUnliftedHsBind (PatBind { pat_lhs = p }) = isUnliftedLPat p
isUnliftedHsBind _ = False
isBangedPatBind :: HsBind id -> Bool
isBangedPatBind (PatBind {pat_lhs = pat}) = isBangedLPat pat
isBangedPatBind _ = False
isBangedLPat :: LPat id -> Bool
isBangedLPat (L _ (ParPat p)) = isBangedLPat p
isBangedLPat (L _ (BangPat {})) = True
isBangedLPat _ = False
looksLazyPatBind :: HsBind id -> Bool
-- Returns True of anything *except*
-- a StrictHsBind (as above) or
-- a VarPat
-- In particular, returns True of a pattern binding with a compound pattern, like (I# x)
looksLazyPatBind (PatBind { pat_lhs = p }) = looksLazyLPat p
looksLazyPatBind _ = False
looksLazyLPat :: LPat id -> Bool
looksLazyLPat (L _ (ParPat p)) = looksLazyLPat p
looksLazyLPat (L _ (AsPat _ p)) = looksLazyLPat p
looksLazyLPat (L _ (BangPat {})) = False
looksLazyLPat (L _ (TuplePat _ Unboxed _)) = False
looksLazyLPat (L _ (SumPat _ _ _ _)) = False
looksLazyLPat (L _ (VarPat {})) = False
looksLazyLPat (L _ (WildPat {})) = False
looksLazyLPat _ = True
isIrrefutableHsPat :: (OutputableBndrId id) => LPat id -> Bool
-- (isIrrefutableHsPat p) is true if matching against p cannot fail,
-- in the sense of falling through to the next pattern.
-- (NB: this is not quite the same as the (silly) defn
-- in 3.17.2 of the Haskell 98 report.)
--
-- WARNING: isIrrefutableHsPat returns False if it's in doubt.
-- Specifically on a ConPatIn, which is what it sees for a
-- (LPat Name) in the renamer, it doesn't know the size of the
-- constructor family, so it returns False. Result: only
-- tuple patterns are considered irrefuable at the renamer stage.
--
-- But if it returns True, the pattern is definitely irrefutable
isIrrefutableHsPat pat
= go pat
where
go (L _ pat) = go1 pat
go1 (WildPat {}) = True
go1 (VarPat {}) = True
go1 (LazyPat {}) = True
go1 (BangPat pat) = go pat
go1 (CoPat _ pat _) = go1 pat
go1 (ParPat pat) = go pat
go1 (AsPat _ pat) = go pat
go1 (ViewPat _ pat _) = go pat
go1 (SigPatIn pat _) = go pat
go1 (SigPatOut pat _) = go pat
go1 (TuplePat pats _ _) = all go pats
go1 (SumPat pat _ _ _) = go pat
go1 (ListPat {}) = False
go1 (PArrPat {}) = False -- ?
go1 (ConPatIn {}) = False -- Conservative
go1 (ConPatOut{ pat_con = L _ (RealDataCon con), pat_args = details })
= isJust (tyConSingleDataCon_maybe (dataConTyCon con))
-- NB: tyConSingleDataCon_maybe, *not* isProductTyCon, because
-- the latter is false of existentials. See Trac #4439
&& all go (hsConPatArgs details)
go1 (ConPatOut{ pat_con = L _ (PatSynCon _pat) })
= False -- Conservative
go1 (LitPat {}) = False
go1 (NPat {}) = False
go1 (NPlusKPat {}) = False
-- Both should be gotten rid of by renamer before
-- isIrrefutablePat is called
go1 (SplicePat {}) = urk pat
urk pat = pprPanic "isIrrefutableHsPat:" (ppr pat)
hsPatNeedsParens :: Pat a -> Bool
hsPatNeedsParens (NPlusKPat {}) = True
hsPatNeedsParens (SplicePat {}) = False
hsPatNeedsParens (ConPatIn _ ds) = conPatNeedsParens ds
hsPatNeedsParens p@(ConPatOut {}) = conPatNeedsParens (pat_args p)
hsPatNeedsParens (SigPatIn {}) = True
hsPatNeedsParens (SigPatOut {}) = True
hsPatNeedsParens (ViewPat {}) = True
hsPatNeedsParens (CoPat _ p _) = hsPatNeedsParens p
hsPatNeedsParens (WildPat {}) = False
hsPatNeedsParens (VarPat {}) = False
hsPatNeedsParens (LazyPat {}) = False
hsPatNeedsParens (BangPat {}) = False
hsPatNeedsParens (ParPat {}) = False
hsPatNeedsParens (AsPat {}) = False
hsPatNeedsParens (TuplePat {}) = False
hsPatNeedsParens (SumPat {}) = False
hsPatNeedsParens (ListPat {}) = False
hsPatNeedsParens (PArrPat {}) = False
hsPatNeedsParens (LitPat {}) = False
hsPatNeedsParens (NPat {}) = False
conPatNeedsParens :: HsConDetails a b -> Bool
conPatNeedsParens (PrefixCon args) = not (null args)
conPatNeedsParens (InfixCon {}) = True
conPatNeedsParens (RecCon {}) = True
{-
% Collect all EvVars from all constructor patterns
-}
-- May need to add more cases
collectEvVarsPats :: [Pat id] -> Bag EvVar
collectEvVarsPats = unionManyBags . map collectEvVarsPat
collectEvVarsLPat :: LPat id -> Bag EvVar
collectEvVarsLPat (L _ pat) = collectEvVarsPat pat
collectEvVarsPat :: Pat id -> Bag EvVar
collectEvVarsPat pat =
case pat of
LazyPat p -> collectEvVarsLPat p
AsPat _ p -> collectEvVarsLPat p
ParPat p -> collectEvVarsLPat p
BangPat p -> collectEvVarsLPat p
ListPat ps _ _ -> unionManyBags $ map collectEvVarsLPat ps
TuplePat ps _ _ -> unionManyBags $ map collectEvVarsLPat ps
SumPat p _ _ _ -> collectEvVarsLPat p
PArrPat ps _ -> unionManyBags $ map collectEvVarsLPat ps
ConPatOut {pat_dicts = dicts, pat_args = args}
-> unionBags (listToBag dicts)
$ unionManyBags
$ map collectEvVarsLPat
$ hsConPatArgs args
SigPatOut p _ -> collectEvVarsLPat p
CoPat _ p _ -> collectEvVarsPat p
ConPatIn _ _ -> panic "foldMapPatBag: ConPatIn"
SigPatIn _ _ -> panic "foldMapPatBag: SigPatIn"
_other_pat -> emptyBag
|
mettekou/ghc
|
compiler/hsSyn/HsPat.hs
|
Haskell
|
bsd-3-clause
| 29,415
|
module CNC.GParser(module CNC.GTypes, parseIsoFile) where
import CNC.GTypes
import Data.Attoparsec.Text
import qualified Data.Text as T
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
import Data.Char
import Control.Applicative
import Data.Maybe
instrP = do
c <- toUpper <$> letter
val <- iso_double
return $ case elem c "GMNT" of
True -> GInstrI c (round val)
_ -> GInstrF c (realToFrac val)
frameP = GFrame <$> many1 instrP
frames = many $ do f <- frameP
skipComment
return f
iso7 = do
char '%'
skipSpace
char 'O'
prog <- many1 digit
skipComment
fs <- frames
char '%'
skipComment
endOfInput
return $ GProgram {gpName = prog, gpCode = fs}
skipComment = do
skipHorSpace
optional $ char ';'
res <- optional $ do
char '('
many $ notChar ')'
char ')'
skipHorSpace
res2 <- optional $ satisfy isEndOfLine
case isJust res || isJust res2 of
False -> return ()
True -> skipComment
skipHorSpace = many $ satisfy isHorizontalSpace
iso_double = do minus <- optional $ char '-'
case minus of
Nothing -> iso_pos_double
Just _ -> negate <$> iso_pos_double
iso_pos_double = leading_dot <|> (double >>= \d -> optional (char '.') >> return d)
leading_dot = do char '.'
n <- number
case n of
I i -> let len = length (show i)
in return $ fromIntegral i / (10^len)
_ -> fail "strange number with leading dot"
parseIsoFile file = do
prog <- T.readFile file
return $ parseOnly iso7 $ T.toStrict prog
|
akamaus/gcodec
|
src/CNC/GParser.hs
|
Haskell
|
bsd-3-clause
| 1,681
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TupleSections #-}
-- | The general Stack configuration that starts everything off. This should
-- be smart to falback if there is no stack.yaml, instead relying on
-- whatever files are available.
--
-- If there is no stack.yaml, and there is a cabal.config, we
-- read in those constraints, and if there's a cabal.sandbox.config,
-- we read any constraints from there and also find the package
-- database from there, etc. And if there's nothing, we should
-- probably default to behaving like cabal, possibly with spitting out
-- a warning that "you should run `stk init` to make things better".
module Stack.Config
(loadConfig
,packagesParser
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent (getNumCapabilities)
import Control.Exception (IOException)
import Control.Monad
import Control.Monad.Catch (Handler(..), MonadCatch, MonadThrow, catches, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger hiding (Loc)
import Control.Monad.Reader (MonadReader, ask, runReaderT)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Crypto.Hash.SHA256 as SHA256
import Data.Aeson.Extended
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Lazy as L
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.Yaml as Yaml
import Distribution.System (OS (..), Platform (..), buildPlatform)
import qualified Distribution.Text
import Distribution.Version (simplifyVersionRange)
import Network.HTTP.Client.Conduit (HasHttpManager, getHttpManager, Manager, parseUrl)
import Network.HTTP.Download (download)
import Options.Applicative (Parser, strOption, long, help)
import Path
import Path.IO
import qualified Paths_stack as Meta
import Stack.BuildPlan
import Stack.Constants
import qualified Stack.Docker as Docker
import qualified Stack.Image as Image
import Stack.Init
import Stack.Types
import Stack.Types.Internal
import System.Directory (getAppUserDataDirectory, createDirectoryIfMissing, canonicalizePath)
import System.Environment
import System.IO
import System.Process.Read (getEnvOverride, EnvOverride, unEnvOverride, readInNull)
-- | Get the latest snapshot resolver available.
getLatestResolver
:: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m)
=> m Resolver
getLatestResolver = do
snapshots <- getSnapshots
let mlts = do
(x,y) <- listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots)))
return (LTS x y)
snap =
case mlts of
Nothing -> Nightly (snapshotsNightly snapshots)
Just lts -> lts
return (ResolverSnapshot snap)
-- | Note that this will be @Nothing@ on Windows, which is by design.
defaultStackGlobalConfig :: Maybe (Path Abs File)
defaultStackGlobalConfig = parseAbsFile "/etc/stack/config"
-- Interprets ConfigMonoid options.
configFromConfigMonoid
:: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env)
=> Path Abs Dir -- ^ stack root, e.g. ~/.stack
-> Maybe Project
-> ConfigMonoid
-> m Config
configFromConfigMonoid configStackRoot mproject configMonoid@ConfigMonoid{..} = do
let configDocker = Docker.dockerOptsFromMonoid mproject configStackRoot configMonoidDockerOpts
configConnectionCount = fromMaybe 8 configMonoidConnectionCount
configHideTHLoading = fromMaybe True configMonoidHideTHLoading
configLatestSnapshotUrl = fromMaybe
"https://s3.amazonaws.com/haddock.stackage.org/snapshots.json"
configMonoidLatestSnapshotUrl
configPackageIndices = fromMaybe
[PackageIndex
{ indexName = IndexName "Hackage"
, indexLocation = ILGitHttp
"https://github.com/commercialhaskell/all-cabal-hashes.git"
"https://s3.amazonaws.com/hackage.fpcomplete.com/00-index.tar.gz"
, indexDownloadPrefix = "https://s3.amazonaws.com/hackage.fpcomplete.com/package/"
, indexGpgVerify = False
, indexRequireHashes = False
}]
configMonoidPackageIndices
configSystemGHC = fromMaybe True configMonoidSystemGHC
configInstallGHC = fromMaybe False configMonoidInstallGHC
configSkipGHCCheck = fromMaybe False configMonoidSkipGHCCheck
configSkipMsys = fromMaybe False configMonoidSkipMsys
configExtraIncludeDirs = configMonoidExtraIncludeDirs
configExtraLibDirs = configMonoidExtraLibDirs
-- Only place in the codebase where platform is hard-coded. In theory
-- in the future, allow it to be configured.
(Platform defArch defOS) = buildPlatform
arch = fromMaybe defArch
$ configMonoidArch >>= Distribution.Text.simpleParse
os = fromMaybe defOS
$ configMonoidOS >>= Distribution.Text.simpleParse
configPlatform = Platform arch os
configRequireStackVersion = simplifyVersionRange configMonoidRequireStackVersion
configConfigMonoid = configMonoid
configImage = Image.imgOptsFromMonoid configMonoidImageOpts
origEnv <- getEnvOverride configPlatform
let configEnvOverride _ = return origEnv
platform <- runReaderT platformRelDir configPlatform
configLocalPrograms <-
case configPlatform of
Platform _ Windows -> do
progsDir <- getWindowsProgsDir configStackRoot origEnv
return $ progsDir </> $(mkRelDir stackProgName) </> platform
_ -> return $ configStackRoot </> $(mkRelDir "programs") </> platform
configLocalBin <-
case configMonoidLocalBinPath of
Nothing -> do
localDir <- liftIO (getAppUserDataDirectory "local") >>= parseAbsDir
return $ localDir </> $(mkRelDir "bin")
Just userPath ->
(liftIO $ canonicalizePath userPath >>= parseAbsDir)
`catches`
[Handler (\(_ :: IOException) -> throwM $ NoSuchDirectory userPath)
,Handler (\(_ :: PathParseException) -> throwM $ NoSuchDirectory userPath)
]
configJobs <-
case configMonoidJobs of
Nothing -> liftIO getNumCapabilities
Just i -> return i
let configConcurrentTests = fromMaybe True configMonoidConcurrentTests
return Config {..}
-- | Get the directory on Windows where we should install extra programs. For
-- more information, see discussion at:
-- https://github.com/fpco/minghc/issues/43#issuecomment-99737383
getWindowsProgsDir :: MonadThrow m
=> Path Abs Dir
-> EnvOverride
-> m (Path Abs Dir)
getWindowsProgsDir stackRoot m =
case Map.lookup "LOCALAPPDATA" $ unEnvOverride m of
Just t -> do
lad <- parseAbsDir $ T.unpack t
return $ lad </> $(mkRelDir "Programs")
Nothing -> return $ stackRoot </> $(mkRelDir "Programs")
data MiniConfig = MiniConfig Manager Config
instance HasConfig MiniConfig where
getConfig (MiniConfig _ c) = c
instance HasStackRoot MiniConfig
instance HasHttpManager MiniConfig where
getHttpManager (MiniConfig man _) = man
instance HasPlatform MiniConfig
-- | Load the configuration, using current directory, environment variables,
-- and defaults as necessary.
loadConfig :: (MonadLogger m,MonadIO m,MonadCatch m,MonadThrow m,MonadBaseControl IO m,MonadReader env m,HasHttpManager env,HasTerminal env)
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (LoadConfig m)
loadConfig configArgs mstackYaml = do
stackRoot <- determineStackRoot
extraConfigs <- getExtraConfigs stackRoot >>= mapM loadYaml
mproject <- loadProjectConfig mstackYaml
config <- configFromConfigMonoid stackRoot (fmap (\(proj, _, _) -> proj) mproject) $ mconcat $
case mproject of
Nothing -> configArgs : extraConfigs
Just (_, _, projectConfig) -> configArgs : projectConfig : extraConfigs
unless (fromCabalVersion Meta.version `withinRange` configRequireStackVersion config)
(throwM (BadStackVersionException (configRequireStackVersion config)))
menv <- runReaderT getMinimalEnvOverride config
return $ LoadConfig
{ lcConfig = config
, lcLoadBuildConfig = loadBuildConfig menv mproject config stackRoot
, lcProjectRoot = fmap (\(_, fp, _) -> parent fp) mproject
}
-- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@.
-- values.
loadBuildConfig :: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env, MonadBaseControl IO m, HasTerminal env)
=> EnvOverride
-> Maybe (Project, Path Abs File, ConfigMonoid)
-> Config
-> Path Abs Dir
-> Maybe Resolver -- override resolver
-> NoBuildConfigStrategy
-> m BuildConfig
loadBuildConfig menv mproject config stackRoot mresolver noConfigStrat = do
env <- ask
let miniConfig = MiniConfig (getHttpManager env) config
(project', stackYamlFP) <- case mproject of
Just (project, fp, _) -> return (project, fp)
Nothing -> case noConfigStrat of
ThrowException -> do
currDir <- getWorkingDir
cabalFiles <- findCabalFiles True currDir
throwM $ NoProjectConfigFound currDir
$ Just $ if null cabalFiles then "new" else "init"
ExecStrategy -> do
let dest :: Path Abs File
dest = destDir </> stackDotYaml
destDir = implicitGlobalDir stackRoot
dest' :: FilePath
dest' = toFilePath dest
createTree destDir
exists <- fileExists dest
if exists
then do
ProjectAndConfigMonoid project _ <- loadYaml dest
when (getTerminal env) $
case mresolver of
Nothing ->
$logInfo ("Using resolver: " <> renderResolver (projectResolver project) <>
" from global config file: " <> T.pack dest')
Just resolver ->
$logInfo ("Using resolver: " <> renderResolver resolver <>
" specified on command line")
return (project, dest)
else do
r <- runReaderT getLatestResolver miniConfig
$logInfo ("Using latest snapshot resolver: " <> renderResolver r)
$logInfo ("Writing global (non-project-specific) config file to: " <> T.pack dest')
$logInfo "Note: You can change the snapshot via the resolver field there."
let p = Project
{ projectPackages = mempty
, projectExtraDeps = mempty
, projectFlags = mempty
, projectResolver = r
}
liftIO $ Yaml.encodeFile dest' p
return (p, dest)
let project = project'
{ projectResolver = fromMaybe (projectResolver project') mresolver
}
ghcVersion <-
case projectResolver project of
ResolverSnapshot snapName -> do
mbp <- runReaderT (loadMiniBuildPlan snapName) miniConfig
return $ mbpGhcVersion mbp
ResolverGhc m -> return $ fromMajorVersion m
let root = parent stackYamlFP
packages' <- mapM (resolvePackageEntry menv root) (projectPackages project)
let packages = Map.fromList $ concat packages'
return BuildConfig
{ bcConfig = config
, bcResolver = projectResolver project
, bcGhcVersionExpected = ghcVersion
, bcPackages = packages
, bcExtraDeps = projectExtraDeps project
, bcRoot = root
, bcStackYaml = stackYamlFP
, bcFlags = projectFlags project
}
-- | Resolve a PackageEntry into a list of paths, downloading and cloning as
-- necessary.
resolvePackageEntry
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageEntry
-> m [(Path Abs Dir, Bool)]
resolvePackageEntry menv projRoot pe = do
entryRoot <- resolvePackageLocation menv projRoot (peLocation pe)
paths <-
case peSubdirs pe of
[] -> return [entryRoot]
subs -> mapM (resolveDir entryRoot) subs
case peValidWanted pe of
Nothing -> return ()
Just _ -> $logWarn "Warning: you are using the deprecated valid-wanted field. You should instead use extra-dep. See: https://github.com/commercialhaskell/stack/wiki/stack.yaml#packages"
return $ map (, not $ peExtraDep pe) paths
-- | Resolve a PackageLocation into a path, downloading and cloning as
-- necessary.
resolvePackageLocation
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageLocation
-> m (Path Abs Dir)
resolvePackageLocation _ projRoot (PLFilePath fp) = resolveDir projRoot fp
resolvePackageLocation _ projRoot (PLHttpTarball url) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 url
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
fileRel <- parseRelFile $ name ++ ".tar.gz"
dirRel <- parseRelDir name
dirRelTmp <- parseRelDir $ name ++ ".tmp"
let file = root </> fileRel
dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
req <- parseUrl $ T.unpack url
_ <- download req file
removeTreeIfExists dirTmp
liftIO $ withBinaryFile (toFilePath file) ReadMode $ \h -> do
lbs <- L.hGetContents h
let entries = Tar.read $ GZip.decompress lbs
Tar.unpack (toFilePath dirTmp) entries
renameDir dirTmp dir
x <- listDirectory dir
case x of
([dir'], []) -> return dir'
(dirs, files) -> do
removeFileIfExists file
removeTreeIfExists dir
throwM $ UnexpectedTarballContents dirs files
resolvePackageLocation menv projRoot (PLGit url commit) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 $ T.unwords [url, commit]
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
dirRel <- parseRelDir $ name ++ ".git"
dirRelTmp <- parseRelDir $ name ++ ".git.tmp"
let dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
removeTreeIfExists dirTmp
createTree (parent dirTmp)
readInNull (parent dirTmp) "git" menv
[ "clone"
, T.unpack url
, toFilePath dirTmp
]
Nothing
readInNull dirTmp "git" menv
[ "reset"
, "--hard"
, T.unpack commit
]
Nothing
renameDir dirTmp dir
return dir
-- | Get the stack root, e.g. ~/.stack
determineStackRoot :: (MonadIO m, MonadThrow m) => m (Path Abs Dir)
determineStackRoot = do
env <- liftIO getEnvironment
case lookup stackRootEnvVar env of
Nothing -> do
x <- liftIO $ getAppUserDataDirectory stackProgName
parseAbsDir x
Just x -> do
y <- liftIO $ do
createDirectoryIfMissing True x
canonicalizePath x
parseAbsDir y
-- | Determine the extra config file locations which exist.
--
-- Returns most local first
getExtraConfigs :: MonadIO m
=> Path Abs Dir -- ^ stack root
-> m [Path Abs File]
getExtraConfigs stackRoot = liftIO $ do
env <- getEnvironment
mstackConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_CONFIG" env
mstackGlobalConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_GLOBAL_CONFIG" env
filterM fileExists
$ fromMaybe (stackRoot </> stackDotYaml) mstackConfig
: maybe [] return (mstackGlobalConfig <|> defaultStackGlobalConfig)
-- | Load and parse YAML from the given file.
loadYaml :: (FromJSON a,MonadIO m) => Path Abs File -> m a
loadYaml path =
liftIO $ Yaml.decodeFileEither (toFilePath path)
>>= either (throwM . ParseConfigFileException path) return
-- | Get the location of the project config file, if it exists.
getProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Path Abs File))
getProjectConfig (Just stackYaml) = return $ Just stackYaml
getProjectConfig Nothing = do
env <- liftIO getEnvironment
case lookup "STACK_YAML" env of
Just fp -> do
$logInfo "Getting project config file from STACK_YAML environment"
liftM Just $ case parseAbsFile fp of
Left _ -> do
currDir <- getWorkingDir
resolveFile currDir fp
Right path -> return path
Nothing -> do
currDir <- getWorkingDir
search currDir
where
search dir = do
let fp = dir </> stackDotYaml
fp' = toFilePath fp
$logDebug $ "Checking for project config at: " <> T.pack fp'
exists <- fileExists fp
if exists
then return $ Just fp
else do
let dir' = parent dir
if dir == dir'
-- fully traversed, give up
then return Nothing
else search dir'
-- | Find the project config file location, respecting environment variables
-- and otherwise traversing parents. If no config is found, we supply a default
-- based on current directory.
loadProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Project, Path Abs File, ConfigMonoid))
loadProjectConfig mstackYaml = do
mfp <- getProjectConfig mstackYaml
case mfp of
Just fp -> do
currDir <- getWorkingDir
$logDebug $ "Loading project config file " <>
T.pack (maybe (toFilePath fp) toFilePath (stripDir currDir fp))
load fp
Nothing -> do
$logDebug $ "No project config file found, using defaults."
return Nothing
where
load fp = do
ProjectAndConfigMonoid project config <- loadYaml fp
return $ Just (project, fp, config)
packagesParser :: Parser [String]
packagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
|
GaloisInc/stack
|
src/Stack/Config.hs
|
Haskell
|
bsd-3-clause
| 20,212
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import GHC.IO.Exception (ExitCode(..))
import System.Exit (die)
import Data.Monoid ((<>))
import Control.Applicative (empty)
import Data.Aeson
import Turtle.Prelude (procStrictWithErr, shell)
import qualified Data.ByteString.Lazy as BS
import System.Directory (getAppUserDataDirectory)
import System.FilePath ((</>))
import Data.Text (unpack, Text)
import qualified Data.Text.IO as T
data Packages = Packages {
patched :: [Text],
vanilla :: [Text]
} deriving (Show, Eq, Ord)
instance FromJSON Packages where
parseJSON (Object v) = Packages <$> v.: "patched" <*> v.: "vanilla"
parseJSON _ = empty
parsePackagesFile :: FilePath -> IO (Maybe Packages)
parsePackagesFile fname = do
contents <- BS.readFile fname
let packages = decode contents
return packages
packagesFilePath :: IO FilePath
packagesFilePath = (</> "patches" </> "packages.json") <$> getAppUserDataDirectory "epm"
buildPackage :: Text -> IO ()
buildPackage pkg = do
let args = ["install", pkg]
(exitCode, out, err) <- procStrictWithErr "epm" args empty
case exitCode of
ExitSuccess -> T.putStr out
ExitFailure x -> T.putStr err >> die ("error in building " <> unpack pkg)
return ()
main :: IO ()
main = do
let vmUpdateCmd = "epm update"
shell vmUpdateCmd ""
epmPkgs <- packagesFilePath
pkg <- parsePackagesFile epmPkgs
case pkg of
Nothing -> die "Problem parsing your packages.json file"
Just pkg' ->
let packages = (patched pkg') <> (vanilla pkg')
in mapM_ buildPackage packages
|
alexander-at-github/eta
|
tests/packages/Test.hs
|
Haskell
|
bsd-3-clause
| 1,599
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Init.Heuristics
-- Copyright : (c) Benedikt Huber 2009
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Heuristics for creating initial cabal files.
--
-----------------------------------------------------------------------------
module Distribution.Client.Init.Heuristics (
guessPackageName,
scanForModules, SourceFileEntry(..),
neededBuildPrograms,
guessMainFileCandidates,
guessAuthorNameMail,
knownCategories,
) where
import Distribution.Text (simpleParse)
import Distribution.Simple.Setup (Flag(..), flagToMaybe)
import Distribution.ModuleName
( ModuleName, toFilePath )
import Distribution.Client.PackageIndex
( allPackagesByName )
import qualified Distribution.Package as P
import qualified Distribution.PackageDescription as PD
( category, packageDescription )
import Distribution.Simple.Utils
( intercalate )
import Distribution.Client.Utils
( tryCanonicalizePath )
import Language.Haskell.Extension ( Extension )
import Distribution.Client.Types ( packageDescription, SourcePackageDb(..) )
import Control.Applicative ( pure, (<$>), (<*>) )
import Control.Arrow ( first )
import Control.Monad ( liftM )
import Data.Char ( isAlphaNum, isNumber, isUpper, isLower, isSpace )
import Data.Either ( partitionEithers )
import Data.List ( isInfixOf, isPrefixOf, isSuffixOf, sortBy )
import Data.Maybe ( mapMaybe, catMaybes, maybeToList )
import Data.Monoid ( mempty, mappend, mconcat, )
import Data.Ord ( comparing )
import qualified Data.Set as Set ( fromList, toList )
import System.Directory ( getCurrentDirectory, getDirectoryContents,
doesDirectoryExist, doesFileExist, getHomeDirectory, )
import Distribution.Compat.Environment ( getEnvironment )
import System.FilePath ( takeExtension, takeBaseName, dropExtension,
(</>), (<.>), splitDirectories, makeRelative )
import Distribution.Client.Init.Types ( InitFlags(..) )
import Distribution.Client.Compat.Process ( readProcessWithExitCode )
import System.Exit ( ExitCode(..) )
-- | Return a list of candidate main files for this executable: top-level
-- modules including the word 'Main' in the file name. The list is sorted in
-- order of preference, shorter file names are preferred. 'Right's are existing
-- candidates and 'Left's are those that do not yet exist.
guessMainFileCandidates :: InitFlags -> IO [Either FilePath FilePath]
guessMainFileCandidates flags = do
dir <-
maybe getCurrentDirectory return (flagToMaybe $ packageDir flags)
files <- getDirectoryContents dir
let existingCandidates = filter isMain files
-- We always want to give the user at least one default choice. If either
-- Main.hs or Main.lhs has already been created, then we don't want to
-- suggest the other; however, if neither has been created, then we
-- suggest both.
newCandidates =
if any (`elem` existingCandidates) ["Main.hs", "Main.lhs"]
then []
else ["Main.hs", "Main.lhs"]
candidates =
sortBy (\x y -> comparing (length . either id id) x y
`mappend` compare x y)
(map Left newCandidates ++ map Right existingCandidates)
return candidates
where
isMain f = (isInfixOf "Main" f || isInfixOf "main" f)
&& (isSuffixOf ".hs" f || isSuffixOf ".lhs" f)
-- | Guess the package name based on the given root directory.
guessPackageName :: FilePath -> IO P.PackageName
guessPackageName = liftM (P.PackageName . repair . last . splitDirectories)
. tryCanonicalizePath
where
-- Treat each span of non-alphanumeric characters as a hyphen. Each
-- hyphenated component of a package name must contain at least one
-- alphabetic character. An arbitrary character ('x') will be prepended if
-- this is not the case for the first component, and subsequent components
-- will simply be run together. For example, "1+2_foo-3" will become
-- "x12-foo3".
repair = repair' ('x' :) id
repair' invalid valid x = case dropWhile (not . isAlphaNum) x of
"" -> repairComponent ""
x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x'
in c ++ repairRest r
where
repairComponent c | all isNumber c = invalid c
| otherwise = valid c
repairRest = repair' id ('-' :)
-- |Data type of source files found in the working directory
data SourceFileEntry = SourceFileEntry
{ relativeSourcePath :: FilePath
, moduleName :: ModuleName
, fileExtension :: String
, imports :: [ModuleName]
, extensions :: [Extension]
} deriving Show
sfToFileName :: FilePath -> SourceFileEntry -> FilePath
sfToFileName projectRoot (SourceFileEntry relPath m ext _ _)
= projectRoot </> relPath </> toFilePath m <.> ext
-- |Search for source files in the given directory
-- and return pairs of guessed Haskell source path and
-- module names.
scanForModules :: FilePath -> IO [SourceFileEntry]
scanForModules rootDir = scanForModulesIn rootDir rootDir
scanForModulesIn :: FilePath -> FilePath -> IO [SourceFileEntry]
scanForModulesIn projectRoot srcRoot = scan srcRoot []
where
scan dir hierarchy = do
entries <- getDirectoryContents (projectRoot </> dir)
(files, dirs) <- liftM partitionEithers (mapM (tagIsDir dir) entries)
let modules = catMaybes [ guessModuleName hierarchy file
| file <- files
, isUpper (head file) ]
modules' <- mapM (findImportsAndExts projectRoot) modules
recMods <- mapM (scanRecursive dir hierarchy) dirs
return $ concat (modules' : recMods)
tagIsDir parent entry = do
isDir <- doesDirectoryExist (parent </> entry)
return $ (if isDir then Right else Left) entry
guessModuleName hierarchy entry
| takeBaseName entry == "Setup" = Nothing
| ext `elem` sourceExtensions =
SourceFileEntry <$> pure relRoot <*> modName <*> pure ext <*> pure [] <*> pure []
| otherwise = Nothing
where
relRoot = makeRelative projectRoot srcRoot
unqualModName = dropExtension entry
modName = simpleParse
$ intercalate "." . reverse $ (unqualModName : hierarchy)
ext = case takeExtension entry of '.':e -> e; e -> e
scanRecursive parent hierarchy entry
| isUpper (head entry) = scan (parent </> entry) (entry : hierarchy)
| isLower (head entry) && not (ignoreDir entry) =
scanForModulesIn projectRoot $ foldl (</>) srcRoot (reverse (entry : hierarchy))
| otherwise = return []
ignoreDir ('.':_) = True
ignoreDir dir = dir `elem` ["dist", "_darcs"]
findImportsAndExts :: FilePath -> SourceFileEntry -> IO SourceFileEntry
findImportsAndExts projectRoot sf = do
s <- readFile (sfToFileName projectRoot sf)
let modules = mapMaybe
( getModName
. drop 1
. filter (not . null)
. dropWhile (/= "import")
. words
)
. filter (not . ("--" `isPrefixOf`)) -- poor man's comment filtering
. lines
$ s
-- XXX we should probably make a better attempt at parsing
-- comments above. Unfortunately we can't use a full-fledged
-- Haskell parser since cabal's dependencies must be kept at a
-- minimum.
-- A poor man's LANGUAGE pragma parser.
exts = mapMaybe simpleParse
. concatMap getPragmas
. filter isLANGUAGEPragma
. map fst
. drop 1
. takeWhile (not . null . snd)
. iterate (takeBraces . snd)
$ ("",s)
takeBraces = break (== '}') . dropWhile (/= '{')
isLANGUAGEPragma = ("{-# LANGUAGE " `isPrefixOf`)
getPragmas = map trim . splitCommas . takeWhile (/= '#') . drop 13
splitCommas "" = []
splitCommas xs = x : splitCommas (drop 1 y)
where (x,y) = break (==',') xs
return sf { imports = modules
, extensions = exts
}
where getModName :: [String] -> Maybe ModuleName
getModName [] = Nothing
getModName ("qualified":ws) = getModName ws
getModName (ms:_) = simpleParse ms
-- Unfortunately we cannot use the version exported by Distribution.Simple.Program
knownSuffixHandlers :: [(String,String)]
knownSuffixHandlers =
[ ("gc", "greencard")
, ("chs", "chs")
, ("hsc", "hsc2hs")
, ("x", "alex")
, ("y", "happy")
, ("ly", "happy")
, ("cpphs", "cpp")
]
sourceExtensions :: [String]
sourceExtensions = "hs" : "lhs" : map fst knownSuffixHandlers
neededBuildPrograms :: [SourceFileEntry] -> [String]
neededBuildPrograms entries =
[ handler
| ext <- nubSet (map fileExtension entries)
, handler <- maybeToList (lookup ext knownSuffixHandlers)
]
-- | Guess author and email using darcs and git configuration options. Use
-- the following in decreasing order of preference:
--
-- 1. vcs env vars ($DARCS_EMAIL, $GIT_AUTHOR_*)
-- 2. Local repo configs
-- 3. Global vcs configs
-- 4. The generic $EMAIL
--
-- Name and email are processed separately, so the guess might end up being
-- a name from DARCS_EMAIL and an email from git config.
--
-- Darcs has preference, for tradition's sake.
guessAuthorNameMail :: IO (Flag String, Flag String)
guessAuthorNameMail = fmap authorGuessPure authorGuessIO
-- Ordered in increasing preference, since Flag-as-monoid is identical to
-- Last.
authorGuessPure :: AuthorGuessIO -> AuthorGuess
authorGuessPure (AuthorGuessIO env darcsLocalF darcsGlobalF gitLocal gitGlobal)
= mconcat
[ emailEnv env
, gitGlobal
, darcsCfg darcsGlobalF
, gitLocal
, darcsCfg darcsLocalF
, gitEnv env
, darcsEnv env
]
authorGuessIO :: IO AuthorGuessIO
authorGuessIO = AuthorGuessIO
<$> getEnvironment
<*> (maybeReadFile $ "_darcs" </> "prefs" </> "author")
<*> (maybeReadFile =<< liftM (</> (".darcs" </> "author")) getHomeDirectory)
<*> gitCfg Local
<*> gitCfg Global
-- Types and functions used for guessing the author are now defined:
type AuthorGuess = (Flag String, Flag String)
type Enviro = [(String, String)]
data GitLoc = Local | Global
data AuthorGuessIO = AuthorGuessIO
Enviro -- ^ Environment lookup table
(Maybe String) -- ^ Contents of local darcs author info
(Maybe String) -- ^ Contents of global darcs author info
AuthorGuess -- ^ Git config --local
AuthorGuess -- ^ Git config --global
darcsEnv :: Enviro -> AuthorGuess
darcsEnv = maybe mempty nameAndMail . lookup "DARCS_EMAIL"
gitEnv :: Enviro -> AuthorGuess
gitEnv env = (name, mail)
where
name = maybeFlag "GIT_AUTHOR_NAME" env
mail = maybeFlag "GIT_AUTHOR_EMAIL" env
darcsCfg :: Maybe String -> AuthorGuess
darcsCfg = maybe mempty nameAndMail
emailEnv :: Enviro -> AuthorGuess
emailEnv env = (mempty, mail)
where
mail = maybeFlag "EMAIL" env
gitCfg :: GitLoc -> IO AuthorGuess
gitCfg which = do
name <- gitVar which "user.name"
mail <- gitVar which "user.email"
return (name, mail)
gitVar :: GitLoc -> String -> IO (Flag String)
gitVar which = fmap happyOutput . gitConfigQuery which
happyOutput :: (ExitCode, a, t) -> Flag a
happyOutput v = case v of
(ExitSuccess, s, _) -> Flag s
_ -> mempty
gitConfigQuery :: GitLoc -> String -> IO (ExitCode, String, String)
gitConfigQuery which key =
fmap trim' $ readProcessWithExitCode "git" ["config", w, key] ""
where
w = case which of
Local -> "--local"
Global -> "--global"
trim' (a, b, c) = (a, trim b, c)
maybeFlag :: String -> Enviro -> Flag String
maybeFlag k = maybe mempty Flag . lookup k
-- | Read the first non-comment, non-trivial line of a file, if it exists
maybeReadFile :: String -> IO (Maybe String)
maybeReadFile f = do
exists <- doesFileExist f
if exists
then fmap getFirstLine $ readFile f
else return Nothing
where
getFirstLine content =
let nontrivialLines = dropWhile (\l -> (null l) || ("#" `isPrefixOf` l)) . lines $ content
in case nontrivialLines of
[] -> Nothing
(l:_) -> Just l
-- |Get list of categories used in Hackage. NOTE: Very slow, needs to be cached
knownCategories :: SourcePackageDb -> [String]
knownCategories (SourcePackageDb sourcePkgIndex _) = nubSet
[ cat | pkg <- map head (allPackagesByName sourcePkgIndex)
, let catList = (PD.category . PD.packageDescription . packageDescription) pkg
, cat <- splitString ',' catList
]
-- Parse name and email, from darcs pref files or environment variable
nameAndMail :: String -> (Flag String, Flag String)
nameAndMail str
| all isSpace nameOrEmail = mempty
| null erest = (mempty, Flag $ trim nameOrEmail)
| otherwise = (Flag $ trim nameOrEmail, Flag mail)
where
(nameOrEmail,erest) = break (== '<') str
(mail,_) = break (== '>') (tail erest)
trim :: String -> String
trim = removeLeadingSpace . reverse . removeLeadingSpace . reverse
where
removeLeadingSpace = dropWhile isSpace
-- split string at given character, and remove whitespace
splitString :: Char -> String -> [String]
splitString sep str = go str where
go s = if null s' then [] else tok : go rest where
s' = dropWhile (\c -> c == sep || isSpace c) s
(tok,rest) = break (==sep) s'
nubSet :: (Ord a) => [a] -> [a]
nubSet = Set.toList . Set.fromList
{-
test db testProjectRoot = do
putStrLn "Guessed package name"
(guessPackageName >=> print) testProjectRoot
putStrLn "Guessed name and email"
guessAuthorNameMail >>= print
mods <- scanForModules testProjectRoot
putStrLn "Guessed modules"
mapM_ print mods
putStrLn "Needed build programs"
print (neededBuildPrograms mods)
putStrLn "List of known categories"
print $ knownCategories db
-}
|
plumlife/cabal
|
cabal-install/Distribution/Client/Init/Heuristics.hs
|
Haskell
|
bsd-3-clause
| 14,315
|
{-# LANGUAGE DeriveGeneric #-}
module Competition where
import Data.Aeson
import GHC.Generics
data Competition = Competition
{ id :: Int
, caption :: String
, league :: String
, year :: String
, currentMatchday :: Int
, numberOfMatchdays :: Int
, numberOfTeams :: Int
, numberOfGames :: Int
, lastUpdated :: String
, _links :: Links
} deriving (Show, Generic)
data Links = Links
{ self :: Link
, teams :: Link
, fixtures :: Link
, leagueTable :: Link
} deriving (Show, Generic)
data Link = Link
{ href :: String
} deriving (Show, Generic)
instance FromJSON Competition
instance FromJSON Links
instance FromJSON Link
|
julienXX/football-data-client
|
src/Competition.hs
|
Haskell
|
bsd-3-clause
| 658
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.