code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Shell.Error (
ShellError(..)
) where
data ShellError = ShellError
| Sventimir/LambdaShell | Shell/Error.hs | gpl-3.0 | 78 | 0 | 5 | 15 | 22 | 14 | 8 | 3 | 0 |
{-|
miniAtmegatron - Soulsby Synthesizers
http://soulsbysynths.com/wp-content/uploads/2016/08/Mini-Manual.pdf, page 15
-}
module Sound.Tidal.MIDI.MiniAtmegatron where
import Sound.Tidal.MIDI.Control
import Sound.Tidal.Params
matmController :: ControllerShape
matmController =
ControllerShape
{ controls =
[ mCC modwheel_p 1
, mCC pan_p 10
, mCC expression_p 11
, mCC sustainpedal_p 64
, mCC cutoff_p 74
, mCC filtenv_p 16
, mCC flfo_p 93
, mCC alfo_p 92
, mCC shape_p 17
, mCC resonance_p 71
, mCC penv_p 94
, mCC plfo_p 1
, mCC pwm_p 91
, mCC flange_p 95
, mCC fwave_p 30
, mCC ffilt_p 31
, mCC ffenv_p 32
, mCC faenv_p 33
, mCC mlfoshape_p 34
, mCC lfospeed_p 79
, mCC porta_p 5
]
, latency = 0.1
}
matm = toShape matmController
(fen, filtenv_p) = pF "fen" (Just 0)
(flo, flfo_p) = pF "flo" (Just 0)
(alo, alfo_p) = pF "alo" (Just 0)
(pen, penv_p) = pF "pen" (Just 0)
(plo, plfo_p) = pF "plo" (Just 0)
(pwm, pwm_p) = pF "pwm" (Just 0)
(fln, flange_p) = pF "fln" (Just 0)
(fwv, fwave_p) = pF "fwv" (Just 0)
(ffl, ffilt_p) = pF "ffl" (Just 0)
(ffe, ffenv_p) = pF "ffe" (Just 0)
(fae, faenv_p) = pF "fae" (Just 0)
(lfs, mlfoshape_p) = pF "lfs" (Just 0)
(lss, lfospeed_p) = pF "lss" (Just 0)
(por, porta_p) = pF "por" (Just 0)
| tidalcycles/tidal-midi | Sound/Tidal/MIDI/MiniAtmegatron.hs | gpl-3.0 | 1,384 | 0 | 8 | 376 | 544 | 287 | 257 | 44 | 1 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>CHRIS/Proba Noise Reduction Tool Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">JavaHelpSearch</data>
</view>
</helpset>
| bcdev/chris-box | chris-noise-reduction/src/main/resources/doc/help/noisereduction.hs | gpl-3.0 | 793 | 54 | 45 | 167 | 291 | 147 | 144 | -1 | -1 |
module Ampersand.Output.ToJSON.ToJson
(generateJSONfiles)
where
import Ampersand.Output.ToJSON.JSONutils
import Ampersand.Output.ToJSON.Settings
import Ampersand.Output.ToJSON.Populations
import Ampersand.Output.ToJSON.Relations
import Ampersand.Output.ToJSON.Rules
import Ampersand.Output.ToJSON.Concepts
import Ampersand.Output.ToJSON.Conjuncts
import Ampersand.Output.ToJSON.Interfaces
import Ampersand.Output.ToJSON.Views
import Ampersand.Output.ToJSON.Roles
generateJSONfiles :: MultiFSpecs -> IO ()
generateJSONfiles multi =
sequence_ $
if genRapPopulationOnly opts
then [ writeJSON "metaPopulation"
(fromAmpersand multi (multi,True) :: Populations)
]
else [ writeJSON "settings" (fromAmpersand multi multi :: Settings)
, writeJSON "relations" (fromAmpersand multi multi :: Relationz)
, writeJSON "rules" (fromAmpersand multi multi :: Rulez)
, writeJSON "concepts" (fromAmpersand multi multi :: Concepts)
, writeJSON "conjuncts" (fromAmpersand multi multi :: Conjuncts)
, writeJSON "interfaces" (fromAmpersand multi multi :: Interfaces)
, writeJSON "views" (fromAmpersand multi multi :: Views)
, writeJSON "roles" (fromAmpersand multi multi :: Roles)
, writeJSON "populations"(fromAmpersand multi (multi,False) :: Populations)
]
where
opts = getOpts . userFSpec $ multi
writeJSON :: ToJSON a => String -> a -> IO()
writeJSON = writeJSONFile opts
| AmpersandTarski/ampersand | src/Ampersand/Output/ToJSON/ToJson.hs | gpl-3.0 | 1,517 | 0 | 11 | 308 | 370 | 210 | 160 | 30 | 2 |
-- HaskellServer - Minimalistic webserver written in Haskell
-- Copyright (C) 2014 Andreas Rohner
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
--------------------------------------------------------------------------------
-- |
-- Module : Settings
-- Note : Contains code to parse command line arguments
--
-- Contains helper functions to parse command line
--
--------------------------------------------------------------------------------
module Settings (Settings (..), parseSettings, evalSettings, getUsage) where
import System.Console.GetOpt
import Data.Maybe
import System.Exit
import System.FilePath
data Flag = Version | Host String | Port String | PubDir String | Help
data Settings = Settings {
settHost :: String,
settPort :: Int,
settPubDir :: String,
settVersion :: Bool,
settMsg :: Maybe String
}
deriving (Show)
defaultSettings = Settings {
settHost = "localhost",
settPort = 8080,
settPubDir = "public",
settVersion = False,
settMsg = Nothing
}
options :: [OptDescr Flag]
options = [
Option ['v'] ["version"] (NoArg Version) "show version number",
Option ['n'] ["host"] (ReqArg Host "NAME") "hostname to bind to",
Option ['p'] ["port"] (ReqArg Port "INT") "port number",
Option ['d'] ["dir"] (ReqArg PubDir "DIR") "path to public directory",
Option ['h'] ["help"] (NoArg Help) "display this help"
]
getUsage :: String -> String
getUsage prog = usageInfo (usageHeader prog) options
where
usageHeader prog = "Usage: " ++ prog ++ " [OPTION...]"
processFlag :: Flag -> Settings -> Settings
processFlag Version s = s { settVersion = True }
processFlag (Host h) s = s { settHost = h }
processFlag (Port p) s = case reads p :: [(Int,String)] of
(x, _):_ -> validatePort p x s
_ -> s { settMsg = Just ("Unable to parse port: "
++ p ++ "\n") }
where
validatePort p x s
| x > 0 && x < 65536 = s { settPort = x }
| otherwise = s { settMsg = Just ("Invalid port number: "
++ p ++ "\n") }
processFlag Help s = s { settMsg = Just "" }
processFlag (PubDir d) s = s { settPubDir = (validateDir d) }
where
validateDir = makeValid . normalise . dropTrailingPathSeparator
parseSettings :: [String] -> Settings
parseSettings args = case getOpt Permute options args of
(flags, [] , []) -> foldr ($) defaultSettings (map processFlag flags)
(_, nonOpts, []) -> defaultSettings { settMsg = Just ("Unrecognized arguments: "
++ unwords nonOpts ++ "\n") }
(_, _, msgs) -> defaultSettings { settMsg = Just (concat msgs) }
evalSettings :: Settings -> String -> IO ()
evalSettings Settings { settMsg = Just "" } u = do
putStrLn u
exitWith ExitSuccess
evalSettings Settings { settMsg = Just m } u = do
putStrLn $ m ++ u
exitWith $ ExitFailure 0
evalSettings Settings { settVersion = True } _ = do
putStrLn "Simple HTTP Server 0.1"
exitWith ExitSuccess
evalSettings _ _ = return ()
| zeitgeist87/HaskellServer | Settings.hs | gpl-3.0 | 3,763 | 0 | 14 | 948 | 925 | 509 | 416 | 60 | 3 |
type Maybe a = Either (Const () a) (Identity a) | hmemcpy/milewski-ctfp-pdf | src/content/1.8/code/haskell/snippet08.hs | gpl-3.0 | 47 | 0 | 8 | 9 | 29 | 15 | 14 | 1 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Groups.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a group.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @directory.groups.insert@.
module Network.Google.Resource.Directory.Groups.Insert
(
-- * REST Resource
GroupsInsertResource
-- * Creating a Request
, groupsInsert
, GroupsInsert
-- * Request Lenses
, giXgafv
, giUploadProtocol
, giAccessToken
, giUploadType
, giPayload
, giCallback
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.groups.insert@ method which the
-- 'GroupsInsert' request conforms to.
type GroupsInsertResource =
"admin" :>
"directory" :>
"v1" :>
"groups" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Group :> Post '[JSON] Group
-- | Creates a group.
--
-- /See:/ 'groupsInsert' smart constructor.
data GroupsInsert =
GroupsInsert'
{ _giXgafv :: !(Maybe Xgafv)
, _giUploadProtocol :: !(Maybe Text)
, _giAccessToken :: !(Maybe Text)
, _giUploadType :: !(Maybe Text)
, _giPayload :: !Group
, _giCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GroupsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'giXgafv'
--
-- * 'giUploadProtocol'
--
-- * 'giAccessToken'
--
-- * 'giUploadType'
--
-- * 'giPayload'
--
-- * 'giCallback'
groupsInsert
:: Group -- ^ 'giPayload'
-> GroupsInsert
groupsInsert pGiPayload_ =
GroupsInsert'
{ _giXgafv = Nothing
, _giUploadProtocol = Nothing
, _giAccessToken = Nothing
, _giUploadType = Nothing
, _giPayload = pGiPayload_
, _giCallback = Nothing
}
-- | V1 error format.
giXgafv :: Lens' GroupsInsert (Maybe Xgafv)
giXgafv = lens _giXgafv (\ s a -> s{_giXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
giUploadProtocol :: Lens' GroupsInsert (Maybe Text)
giUploadProtocol
= lens _giUploadProtocol
(\ s a -> s{_giUploadProtocol = a})
-- | OAuth access token.
giAccessToken :: Lens' GroupsInsert (Maybe Text)
giAccessToken
= lens _giAccessToken
(\ s a -> s{_giAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
giUploadType :: Lens' GroupsInsert (Maybe Text)
giUploadType
= lens _giUploadType (\ s a -> s{_giUploadType = a})
-- | Multipart request metadata.
giPayload :: Lens' GroupsInsert Group
giPayload
= lens _giPayload (\ s a -> s{_giPayload = a})
-- | JSONP
giCallback :: Lens' GroupsInsert (Maybe Text)
giCallback
= lens _giCallback (\ s a -> s{_giCallback = a})
instance GoogleRequest GroupsInsert where
type Rs GroupsInsert = Group
type Scopes GroupsInsert =
'["https://www.googleapis.com/auth/admin.directory.group"]
requestClient GroupsInsert'{..}
= go _giXgafv _giUploadProtocol _giAccessToken
_giUploadType
_giCallback
(Just AltJSON)
_giPayload
directoryService
where go
= buildClient (Proxy :: Proxy GroupsInsertResource)
mempty
| brendanhay/gogol | gogol-admin-directory/gen/Network/Google/Resource/Directory/Groups/Insert.hs | mpl-2.0 | 4,288 | 0 | 18 | 1,064 | 711 | 414 | 297 | 102 | 1 |
{-# LANGUAGE BangPatterns, FlexibleContexts, TypeFamilies #-}
-- | Provides high level functions to do geometric transformations on images.
--
-- Every transformation is been declared @INLINABLE@ so new image types could be
-- specialized.
module Vision.Image.Transform (
InterpolMethod (..), crop, resize, horizontalFlip, verticalFlip, floodFill
) where
import Control.Monad (when)
import Control.Monad.Primitive (PrimMonad (..))
import Data.RatioInt (RatioInt, (%))
import Vision.Image.Interpolate (Interpolable, bilinearInterpol)
import Vision.Image.Mutable (MutableImage (..))
import Vision.Image.Type (
MaskedImage (..), Image (..), ImageChannel, FromFunction (..)
)
import Vision.Primitive (
Z (..), (:.) (..), Point, RPoint (..), Rect (..), Size, ix2, toLinearIndex
)
-- | Defines the set of possible methods for pixel interpolations when looking
-- for a pixel at floating point coordinates.
data InterpolMethod =
TruncateInteger -- ^ Selects the top left pixel (fastest).
| NearestNeighbor -- ^ Selects the nearest pixel (fast).
| Bilinear -- ^ Does a double linear interpolation over the four
-- surrounding points (slow).
-- | Maps the content of the image\'s rectangle in a new image.
crop :: (Image i1, FromFunction i2, ImagePixel i1 ~ FromFunctionPixel i2)
=> Rect -> i1 -> i2
crop !(Rect rx ry rw rh) !img =
fromFunction (Z :. rh :. rw) $ \(Z :. y :. x) ->
img `index` ix2 (ry + y) (rx + x)
{-# INLINABLE crop #-}
-- | Resizes the 'Image' using the given interpolation method.
resize :: (Image i1, Interpolable (ImagePixel i1), FromFunction i2
, ImagePixel i1 ~ FromFunctionPixel i2, Integral (ImageChannel i1))
=> InterpolMethod -> Size -> i1 -> i2
resize !method !size'@(Z :. h' :. w') !img =
case method of
TruncateInteger ->
let !widthRatio = double w / double w'
!heightRatio = double h / double h'
line !y' = truncate $ (double y' + 0.5) * heightRatio - 0.5
{-# INLINE line #-}
col !x' = truncate $ (double x' + 0.5) * widthRatio - 0.5
{-# INLINE col #-}
f !y !(Z :. _ :. x') = let !x = col x'
in img `index` ix2 y x
{-# INLINE f #-}
in fromFunctionLine size' line f
NearestNeighbor ->
let !widthRatio = double w / double w'
!heightRatio = double h / double h'
line !y' = round $ (double y' + 0.5) * heightRatio - 0.5
{-# INLINE line #-}
col !x' = round $ (double x' + 0.5) * widthRatio - 0.5
{-# INLINE col #-}
f !y !(Z :. _ :. x') = let !x = col x'
in img `index` ix2 y x
{-# INLINE f #-}
in fromFunctionLine size' line f
Bilinear ->
let !widthRatio = w % w'
!maxWidth = ratio (w - 1)
!heightRatio = (h - 1) % (h' - 1)
!maxHeight = ratio (h - 1)
-- Limits the interpolation to inner pixel as first and last
-- pixels can have out of bound coordinates.
bound !limit = min limit . max 0
{-# INLINE bound #-}
line !y' = bound maxHeight $ (ratio y' + 0.5) * heightRatio
- 0.5
{-# INLINE line #-}
col !x' = bound maxWidth $ (ratio x' + 0.5) * widthRatio
- 0.5
{-# INLINE col #-}
f !y !x _ = img `bilinearInterpol` RPoint x y
{-# INLINE f #-}
in fromFunctionCached size' line col f
where
!(Z :. h :. w) = shape img
{-# INLINABLE resize #-}
-- | Reverses the image horizontally.
horizontalFlip :: (Image i1, FromFunction i2
, ImagePixel i1 ~ FromFunctionPixel i2)
=> i1 -> i2
horizontalFlip !img =
let f !(Z :. y :. x') = let !x = maxX - x'
in img `index` ix2 y x
{-# INLINE f #-}
in fromFunction size f
where
!size@(Z :. _ :. w) = shape img
!maxX = w - 1
{-# INLINABLE horizontalFlip #-}
-- | Reverses the image vertically.
verticalFlip :: (Image i1, FromFunction i2
, ImagePixel i1 ~ FromFunctionPixel i2)
=> i1 -> i2
verticalFlip !img =
let line !y' = maxY - y'
{-# INLINE line #-}
f !y !(Z :. _ :. x) = img `index` ix2 y x
{-# INLINE f #-}
in fromFunctionLine size line f
where
!size@(Z :. h :. _) = shape img
!maxY = h - 1
{-# INLINABLE verticalFlip #-}
-- | Paints with a new value the pixels surrounding the given point of the image
-- which have the same value as the starting point.
floodFill :: (PrimMonad m, MutableImage i, Eq (ImagePixel (Freezed i)))
=> Point -> ImagePixel (Freezed i) -> i (PrimState m) -> m ()
floodFill !start !newVal !img = do
let !linearIX = toLinearIndex size start
val <- linearRead img linearIX
when (val /= newVal) $ -- No reason to repaint using the same color.
go val start linearIX
where
!size@(Z :. h :. w) = mShape img
-- Runs the flood-fill algorithm from the starting point then checks the
-- pixels at the left and at the right of the point until their value
-- change (scanLine). Then visits the upper and lower line of neighboring
-- pixels (visitLine).
go !val !(Z :. y :. x) !linearIX = do
pix <- linearRead img linearIX
when (pix == val) $ do
let !minLineLinearIX = linearIX - x
!maxLineLinearIX = minLineLinearIX + w - 1
linearWrite img linearIX newVal
stopLeft <- scanLine val (< minLineLinearIX) pred (linearIX - 1)
stopRight <- scanLine val (> maxLineLinearIX) succ (linearIX + 1)
let !from = stopLeft + 1
!to = stopRight - 1
!xFrom = from - minLineLinearIX
when (y > 0) $
visitLine val (to - w) (ix2 (y - 1) xFrom) (from - w)
when ((y + 1) < h) $
visitLine val (to + w) (ix2 (y + 1) xFrom) (from + w)
scanLine !val !stop !next !linearIX
| stop linearIX = return linearIX
| otherwise = do
pix <- linearRead img linearIX
if pix == val then do linearWrite img linearIX newVal
scanLine val stop next (next linearIX)
else return linearIX
visitLine !val !maxLinearIX !pt@(y :. x) !linearIX
| linearIX > maxLinearIX = return ()
| otherwise = do
go val pt linearIX
visitLine val maxLinearIX (y :. (x + 1)) (linearIX + 1)
{-# INLINABLE floodFill #-}
double :: Integral a => a -> Double
double = fromIntegral
ratio :: Integral a => a -> RatioInt
ratio = fromIntegral
| TomMD/friday | src/Vision/Image/Transform.hs | lgpl-3.0 | 7,046 | 0 | 17 | 2,457 | 2,010 | 1,010 | 1,000 | 125 | 3 |
-- A program to test receiving messages on a message queue.
--
-- Based on sipc code from: examples/mq_reader.c
--
module Main where
import Bindings.SELinux.SIPC
import Control.Monad (when)
import Foreign
import Foreign.C
import System.IO
-- Key which sender and receiver have agreed upon
sipcKey = "sipc_mq_test"
-- Amount of data to allocate inside the IPC handle
dataLen = 8192
-- End of message marker which sender and receiver have agreed upon
dataEnd = "0xDEADBEEF"
main :: IO ()
main = do
sipc <- sipcOpen sipcKey SipcReceiver SipcSysvMqueues dataLen
if sipc == nullPtr
then do
hPutStrLn stderr "Error: Unable to create message queue"
else do
recvMessages sipc
sipcClose sipc
-- |Receive data from shared memory until the end of transmission
-- marker has been received.
recvMessages :: SipcPtr -> IO ()
recvMessages sipc = do
(result, dataP, len) <- sipcRecvData sipc
when (result == 0) $ recv dataP
where
recv dataP = do
dataStr <- peekCString dataP
free dataP
when (dataStr /= dataEnd) $ recvNext dataStr
recvNext msg = do
putStr msg
recvMessages sipc
| justinethier/hs-bindings-sipc | src/Examples/MQReader.hs | lgpl-3.0 | 1,168 | 0 | 12 | 279 | 250 | 127 | 123 | 29 | 2 |
{-# OPTIONS_GHC -Wall #-}
-- http://www.cis.upenn.edu/~cis194/spring13/hw/05-type-classes.pdf
module Main where
import ExprT
import Parser
-- Exercise 1
eval :: ExprT -> Integer
eval (Lit n) = n
eval (Add e1 e2) = eval e1 + eval e2
eval (Mul e1 e2) = eval e1 * eval e2
-- Exercise 2
evalStr :: String -> Maybe Integer
evalStr = (eval <$>) . (parseExp Lit Add Mul)
-- Exercise 3
class Expr a where
lit :: Integer -> a
add :: a -> a -> a
mul :: a -> a -> a
instance Expr ExprT where
lit = ExprT.Lit
add = ExprT.Add
mul = ExprT.Mul
reify :: ExprT -> ExprT
reify = id
-- Exercise 4
instance Expr Integer where
lit = id
add = (+)
mul = (*)
instance Expr Bool where
lit = (>0)
add = (||)
mul = (&&)
newtype MinMax = MinMax Integer deriving (Eq, Show)
newtype Mod7 = Mod7 Integer deriving (Eq, Show)
instance Expr MinMax where
lit x = MinMax x
add (MinMax x) (MinMax y) = MinMax $ max x y
mul (MinMax x) (MinMax y) = MinMax $ min x y
instance Expr Mod7 where
lit x = Mod7 $ mod x 7
add (Mod7 x) (Mod7 y) = Mod7 $ mod (x+y) 7
mul (Mod7 x) (Mod7 y) = Mod7 $ mod (x*y) 7
testExp :: Expr a => Maybe a
testExp = parseExp lit add mul "(3 * -4) + 5"
testInteger :: Maybe Integer
testInteger = testExp :: Maybe Integer
testBool :: Maybe Bool
testBool = testExp :: Maybe Bool
testMM :: Maybe MinMax
testMM = testExp :: Maybe MinMax
testSat :: Maybe Mod7
testSat = testExp :: Maybe Mod7
main :: IO ()
main = do
putStrLn "hello world"
| prt2121/haskell-practice | cis194-s13-hw05/src/Main.hs | apache-2.0 | 1,539 | 0 | 9 | 411 | 617 | 327 | 290 | 51 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Types
( ConcordOpts(..)
) where
import Control.Lens
import ClassyPrelude
data ConcordOpts
= CO
{ _optsInput :: !FilePath
, _optsOutput :: !(Maybe FilePath)
} deriving (Show)
makeLenses ''ConcordOpts
| erochest/concord | Types.hs | apache-2.0 | 379 | 0 | 11 | 110 | 68 | 40 | 28 | 17 | 0 |
module NewBase60 where
import qualified Data.Map.Strict as Map
digits = "0123456789ABCDEFGHJKLMNPQRSTUVWXYZ_abcdefghijkmnopqrstuvwxyz"
itob60 :: Int -> String
itob60 0 = "0"
itob60 n = reverse $ newBase60' n
where
newBase60' 0 = ""
newBase60' n = nextDigit n : newBase60' (n `div` 60)
nextDigit n = digits!!(n `mod` 60)
b60tot :: String -> Int
b60toi s = sum $ zipWith (*) pValues $ reverse dValues
where
-- inxex values of the digit list (the base 10 value of the digit)
indexes = Map.fromList $ zip digits [0..]
-- TODO: accept confusible digits as representing the appropriate value.
-- for instance intrepret "O" as the same value as "0"
-- place values [60**0, 60**1, 60**2 ... ]
pValues = map (60^) [0..]
-- digit values of the given string s. Map lookup across the inxexes
dValues = map (indexes Map.!) s
-- aliases to match Tantek's names
itosxg = itob60
sxgtoi = b60toi
-- TODO implement the 0 padded version
| jessebmiller/NewBase60 | newBase60.hs | bsd-2-clause | 981 | 0 | 10 | 221 | 214 | 121 | 93 | 16 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : MailCleaner.Options
-- Copyright : Thomas Sutton 2010
-- License : BSD3
--
-- Maintainer : Thomas Sutton <me@thomas-sutton.id.au>
-- Stability : experimental
-- Portability : unknown
--
-- Option handling code for MailCleaner.
--
-- This module implements the command-line options for MailCleaner. It uses a
-- common pattern for option representation where option handlers manipulate
-- record of values used during operation.
--
-----------------------------------------------------------------------------
module MailCleaner.Options (
header
, options
, defaultOptions
, Options(..)
) where
import System.Console.GetOpt
import System.Exit
import System.IO
import MailCleaner.Version
-- | Program state controlled by command-line options.
data Options = Options
{ optName :: FilePath -- ^ Name of the input file.
, optInput :: Handle -- ^ Handle to read input from.
, optOutput :: Handle -- ^ Handle to write output to.
, optError :: Maybe Handle -- ^ Handle to report errors on.
, optField :: Int -- ^ Field containing email address.
}
-- | Default options reading standard input, writing standard output,
-- suppressing errors and checking the first field.
defaultOptions :: Options
defaultOptions = Options
{ optName = "<stdin>"
, optInput = stdin
, optOutput = stdout
, optError = Nothing
, optField = 0
}
-- | Options for use by getOpt.
options :: [OptDescr (Options -> IO Options)]
options =
[ Option ['V'] ["version"] (NoArg showVersion) "show version number"
, Option ['h'] ["help"] (NoArg showHelp) "show the help"
, Option ['f'] ["field"] (ReqArg fieldNumber "NUMBER") "Email address field. [1]"
, Option ['i'] ["input"] (ReqArg readInput "FILE") "Read from FILE"
, Option ['o'] ["output"] (ReqArg writeOutput "FILE") "Write to FILE"
]
--, Option ['e'] ["error"] (OptArg writeErrors "FILE") "Write errors to FILE"
header :: String
header = "mailcleaner " ++ version
-- | Show the help message and terminate.
showHelp :: Options -> IO Options
showHelp _ = do
putStrLn $ usageInfo header options
exitWith ExitSuccess
-- | Show the version number and terminate.
showVersion :: Options -> IO Options
showVersion _ = do
putStrLn $ header
exitWith ExitSuccess
-- | Open and remember the handle to read input from.
readInput :: FilePath -> Options -> IO Options
readInput arg opt = do
putStrLn $ "# Using " ++ arg ++ " for input."
hand <- openFile arg ReadMode
hSetBuffering hand NoBuffering
return opt { optInput = hand, optName = arg }
-- | Open and remember the handle to write output to.
writeOutput :: FilePath -> Options -> IO Options
writeOutput arg opt = do
putStrLn $ "# Using " ++ arg ++ " for output."
hand <- openFile arg WriteMode
hSetBuffering hand NoBuffering
return opt { optOutput = hand }
-- | Open and remember the handle to record errors to.
writeErrors :: Maybe FilePath -> Options -> IO Options
writeErrors arg opt = do
(handle, fname) <- maybe
(return (stderr, "<stderr>"))
(\fp -> openFile fp WriteMode >>= \h -> return (h, fp))
arg
putStrLn $ "# Using " ++ fname ++ " for errors."
hSetBuffering handle NoBuffering
return opt { optError = Just handle }
-- | Check the field number and remember it.
fieldNumber :: String -> Options -> IO Options
fieldNumber arg opt = do
let field = read arg
if (field < 1)
then fail "Field number must be greater than 0!"
else return ()
return $ opt { optField = (field - 1) }
| thsutton/mailcleaner | src/MailCleaner/Options.hs | bsd-2-clause | 3,690 | 0 | 14 | 816 | 773 | 420 | 353 | 67 | 2 |
module Drasil.Projectile.Figures (figLaunch) where
import Language.Drasil
import Data.Drasil.Concepts.Documentation (physicalSystem)
resourcePath :: String
resourcePath = "../../../datafiles/Projectile/"
figLaunch :: LabelledContent
figLaunch = llcc (makeFigRef "Launch") $ figWithWidth (S "The" +:+ phrase physicalSystem)
(resourcePath ++ "Launch.jpg") 70
| JacquesCarette/literate-scientific-software | code/drasil-example/Drasil/Projectile/Figures.hs | bsd-2-clause | 363 | 0 | 9 | 40 | 89 | 50 | 39 | 8 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Distance.GA.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Distance.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale GA Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple Kilometre 3)
[ "3 ciliméadair"
, "3 km"
, "3km"
, "3k"
]
, examples (simple Kilometre 3.0)
[ "3.0 km"
]
, examples (simple Mile 8)
[ "8 mhíle"
, "8 míle"
]
, examples (simple M 9)
[ "9m"
]
, examples (simple Centimetre 2)
[ "2cm"
, "2 cheintiméadar"
]
]
| facebookincubator/duckling | Duckling/Distance/GA/Corpus.hs | bsd-3-clause | 1,072 | 0 | 9 | 349 | 207 | 122 | 85 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Language.Haskell.Format.Nested
( qop
, if_
, case_
, pattern_
, do_
, generator
) where
import Language.Haskell.Exts
import Language.Haskell.Format.Internal as Format
import Language.Haskell.Format.Types
import qualified Language.Haskell.Format.Atom as Atom
import qualified Language.Haskell.Format.Pattern as Pattern
qop :: QOp CommentedSrc -> Format -> Format
qop qop' =
nest (Atom.qop qop')
if_ :: Format -> Format
if_ =
nest "if"
case_ :: Format -> Format
case_ =
nest "case"
pattern_ :: Pat CommentedSrc -> Format -> Format
pattern_ pat =
nest (Pattern.format pat <> " |")
do_ :: Format -> Format
do_ =
nest "do"
generator :: Pat CommentedSrc -> Format -> Format
generator pat =
nest (Pattern.format pat <> " <-")
nest :: Format -> Format -> Format
nest anchor target =
Format.intercalate newLine (firstLine : paddedLines)
where
(x1 : xs) =
Format.lines target
firstLine =
anchor <> " " <> x1
paddedLines =
map (padding <>) xs
padding =
Format.fromString (replicate depth ' ')
depth =
Format.length anchor + 1
| hecrj/haskell-format | src/Language/Haskell/Format/Nested.hs | bsd-3-clause | 1,236 | 0 | 9 | 342 | 351 | 194 | 157 | 44 | 1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, TypeFamilies, UndecidableInstances #-}
module Lang.Haskell where
import qualified Prelude as P
import Generic.Prelude
type family H a :: *
newtype Haskell a = Hs { runHaskell :: H a }
-- * Haskell instances for AwesomePrelude 'data types'.
instance NameC Haskell where
named _ a = a -- drop name annotation, for now
type instance H (a -> b) = H a -> H b
instance LamFunC Haskell where
lam f = Hs (\x -> runHaskell (f (Hs x)))
instance AppFunC Haskell where
app (Hs f) (Hs x) = Hs (f x)
instance RecFunC Haskell where
fix f = f (fix f)
type instance H Bool = P.Bool
instance BoolC Haskell where
false = Hs P.False
true = Hs P.True
bool x y (Hs b) = if b then y else x
type instance H (Maybe a) = P.Maybe (H a)
instance MaybeC Haskell where
nothing = Hs P.Nothing
just (Hs x) = Hs (P.Just x)
maybe n f (Hs mx) = P.maybe n (\x -> f (Hs x)) mx
type instance H (a, b) = (H a, H b)
instance TupleC Haskell where
mkTuple (Hs x) (Hs y) = Hs (x, y)
tuple f (Hs (x, y)) = f (Hs x) (Hs y)
type instance H (Either a b) = P.Either (H a) (H b)
instance EitherC Haskell where
left (Hs x) = Hs (P.Left x)
right (Hs y) = Hs (P.Right y)
either l r (Hs e) = P.either (\x -> l (Hs x)) (\y -> r (Hs y)) e
type instance H [a] = [H a]
instance ListC Haskell where
nil = Hs []
cons (Hs x) (Hs xs) = Hs (x:xs)
list n c (Hs xs) = case xs of { [] -> n; y:ys -> c (Hs y) (Hs ys) }
-- * Haskell instances of AwesomePrelude type classes.
instance (P.Num a) => Num Haskell a where
(+) = (P.+)
(-) = (P.-)
(*) = (P.*)
fromInteger = P.fromInteger
instance (P.Eq a) => Eq Haskell a where
x == y = if x P.== y then true else false
| tomlokhorst/AwesomePrelude | src/Lang/Haskell.hs | bsd-3-clause | 1,817 | 0 | 13 | 514 | 876 | 464 | 412 | 46 | 0 |
{-# LANGUAGE OverloadedStrings, TupleSections, PackageImports #-}
module Network.PeyoTLS.HandshakeMonad (
TH.TlsM, TH.run, HandshakeM, execHandshakeM, withRandom, randomByteString,
ValidateHandle(..), handshakeValidate,
TH.TlsHandle(..), TH.ContentType(..),
setCipherSuite, flushCipherSuite, debugCipherSuite,
tlsGetContentType, tlsGet, tlsPut,
generateKeys, encryptRsa, decryptRsa, rsaPadding,
TH.Alert(..), TH.AlertLevel(..), TH.AlertDesc(..),
TH.Side(..), TH.RW(..), handshakeHash, finishedHash, throwError ) where
import Prelude hiding (read)
import Control.Applicative
import qualified Data.ASN1.Types as ASN1
import Control.Arrow (first)
import Control.Monad (liftM)
import "monads-tf" Control.Monad.Trans (lift)
import "monads-tf" Control.Monad.State (StateT, execStateT, get, gets, put, modify)
import qualified "monads-tf" Control.Monad.Error as E (throwError)
import "monads-tf" Control.Monad.Error.Class (strMsg)
import Data.HandleLike (HandleLike(..))
import System.IO (Handle)
import "crypto-random" Crypto.Random (CPRG)
import qualified Data.ByteString as BS
import qualified Data.X509 as X509
import qualified Data.X509.Validation as X509
import qualified Data.X509.CertificateStore as X509
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Crypto.PubKey.HashDescr as HD
import qualified Crypto.PubKey.RSA as RSA
import qualified Crypto.PubKey.RSA.PKCS15 as RSA
import qualified Network.PeyoTLS.TlsHandle as TH (
TlsM, Alert(..), AlertLevel(..), AlertDesc(..),
run, withRandom, randomByteString,
TlsHandle(..), ContentType(..),
newHandle, getContentType, tlsGet, tlsPut, generateKeys,
cipherSuite, setCipherSuite, flushCipherSuite, debugCipherSuite,
Side(..), RW(..), finishedHash, handshakeHash, CipherSuite(..) )
throwError :: HandleLike h =>
TH.AlertLevel -> TH.AlertDesc -> String -> HandshakeM h g a
throwError al ad m = E.throwError $ TH.Alert al ad m
type HandshakeM h g = StateT (TH.TlsHandle h g, SHA256.Ctx) (TH.TlsM h g)
execHandshakeM :: HandleLike h =>
h -> HandshakeM h g () -> TH.TlsM h g (TH.TlsHandle h g)
execHandshakeM h =
liftM fst . ((, SHA256.init) `liftM` TH.newHandle h >>=) . execStateT
withRandom :: HandleLike h => (g -> (a, g)) -> HandshakeM h g a
withRandom = lift . TH.withRandom
randomByteString :: (HandleLike h, CPRG g) => Int -> HandshakeM h g BS.ByteString
randomByteString = lift . TH.randomByteString
class HandleLike h => ValidateHandle h where
validate :: h -> X509.CertificateStore -> X509.CertificateChain ->
HandleMonad h [X509.FailedReason]
instance ValidateHandle Handle where
validate _ cs = X509.validate X509.HashSHA256 X509.defaultHooks
validationChecks cs validationCache ("", "")
where
validationCache = X509.ValidationCache
(\_ _ _ -> return X509.ValidationCacheUnknown)
(\_ _ _ -> return ())
validationChecks = X509.defaultChecks { X509.checkFQHN = False }
certNames :: X509.Certificate -> [String]
certNames = nms
where
nms c = maybe id (:) <$> nms_ <*> ans $ c
nms_ = (ASN1.asn1CharacterToString =<<) .
X509.getDnElement X509.DnCommonName . X509.certSubjectDN
ans = maybe [] ((\ns -> [s | X509.AltNameDNS s <- ns])
. \(X509.ExtSubjectAltName ns) -> ns)
. X509.extensionGet . X509.certExtensions
handshakeValidate :: ValidateHandle h =>
X509.CertificateStore -> X509.CertificateChain ->
HandshakeM h g [X509.FailedReason]
handshakeValidate cs cc@(X509.CertificateChain (c : _)) = gets fst >>= \t -> do
modify . first $ const t { TH.names = certNames $ X509.getCertificate c }
lift . lift . lift $ validate (TH.tlsHandle t) cs cc
handshakeValidate _ _ = error "empty certificate chain"
setCipherSuite :: HandleLike h => TH.CipherSuite -> HandshakeM h g ()
setCipherSuite = modify . first . TH.setCipherSuite
flushCipherSuite :: (HandleLike h, CPRG g) => TH.RW -> HandshakeM h g ()
flushCipherSuite p =
TH.flushCipherSuite p `liftM` gets fst >>= modify . first . const
debugCipherSuite :: HandleLike h => String -> HandshakeM h g ()
debugCipherSuite m = do t <- gets fst; lift $ TH.debugCipherSuite t m
tlsGetContentType :: (HandleLike h, CPRG g) => HandshakeM h g TH.ContentType
tlsGetContentType = gets fst >>= lift . TH.getContentType
tlsGet :: (HandleLike h, CPRG g) => Int -> HandshakeM h g BS.ByteString
tlsGet n = do ((_, bs), t') <- lift . flip TH.tlsGet n =<< get; put t'; return bs
tlsPut :: (HandleLike h, CPRG g) =>
TH.ContentType -> BS.ByteString -> HandshakeM h g ()
tlsPut ct bs = get >>= lift . (\t -> TH.tlsPut t ct bs) >>= put
generateKeys :: HandleLike h => TH.Side ->
(BS.ByteString, BS.ByteString) -> BS.ByteString -> HandshakeM h g ()
generateKeys p (cr, sr) pms = do
t <- gets fst
k <- lift $ TH.generateKeys p (TH.cipherSuite t) cr sr pms
modify . first $ const t { TH.keys = k }
encryptRsa :: (HandleLike h, CPRG g) =>
RSA.PublicKey -> BS.ByteString -> HandshakeM h g BS.ByteString
encryptRsa pk p = either (E.throwError . strMsg . show) return =<<
withRandom (\g -> RSA.encrypt g pk p)
decryptRsa :: (HandleLike h, CPRG g) =>
RSA.PrivateKey -> BS.ByteString -> HandshakeM h g BS.ByteString
decryptRsa sk e = either (E.throwError . strMsg . show) return =<<
withRandom (\g -> RSA.decryptSafer g sk e)
rsaPadding :: RSA.PublicKey -> BS.ByteString -> BS.ByteString
rsaPadding pk bs = case RSA.padSignature (RSA.public_size pk) $
HD.digestToASN1 HD.hashDescrSHA256 bs of
Right pd -> pd; Left m -> error $ show m
handshakeHash :: HandleLike h => HandshakeM h g BS.ByteString
handshakeHash = get >>= lift . TH.handshakeHash
finishedHash :: (HandleLike h, CPRG g) => TH.Side -> HandshakeM h g BS.ByteString
finishedHash p = get >>= lift . flip TH.finishedHash p
| YoshikuniJujo/forest | subprojects/tls-analysis/server/src/Network/PeyoTLS/HandshakeMonad.hs | bsd-3-clause | 5,659 | 64 | 18 | 873 | 2,147 | 1,173 | 974 | 110 | 2 |
--
-- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE OverloadedStrings #-}
module App.Run (
withServer
) where
import Control.Concurrent
import Control.Exception
import Control.Lens
import Control.Monad.IO.Class
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy as BSL
import Data.Configurator
import Data.Map.Lazy
import Data.Text (pack)
import qualified Network.HTTP.Client as HC
import qualified Network.HTTP.Types as HT
import Snap.Http.Server (simpleHttpServe)
import Snap.Http.Server.Config
import Snap.Snaplet
import System.IO
import App.App
import Test.SafeCWD
withServer :: Int -> String -> IO a -> IO a
withServer servePort cfgPath runner = bracket start stop run
where
start = do
(tid, mvar) <- inDir False "." $ startServer servePort cfgPath
putStrLn $ "Running server on process " ++ show tid
return (tid, mvar)
stop (tid, _) = killThread tid
run (_,_) = runner
startServer :: Int -> String -> IO (ThreadId, MVar ())
startServer servePort cfgPath = do
mvar <- newEmptyMVar
t <- forkIO $ serve mvar (setPort servePort defaultConfig) app
threadDelay $ 2*10^(6::Int)
return (t, mvar)
where
serve mvar config initializer =
flip finally (putMVar mvar ()) $
handle handleErr $ do
hPutStrLn stderr "initializing snaplet"
(_, handler, doCleanup) <- runSnaplet (Just cfgPath) initializer
flip finally doCleanup $ do
(conf, site) <- combineConfig config handler
hPutStrLn stderr "bringing up server"
simpleHttpServe conf site
hPutStrLn stderr "server killed"
handleErr :: SomeException -> IO ()
handleErr e = hPutStrLn stderr $ "startServer exception: " ++ show e
| anchor/snaplet-httpauth | test/App/Run.hs | bsd-3-clause | 2,123 | 0 | 14 | 483 | 556 | 300 | 256 | 48 | 1 |
import Language.Haskell.Extension
import Distribution.Text
import Text.PrettyPrint
main = writeFile "src/Hint/Extension.hs" $ render moduleDoc
moduleDoc :: Doc
moduleDoc =
vcat [
text "-- this module was automatically generated. do not edit!",
text "-- edit util/mk_extensions_mod.hs instead",
text "module Hint.Extension (",
text " Extension(..), availableExtensions, asExtension",
text ") where",
text "",
text "import Hint.Compat as Compat",
text "",
text "-- | List of the extensions known by the interpreter.",
text "availableExtensions :: [Extension]",
text "availableExtensions = map asExtension Compat.supportedExtensions",
text "",
text "asExtension :: String -> Extension",
text "asExtension s = if isKnown s",
text " then read s",
text " else let no_s = \"No\" ++ s",
text " in if isKnown no_s then read no_s",
text " else UnknownExtension s",
text " where isKnown e = e `elem` map show knownExtensions",
text "",
text "-- | This represents language extensions beyond Haskell 98",
text "-- that are supported by GHC (it was taken from",
text "-- Cabal's @Language.Haskell.Extension@)",
align "data Extension " $
punctuateL (text "| ") . onFirst (text "= ") $ known ++ [unknown],
nest 8 $ text "deriving (Eq, Show, Read)",
text "",
text "knownExtensions :: [Extension]",
align "knownExtensions = [" (punctuate comma known ++ [text "]"]),
text ""
]
allKnown :: [KnownExtension]
allKnown = [(minBound :: KnownExtension)..]
allPositive, allNegative :: [Extension]
allPositive = map EnableExtension allKnown
allNegative = map DisableExtension allKnown
known :: [Doc]
known = map disp (allPositive ++ allNegative)
unknown :: Doc
unknown = text "UnknownExtension String"
align :: String -> [Doc] -> Doc
align s [] = text s
align s (d:ds) = hang (text s <> d) (length s) (vcat ds)
-- punctuateL p [d1, ..., dn] = [d1, p <> d2, ..., p <> dn]
punctuateL :: Doc -> [Doc] -> [Doc]
punctuateL _ [] = []
punctuateL _ [d] = [d]
punctuateL p (d:ds) = d : map (p <>) ds
onFirst :: Doc -> [Doc] -> [Doc]
onFirst _ [] = []
onFirst p (d:ds) = p <> d : ds
| meditans/hint | generate/mk_extensions_mod.hs | bsd-3-clause | 2,285 | 0 | 13 | 571 | 588 | 308 | 280 | 56 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module ML where
import Data.Typeable
import Language.Inferno.Solver (TermVar)
data Tm = Var TermVar
| Abs TermVar Tm
| App Tm Tm
| Let TermVar Tm Tm
| Pair Tm Tm
| Proj Int Tm
| Bool Bool
| If Tm Tm Tm
deriving (Typeable, Show)
| sweirich/hs-inferno | test/ML.hs | bsd-3-clause | 329 | 0 | 6 | 114 | 92 | 54 | 38 | 13 | 0 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Maximize
-- Description : Temporarily yank the focused window out of the layout to mostly fill the screen.
-- Copyright : (c) 2007 James Webb
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : xmonad#jwebb,sygneca,com
-- Stability : unstable
-- Portability : unportable
--
-- Temporarily yanks the focused window out of the layout to mostly fill
-- the screen.
--
-----------------------------------------------------------------------------
module XMonad.Layout.Maximize (
-- * Usage
-- $usage
maximize,
maximizeWithPadding,
maximizeRestore,
Maximize, MaximizeRestore,
) where
import XMonad
import qualified XMonad.StackSet as S
import XMonad.Layout.LayoutModifier
import XMonad.Prelude ( partition )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.Maximize
--
-- Then edit your @layoutHook@ by adding the Maximize layout modifier:
--
-- > myLayout = maximize (Tall 1 (3/100) (1/2)) ||| Full ||| etc..)
-- > main = xmonad def { layoutHook = myLayout }
--
-- Or, if you want to control the amount of padding placed around the
-- maximized window:
--
-- > myLayout = maximizeWithPadding 10 (Tall 1 (3/100) (1/2)) ||| Full ||| etc..)
-- > main = xmonad def { layoutHook = myLayout }
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
--
-- In the key-bindings, do something like:
--
-- > , ((modm, xK_backslash), withFocused (sendMessage . maximizeRestore))
-- > ...
--
-- For detailed instruction on editing the key binding see:
--
-- "XMonad.Doc.Extending#Editing_key_bindings".
data Maximize a = Maximize Dimension (Maybe Window) deriving ( Read, Show )
maximize :: LayoutClass l Window => l Window -> ModifiedLayout Maximize l Window
maximize = ModifiedLayout $ Maximize 25 Nothing
-- | Like 'maximize', but allows you to specify the amount of padding
-- placed around the maximized window.
maximizeWithPadding :: LayoutClass l Window => Dimension -> l Window -> ModifiedLayout Maximize l Window
maximizeWithPadding padding = ModifiedLayout $ Maximize padding Nothing
newtype MaximizeRestore = MaximizeRestore Window deriving ( Eq )
instance Message MaximizeRestore
maximizeRestore :: Window -> MaximizeRestore
maximizeRestore = MaximizeRestore
instance LayoutModifier Maximize Window where
modifierDescription (Maximize _ _) = "Maximize"
pureModifier (Maximize padding (Just target)) rect (Just (S.Stack focused _ _)) wrs =
if focused == target
then (maxed ++ rest, Nothing)
else (rest ++ maxed, lay)
where
(toMax, rest) = partition (\(w, _) -> w == target) wrs
maxed = map (\(w, _) -> (w, maxRect)) toMax
maxRect = Rectangle (rect_x rect + fromIntegral padding)
(rect_y rect + fromIntegral padding)
(rect_width rect - padding * 2)
(rect_height rect - padding * 2)
lay | null maxed = Just (Maximize padding Nothing)
| otherwise = Nothing
pureModifier _ _ _ wrs = (wrs, Nothing)
pureMess (Maximize padding mw) m = case fromMessage m of
Just (MaximizeRestore w) -> case mw of
Just w' -> if w == w'
then Just $ Maximize padding Nothing -- restore window
else Just $ Maximize padding $ Just w -- maximize different window
Nothing -> Just $ Maximize padding $ Just w -- maximize window
_ -> Nothing
-- vim: sw=4:et
| xmonad/xmonad-contrib | XMonad/Layout/Maximize.hs | bsd-3-clause | 3,869 | 0 | 15 | 941 | 667 | 373 | 294 | 41 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_surface_protected_capabilities - instance extension
--
-- == VK_KHR_surface_protected_capabilities
--
-- [__Name String__]
-- @VK_KHR_surface_protected_capabilities@
--
-- [__Extension Type__]
-- Instance extension
--
-- [__Registered Extension Number__]
-- 240
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.1
--
-- - Requires @VK_KHR_get_surface_capabilities2@
--
-- [__Contact__]
--
-- - Sandeep Shinde
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_surface_protected_capabilities] @sashinde%0A<<Here describe the issue or question you have about the VK_KHR_surface_protected_capabilities extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-12-18
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Sandeep Shinde, NVIDIA
--
-- - James Jones, NVIDIA
--
-- - Daniel Koch, NVIDIA
--
-- == Description
--
-- This extension extends
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.SurfaceCapabilities2KHR',
-- providing applications a way to query whether swapchains /can/ be
-- created with the
-- 'Vulkan.Extensions.VK_KHR_swapchain.SWAPCHAIN_CREATE_PROTECTED_BIT_KHR'
-- flag set.
--
-- Vulkan 1.1 added (optional) support for protect memory and protected
-- resources including buffers
-- ('Vulkan.Core10.Enums.BufferCreateFlagBits.BUFFER_CREATE_PROTECTED_BIT'),
-- images
-- ('Vulkan.Core10.Enums.ImageCreateFlagBits.IMAGE_CREATE_PROTECTED_BIT'),
-- and swapchains
-- ('Vulkan.Extensions.VK_KHR_swapchain.SWAPCHAIN_CREATE_PROTECTED_BIT_KHR').
-- However, on implementations which support multiple windowing systems,
-- not all window systems /may/ be able to provide a protected display
-- path.
--
-- This extension provides a way to query if a protected swapchain created
-- for a surface (and thus a specific windowing system) /can/ be displayed
-- on screen. It extends the existing
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.SurfaceCapabilities2KHR'
-- structure with a new 'SurfaceProtectedCapabilitiesKHR' structure from
-- which the application /can/ obtain information about support for
-- protected swapchain creation through
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.getPhysicalDeviceSurfaceCapabilities2KHR'.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.SurfaceCapabilities2KHR':
--
-- - 'SurfaceProtectedCapabilitiesKHR'
--
-- == New Enum Constants
--
-- - 'KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME'
--
-- - 'KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR'
--
-- == Version History
--
-- - Revision 1, 2018-12-18 (Sandeep Shinde, Daniel Koch)
--
-- - Internal revisions.
--
-- == See Also
--
-- 'SurfaceProtectedCapabilitiesKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_surface_protected_capabilities Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_surface_protected_capabilities ( SurfaceProtectedCapabilitiesKHR(..)
, KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION
, pattern KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION
, KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME
, pattern KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR))
-- | VkSurfaceProtectedCapabilitiesKHR - Structure describing capability of a
-- surface to be protected
--
-- = Description
--
-- If the @VK_GOOGLE_surfaceless_query@ extension is enabled, the value
-- returned in @supportsProtected@ will be identical for every valid
-- surface created on this physical device, and so in the
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.getPhysicalDeviceSurfaceCapabilities2KHR'
-- call,
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.PhysicalDeviceSurfaceInfo2KHR'::@surface@
-- /can/ be 'Vulkan.Core10.APIConstants.NULL_HANDLE'. In that case, the
-- contents of
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.SurfaceCapabilities2KHR'::@surfaceCapabilities@
-- as well as any other struct chained to it will be undefined.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_surface_protected_capabilities VK_KHR_surface_protected_capabilities>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data SurfaceProtectedCapabilitiesKHR = SurfaceProtectedCapabilitiesKHR
{ -- | @supportsProtected@ specifies whether a protected swapchain created from
-- 'Vulkan.Extensions.VK_KHR_get_surface_capabilities2.PhysicalDeviceSurfaceInfo2KHR'::@surface@
-- for a particular windowing system /can/ be displayed on screen or not.
-- If @supportsProtected@ is 'Vulkan.Core10.FundamentalTypes.TRUE', then
-- creation of swapchains with the
-- 'Vulkan.Extensions.VK_KHR_swapchain.SWAPCHAIN_CREATE_PROTECTED_BIT_KHR'
-- flag set /must/ be supported for @surface@.
supportsProtected :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (SurfaceProtectedCapabilitiesKHR)
#endif
deriving instance Show SurfaceProtectedCapabilitiesKHR
instance ToCStruct SurfaceProtectedCapabilitiesKHR where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p SurfaceProtectedCapabilitiesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (supportsProtected))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct SurfaceProtectedCapabilitiesKHR where
peekCStruct p = do
supportsProtected <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ SurfaceProtectedCapabilitiesKHR
(bool32ToBool supportsProtected)
instance Storable SurfaceProtectedCapabilitiesKHR where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero SurfaceProtectedCapabilitiesKHR where
zero = SurfaceProtectedCapabilitiesKHR
zero
type KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION"
pattern KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION = 1
type KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME = "VK_KHR_surface_protected_capabilities"
-- No documentation found for TopLevel "VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME"
pattern KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME = "VK_KHR_surface_protected_capabilities"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_surface_protected_capabilities.hs | bsd-3-clause | 8,757 | 0 | 14 | 1,372 | 976 | 612 | 364 | -1 | -1 |
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-}
module Games.Chaos2010.Database.Fields where
import Database.HaskellDB.DBLayout
data IdTag
type Id = Proxy IdTag
instance ShowLabel Id where showLabel _ = "id"
xid :: Id
xid = proxy
data History_nameTag
type History_name = Proxy History_nameTag
history_name :: History_name
history_name = proxy
instance ShowLabel History_name
where showLabel _ = "history_name"
data AllegianceTag
type Allegiance = Proxy AllegianceTag
allegiance :: Allegiance
allegiance = proxy
instance ShowLabel Allegiance
where showLabel _ = "allegiance"
data XTag
type X = Proxy XTag
x :: X
x = proxy
instance ShowLabel X
where showLabel _ = "x"
data YTag
type Y = Proxy YTag
y :: Y
y = proxy
instance ShowLabel Y
where showLabel _ = "y"
data TxTag
type Tx = Proxy TxTag
tx :: Tx
tx = proxy
instance ShowLabel Tx
where showLabel _ = "tx"
data TyTag
type Ty = Proxy TyTag
ty :: Ty
ty = proxy
instance ShowLabel Ty
where showLabel _ = "ty"
data PtypeTag
type Ptype = Proxy PtypeTag
ptype :: Ptype
ptype = proxy
instance ShowLabel Ptype
where showLabel _ = "ptype"
data TagTag
type Tag = Proxy TagTag
tag :: Tag
tag = proxy
instance ShowLabel Tag
where showLabel _ = "tag"
data Spell_nameTag
type Spell_name = Proxy Spell_nameTag
spell_name :: Spell_name
spell_name = proxy
instance ShowLabel Spell_name
where showLabel _ = "spell_name"
data Num_wizardsTag
type Num_wizards = Proxy Num_wizardsTag
num_wizards :: Num_wizards
num_wizards = proxy
instance ShowLabel Num_wizards
where showLabel _ = "num_wizards"
data Turn_numberTag
type Turn_number = Proxy Turn_numberTag
turn_number :: Turn_number
turn_number = proxy
instance ShowLabel Turn_number
where showLabel _ = "turn_number"
data Turn_phaseTag
type Turn_phase = Proxy Turn_phaseTag
turn_phase :: Turn_phase
turn_phase = proxy
instance ShowLabel Turn_phase
where showLabel _ = "turn_phase"
data ColourTag
type Colour = Proxy ColourTag
colour :: Colour
colour = proxy
instance ShowLabel Colour
where showLabel _ = "colour"
data ActionTag
type Action = Proxy ActionTag
action :: Action
action = proxy
instance ShowLabel Action
where showLabel _ = "action"
data HelpTag
type Help = Proxy HelpTag
help :: Help
help = proxy
instance ShowLabel Help
where showLabel _ = "help"
data Object_typeTag
type Object_type = Proxy Object_typeTag
object_type :: Object_type
object_type = proxy
instance ShowLabel Object_type
where showLabel _ = "object_type"
data Object_nameTag
type Object_name = Proxy Object_nameTag
object_name :: Object_name
object_name = proxy
instance ShowLabel Object_name
where showLabel _ = "object_name"
data Module_nameTag
type Module_name = Proxy Module_nameTag
module_name :: Module_name
module_name = proxy
instance ShowLabel Module_name
where showLabel _ = "module_name"
data Physical_defenseTag
type Physical_defense = Proxy Physical_defenseTag
physical_defense :: Physical_defense
physical_defense = proxy
instance ShowLabel Physical_defense
where showLabel _ = "physical_defense"
data Attack_strengthTag
type Attack_strength = Proxy Attack_strengthTag
attack_strength :: Attack_strength
attack_strength = proxy
instance ShowLabel Attack_strength
where showLabel _ = "attack_strength"
data Attribute_nameTag
type Attribute_name = Proxy Attribute_nameTag
attribute_name :: Attribute_name
attribute_name = proxy
instance ShowLabel Attribute_name
where showLabel _ = "attribute_name"
data Type_nameTag
type Type_name = Proxy Type_nameTag
type_name :: Type_name
type_name = proxy
instance ShowLabel Type_name
where showLabel _ = "type_name"
data Relvar_nameTag
type Relvar_name = Proxy Relvar_nameTag
relvar_name :: Relvar_name
relvar_name = proxy
instance ShowLabel Relvar_name
where showLabel _ = "relvar_name"
data Constraint_nameTag
type Constraint_name = Proxy Constraint_nameTag
constraint_name :: Constraint_name
constraint_name = proxy
instance ShowLabel Constraint_name
where showLabel _ = "constraint_name"
data XtypeTag
type Xtype = Proxy XtypeTag
xtype :: Xtype
xtype = proxy
instance ShowLabel Xtype
where showLabel _ = "xtype"
data SpriteTag
type Sprite = Proxy SpriteTag
sprite :: Sprite
sprite = proxy
instance ShowLabel Sprite
where showLabel _ = "sprite"
data RangeTag
type Range = Proxy RangeTag
range :: Range
range = proxy
instance ShowLabel Range
where showLabel _ = "range"
data WidthTag
type Width = Proxy WidthTag
width :: Width
width = proxy
instance ShowLabel Width
where showLabel _ = "width"
data HeightTag
type Height = Proxy HeightTag
height :: Height
height = proxy
instance ShowLabel Height
where showLabel _ = "height"
data SpTag
type Sp = Proxy SpTag
sp :: Sp
sp = proxy
instance ShowLabel Sp
where showLabel _ = "sp"
data Start_tickTag
type Start_tick = Proxy Start_tickTag
start_tick :: Start_tick
start_tick = proxy
instance ShowLabel Start_tick
where showLabel _ = "start_tick"
data Animation_speedTag
type Animation_speed = Proxy Animation_speedTag
animation_speed :: Animation_speed
animation_speed = proxy
instance ShowLabel Animation_speed
where showLabel _ = "animation_speed"
data SelectedTag
type Selected = Proxy SelectedTag
selected :: Selected
selected = proxy
instance ShowLabel Selected
where showLabel _ = "selected"
data Cast_alignmentTag
type Cast_alignment = Proxy Cast_alignmentTag
cast_alignment :: Cast_alignment
cast_alignment = proxy
instance ShowLabel Cast_alignment
where showLabel _ = "cast_alignment"
data Cast_success_checkedTag
type Cast_success_checked = Proxy Cast_success_checkedTag
cast_success_checked :: Cast_success_checked
cast_success_checked = proxy
instance ShowLabel Cast_success_checked
where showLabel _ = "cast_success_checked"
data PlaceTag
type Place = Proxy PlaceTag
place :: Place
place = proxy
instance ShowLabel Place
where showLabel _ = "place"
data Wizard_nameTag
type Wizard_name = Proxy Wizard_nameTag
wizard_name :: Wizard_name
wizard_name = proxy
instance ShowLabel Wizard_name
where showLabel _ = "wizard_name"
data Computer_controlledTag
type Computer_controlled = Proxy Computer_controlledTag
computer_controlled :: Computer_controlled
computer_controlled = proxy
instance ShowLabel Computer_controlled
where showLabel _ = "computer_controlled"
data NameTag
type Name = Proxy NameTag
name :: Name
name = proxy
instance ShowLabel Name
where showLabel _ = "name"
data RedTag
type Red = Proxy RedTag
red :: Red
red = proxy
instance ShowLabel Red
where showLabel _ = "red"
data GreenTag
type Green = Proxy GreenTag
green :: Green
green = proxy
instance ShowLabel Green
where showLabel _ = "green"
data BlueTag
type Blue = Proxy BlueTag
blue :: Blue
blue = proxy
instance ShowLabel Blue
where showLabel _ = "blue"
data Creating_new_gameTag
type Creating_new_game = Proxy Creating_new_gameTag
creating_new_game :: Creating_new_game
creating_new_game = proxy
instance ShowLabel Creating_new_game
where showLabel _ = "creating_new_game"
data FlyingTag
type Flying = Proxy FlyingTag
flying :: Flying
flying = proxy
instance ShowLabel Flying
where showLabel _ = "flying"
data SpeedTag
type Speed = Proxy SpeedTag
speed :: Speed
speed = proxy
instance ShowLabel Speed
where showLabel _ = "speed"
data AgilityTag
type Agility = Proxy AgilityTag
agility :: Agility
agility = proxy
instance ShowLabel Agility
where showLabel _ = "agility"
data Magic_defenseTag
type Magic_defense = Proxy Magic_defenseTag
magic_defense :: Magic_defense
magic_defense = proxy
instance ShowLabel Magic_defense
where showLabel _ = "magic_defense"
data Spell_categoryTag
type Spell_category = Proxy Spell_categoryTag
spell_category :: Spell_category
spell_category = proxy
instance ShowLabel Spell_category
where showLabel _ = "spell_category"
data Base_chanceTag
type Base_chance = Proxy Base_chanceTag
base_chance :: Base_chance
base_chance = proxy
instance ShowLabel Base_chance
where showLabel _ = "base_chance"
data DescriptionTag
type Description = Proxy DescriptionTag
description :: Description
description = proxy
instance ShowLabel Description
where showLabel _ = "description"
data NumbTag
type Numb = Proxy NumbTag
numb :: Numb
numb = proxy
instance ShowLabel Numb
where showLabel _ = "numb"
data CountTag
type Count = Proxy CountTag
count :: Count
count = proxy
instance ShowLabel Count
where showLabel _ = "count"
data ChanceTag
type Chance = Proxy ChanceTag
chance :: Chance
chance = proxy
instance ShowLabel Chance
where showLabel _ = "chance"
data Alignment_stringTag
type Alignment_string = Proxy Alignment_stringTag
alignment_string :: Alignment_string
alignment_string = proxy
instance ShowLabel Alignment_string
where showLabel _ = "alignment_string"
data Current_wizardTag
type Current_wizard = Proxy Current_wizardTag
current_wizard :: Current_wizard
current_wizard = proxy
instance ShowLabel Current_wizard
where showLabel _ = "current_wizard"
data ImaginaryTag
type Imaginary = Proxy ImaginaryTag
imaginary :: Imaginary
imaginary = proxy
instance ShowLabel Imaginary
where showLabel _ = "imaginary"
data UndeadTag
type Undead = Proxy UndeadTag
undead :: Undead
undead = proxy
instance ShowLabel Undead
where showLabel _ = "undead"
data RidableTag
type Ridable = Proxy RidableTag
ridable :: Ridable
ridable = proxy
instance ShowLabel Ridable
where showLabel _ = "ridable"
data Ranged_weapon_typeTag
type Ranged_weapon_type = Proxy Ranged_weapon_typeTag
ranged_weapon_type :: Ranged_weapon_type
ranged_weapon_type = proxy
instance ShowLabel Ranged_weapon_type
where showLabel _ = "ranged_weapon_type"
data Ranged_attack_strengthTag
type Ranged_attack_strength = Proxy Ranged_attack_strengthTag
ranged_attack_strength :: Ranged_attack_strength
ranged_attack_strength = proxy
instance ShowLabel Ranged_attack_strength
where showLabel _ = "ranged_attack_strength"
data WtypeTag
type Wtype = Proxy WtypeTag
wtype :: Wtype
wtype = proxy
instance ShowLabel Wtype
where showLabel _ = "wtype"
data Shadow_formTag
type Shadow_form = Proxy Shadow_formTag
shadow_form :: Shadow_form
shadow_form = proxy
instance ShowLabel Shadow_form
where showLabel _ = "shadow_form"
data Magic_swordTag
type Magic_sword = Proxy Magic_swordTag
magic_sword :: Magic_sword
magic_sword = proxy
instance ShowLabel Magic_sword
where showLabel _ = "magic_sword"
data Magic_knifeTag
type Magic_knife = Proxy Magic_knifeTag
magic_knife :: Magic_knife
magic_knife = proxy
instance ShowLabel Magic_knife
where showLabel _ = "magic_knife"
data Magic_shieldTag
type Magic_shield = Proxy Magic_shieldTag
magic_shield :: Magic_shield
magic_shield = proxy
instance ShowLabel Magic_shield
where showLabel _ = "magic_shield"
data Magic_wingsTag
type Magic_wings = Proxy Magic_wingsTag
magic_wings :: Magic_wings
magic_wings = proxy
instance ShowLabel Magic_wings
where showLabel _ = "magic_wings"
data Magic_armourTag
type Magic_armour = Proxy Magic_armourTag
magic_armour :: Magic_armour
magic_armour = proxy
instance ShowLabel Magic_armour
where showLabel _ = "magic_armour"
data Magic_bowTag
type Magic_bow = Proxy Magic_bowTag
magic_bow :: Magic_bow
magic_bow = proxy
instance ShowLabel Magic_bow
where showLabel _ = "magic_bow"
data Original_placeTag
type Original_place = Proxy Original_placeTag
original_place :: Original_place
original_place = proxy
instance ShowLabel Original_place
where showLabel _ = "original_place"
data ExpiredTag
type Expired = Proxy ExpiredTag
expired :: Expired
expired = proxy
instance ShowLabel Expired
where showLabel _ = "expired"
data ExpressionTag
type Expression = Proxy ExpressionTag
expression :: Expression
expression = proxy
instance ShowLabel Expression
where showLabel _ = "expression"
data Disable_spreadingTag
type Disable_spreading = Proxy Disable_spreadingTag
disable_spreading :: Disable_spreading
disable_spreading = proxy
instance ShowLabel Disable_spreading
where showLabel _ = "disable_spreading"
data Dont_nest_ai_next_phaseTag
type Dont_nest_ai_next_phase = Proxy Dont_nest_ai_next_phaseTag
dont_nest_ai_next_phase :: Dont_nest_ai_next_phase
dont_nest_ai_next_phase = proxy
instance ShowLabel Dont_nest_ai_next_phase
where showLabel _ = "dont_nest_ai_next_phase"
data Game_completedTag
type Game_completed = Proxy Game_completedTag
game_completed :: Game_completed
game_completed = proxy
instance ShowLabel Game_completed
where showLabel _ = "game_completed"
data In_next_phase_hackTag
type In_next_phase_hack = Proxy In_next_phase_hackTag
in_next_phase_hack :: In_next_phase_hack
in_next_phase_hack = proxy
instance ShowLabel In_next_phase_hack
where showLabel _ = "in_next_phase_hack"
data Key_codeTag
type Key_code = Proxy Key_codeTag
key_code :: Key_code
key_code = proxy
instance ShowLabel Key_code
where showLabel _ = "key_code"
data Action_nameTag
type Action_name = Proxy Action_nameTag
action_name :: Action_name
action_name = proxy
instance ShowLabel Action_name
where showLabel _ = "action_name"
data Module_orderTag
type Module_order = Proxy Module_orderTag
module_order :: Module_order
module_order = proxy
instance ShowLabel Module_order
where showLabel _ = "module_order"
data AlignmentTag
type Alignment = Proxy AlignmentTag
alignment :: Alignment
alignment = proxy
instance ShowLabel Alignment
where showLabel _ = "alignment"
data Valid_square_categoryTag
type Valid_square_category = Proxy Valid_square_categoryTag
valid_square_category :: Valid_square_category
valid_square_category = proxy
instance ShowLabel Valid_square_category
where showLabel _ = "valid_square_category"
data LineTag
type Line = Proxy LineTag
line :: Line
line = proxy
instance ShowLabel Line
where showLabel _ = "line"
data StateTag
type State = Proxy StateTag
state :: State
state = proxy
instance ShowLabel State
where showLabel _ = "state"
data New_wizard_nameTag
type New_wizard_name = Proxy New_wizard_nameTag
new_wizard_name :: New_wizard_name
new_wizard_name = proxy
instance ShowLabel New_wizard_name
where showLabel _ = "new_wizard_name"
data Object_orderTag
type Object_order = Proxy Object_orderTag
object_order :: Object_order
object_order = proxy
instance ShowLabel Object_order
where showLabel _ = "object_order"
data Operator_nameTag
type Operator_name = Proxy Operator_nameTag
operator_name :: Operator_name
operator_name = proxy
instance ShowLabel Operator_name
where showLabel _ = "operator_name"
data SourceTag
type Source = Proxy SourceTag
source :: Source
source = proxy
instance ShowLabel Source
where showLabel _ = "source"
data PreferenceTag
type Preference = Proxy PreferenceTag
preference :: Preference
preference = proxy
instance ShowLabel Preference
where showLabel _ = "preference"
data Remaining_walk_hackTag
type Remaining_walk_hack = Proxy Remaining_walk_hackTag
remaining_walk_hack :: Remaining_walk_hack
remaining_walk_hack = proxy
instance ShowLabel Remaining_walk_hack
where showLabel _ = "remaining_walk_hack"
data Remaining_walkTag
type Remaining_walk = Proxy Remaining_walkTag
remaining_walk :: Remaining_walk
remaining_walk = proxy
instance ShowLabel Remaining_walk
where showLabel _ = "remaining_walk"
data Scalar_nameTag
type Scalar_name = Proxy Scalar_nameTag
scalar_name :: Scalar_name
scalar_name = proxy
instance ShowLabel Scalar_name
where showLabel _ = "scalar_name"
data Section_orderTag
type Section_order = Proxy Section_orderTag
section_order :: Section_order
section_order = proxy
instance ShowLabel Section_order
where showLabel _ = "section_order"
data Move_phaseTag
type Move_phase = Proxy Move_phaseTag
move_phase :: Move_phase
move_phase = proxy
instance ShowLabel Move_phase
where showLabel _ = "move_phase"
data EngagedTag
type Engaged = Proxy EngagedTag
engaged :: Engaged
engaged = proxy
instance ShowLabel Engaged
where showLabel _ = "engaged"
data Spell_book_show_allTag
type Spell_book_show_all = Proxy Spell_book_show_allTag
spell_book_show_all :: Spell_book_show_all
spell_book_show_all = proxy
instance ShowLabel Spell_book_show_all
where showLabel _ = "spell_book_show_all"
data KeyTag
type Key = Proxy KeyTag
key :: Key
key = proxy
instance ShowLabel Key
where showLabel _ = "key"
data Alignment_orderTag
type Alignment_order = Proxy Alignment_orderTag
alignment_order :: Alignment_order
alignment_order = proxy
instance ShowLabel Alignment_order
where showLabel _ = "alignment_order"
data Count_iconsTag
type Count_icons = Proxy Count_iconsTag
count_icons :: Count_icons
count_icons = proxy
instance ShowLabel Count_icons
where showLabel _ = "count_icons"
data Align_iconsTag
type Align_icons = Proxy Align_iconsTag
align_icons :: Align_icons
align_icons = proxy
instance ShowLabel Align_icons
where showLabel _ = "align_icons"
data Spell_choice_hackTag
type Spell_choice_hack = Proxy Spell_choice_hackTag
spell_choice_hack :: Spell_choice_hack
spell_choice_hack = proxy
instance ShowLabel Spell_choice_hack
where showLabel _ = "spell_choice_hack"
data Row_numberTag
type Row_number = Proxy Row_numberTag
row_number :: Row_number
row_number = proxy
instance ShowLabel Row_number
where showLabel _ = "row_number"
data Spell_parts_to_castTag
type Spell_parts_to_cast = Proxy Spell_parts_to_castTag
spell_parts_to_cast :: Spell_parts_to_cast
spell_parts_to_cast = proxy
instance ShowLabel Spell_parts_to_cast
where showLabel _ = "spell_parts_to_cast"
data CategoryTag
type Category = Proxy CategoryTag
category :: Category
category = proxy
instance ShowLabel Category
where showLabel _ = "category"
data CreatureTag
type Creature = Proxy CreatureTag
creature :: Creature
creature = proxy
instance ShowLabel Creature
where showLabel _ = "creature"
data MonsterTag
type Monster = Proxy MonsterTag
monster :: Monster
monster = proxy
instance ShowLabel Monster
where showLabel _ = "monster"
data OverrideTag
type Override = Proxy OverrideTag
override :: Override
override = proxy
instance ShowLabel Override
where showLabel _ = "override"
data SettingTag
type Setting = Proxy SettingTag
setting :: Setting
setting = proxy
instance ShowLabel Setting
where showLabel _ = "setting"
data Trigger_catalogTag
type Trigger_catalog = Proxy Trigger_catalogTag
trigger_catalog :: Trigger_catalog
trigger_catalog = proxy
instance ShowLabel Trigger_catalog
where showLabel _ = "trigger_catalog"
data Trigger_schemaTag
type Trigger_schema = Proxy Trigger_schemaTag
trigger_schema :: Trigger_schema
trigger_schema = proxy
instance ShowLabel Trigger_schema
where showLabel _ = "trigger_schema"
data Trigger_nameTag
type Trigger_name = Proxy Trigger_nameTag
trigger_name :: Trigger_name
trigger_name = proxy
instance ShowLabel Trigger_name
where showLabel _ = "trigger_name"
data Event_manipulationTag
type Event_manipulation = Proxy Event_manipulationTag
event_manipulation :: Event_manipulation
event_manipulation = proxy
instance ShowLabel Event_manipulation
where showLabel _ = "event_manipulation"
data Event_object_catalogTag
type Event_object_catalog = Proxy Event_object_catalogTag
event_object_catalog :: Event_object_catalog
event_object_catalog = proxy
instance ShowLabel Event_object_catalog
where showLabel _ = "event_object_catalog"
data Event_object_schemaTag
type Event_object_schema = Proxy Event_object_schemaTag
event_object_schema :: Event_object_schema
event_object_schema = proxy
instance ShowLabel Event_object_schema
where showLabel _ = "event_object_schema"
data Event_object_tableTag
type Event_object_table = Proxy Event_object_tableTag
event_object_table :: Event_object_table
event_object_table = proxy
instance ShowLabel Event_object_table
where showLabel _ = "event_object_table"
data Action_orderTag
type Action_order = Proxy Action_orderTag
action_order :: Action_order
action_order = proxy
instance ShowLabel Action_order
where showLabel _ = "action_order"
data Action_conditionTag
type Action_condition = Proxy Action_conditionTag
action_condition :: Action_condition
action_condition = proxy
instance ShowLabel Action_condition
where showLabel _ = "action_condition"
data Action_statementTag
type Action_statement = Proxy Action_statementTag
action_statement :: Action_statement
action_statement = proxy
instance ShowLabel Action_statement
where showLabel _ = "action_statement"
data Action_orientationTag
type Action_orientation = Proxy Action_orientationTag
action_orientation :: Action_orientation
action_orientation = proxy
instance ShowLabel Action_orientation
where showLabel _ = "action_orientation"
data Condition_timingTag
type Condition_timing = Proxy Condition_timingTag
condition_timing :: Condition_timing
condition_timing = proxy
instance ShowLabel Condition_timing
where showLabel _ = "condition_timing"
data Condition_reference_old_tableTag
type Condition_reference_old_table = Proxy Condition_reference_old_tableTag
condition_reference_old_table :: Condition_reference_old_table
condition_reference_old_table = proxy
instance ShowLabel Condition_reference_old_table
where showLabel _ = "condition_reference_old_table"
data Condition_reference_new_tableTag
type Condition_reference_new_table = Proxy Condition_reference_new_tableTag
condition_reference_new_table :: Condition_reference_new_table
condition_reference_new_table = proxy
instance ShowLabel Condition_reference_new_table
where showLabel _ = "condition_reference_new_table"
data Condition_reference_old_rowTag
type Condition_reference_old_row = Proxy Condition_reference_old_rowTag
condition_reference_old_row :: Condition_reference_old_row
condition_reference_old_row = proxy
instance ShowLabel Condition_reference_old_row
where showLabel _ = "condition_reference_old_row"
data Condition_reference_new_rowTag
type Condition_reference_new_row = Proxy Condition_reference_new_rowTag
condition_reference_new_row :: Condition_reference_new_row
condition_reference_new_row = proxy
instance ShowLabel Condition_reference_new_row
where showLabel _ = "condition_reference_new_row"
data CreatedTag
type Created = Proxy CreatedTag
created :: Created
created = proxy
instance ShowLabel Created
where showLabel _ = "created"
data View_nameTag
type View_name = Proxy View_nameTag
view_name :: View_name
view_name = proxy
instance ShowLabel View_name
where showLabel _ = "view_name"
data Table_catalogTag
type Table_catalog = Proxy Table_catalogTag
table_catalog :: Table_catalog
table_catalog = proxy
instance ShowLabel Table_catalog
where showLabel _ = "table_catalog"
data DefinitionTag
type Definition = Proxy DefinitionTag
definition :: Definition
definition = proxy
instance ShowLabel Definition
where showLabel _ = "definition"
data Table_schemaTag
type Table_schema = Proxy Table_schemaTag
table_schema :: Table_schema
table_schema = proxy
instance ShowLabel Table_schema
where showLabel _ = "table_schema"
data Table_nameTag
type Table_name = Proxy Table_nameTag
table_name :: Table_name
table_name = proxy
instance ShowLabel Table_name
where showLabel _ = "table_name"
data View_definitionTag
type View_definition = Proxy View_definitionTag
view_definition :: View_definition
view_definition = proxy
instance ShowLabel View_definition
where showLabel _ = "view_definition"
data Check_optionTag
type Check_option = Proxy Check_optionTag
check_option :: Check_option
check_option = proxy
instance ShowLabel Check_option
where showLabel _ = "check_option"
data Is_updatableTag
type Is_updatable = Proxy Is_updatableTag
is_updatable :: Is_updatable
is_updatable = proxy
instance ShowLabel Is_updatable
where showLabel _ = "is_updatable"
data Is_insertable_intoTag
type Is_insertable_into = Proxy Is_insertable_intoTag
is_insertable_into :: Is_insertable_into
is_insertable_into = proxy
instance ShowLabel Is_insertable_into
where showLabel _ = "is_insertable_into"
data Default_spriteTag
type Default_sprite = Proxy Default_spriteTag
default_sprite :: Default_sprite
default_sprite = proxy
instance ShowLabel Default_sprite
where showLabel _ = "default_sprite"
data Wizard_countTag
type Wizard_count = Proxy Wizard_countTag
wizard_count :: Wizard_count
wizard_count = proxy
instance ShowLabel Wizard_count
where showLabel _ = "wizard_count"
data World_alignmentTag
type World_alignment = Proxy World_alignmentTag
world_alignment :: World_alignment
world_alignment = proxy
instance ShowLabel World_alignment
where showLabel _ = "world_alignment" | JakeWheat/Chaos-2010 | Games/Chaos2010/Database/Fields.hs | bsd-3-clause | 24,422 | 0 | 6 | 3,408 | 5,183 | 2,867 | 2,316 | -1 | -1 |
module Insomnia.Common.FreshName where
import Data.Typeable (Typeable)
import Unbound.Generics.LocallyNameless
withFreshName :: (Typeable a, LFresh m) => String -> (Name a -> m r) -> m r
withFreshName s kont = do
n' <- lfresh $ s2n s
avoid [AnyName n'] $ kont n'
withFreshNames :: (Typeable a, LFresh m) => [String]
-> ([Name a] -> m r) -> m r
withFreshNames [] kont = kont []
withFreshNames (s:ss) kont =
withFreshName s $ \x ->
withFreshNames ss $ \xs ->
kont (x:xs)
| lambdageek/insomnia | src/Insomnia/Common/FreshName.hs | bsd-3-clause | 504 | 0 | 11 | 116 | 228 | 116 | 112 | 14 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Foreign.CUDA.Driver.Context
-- Copyright : [2009..2015] Trevor L. McDonell
-- License : BSD
--
-- Context management for low-level driver interface
--
--------------------------------------------------------------------------------
module Foreign.CUDA.Driver.Context (
module Foreign.CUDA.Driver.Context.Base,
module Foreign.CUDA.Driver.Context.Config,
module Foreign.CUDA.Driver.Context.Peer,
) where
import Foreign.CUDA.Driver.Context.Base
import Foreign.CUDA.Driver.Context.Config
import Foreign.CUDA.Driver.Context.Peer
| mwu-tow/cuda | Foreign/CUDA/Driver/Context.hs | bsd-3-clause | 639 | 0 | 5 | 62 | 71 | 56 | 15 | 7 | 0 |
{-# LANGUAGE DeriveDataTypeable, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.C.Data.Position
-- Copyright : (c) [1995..2000] Manuel M. T. Chakravarty
-- License : BSD-style
-- Maintainer : benedikt.huber@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Source code position
-----------------------------------------------------------------------------
module Language.C.Data.Position (
--
-- source text positions
--
Position(Position),initPos,
posFile,posRow,posColumn,isSourcePos,
nopos, isNoPos,
builtinPos, isBuiltinPos,
internalPos, isInternalPos,
incPos, tabPos, retPos, adjustPos,
Pos(..),
) where
import Data.Generics
-- | uniform representation of source file positions; the order of the arguments
-- is important as it leads to the desired ordering of source positions
data Position = Position String -- file name
{-# UNPACK #-} !Int -- row
{-# UNPACK #-} !Int -- column
deriving (Eq, Ord, Typeable, Data)
instance Show Position where
show pos@(Position fname row col)
| isNoPos pos = "<no file>"
| isBuiltinPos pos = "<builtin>"
| isInternalPos pos = "<internal>"
| otherwise = show (fname, row, col)
instance Read Position where
readsPrec p s = case s of
'<' : _ -> readInternal s
_ -> map (\((file,row,pos),r) -> (Position file row pos,r)) . readsPrec p $ s
readInternal :: ReadS Position
readInternal s | (Just rest) <- readString "<no file>" s = [(nopos,rest)]
| (Just rest) <- readString "<builtin>" s = [(builtinPos,rest)]
| (Just rest) <- readString "<internal>" s = [(internalPos,rest)]
| otherwise = []
where readString [] r = return r
readString (c:cs) (c':cs') | c == c' = readString cs cs'
| otherwise = Nothing
readString (_:_) [] = Nothing
-- | get the source file of the specified position. Fails unless @isSourcePos pos@.
posFile :: Position -> String
posFile (Position fname _ _) = fname
-- | get the line number of the specified position. Fails unless @isSourcePos pos@
posRow :: Position -> Int
posRow (Position _ row _) = row
-- | get the column of the specified position. Fails unless @isSourcePos pos@
posColumn :: Position -> Int
posColumn (Position _ _ col) = col
-- | class of type which aggregate a source code location
class Pos a where
posOf :: a -> Position
-- | initialize a Position to the start of the translation unit starting in the given file
initPos :: FilePath -> Position
initPos file = Position file 1 1
-- | returns @True@ if the given position refers to an actual source file
isSourcePos :: Position -> Bool
isSourcePos (Position _ row col) = row >= 0 && col >= 0
-- | no position (for unknown position information)
nopos :: Position
nopos = Position "<no file>" (-1) 0
isNoPos :: Position -> Bool
isNoPos (Position _ (-1) 0) = True
isNoPos _ = False
-- | position attached to built-in objects
--
builtinPos :: Position
builtinPos = Position "<built into the parser>" (-1) 1
-- | returns @True@ if the given position refers to a builtin definition
isBuiltinPos :: Position -> Bool
isBuiltinPos (Position _ (-1) 1) = True
isBuiltinPos _ = False
-- | position used for internal errors
internalPos :: Position
internalPos = Position "<internal error>" (-1) 2
-- | returns @True@ if the given position is internal
isInternalPos :: Position -> Bool
isInternalPos (Position _ (-1) 2) = True
isInternalPos _ = False
{-# INLINE incPos #-}
-- | advance column
incPos :: Position -> Int -> Position
incPos (Position fname row col) n = Position fname row (col + n)
{-# DEPRECATED tabPos "Use 'incPos column-adjustment' instead" #-}
-- | advance column to next tab positions (tabs are considered to be at every 8th column)
tabPos :: Position -> Position
tabPos (Position fname row col) =
Position fname row (col + 8 - (col - 1) `mod` 8)
{-# INLINE retPos #-}
-- | advance to next line
retPos :: Position -> Position
retPos (Position fname row _col) = Position fname (row + 1) 1
{-# INLINE adjustPos #-}
-- | adjust position: change file and line number, reseting column to 1. This is usually
-- used for #LINE pragmas.
adjustPos :: FilePath -> Int -> Position -> Position
adjustPos fname row (Position _ _ _) = Position fname row 1
| linzhp/Language-GLSL | src/Language/C/Data/Position.hs | bsd-3-clause | 4,562 | 0 | 15 | 1,077 | 1,073 | 582 | 491 | 76 | 3 |
module Main where
main :: IO ()
main = do
putStrLn "7"
putStrLn "S"
putStrLn "△"
putStrLn "☃"
putStrLn "¥"
putStrLn "n̂"
| pparkkin/eta | tests/basic/unicode/Unicode.hs | bsd-3-clause | 140 | 0 | 7 | 35 | 55 | 23 | 32 | 9 | 1 |
module Settings.Builders.Make (makeBuilderArgs) where
import Rules.Gmp
import Rules.Libffi
import Settings.Builders.Common
makeBuilderArgs :: Args
makeBuilderArgs = do
threads <- shakeThreads <$> expr getShakeOptions
gmpPath <- expr gmpBuildPath
libffiPath <- expr libffiBuildPath
let t = show $ max 4 (threads - 2) -- Don't use all Shake's threads
mconcat
[ builder (Make gmpPath ) ? pure ["MAKEFLAGS=-j" ++ t]
, builder (Make libffiPath ) ? pure ["MAKEFLAGS=-j" ++ t, "install"]
, builder (Make "testsuite/tests") ? pure ["THREADS=" ++ t, "fast"] ]
| ezyang/ghc | hadrian/src/Settings/Builders/Make.hs | bsd-3-clause | 618 | 0 | 13 | 146 | 188 | 96 | 92 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Crypto.Hash.Tsuraan.Blake2.Parallel
( Ctx
, init
, init_key
, update
, finalize
, hash
, hash_key
) where
import Data.ByteString ( ByteString )
import System.IO.Unsafe ( unsafePerformIO )
import Foreign.C ( CInt(..) )
import Foreign.Ptr ( Ptr )
import Foreign.Storable ( Storable(..) )
import Data.Word ( Word8 )
import Crypto.Hash.Tsuraan.Blake2.Internal ( BlakeState(..), runHasher, runInit, runInitKey, runUpdate, runFinalize )
import Prelude hiding ( init )
data Blake2bpState
-- |Opaque type that tracks the Blake2 hashing state. The update and finalize
-- functions mutate this context.
newtype Ctx = Ctx (BlakeState Blake2bpState) deriving ( Show )
instance Storable Blake2bpState where
sizeOf _ = unsafePerformIO blake2bp_size
alignment _ = 64 -- from blake2.h; this should be automagical, I think
peek _ = error "no peek"
poke _a _b = error "no poke"
foreign import ccall "blake2.h blake2bp" blake2bp
:: Ptr Word8 -> Ptr Word8 -> Ptr Word8 -> Int -> Int -> Int -> IO CInt
foreign import ccall "blake2.h blake2bp_init" blake2bp_init
:: Ptr Blake2bpState -> Int -> IO CInt
foreign import ccall "blake2.h blake2bp_init_key" blake2bp_init_key
:: Ptr Blake2bpState -> Int -> Ptr Word8 -> Int -> IO CInt
foreign import ccall "blake2.h blake2bp_update" blake2bp_update
:: Ptr Blake2bpState -> Ptr Word8 -> Int -> IO CInt
foreign import ccall "blake2.h blake2bp_final" blake2bp_final
:: Ptr Blake2bpState -> Ptr Word8 -> Int -> IO CInt
foreign import ccall "alloc.h blake2bp_size" blake2bp_size
:: IO Int
-- |Create a hashing context.
init :: Int -- ^Desired digest size
-> IO Ctx
init outlen = Ctx `fmap` runInit blake2bp_init outlen
-- |Create a hashing context for key-based hashing.
init_key :: ByteString -- ^Desired hashing key
-> Int -- ^Desired digest size
-> IO Ctx
init_key key outlen = Ctx `fmap` runInitKey blake2bp_init_key key outlen
-- |Add more data to the hash.
update :: Ctx -- ^Hashing context
-> ByteString -- ^Data to add to the hash
-> IO ()
update (Ctx ptr) bs = runUpdate blake2bp_update ptr bs
-- |Finish hashing. This returns the digest of all the data that's been given
-- to the 'update' function.
finalize :: Ctx -- ^Hashing context
-> IO ByteString
finalize (Ctx state) = runFinalize blake2bp_final state
-- |Hash a 'ByteString' into a digest 'ByteString' using a key. This function
-- always runs in parallel, which is slower for very small strings but faster
-- as the strings get larger.
hash_key :: ByteString -- ^The key to hash with
-> Int -- ^The digest size to generate; must be 1-64
-> ByteString -- ^The string to hash
-> ByteString
hash_key key hashlen bytes = runHasher blake2bp key hashlen bytes
-- |Hash a 'ByteString' into a digest 'ByteString'. This function always runs
-- in parallel, which is slower for very small strings but faster as the
-- strings get larger.
hash :: Int -- ^The digest size to generate; must be 1-64
-> ByteString -- ^The string to hash
-> ByteString
hash = hash_key ""
| tsuraan/hs-blake2 | src/Crypto/Hash/Tsuraan/Blake2/Parallel.hs | bsd-3-clause | 3,160 | 0 | 12 | 662 | 636 | 352 | 284 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-incomplete-record-updates #-}
module GHC.Rename.Splice (
rnTopSpliceDecls,
rnSpliceType, rnSpliceExpr, rnSplicePat, rnSpliceDecl,
rnBracket,
checkThLocalName
, traceSplice, SpliceInfo(..)
) where
#include "HsVersions.h"
import GhcPrelude
import Name
import NameSet
import GHC.Hs
import RdrName
import TcRnMonad
import GHC.Rename.Env
import GHC.Rename.Utils ( HsDocContext(..), newLocalBndrRn )
import GHC.Rename.Unbound ( isUnboundName )
import GHC.Rename.Source ( rnSrcDecls, findSplice )
import GHC.Rename.Pat ( rnPat )
import BasicTypes ( TopLevelFlag, isTopLevel, SourceText(..) )
import Outputable
import Module
import SrcLoc
import GHC.Rename.Types ( rnLHsType )
import Control.Monad ( unless, when )
import {-# SOURCE #-} GHC.Rename.Expr ( rnLExpr )
import TcEnv ( checkWellStaged )
import THNames ( liftName )
import DynFlags
import FastString
import ErrUtils ( dumpIfSet_dyn_printer, DumpFormat (..) )
import TcEnv ( tcMetaTy )
import Hooks
import THNames ( quoteExpName, quotePatName, quoteDecName, quoteTypeName
, decsQTyConName, expQTyConName, patQTyConName, typeQTyConName, )
import {-# SOURCE #-} TcExpr ( tcPolyExpr )
import {-# SOURCE #-} TcSplice
( runMetaD
, runMetaE
, runMetaP
, runMetaT
, tcTopSpliceExpr
)
import TcHsSyn
import GHCi.RemoteTypes ( ForeignRef )
import qualified Language.Haskell.TH as TH (Q)
import qualified GHC.LanguageExtensions as LangExt
{-
************************************************************************
* *
Template Haskell brackets
* *
************************************************************************
-}
rnBracket :: HsExpr GhcPs -> HsBracket GhcPs -> RnM (HsExpr GhcRn, FreeVars)
rnBracket e br_body
= addErrCtxt (quotationCtxtDoc br_body) $
do { -- Check that -XTemplateHaskellQuotes is enabled and available
thQuotesEnabled <- xoptM LangExt.TemplateHaskellQuotes
; unless thQuotesEnabled $
failWith ( vcat
[ text "Syntax error on" <+> ppr e
, text ("Perhaps you intended to use TemplateHaskell"
++ " or TemplateHaskellQuotes") ] )
-- Check for nested brackets
; cur_stage <- getStage
; case cur_stage of
{ Splice Typed -> checkTc (isTypedBracket br_body)
illegalUntypedBracket
; Splice Untyped -> checkTc (not (isTypedBracket br_body))
illegalTypedBracket
; RunSplice _ ->
-- See Note [RunSplice ThLevel] in "TcRnTypes".
pprPanic "rnBracket: Renaming bracket when running a splice"
(ppr e)
; Comp -> return ()
; Brack {} -> failWithTc illegalBracket
}
-- Brackets are desugared to code that mentions the TH package
; recordThUse
; case isTypedBracket br_body of
True -> do { traceRn "Renaming typed TH bracket" empty
; (body', fvs_e) <-
setStage (Brack cur_stage RnPendingTyped) $
rn_bracket cur_stage br_body
; return (HsBracket noExtField body', fvs_e) }
False -> do { traceRn "Renaming untyped TH bracket" empty
; ps_var <- newMutVar []
; (body', fvs_e) <-
setStage (Brack cur_stage (RnPendingUntyped ps_var)) $
rn_bracket cur_stage br_body
; pendings <- readMutVar ps_var
; return (HsRnBracketOut noExtField body' pendings, fvs_e) }
}
rn_bracket :: ThStage -> HsBracket GhcPs -> RnM (HsBracket GhcRn, FreeVars)
rn_bracket outer_stage br@(VarBr x flg rdr_name)
= do { name <- lookupOccRn rdr_name
; this_mod <- getModule
; when (flg && nameIsLocalOrFrom this_mod name) $
-- Type variables can be quoted in TH. See #5721.
do { mb_bind_lvl <- lookupLocalOccThLvl_maybe name
; case mb_bind_lvl of
{ Nothing -> return () -- Can happen for data constructors,
-- but nothing needs to be done for them
; Just (top_lvl, bind_lvl) -- See Note [Quoting names]
| isTopLevel top_lvl
-> when (isExternalName name) (keepAlive name)
| otherwise
-> do { traceRn "rn_bracket VarBr"
(ppr name <+> ppr bind_lvl
<+> ppr outer_stage)
; checkTc (thLevel outer_stage + 1 == bind_lvl)
(quotedNameStageErr br) }
}
}
; return (VarBr x flg name, unitFV name) }
rn_bracket _ (ExpBr x e) = do { (e', fvs) <- rnLExpr e
; return (ExpBr x e', fvs) }
rn_bracket _ (PatBr x p)
= rnPat ThPatQuote p $ \ p' -> return (PatBr x p', emptyFVs)
rn_bracket _ (TypBr x t) = do { (t', fvs) <- rnLHsType TypBrCtx t
; return (TypBr x t', fvs) }
rn_bracket _ (DecBrL x decls)
= do { group <- groupDecls decls
; gbl_env <- getGblEnv
; let new_gbl_env = gbl_env { tcg_dus = emptyDUs }
-- The emptyDUs is so that we just collect uses for this
-- group alone in the call to rnSrcDecls below
; (tcg_env, group') <- setGblEnv new_gbl_env $
rnSrcDecls group
-- Discard the tcg_env; it contains only extra info about fixity
; traceRn "rn_bracket dec" (ppr (tcg_dus tcg_env) $$
ppr (duUses (tcg_dus tcg_env)))
; return (DecBrG x group', duUses (tcg_dus tcg_env)) }
where
groupDecls :: [LHsDecl GhcPs] -> RnM (HsGroup GhcPs)
groupDecls decls
= do { (group, mb_splice) <- findSplice decls
; case mb_splice of
{ Nothing -> return group
; Just (splice, rest) ->
do { group' <- groupDecls rest
; let group'' = appendGroups group group'
; return group'' { hs_splcds = noLoc splice : hs_splcds group' }
}
}}
rn_bracket _ (DecBrG {}) = panic "rn_bracket: unexpected DecBrG"
rn_bracket _ (TExpBr x e) = do { (e', fvs) <- rnLExpr e
; return (TExpBr x e', fvs) }
rn_bracket _ (XBracket nec) = noExtCon nec
quotationCtxtDoc :: HsBracket GhcPs -> SDoc
quotationCtxtDoc br_body
= hang (text "In the Template Haskell quotation")
2 (ppr br_body)
illegalBracket :: SDoc
illegalBracket =
text "Template Haskell brackets cannot be nested" <+>
text "(without intervening splices)"
illegalTypedBracket :: SDoc
illegalTypedBracket =
text "Typed brackets may only appear in typed splices."
illegalUntypedBracket :: SDoc
illegalUntypedBracket =
text "Untyped brackets may only appear in untyped splices."
quotedNameStageErr :: HsBracket GhcPs -> SDoc
quotedNameStageErr br
= sep [ text "Stage error: the non-top-level quoted name" <+> ppr br
, text "must be used at the same stage at which it is bound" ]
{-
*********************************************************
* *
Splices
* *
*********************************************************
Note [Free variables of typed splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider renaming this:
f = ...
h = ...$(thing "f")...
where the splice is a *typed* splice. The splice can expand into
literally anything, so when we do dependency analysis we must assume
that it might mention 'f'. So we simply treat all locally-defined
names as mentioned by any splice. This is terribly brutal, but I
don't see what else to do. For example, it'll mean that every
locally-defined thing will appear to be used, so no unused-binding
warnings. But if we miss the dependency, then we might typecheck 'h'
before 'f', and that will crash the type checker because 'f' isn't in
scope.
Currently, I'm not treating a splice as also mentioning every import,
which is a bit inconsistent -- but there are a lot of them. We might
thereby get some bogus unused-import warnings, but we won't crash the
type checker. Not very satisfactory really.
Note [Renamer errors]
~~~~~~~~~~~~~~~~~~~~~
It's important to wrap renamer calls in checkNoErrs, because the
renamer does not fail for out of scope variables etc. Instead it
returns a bogus term/type, so that it can report more than one error.
We don't want the type checker to see these bogus unbound variables.
-}
rnSpliceGen :: (HsSplice GhcRn -> RnM (a, FreeVars))
-- Outside brackets, run splice
-> (HsSplice GhcRn -> (PendingRnSplice, a))
-- Inside brackets, make it pending
-> HsSplice GhcPs
-> RnM (a, FreeVars)
rnSpliceGen run_splice pend_splice splice
= addErrCtxt (spliceCtxt splice) $ do
{ stage <- getStage
; case stage of
Brack pop_stage RnPendingTyped
-> do { checkTc is_typed_splice illegalUntypedSplice
; (splice', fvs) <- setStage pop_stage $
rnSplice splice
; let (_pending_splice, result) = pend_splice splice'
; return (result, fvs) }
Brack pop_stage (RnPendingUntyped ps_var)
-> do { checkTc (not is_typed_splice) illegalTypedSplice
; (splice', fvs) <- setStage pop_stage $
rnSplice splice
; let (pending_splice, result) = pend_splice splice'
; ps <- readMutVar ps_var
; writeMutVar ps_var (pending_splice : ps)
; return (result, fvs) }
_ -> do { (splice', fvs1) <- checkNoErrs $
setStage (Splice splice_type) $
rnSplice splice
-- checkNoErrs: don't attempt to run the splice if
-- renaming it failed; otherwise we get a cascade of
-- errors from e.g. unbound variables
; (result, fvs2) <- run_splice splice'
; return (result, fvs1 `plusFV` fvs2) } }
where
is_typed_splice = isTypedSplice splice
splice_type = if is_typed_splice
then Typed
else Untyped
------------------
-- | Returns the result of running a splice and the modFinalizers collected
-- during the execution.
--
-- See Note [Delaying modFinalizers in untyped splices].
runRnSplice :: UntypedSpliceFlavour
-> (LHsExpr GhcTc -> TcRn res)
-> (res -> SDoc) -- How to pretty-print res
-- Usually just ppr, but not for [Decl]
-> HsSplice GhcRn -- Always untyped
-> TcRn (res, [ForeignRef (TH.Q ())])
runRnSplice flavour run_meta ppr_res splice
= do { splice' <- getHooked runRnSpliceHook return >>= ($ splice)
; let the_expr = case splice' of
HsUntypedSplice _ _ _ e -> e
HsQuasiQuote _ _ q qs str -> mkQuasiQuoteExpr flavour q qs str
HsTypedSplice {} -> pprPanic "runRnSplice" (ppr splice)
HsSpliced {} -> pprPanic "runRnSplice" (ppr splice)
HsSplicedT {} -> pprPanic "runRnSplice" (ppr splice)
XSplice nec -> noExtCon nec
-- Typecheck the expression
; meta_exp_ty <- tcMetaTy meta_ty_name
; zonked_q_expr <- zonkTopLExpr =<<
tcTopSpliceExpr Untyped
(tcPolyExpr the_expr meta_exp_ty)
-- Run the expression
; mod_finalizers_ref <- newTcRef []
; result <- setStage (RunSplice mod_finalizers_ref) $
run_meta zonked_q_expr
; mod_finalizers <- readTcRef mod_finalizers_ref
; traceSplice (SpliceInfo { spliceDescription = what
, spliceIsDecl = is_decl
, spliceSource = Just the_expr
, spliceGenerated = ppr_res result })
; return (result, mod_finalizers) }
where
meta_ty_name = case flavour of
UntypedExpSplice -> expQTyConName
UntypedPatSplice -> patQTyConName
UntypedTypeSplice -> typeQTyConName
UntypedDeclSplice -> decsQTyConName
what = case flavour of
UntypedExpSplice -> "expression"
UntypedPatSplice -> "pattern"
UntypedTypeSplice -> "type"
UntypedDeclSplice -> "declarations"
is_decl = case flavour of
UntypedDeclSplice -> True
_ -> False
------------------
makePending :: UntypedSpliceFlavour
-> HsSplice GhcRn
-> PendingRnSplice
makePending flavour (HsUntypedSplice _ _ n e)
= PendingRnSplice flavour n e
makePending flavour (HsQuasiQuote _ n quoter q_span quote)
= PendingRnSplice flavour n (mkQuasiQuoteExpr flavour quoter q_span quote)
makePending _ splice@(HsTypedSplice {})
= pprPanic "makePending" (ppr splice)
makePending _ splice@(HsSpliced {})
= pprPanic "makePending" (ppr splice)
makePending _ splice@(HsSplicedT {})
= pprPanic "makePending" (ppr splice)
makePending _ (XSplice nec)
= noExtCon nec
------------------
mkQuasiQuoteExpr :: UntypedSpliceFlavour -> Name -> SrcSpan -> FastString
-> LHsExpr GhcRn
-- Return the expression (quoter "...quote...")
-- which is what we must run in a quasi-quote
mkQuasiQuoteExpr flavour quoter q_span quote
= L q_span $ HsApp noExtField (L q_span
$ HsApp noExtField (L q_span (HsVar noExtField (L q_span quote_selector)))
quoterExpr)
quoteExpr
where
quoterExpr = L q_span $! HsVar noExtField $! (L q_span quoter)
quoteExpr = L q_span $! HsLit noExtField $! HsString NoSourceText quote
quote_selector = case flavour of
UntypedExpSplice -> quoteExpName
UntypedPatSplice -> quotePatName
UntypedTypeSplice -> quoteTypeName
UntypedDeclSplice -> quoteDecName
---------------------
rnSplice :: HsSplice GhcPs -> RnM (HsSplice GhcRn, FreeVars)
-- Not exported...used for all
rnSplice (HsTypedSplice x hasParen splice_name expr)
= do { loc <- getSrcSpanM
; n' <- newLocalBndrRn (L loc splice_name)
; (expr', fvs) <- rnLExpr expr
; return (HsTypedSplice x hasParen n' expr', fvs) }
rnSplice (HsUntypedSplice x hasParen splice_name expr)
= do { loc <- getSrcSpanM
; n' <- newLocalBndrRn (L loc splice_name)
; (expr', fvs) <- rnLExpr expr
; return (HsUntypedSplice x hasParen n' expr', fvs) }
rnSplice (HsQuasiQuote x splice_name quoter q_loc quote)
= do { loc <- getSrcSpanM
; splice_name' <- newLocalBndrRn (L loc splice_name)
-- Rename the quoter; akin to the HsVar case of rnExpr
; quoter' <- lookupOccRn quoter
; this_mod <- getModule
; when (nameIsLocalOrFrom this_mod quoter') $
checkThLocalName quoter'
; return (HsQuasiQuote x splice_name' quoter' q_loc quote
, unitFV quoter') }
rnSplice splice@(HsSpliced {}) = pprPanic "rnSplice" (ppr splice)
rnSplice splice@(HsSplicedT {}) = pprPanic "rnSplice" (ppr splice)
rnSplice (XSplice nec) = noExtCon nec
---------------------
rnSpliceExpr :: HsSplice GhcPs -> RnM (HsExpr GhcRn, FreeVars)
rnSpliceExpr splice
= rnSpliceGen run_expr_splice pend_expr_splice splice
where
pend_expr_splice :: HsSplice GhcRn -> (PendingRnSplice, HsExpr GhcRn)
pend_expr_splice rn_splice
= (makePending UntypedExpSplice rn_splice, HsSpliceE noExtField rn_splice)
run_expr_splice :: HsSplice GhcRn -> RnM (HsExpr GhcRn, FreeVars)
run_expr_splice rn_splice
| isTypedSplice rn_splice -- Run it later, in the type checker
= do { -- Ugh! See Note [Splices] above
traceRn "rnSpliceExpr: typed expression splice" empty
; lcl_rdr <- getLocalRdrEnv
; gbl_rdr <- getGlobalRdrEnv
; let gbl_names = mkNameSet [gre_name gre | gre <- globalRdrEnvElts gbl_rdr
, isLocalGRE gre]
lcl_names = mkNameSet (localRdrEnvElts lcl_rdr)
; return (HsSpliceE noExtField rn_splice, lcl_names `plusFV` gbl_names) }
| otherwise -- Run it here, see Note [Running splices in the Renamer]
= do { traceRn "rnSpliceExpr: untyped expression splice" empty
; (rn_expr, mod_finalizers) <-
runRnSplice UntypedExpSplice runMetaE ppr rn_splice
; (lexpr3, fvs) <- checkNoErrs (rnLExpr rn_expr)
-- See Note [Delaying modFinalizers in untyped splices].
; return ( HsPar noExtField $ HsSpliceE noExtField
. HsSpliced noExtField (ThModFinalizers mod_finalizers)
. HsSplicedExpr <$>
lexpr3
, fvs)
}
{- Note [Running splices in the Renamer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Splices used to be run in the typechecker, which led to (#4364). Since the
renamer must decide which expressions depend on which others, and it cannot
reliably do this for arbitrary splices, we used to conservatively say that
splices depend on all other expressions in scope. Unfortunately, this led to
the problem of cyclic type declarations seen in (#4364). Instead, by
running splices in the renamer, we side-step the problem of determining
dependencies: by the time the dependency analysis happens, any splices have
already been run, and expression dependencies can be determined as usual.
However, see (#9813), for an example where we would like to run splices
*after* performing dependency analysis (that is, after renaming). It would be
desirable to typecheck "non-splicy" expressions (those expressions that do not
contain splices directly or via dependence on an expression that does) before
"splicy" expressions, such that types/expressions within the same declaration
group would be available to `reify` calls, for example consider the following:
> module M where
> data D = C
> f = 1
> g = $(mapM reify ['f, 'D, ''C] ...)
Compilation of this example fails since D/C/f are not in the type environment
and thus cannot be reified as they have not been typechecked by the time the
splice is renamed and thus run.
These requirements are at odds: we do not want to run splices in the renamer as
we wish to first determine dependencies and typecheck certain expressions,
making them available to reify, but cannot accurately determine dependencies
without running splices in the renamer!
Indeed, the conclusion of (#9813) was that it is not worth the complexity
to try and
a) implement and maintain the code for renaming/typechecking non-splicy
expressions before splicy expressions,
b) explain to TH users which expressions are/not available to reify at any
given point.
-}
{- Note [Delaying modFinalizers in untyped splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When splices run in the renamer, 'reify' does not have access to the local
type environment (#11832, [1]).
For instance, in
> let x = e in $(reify (mkName "x") >>= runIO . print >> [| return () |])
'reify' cannot find @x@, because the local type environment is not yet
populated. To address this, we allow 'reify' execution to be deferred with
'addModFinalizer'.
> let x = e in $(do addModFinalizer (reify (mkName "x") >>= runIO . print)
[| return () |]
)
The finalizer is run with the local type environment when type checking is
complete.
Since the local type environment is not available in the renamer, we annotate
the tree at the splice point [2] with @HsSpliceE (HsSpliced finalizers e)@ where
@e@ is the result of splicing and @finalizers@ are the finalizers that have been
collected during evaluation of the splice [3]. In our example,
> HsLet
> (x = e)
> (HsSpliceE $ HsSpliced [reify (mkName "x") >>= runIO . print]
> (HsSplicedExpr $ return ())
> )
When the typechecker finds the annotation, it inserts the finalizers in the
global environment and exposes the current local environment to them [4, 5, 6].
> addModFinalizersWithLclEnv [reify (mkName "x") >>= runIO . print]
References:
[1] https://gitlab.haskell.org/ghc/ghc/wikis/template-haskell/reify
[2] 'rnSpliceExpr'
[3] 'TcSplice.qAddModFinalizer'
[4] 'TcExpr.tcExpr' ('HsSpliceE' ('HsSpliced' ...))
[5] 'TcHsType.tc_hs_type' ('HsSpliceTy' ('HsSpliced' ...))
[6] 'TcPat.tc_pat' ('SplicePat' ('HsSpliced' ...))
-}
----------------------
rnSpliceType :: HsSplice GhcPs -> RnM (HsType GhcRn, FreeVars)
rnSpliceType splice
= rnSpliceGen run_type_splice pend_type_splice splice
where
pend_type_splice rn_splice
= ( makePending UntypedTypeSplice rn_splice
, HsSpliceTy noExtField rn_splice)
run_type_splice rn_splice
= do { traceRn "rnSpliceType: untyped type splice" empty
; (hs_ty2, mod_finalizers) <-
runRnSplice UntypedTypeSplice runMetaT ppr rn_splice
; (hs_ty3, fvs) <- do { let doc = SpliceTypeCtx hs_ty2
; checkNoErrs $ rnLHsType doc hs_ty2 }
-- checkNoErrs: see Note [Renamer errors]
-- See Note [Delaying modFinalizers in untyped splices].
; return ( HsParTy noExtField
$ HsSpliceTy noExtField
. HsSpliced noExtField (ThModFinalizers mod_finalizers)
. HsSplicedTy <$>
hs_ty3
, fvs
) }
-- Wrap the result of the splice in parens so that we don't
-- lose the outermost location set by runQuasiQuote (#7918)
{- Note [Partial Type Splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Partial Type Signatures are partially supported in TH type splices: only
anonymous wild cards are allowed.
-- ToDo: SLPJ says: I don't understand all this
Normally, named wild cards are collected before renaming a (partial) type
signature. However, TH type splices are run during renaming, i.e. after the
initial traversal, leading to out of scope errors for named wild cards. We
can't just extend the initial traversal to collect the named wild cards in TH
type splices, as we'd need to expand them, which is supposed to happen only
once, during renaming.
Similarly, the extra-constraints wild card is handled right before renaming
too, and is therefore also not supported in a TH type splice. Another reason
to forbid extra-constraints wild cards in TH type splices is that a single
signature can contain many TH type splices, whereas it mustn't contain more
than one extra-constraints wild card. Enforcing would this be hard the way
things are currently organised.
Anonymous wild cards pose no problem, because they start out without names and
are given names during renaming. These names are collected right after
renaming. The names generated for anonymous wild cards in TH type splices will
thus be collected as well.
For more details about renaming wild cards, see GHC.Rename.Types.rnHsSigWcType
Note that partial type signatures are fully supported in TH declaration
splices, e.g.:
[d| foo :: _ => _
foo x y = x == y |]
This is because in this case, the partial type signature can be treated as a
whole signature, instead of as an arbitrary type.
-}
----------------------
-- | Rename a splice pattern. See Note [rnSplicePat]
rnSplicePat :: HsSplice GhcPs -> RnM ( Either (Pat GhcPs) (Pat GhcRn)
, FreeVars)
rnSplicePat splice
= rnSpliceGen run_pat_splice pend_pat_splice splice
where
pend_pat_splice :: HsSplice GhcRn ->
(PendingRnSplice, Either b (Pat GhcRn))
pend_pat_splice rn_splice
= (makePending UntypedPatSplice rn_splice
, Right (SplicePat noExtField rn_splice))
run_pat_splice :: HsSplice GhcRn ->
RnM (Either (Pat GhcPs) (Pat GhcRn), FreeVars)
run_pat_splice rn_splice
= do { traceRn "rnSplicePat: untyped pattern splice" empty
; (pat, mod_finalizers) <-
runRnSplice UntypedPatSplice runMetaP ppr rn_splice
-- See Note [Delaying modFinalizers in untyped splices].
; return ( Left $ ParPat noExtField $ ((SplicePat noExtField)
. HsSpliced noExtField (ThModFinalizers mod_finalizers)
. HsSplicedPat) `mapLoc`
pat
, emptyFVs
) }
-- Wrap the result of the quasi-quoter in parens so that we don't
-- lose the outermost location set by runQuasiQuote (#7918)
----------------------
rnSpliceDecl :: SpliceDecl GhcPs -> RnM (SpliceDecl GhcRn, FreeVars)
rnSpliceDecl (SpliceDecl _ (L loc splice) flg)
= rnSpliceGen run_decl_splice pend_decl_splice splice
where
pend_decl_splice rn_splice
= ( makePending UntypedDeclSplice rn_splice
, SpliceDecl noExtField (L loc rn_splice) flg)
run_decl_splice rn_splice = pprPanic "rnSpliceDecl" (ppr rn_splice)
rnSpliceDecl (XSpliceDecl nec) = noExtCon nec
rnTopSpliceDecls :: HsSplice GhcPs -> RnM ([LHsDecl GhcPs], FreeVars)
-- Declaration splice at the very top level of the module
rnTopSpliceDecls splice
= do { (rn_splice, fvs) <- checkNoErrs $
setStage (Splice Untyped) $
rnSplice splice
-- As always, be sure to checkNoErrs above lest we end up with
-- holes making it to typechecking, hence #12584.
--
-- Note that we cannot call checkNoErrs for the whole duration
-- of rnTopSpliceDecls. The reason is that checkNoErrs changes
-- the local environment to temporarily contain a new
-- reference to store errors, and add_mod_finalizers would
-- cause this reference to be stored after checkNoErrs finishes.
-- This is checked by test TH_finalizer.
; traceRn "rnTopSpliceDecls: untyped declaration splice" empty
; (decls, mod_finalizers) <- checkNoErrs $
runRnSplice UntypedDeclSplice runMetaD ppr_decls rn_splice
; add_mod_finalizers_now mod_finalizers
; return (decls,fvs) }
where
ppr_decls :: [LHsDecl GhcPs] -> SDoc
ppr_decls ds = vcat (map ppr ds)
-- Adds finalizers to the global environment instead of delaying them
-- to the type checker.
--
-- Declaration splices do not have an interesting local environment so
-- there is no point in delaying them.
--
-- See Note [Delaying modFinalizers in untyped splices].
add_mod_finalizers_now :: [ForeignRef (TH.Q ())] -> TcRn ()
add_mod_finalizers_now [] = return ()
add_mod_finalizers_now mod_finalizers = do
th_modfinalizers_var <- fmap tcg_th_modfinalizers getGblEnv
env <- getLclEnv
updTcRef th_modfinalizers_var $ \fins ->
(env, ThModFinalizers mod_finalizers) : fins
{-
Note [rnSplicePat]
~~~~~~~~~~~~~~~~~~
Renaming a pattern splice is a bit tricky, because we need the variables
bound in the pattern to be in scope in the RHS of the pattern. This scope
management is effectively done by using continuation-passing style in
GHC.Rename.Pat, through the CpsRn monad. We don't wish to be in that monad here
(it would create import cycles and generally conflict with renaming other
splices), so we really want to return a (Pat RdrName) -- the result of
running the splice -- which can then be further renamed in GHC.Rename.Pat, in
the CpsRn monad.
The problem is that if we're renaming a splice within a bracket, we
*don't* want to run the splice now. We really do just want to rename
it to an HsSplice Name. Of course, then we can't know what variables
are bound within the splice. So we accept any unbound variables and
rename them again when the bracket is spliced in. If a variable is brought
into scope by a pattern splice all is fine. If it is not then an error is
reported.
In any case, when we're done in rnSplicePat, we'll either have a
Pat RdrName (the result of running a top-level splice) or a Pat Name
(the renamed nested splice). Thus, the awkward return type of
rnSplicePat.
-}
spliceCtxt :: HsSplice GhcPs -> SDoc
spliceCtxt splice
= hang (text "In the" <+> what) 2 (ppr splice)
where
what = case splice of
HsUntypedSplice {} -> text "untyped splice:"
HsTypedSplice {} -> text "typed splice:"
HsQuasiQuote {} -> text "quasi-quotation:"
HsSpliced {} -> text "spliced expression:"
HsSplicedT {} -> text "spliced expression:"
XSplice {} -> text "spliced expression:"
-- | The splice data to be logged
data SpliceInfo
= SpliceInfo
{ spliceDescription :: String
, spliceSource :: Maybe (LHsExpr GhcRn) -- Nothing <=> top-level decls
-- added by addTopDecls
, spliceIsDecl :: Bool -- True <=> put the generate code in a file
-- when -dth-dec-file is on
, spliceGenerated :: SDoc
}
-- Note that 'spliceSource' is *renamed* but not *typechecked*
-- Reason (a) less typechecking crap
-- (b) data constructors after type checking have been
-- changed to their *wrappers*, and that makes them
-- print always fully qualified
-- | outputs splice information for 2 flags which have different output formats:
-- `-ddump-splices` and `-dth-dec-file`
traceSplice :: SpliceInfo -> TcM ()
traceSplice (SpliceInfo { spliceDescription = sd, spliceSource = mb_src
, spliceGenerated = gen, spliceIsDecl = is_decl })
= do { loc <- case mb_src of
Nothing -> getSrcSpanM
Just (L loc _) -> return loc
; traceOptTcRn Opt_D_dump_splices (spliceDebugDoc loc)
; when is_decl $ -- Raw material for -dth-dec-file
do { dflags <- getDynFlags
; liftIO $ dumpIfSet_dyn_printer alwaysQualify dflags Opt_D_th_dec_file
"" FormatHaskell (spliceCodeDoc loc) } }
where
-- `-ddump-splices`
spliceDebugDoc :: SrcSpan -> SDoc
spliceDebugDoc loc
= let code = case mb_src of
Nothing -> ending
Just e -> nest 2 (ppr (stripParensHsExpr e)) : ending
ending = [ text "======>", nest 2 gen ]
in hang (ppr loc <> colon <+> text "Splicing" <+> text sd)
2 (sep code)
-- `-dth-dec-file`
spliceCodeDoc :: SrcSpan -> SDoc
spliceCodeDoc loc
= vcat [ text "--" <+> ppr loc <> colon <+> text "Splicing" <+> text sd
, gen ]
illegalTypedSplice :: SDoc
illegalTypedSplice = text "Typed splices may not appear in untyped brackets"
illegalUntypedSplice :: SDoc
illegalUntypedSplice = text "Untyped splices may not appear in typed brackets"
checkThLocalName :: Name -> RnM ()
checkThLocalName name
| isUnboundName name -- Do not report two errors for
= return () -- $(not_in_scope args)
| otherwise
= do { traceRn "checkThLocalName" (ppr name)
; mb_local_use <- getStageAndBindLevel name
; case mb_local_use of {
Nothing -> return () ; -- Not a locally-bound thing
Just (top_lvl, bind_lvl, use_stage) ->
do { let use_lvl = thLevel use_stage
; checkWellStaged (quotes (ppr name)) bind_lvl use_lvl
; traceRn "checkThLocalName" (ppr name <+> ppr bind_lvl
<+> ppr use_stage
<+> ppr use_lvl)
; checkCrossStageLifting top_lvl bind_lvl use_stage use_lvl name } } }
--------------------------------------
checkCrossStageLifting :: TopLevelFlag -> ThLevel -> ThStage -> ThLevel
-> Name -> TcM ()
-- We are inside brackets, and (use_lvl > bind_lvl)
-- Now we must check whether there's a cross-stage lift to do
-- Examples \x -> [| x |]
-- [| map |]
--
-- This code is similar to checkCrossStageLifting in TcExpr, but
-- this is only run on *untyped* brackets.
checkCrossStageLifting top_lvl bind_lvl use_stage use_lvl name
| Brack _ (RnPendingUntyped ps_var) <- use_stage -- Only for untyped brackets
, use_lvl > bind_lvl -- Cross-stage condition
= check_cross_stage_lifting top_lvl name ps_var
| otherwise
= return ()
check_cross_stage_lifting :: TopLevelFlag -> Name -> TcRef [PendingRnSplice] -> TcM ()
check_cross_stage_lifting top_lvl name ps_var
| isTopLevel top_lvl
-- Top-level identifiers in this module,
-- (which have External Names)
-- are just like the imported case:
-- no need for the 'lifting' treatment
-- E.g. this is fine:
-- f x = x
-- g y = [| f 3 |]
= when (isExternalName name) (keepAlive name)
-- See Note [Keeping things alive for Template Haskell]
| otherwise
= -- Nested identifiers, such as 'x' in
-- E.g. \x -> [| h x |]
-- We must behave as if the reference to x was
-- h $(lift x)
-- We use 'x' itself as the SplicePointName, used by
-- the desugarer to stitch it all back together.
-- If 'x' occurs many times we may get many identical
-- bindings of the same SplicePointName, but that doesn't
-- matter, although it's a mite untidy.
do { traceRn "checkCrossStageLifting" (ppr name)
-- Construct the (lift x) expression
; let lift_expr = nlHsApp (nlHsVar liftName) (nlHsVar name)
pend_splice = PendingRnSplice UntypedExpSplice name lift_expr
-- Update the pending splices
; ps <- readMutVar ps_var
; writeMutVar ps_var (pend_splice : ps) }
{-
Note [Keeping things alive for Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = x+1
g y = [| f 3 |]
Here 'f' is referred to from inside the bracket, which turns into data
and mentions only f's *name*, not 'f' itself. So we need some other
way to keep 'f' alive, lest it get dropped as dead code. That's what
keepAlive does. It puts it in the keep-alive set, which subsequently
ensures that 'f' stays as a top level binding.
This must be done by the renamer, not the type checker (as of old),
because the type checker doesn't typecheck the body of untyped
brackets (#8540).
A thing can have a bind_lvl of outerLevel, but have an internal name:
foo = [d| op = 3
bop = op + 1 |]
Here the bind_lvl of 'op' is (bogusly) outerLevel, even though it is
bound inside a bracket. That is because we don't even even record
binding levels for top-level things; the binding levels are in the
LocalRdrEnv.
So the occurrence of 'op' in the rhs of 'bop' looks a bit like a
cross-stage thing, but it isn't really. And in fact we never need
to do anything here for top-level bound things, so all is fine, if
a bit hacky.
For these chaps (which have Internal Names) we don't want to put
them in the keep-alive set.
Note [Quoting names]
~~~~~~~~~~~~~~~~~~~~
A quoted name 'n is a bit like a quoted expression [| n |], except that we
have no cross-stage lifting (c.f. TcExpr.thBrackId). So, after incrementing
the use-level to account for the brackets, the cases are:
bind > use Error
bind = use+1 OK
bind < use
Imported things OK
Top-level things OK
Non-top-level Error
where 'use' is the binding level of the 'n quote. (So inside the implied
bracket the level would be use+1.)
Examples:
f 'map -- OK; also for top-level defns of this module
\x. f 'x -- Not ok (bind = 1, use = 1)
-- (whereas \x. f [| x |] might have been ok, by
-- cross-stage lifting
\y. [| \x. $(f 'y) |] -- Not ok (bind =1, use = 1)
[| \x. $(f 'x) |] -- OK (bind = 2, use = 1)
-}
| sdiehl/ghc | compiler/GHC/Rename/Splice.hs | bsd-3-clause | 37,612 | 8 | 20 | 11,499 | 5,561 | 2,882 | 2,679 | 439 | 13 |
-- This is an alternative wrapper around the main command which
-- switches to a different directory before executing.
-- It can be used with "runhaskell" to run from source.
import qualified Intel.Cnc.Spec.MainExecutable as M
import System.Environment
import System.Directory
main =
do
putStrLn$ "[CnC] Script wrapper, running from source."
args <- getArgs
let dir = head args
putStrLn$ "[CnC] Working directory: " ++ dir
setCurrentDirectory dir
withArgs (tail args) M.mainExecutableCommand
| rrnewton/Haskell-CnC | cnc_wrapper.hs | bsd-3-clause | 523 | 0 | 10 | 100 | 88 | 45 | 43 | 11 | 1 |
{-# LANGUAGE PolyKinds #-}
module FFTRounded where
import ListNumSyntax
import Data.Complex
import Data.Number.IReal.Rounded
-- All functions in this file take a precision argument for (less in-)efficiency.
-- To get d correct decimals in result, slightly higher precision
-- argument should be used.
-- basic fast Fourier transform, for power of 2 lengths only
fft :: (RealFloat a) => [Complex a] -> [Complex a]
fft [] = error "fft: empty list"
fft [a] = [a]
fft [a,b] = [a+b,a-b]
fft as = interleave (fft cs) (fft ds)
where
n = length as
(ls,rs) = splitAt (n `div` 2) as
a = -2*pi/fromIntegral n
tws = [ cis (a * fromIntegral k) | k <- [0..n `div` 2 - 1] ]
cs = ls + rs
ds = tws * (ls - rs)
interleave [] bs = bs
interleave (a : as) bs = a : interleave bs as
-- inverse FFT
ifft :: (RealFloat a) => [Complex a] -> [Complex a]
ifft as = (fft . rev) as / repeat n
where n = fromIntegral (length as)
rev (a : as) = a : reverse as
-- type 1 discrete cosine transform (for lengths 2^n + 1)
dct :: (RealFloat a) => [a] -> [a]
dct as = (map ((/2) . realPart) . take (length as) . fft . map (:+ 0)) as'
where as' = as ++ tail (reverse (tail as))
-- inverse DCT
idct :: (RealFloat a) => [a] -> [a]
idct cs = dct cs / repeat n'
where n' = fromIntegral ((length cs - 1) `div` 2)
-- unscaled type 1 DCT
dctu [x0,x1] = [x0+x1,x0-x1]
dctu xs = dct (eDouble xs)
where eDouble (x:xs) = 2*x : t xs
t [x] = [2*x]
t (x:xs) = x : t xs
-- its inverse
idctu d [c0,c1] = [(c0+c1)/2,(c0-c1)/2]
idctu d xs = eHalve (idct xs)
where eHalve (x:xs) = x/2 : t xs
t [x] = [x/2]
t (x:xs) = x : t xs
-- discrete sine transform (for lengths 2^n - 1)
dst as = (map ((/2) . negate . imagPart) . tail . take (length as+1) . fft . map (:+ 0)) as'
where as' = 0 : as ++ 0 : map negate (reverse as)
-- its inverse
idst cs = dst cs / repeat n'
where n' = fromIntegral ((length cs + 1) `div` 2)
--instance VarPrec a => VarPrec (Complex a) where
-- precB p (a :+ b) = precB p a :+ precB p b
-- All the instances below are dubious and present only since Haskell requires
-- (for dubious reasons) that inorder to define an instance of Num for Complex a,
-- a must be an instance of RealFloat. And we do want to define the FFT...
instance Precision p => Real (Rounded p) where
toRational (R x) = toRational x
instance Precision p => RealFrac (Rounded p) where
properFraction (R x) =(i,r y)
where (i,y) = properFraction x
instance Precision p => RealFloat (Rounded p) where
floatRadix = undefined
floatDigits = undefined
floatRange = undefined
decodeFloat = undefined
encodeFloat = undefined
isNaN = undefined
isInfinite = undefined
isDenormalized = undefined
isNegativeZero = undefined
isIEEE = undefined
| sydow/ireal | applications/FFTRounded.hs | bsd-3-clause | 2,824 | 0 | 15 | 692 | 1,136 | 606 | 530 | 58 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DataKinds #-}
-- | DRM/KMS Internals
--
-- Bindings with C structures and IOCTLs
module Haskus.System.Linux.Internals.Graphics
(
-- * Mode
ModeType (..)
, ModeTypes
, ModeFlag (..)
, ModeFlags
, Stereo3D (..)
, ModeFlagsStereo3D
, PowerState(..)
, ScalingMode(..)
, AspectMode(..)
, DitheringMode(..)
, DirtyMode(..)
, StructMode (..)
, emptyStructMode
-- * Resources
, StructCardRes (..)
-- * Controller
, StructController (..)
-- * Plane
, ModeFieldPresent (..)
, ModeFieldPresents
, StructSetPlane (..)
, StructGetPlane (..)
, StructGetPlaneRes (..)
-- * Encoder
, EncoderType (..)
, StructGetEncoder (..)
-- * Connector
, SubConnectorType (..)
, ConnectorType (..)
, StructGetConnector (..)
-- * Properties
, PropertyTypeType (..)
, getPropertyTypeType
, isPending
, isImmutable
, isAtomic
, StructPropertyEnum (..)
, StructGetProperty (..)
, StructSetProperty (..)
, StructGetObjectProperties (..)
, StructSetObjectProperty (..)
, StructGetBlob (..)
-- * Framebuffer
, FrameBufferFlag (..)
, FrameBufferFlags
, StructFrameBufferCommand (..)
, DirtyAnnotation (..)
, dirtyMaxClips
, StructFrameBufferDirty (..)
, StructModeCommand (..) -- move
-- * Cursor
, CursorFlag (..)
, CursorFlags
, StructCursor (..)
, StructCursor2 (..)
-- * Gamma look-up table
, StructControllerLut (..)
-- * Page flipping
, PageFlipFlag (..)
, PageFlipFlags
, StructPageFlip (..)
, StructPageFlipTarget (..)
-- * Generic (dumb) buffer
, StructCreateDumb (..)
, StructMapDumb (..)
, StructDestroyDumb (..)
-- * Atomic
, AtomicFlag (..)
, AtomicFlags
, StructAtomic (..)
-- * Blob
, StructCreateBlob (..)
, StructDestroyBlob (..)
-- * Generic
, Clip (..)
-- * Capabilities
, Capability (..)
, StructGetCap (..)
, ClientCapability (..)
, StructSetClientCap (..)
-- * Prime
, StructPrimeHandle (..)
, PrimeFlag (..)
-- * IOCTLs
, ioctlGetCapabilities
, ioctlSetClientCapability
, ioctlGetResources
, ioctlGetController
, ioctlSetController
, ioctlGetGamma
, ioctlSetGamma
, ioctlGetEncoder
, ioctlGetConnector
, ioctlGetProperty
, ioctlSetProperty
, ioctlGetObjectProperties
, ioctlSetObjectProperty
, ioctlGetBlob
, ioctlPageFlip
, ioctlDirtyFrameBuffer
, ioctlCreateHostBuffer
, ioctlMapHostBuffer
, ioctlDestroyHostBuffer
, ioctlGetPlaneResources
, ioctlGetPlane
, ioctlSetPlane
, ioctlAddFrameBuffer
, ioctlRemoveFrameBuffer
, ioctlCursor
, ioctlAtomic
, ioctlCreateBlob
, ioctlDestroyBlob
-- * Events
, DRMEventHeader (..)
, EventType (..)
, toEventType
, DRMEvent (..)
-- * Rotation/reflection
, Rotation (..)
, Reflection (..)
, RotateReflect
-- * SubPixel order
, SubPixel (..)
)
where
import Haskus.System.Linux.Ioctl
import Haskus.System.Linux.Handle
import Haskus.System.Linux.Internals.Error
import Haskus.System.Linux.Graphics.PixelFormat
import Haskus.Format.Binary.BitSet as BitSet
import Haskus.Format.Binary.Vector as Vector
import Haskus.Format.Binary.BitField
import Haskus.Format.Binary.Enum
import Haskus.Format.Binary.FixedPoint
import Haskus.Format.Binary.Word
import Haskus.Format.Binary.Bits
import Haskus.Format.Binary.Storable
import Haskus.Format.String
import Haskus.Utils.Variant.Flow
import Haskus.Utils.Monad
import Haskus.Utils.Types.Generics (Generic)
-- =============================================================
-- From linux/include/uapi/drm/drm_mode.h
-- =============================================================
-----------------------------------------------------------------------------
-- Mode
-----------------------------------------------------------------------------
-- | Mode type
data ModeType
= ModeTypeBuiltin
| ModeTypeClockC
| ModeTypeControllerC
| ModeTypePreferred
| ModeTypeDefault
| ModeTypeUserDef
| ModeTypeDriver
deriving (Show,Enum,CBitSet)
type ModeTypes = BitSet Word32 ModeType
-- | Video mode flags
data ModeFlag
= ModeFlagPHSync
| ModeFlagNHSync
| ModeFlagPVSync
| ModeFlagNVSync
| ModeFlagInterlace
| ModeFlagDoubleScan
| ModeFlagCSync
| ModeFlagPCSync
| ModeFlagNCSync
| ModeFlagHSkew
| ModeFlagBroadCast
| ModeFlagPixMux
| ModeFlagDoubleClock
| ModeFlagClockDiv2
deriving (Show,Enum,CBitSet)
type ModeFlags = BitSet Word32 ModeFlag
-- | 3D mode
data Stereo3D
= Stereo3DNone
| Stereo3DFramePacking
| Stereo3DFieldAlternative
| Stereo3DLineAlternative
| Stereo3DSideBySideFull
| Stereo3DLDepth
| Stereo3DLDepthGFXGFXDepth
| Stereo3DTopAndBottom
| Stereo3DSideBySideHalf
deriving (Show,Enum,CEnum)
type ModeFlagsStereo3D = BitFields Word32
'[ BitField 18 "stereo3d" (EnumField Word32 Stereo3D)
, BitField 14 "flags" ModeFlags
]
-- | DPMS flags
data PowerState
= PowerOn
| PowerStandBy
| PowerSuspend
| PowerOff
deriving (Show,Eq,Enum)
-- | Scaling mode
data ScalingMode
= ScaleNone -- ^ Unmodified timing (display or software can still scale)
| ScaleFullScreen -- ^ Full screen, ignore aspect
| ScaleCenter -- ^ Centered, no scaling
| ScaleAspect -- ^ Full screen, preserve aspect
deriving (Show,Eq,Enum)
-- | Aspect mode
data AspectMode
= AspectNone
| Aspect4_3
| Aspect16_9
deriving(Show,Eq,Enum)
-- | Dithering mode
data DitheringMode
= DitheringOff
| DitheringOn
| DitheringAuto
deriving (Show,Eq,Enum)
-- | Dirty mode
data DirtyMode
= DirtyOff
| DirtyOn
| DirtyAnnotate
deriving (Show,Eq,Enum)
-- | drm_mode_modeinfo
data StructMode = StructMode
{ miClock :: {-# UNPACK #-} !Word32
, miHDisplay :: {-# UNPACK #-} !Word16
, miHSyncStart :: {-# UNPACK #-} !Word16
, miHSyncEnd :: {-# UNPACK #-} !Word16
, miHTotal :: {-# UNPACK #-} !Word16
, miHSkew :: {-# UNPACK #-} !Word16
, miVDisplay :: {-# UNPACK #-} !Word16
, miVSyncStart :: {-# UNPACK #-} !Word16
, miVSyncEnd :: {-# UNPACK #-} !Word16
, miVTotal :: {-# UNPACK #-} !Word16
, miVScan :: {-# UNPACK #-} !Word16
, miVRefresh :: {-# UNPACK #-} !Word32
, miFlags :: {-# UNPACK #-} !ModeFlagsStereo3D
, miType :: {-# UNPACK #-} !ModeTypes
, miName :: {-# UNPACK #-} !(CStringBuffer 32)
} deriving (Generic)
instance Storable StructMode
emptyStructMode :: StructMode
emptyStructMode = StructMode 0 0 0 0 0 0 0 0 0 0 0 0 (BitFields 0) BitSet.empty emptyCStringBuffer
-----------------------------------------------------------------------------
-- Resources
-----------------------------------------------------------------------------
-- | drm_mode_card_res
data StructCardRes = StructCardRes
{ csFbIdPtr :: {-# UNPACK #-} !Word64
, csCrtcIdPtr :: {-# UNPACK #-} !Word64
, csConnIdPtr :: {-# UNPACK #-} !Word64
, csEncIdPtr :: {-# UNPACK #-} !Word64
, csCountFbs :: {-# UNPACK #-} !Word32
, csCountCrtcs :: {-# UNPACK #-} !Word32
, csCountConns :: {-# UNPACK #-} !Word32
, csCountEncs :: {-# UNPACK #-} !Word32
, csMinWidth :: {-# UNPACK #-} !Word32
, csMaxWidth :: {-# UNPACK #-} !Word32
, csMinHeight :: {-# UNPACK #-} !Word32
, csMaxHeight :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Controller
-----------------------------------------------------------------------------
-- | drm_mode_crtc
data StructController = StructController
{ contSetConnPtr :: {-# UNPACK #-} !Word64
, contConnCount :: {-# UNPACK #-} !Word32
, contID :: {-# UNPACK #-} !Word32
, contFbID :: {-# UNPACK #-} !Word32
, contFbX :: {-# UNPACK #-} !Word32
, contFbY :: {-# UNPACK #-} !Word32
, contGammaSize :: {-# UNPACK #-} !Word32
, contModeValid :: {-# UNPACK #-} !Word32
, contModeInfo :: {-# UNPACK #-} !StructMode
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Plane
-----------------------------------------------------------------------------
data ModeFieldPresent
= PresentTopField
| PresentBottomField
deriving (Show,Enum,CBitSet)
type ModeFieldPresents = BitSet Word32 ModeFieldPresent
-- | drm_mode_set_plane
--
-- Planes blend with or override other bits on the CRTC
data StructSetPlane = StructSetPlane
{ spPlaneId :: {-# UNPACK #-} !Word32
, spCrtcId :: {-# UNPACK #-} !Word32
, spFbId :: {-# UNPACK #-} !Word32 -- ^ Frame buffer contains surface format type
, spFlags :: {-# UNPACK #-} !ModeFieldPresents
, spCrtcX :: {-# UNPACK #-} !Int32 -- ^ Signed dest location allows it to be partially off screen
, spCrtcY :: {-# UNPACK #-} !Int32
, spCrtcW :: {-# UNPACK #-} !Word32
, spCrtcH :: {-# UNPACK #-} !Word32
, spSrcX :: {-# UNPACK #-} !(FixedPoint Word32 16 16)
, spSrcY :: {-# UNPACK #-} !(FixedPoint Word32 16 16)
, spSrcH :: {-# UNPACK #-} !(FixedPoint Word32 16 16)
, spSrcW :: {-# UNPACK #-} !(FixedPoint Word32 16 16)
} deriving (Generic,Storable)
-- | drm_mode_get_plane
data StructGetPlane = StructGetPlane
{ gpPlaneId :: {-# UNPACK #-} !Word32
, gpCrtcId :: {-# UNPACK #-} !Word32
, gpFbId :: {-# UNPACK #-} !Word32
, gpPossibleCrtcs :: {-# UNPACK #-} !(BitSet Word32 Int)
, gpGammaSize :: {-# UNPACK #-} !Word32
, gpCountFmtTypes :: {-# UNPACK #-} !Word32
, gpFormatTypePtr :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
-- | drm_mode_get_plane_res
data StructGetPlaneRes = StructGetPlaneRes
{ gprsPlaneIdPtr :: {-# UNPACK #-} !Word64
, gprsCountPlanes :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Encoder
-----------------------------------------------------------------------------
-- | Type of the encoder
data EncoderType
= EncoderTypeNone
| EncoderTypeDAC -- ^ for VGA and analog on DVI-I/DVI-A
| EncoderTypeTMDS -- ^ for DVI, HDMI and (embedded) DisplayPort
| EncoderTypeLVDS -- ^ for display panels
| EncoderTypeTVDAC -- ^ for TV output (Composite, S-Video, Component, SCART)
| EncoderTypeVirtual -- ^ for virtual machine display
| EncoderTypeDSI
| EncoderTypeDPMST
deriving (Eq,Ord,Show,Enum,CEnum)
-- | drm_mode_get_encoder
data StructGetEncoder = StructGetEncoder
{ geEncoderId :: {-# UNPACK #-} !Word32
, geEncoderType :: {-# UNPACK #-} !(EnumField Word32 EncoderType)
, geCrtcId :: {-# UNPACK #-} !Word32
, gePossibleCrtcs :: {-# UNPACK #-} !(BitSet Word32 Int) -- ^ Valid controller indexes
, gePossibleClones :: {-# UNPACK #-} !(BitSet Word32 Int) -- ^ Valid clone encoder indexes
} deriving (Generic,Storable)
-- | This is for connectors with multiple signal types
data SubConnectorType
= SubConnectorTypeUnknown
| SubConnectorTypeDVID
| SubConnectorTypeDVIA
| SubConnectorTypeComposite
| SubConnectorTypeSVIDEO
| SubConnectorTypeComponent
| SubConnectorTypeSCART
deriving (Show,Eq)
-- Try to match ConnectorType as closely as possible...
instance Enum SubConnectorType where
fromEnum x = case x of
SubConnectorTypeUnknown -> 0
SubConnectorTypeDVID -> 3
SubConnectorTypeDVIA -> 4
SubConnectorTypeComposite -> 5
SubConnectorTypeSVIDEO -> 6
SubConnectorTypeComponent -> 8
SubConnectorTypeSCART -> 9
toEnum x = case x of
0 -> SubConnectorTypeUnknown
3 -> SubConnectorTypeDVID
4 -> SubConnectorTypeDVIA
5 -> SubConnectorTypeComposite
6 -> SubConnectorTypeSVIDEO
8 -> SubConnectorTypeComponent
9 -> SubConnectorTypeSCART
_ -> error "Unknown sub-connector type"
-- | Connector type
data ConnectorType
= ConnectorTypeUnknown
| ConnectorTypeVGA
| ConnectorTypeDVII
| ConnectorTypeDVID
| ConnectorTypeDVIA
| ConnectorTypeComposite
| ConnectorTypeSVIDEO
| ConnectorTypeLVDS
| ConnectorTypeComponent
| ConnectorType9PinDIN
| ConnectorTypeDisplayPort
| ConnectorTypeHDMIA
| ConnectorTypeHDMIB
| ConnectorTypeTV
| ConnectorTypeeDP
| ConnectorTypeVirtual
| ConnectorTypeDSI
deriving (Eq, Ord, Enum, CEnum)
instance Show ConnectorType where
show x = case x of
ConnectorTypeUnknown -> "Unknown"
ConnectorTypeVGA -> "VGA"
ConnectorTypeDVII -> "DVI-I"
ConnectorTypeDVID -> "DVI-D"
ConnectorTypeDVIA -> "DVI-A"
ConnectorTypeComposite -> "Composite"
ConnectorTypeSVIDEO -> "SVIDEO"
ConnectorTypeLVDS -> "LVDS"
ConnectorTypeComponent -> "Component"
ConnectorType9PinDIN -> "9PinDIN"
ConnectorTypeDisplayPort -> "DisplayPort"
ConnectorTypeHDMIA -> "HDMI-A"
ConnectorTypeHDMIB -> "HDMI-B"
ConnectorTypeTV -> "TV"
ConnectorTypeeDP -> "eDP"
ConnectorTypeVirtual -> "Virtual"
ConnectorTypeDSI -> "DSI"
-- | drm_mode_get_connector
data StructGetConnector = StructGetConnector
{ connEncodersPtr :: {-# UNPACK #-} !Word64
, connModesPtr :: {-# UNPACK #-} !Word64
, connPropsPtr :: {-# UNPACK #-} !Word64
, connPropValuesPtr :: {-# UNPACK #-} !Word64
, connModesCount :: {-# UNPACK #-} !Word32
, connPropsCount :: {-# UNPACK #-} !Word32
, connEncodersCount :: {-# UNPACK #-} !Word32
, connEncoderID_ :: {-# UNPACK #-} !Word32 -- ^ current encoder
, connConnectorID_ :: {-# UNPACK #-} !Word32 -- ^ ID
, connConnectorType_ :: {-# UNPACK #-} !(EnumField Word32 ConnectorType)
, connConnectorTypeID_ :: {-# UNPACK #-} !Word32
, connConnection_ :: {-# UNPACK #-} !Word32
, connWidth_ :: {-# UNPACK #-} !Word32 -- ^ HxW in millimeters
, connHeight_ :: {-# UNPACK #-} !Word32
, connSubPixel_ :: {-# UNPACK #-} !(EnumField Word32 SubPixel)
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Properties
-----------------------------------------------------------------------------
-- | Type of the property
data PropertyTypeType
= PropTypeRange
| PropTypeEnum -- ^ Enumerated type with text strings
| PropTypeBlob
| PropTypeBitmask -- ^ Bitmask of enumerated types
| PropTypeObject
| PropTypeSignedRange
deriving (Eq,Ord,Show)
getPropertyTypeType :: StructGetProperty -> PropertyTypeType
getPropertyTypeType x =
-- type is interleaved with Pending and Immutable flags
case gpsFlags x .&. 0xFA of
2 -> PropTypeRange
8 -> PropTypeEnum
16 -> PropTypeBlob
32 -> PropTypeBitmask
64 -> PropTypeObject
128 -> PropTypeSignedRange
_ -> error "Unknown property type"
isPending :: StructGetProperty -> Bool
isPending x = testBit (gpsFlags x) 0
isImmutable :: StructGetProperty -> Bool
isImmutable x = testBit (gpsFlags x) 2
isAtomic :: StructGetProperty -> Bool
isAtomic x = testBit (gpsFlags x) 31
-- | drm_mode_property_enum
data StructPropertyEnum = StructPropertyEnum
{ peValue :: {-# UNPACK #-} !Word64
, peName :: {-# UNPACK #-} !(CStringBuffer 32)
} deriving (Generic,Storable)
-- | drm_mode_get_property
data StructGetProperty = StructGetProperty
{ gpsValuesPtr :: {-# UNPACK #-} !Word64 -- ^ Values or blob lengths
, gpsEnumBlobPtr :: {-# UNPACK #-} !Word64 -- ^ Enum or blob id ptrs
, gpsPropId :: {-# UNPACK #-} !Word32
, gpsFlags :: {-# UNPACK #-} !Word32
, gpsName :: {-# UNPACK #-} !(CStringBuffer 32)
, gpsCountValues :: {-# UNPACK #-} !Word32
, gpsCountEnum :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-- | drm_mode_set_property
data StructSetProperty = StructSetProperty
{ spsValue :: {-# UNPACK #-} !Word64
, spsPropId :: {-# UNPACK #-} !Word32
, spsConnId :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-- | drm_mode_obj_get_properties
data StructGetObjectProperties = StructGetObjectProperties
{ gopPropsPtr :: {-# UNPACK #-} !Word64
, gopValuesPtr :: {-# UNPACK #-} !Word64
, gopCountProps :: {-# UNPACK #-} !Word32
, gopObjId :: {-# UNPACK #-} !Word32
, gopObjType :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-- | drm_mode_obj_set_property
data StructSetObjectProperty = StructSetObjectProperty
{ sopValue :: {-# UNPACK #-} !Word64
, sopPropId :: {-# UNPACK #-} !Word32
, sopObjId :: {-# UNPACK #-} !Word32
, sopObjType :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-- | drm_mode_get_blob
data StructGetBlob = StructGetBlob
{ gbBlobId :: {-# UNPACK #-} !Word32
, gbLength :: {-# UNPACK #-} !Word32
, gbData :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Framebuffer
-----------------------------------------------------------------------------
-- we don't use drm_mode_fb_cmd as we have drm_mode_fb_cmd2
-- | Frame buffer flags
data FrameBufferFlag
= FrameBufferInterlaced -- ^ Interlaced frame buffer
| FrameBufferUseModifiers -- ^ Enable modifiers
deriving (Show,Eq,Enum,CBitSet)
type FrameBufferFlags = BitSet Word32 FrameBufferFlag
-- | Data matching the C structure drm_mode_fb_cmd2
data StructFrameBufferCommand = StructFrameBufferCommand
{ fc2FbId :: {-# UNPACK #-} !Word32
, fc2Width :: {-# UNPACK #-} !Word32
, fc2Height :: {-# UNPACK #-} !Word32
, fc2PixelFormat :: {-# UNPACK #-} !PixelFormat
, fc2Flags :: {-# UNPACK #-} !FrameBufferFlags
, fc2Handles :: {-# UNPACK #-} !(Vector 4 Word32)
, fc2Pitches :: {-# UNPACK #-} !(Vector 4 Word32) -- ^ Pitch for each plane
, fc2Offsets :: {-# UNPACK #-} !(Vector 4 Word32) -- ^ Offset of each plane
, fc2Modifiers :: {-# UNPACK #-} !(Vector 4 Word64) -- ^ tiling, compressed
} deriving (Generic,Storable)
-- | Mark a region of a framebuffer as dirty.
--
-- Some hardware does not automatically update display contents as a hardware or
-- software draw to a framebuffer. This ioctl allows userspace to tell the
-- kernel and the hardware what regions of the framebuffer have changed.
--
-- The kernel or hardware is free to update more then just the region specified
-- by the clip rects. The kernel or hardware may also delay and/or coalesce
-- several calls to dirty into a single update.
--
-- Userspace may annotate the updates, the annotates are a promise made by the
-- caller that the change is either a copy of pixels or a fill of a single color
-- in the region specified.
--
-- If the DirtyCopy mode is used then the clip rects are paired as (src,dst).
-- The width and height of each one of the pairs must match.
--
-- If the DirtyFill mode is used the caller promises that the region specified
-- of the clip rects is filled completely with a single color as given in the
-- color argument.
data DirtyAnnotation
= Dirty [Clip]
| DirtyCopy [(Clip,Clip)]
| DirtyFill Word32 [Clip]
deriving (Show,Eq)
dirtyMaxClips :: Word32
dirtyMaxClips = 256
-- | drm_mode_fb_dirty_cmd
data StructFrameBufferDirty = StructFrameBufferDirty
{ fdFbId :: {-# UNPACK #-} !Word32
, fdFlags :: {-# UNPACK #-} !Word32
, fdColor :: {-# UNPACK #-} !Word32
, fdNumClips :: {-# UNPACK #-} !Word32
, fdClipsPtr :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
-- | drm_mode_mode_cmd
data StructModeCommand = StructModeCommand
{ mcConnId :: {-# UNPACK #-} !Word32
, mcMode :: {-# UNPACK #-} !StructMode
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Cursor
-----------------------------------------------------------------------------
-- | Depending on the value in flags different members are used.
--
-- CursorFlagBO uses
-- crtcId
-- width
-- height
-- handle - if 0 turns the cursor off
--
-- CursorFlagMove uses
-- crtcId
-- x
-- y
data CursorFlag
= CursorFlagBO
| CursorFlagMove
deriving (Eq,Enum,Show,CBitSet)
type CursorFlags = BitSet Word32 CursorFlag
-- | drm_mode_cursor
data StructCursor = StructCursor
{ curFlags :: {-# UNPACK #-} !CursorFlags
, curCrtcId :: {-# UNPACK #-} !Word32
, curX :: {-# UNPACK #-} !Int32
, curY :: {-# UNPACK #-} !Int32
, curWidth :: {-# UNPACK #-} !Word32
, curHeight :: {-# UNPACK #-} !Word32
, curHandle :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-- | drm_mode_cursor2
data StructCursor2 = StructCursor2
{ cur2Flags :: {-# UNPACK #-} !CursorFlags
, cur2CrtcId :: {-# UNPACK #-} !Word32
, cur2X :: {-# UNPACK #-} !Int32
, cur2Y :: {-# UNPACK #-} !Int32
, cur2Width :: {-# UNPACK #-} !Word32
, cur2Height :: {-# UNPACK #-} !Word32
, cur2Handle :: {-# UNPACK #-} !Word32
, cur2HotX :: {-# UNPACK #-} !Int32
, cur2HotY :: {-# UNPACK #-} !Int32
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Gamma look-up table
-----------------------------------------------------------------------------
-- | drm_mode_crtc_lut
data StructControllerLut = StructControllerLut
{ clsCrtcId :: {-# UNPACK #-} !Word32
, clsGammaSize :: {-# UNPACK #-} !Word32
, clsRed :: {-# UNPACK #-} !Word64
, clsGreen :: {-# UNPACK #-} !Word64
, clsBlue :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Page flipping
-----------------------------------------------------------------------------
-- | Page flip flags
data PageFlipFlag
= PageFlipEvent
| PageFlipAsync
| PageFlipTargetAbsolute
| PageFlipTargetRelative
deriving (Show,Eq,Enum,CBitSet)
type PageFlipFlags = BitSet Word32 PageFlipFlag
--
-- Request a page flip on the specified crtc.
--
-- This ioctl will ask KMS to schedule a page flip for the specified
-- crtc. Once any pending rendering targeting the specified fb (as of
-- ioctl time) has completed, the crtc will be reprogrammed to display
-- that fb after the next vertical refresh. The ioctl returns
-- immediately, but subsequent rendering to the current fb will block
-- in the execbuffer ioctl until the page flip happens. If a page
-- flip is already pending as the ioctl is called, EBUSY will be
-- returned.
--
-- Flag DRM_MODE_PAGE_FLIP_EVENT requests that drm sends back a vblank
-- event (see drm.h: struct drm_event_vblank) when the page flip is
-- done. The user_data field passed in with this ioctl will be
-- returned as the user_data field in the vblank event struct.
--
-- Flag DRM_MODE_PAGE_FLIP_ASYNC requests that the flip happen
-- 'as soon as possible', meaning that it not delay waiting for vblank.
-- This may cause tearing on the screen.
--
-- The reserved field must be zero.
-- | drm_mode_crtc_page_flip
data StructPageFlip = StructPageFlip
{ pfCrtcId :: {-# UNPACK #-} !Word32
, pfFbId :: {-# UNPACK #-} !Word32
, pfFlags :: {-# UNPACK #-} !PageFlipFlags
, pfReserved :: {-# UNPACK #-} !Word32
, pfUserData :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
--
-- Request a page flip on the specified crtc.
--
-- Same as struct drm_mode_crtc_page_flip, but supports new flags and
-- re-purposes the reserved field:
--
-- The sequence field must be zero unless either of the
-- DRM_MODE_PAGE_FLIP_TARGET_ABSOLUTE/RELATIVE flags is specified. When
-- the ABSOLUTE flag is specified, the sequence field denotes the absolute
-- vblank sequence when the flip should take effect. When the RELATIVE
-- flag is specified, the sequence field denotes the relative (to the
-- current one when the ioctl is called) vblank sequence when the flip
-- should take effect. NOTE: DRM_IOCTL_WAIT_VBLANK must still be used to
-- make sure the vblank sequence before the target one has passed before
-- calling this ioctl. The purpose of the
-- DRM_MODE_PAGE_FLIP_TARGET_ABSOLUTE/RELATIVE flags is merely to clarify
-- the target for when code dealing with a page flip runs during a
-- vertical blank period.
-- drm_mode_crtc_page_flip_target
data StructPageFlipTarget = StructPageFlipTarget
{ pftCrtcId :: {-# UNPACK #-} !Word32
, pftFbId :: {-# UNPACK #-} !Word32
, pftFlags :: {-# UNPACK #-} !Word32
, pftSequence :: {-# UNPACK #-} !Word32
, pftUserData :: {-# UNPACK #-} !Word64
} deriving (Show,Generic,Storable)
-----------------------------------------------------------------------------
-- Generic buffer
-----------------------------------------------------------------------------
-- | drm_mode_create_dumb
data StructCreateDumb = StructCreateDumb
{ cdHeight :: {-# UNPACK #-} !Word32
, cdWidth :: {-# UNPACK #-} !Word32
, cdBPP :: {-# UNPACK #-} !Word32 -- ^ Bits per pixel
, cdFlags :: {-# UNPACK #-} !Word32
, cdHandle :: {-# UNPACK #-} !Word32 -- ^ Handle, pitch, size will be returned
, cdPitch :: {-# UNPACK #-} !Word32
, cdSize :: {-# UNPACK #-} !Word64
} deriving (Show,Generic,Storable)
-- | drm_mode_map_dumb
data StructMapDumb = StructMapDumb
{ mdHandle :: {-# UNPACK #-} !Word32
, mdPad :: {-# UNPACK #-} !Word32 -- Padding field: not useful
, mdOffset :: {-# UNPACK #-} !Word64 -- ^ Fake offset to use for subsequent mmap call
} deriving (Show,Generic,Storable)
-- | drm_mode_destroy_dumb
newtype StructDestroyDumb = StructDestroyDumb
{ dbHandle :: Word32 -- ^ Dumb buffer handle
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Atomic
-----------------------------------------------------------------------------
-- | Flags for the atomic state change
data AtomicFlag
= AtomicFlagPageFlipEvent -- ^ Generates a page-flip event
| AtomicFlagPageFlipAsync -- ^ Asynchronous page-flip, i.e. don't wait for v-blank (may not be supported)
| AtomicFlagTestOnly -- ^ Only test the config, don't commit it
| AtomicFlagNonBlock -- ^ Schedule an asynchronous commit (may not be supported)
| AtomicFlagAllowModeset -- ^ Allow full mode-setting. This flag is useful for devices such as tablets whose screen is often shutdown: we can use a degraded mode (scaled, etc.) for a while to save power and only perform the full modeset when the screen is reactivated.
deriving (Show,Eq,Enum)
instance CBitSet AtomicFlag where
toBitOffset x = case x of
AtomicFlagPageFlipEvent -> 0
AtomicFlagPageFlipAsync -> 1
AtomicFlagTestOnly -> 8
AtomicFlagNonBlock -> 9
AtomicFlagAllowModeset -> 10
fromBitOffset x = case x of
0 -> AtomicFlagPageFlipEvent
1 -> AtomicFlagPageFlipAsync
8 -> AtomicFlagTestOnly
9 -> AtomicFlagNonBlock
10 -> AtomicFlagAllowModeset
_ -> error "Unknown atomic flag"
-- | Set of atomic flags
type AtomicFlags = BitSet Word32 AtomicFlag
-- | drm_mode_atomic
data StructAtomic = StructAtomic
{ atomFlags :: {-# UNPACK #-} !AtomicFlags
, atomCountObjects :: {-# UNPACK #-} !Word32
, atomObjectsPtr :: {-# UNPACK #-} !Word64
, atomCountPropsPtr :: {-# UNPACK #-} !Word64
, atomPropsPtr :: {-# UNPACK #-} !Word64
, atomPropValuesPtr :: {-# UNPACK #-} !Word64
, atomReserved :: {-# UNPACK #-} !Word64
, atomUserData :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- Blob
-----------------------------------------------------------------------------
-- | Create a new 'blob' data property, copying length bytes from data pointer,
-- and returning new blob ID.
data StructCreateBlob = StructCreateBlob
{ cbData :: {-# UNPACK #-} !Word64 -- ^ Pointer to data to copy
, cbLength :: {-# UNPACK #-} !Word32 -- ^ Length of data to copy
, cbBlobID :: {-# UNPACK #-} !Word32 -- ^ Return: new property ID
} deriving (Generic,Storable)
-- | Destroy a user-created blob property.
newtype StructDestroyBlob = StructDestroyBlob
{ dbBlobId :: Word32 -- ^ blob identifier
} deriving (Generic,Storable)
-- =============================================================
-- From linux/include/uapi/drm/drm.h
-- =============================================================
-----------------------------------------------------------------------------
-- Generic
-----------------------------------------------------------------------------
data Clip = Clip
{ clipX1 :: {-# UNPACK #-} !Word16
, clipY1 :: {-# UNPACK #-} !Word16
, clipX2 :: {-# UNPACK #-} !Word16
, clipY2 :: {-# UNPACK #-} !Word16
} deriving (Show,Eq,Generic,Storable)
-----------------------------------------------------------------------------
-- Capabilities
-----------------------------------------------------------------------------
-- | Capability
data Capability
= CapHostBuffer -- ^ Support generic buffers (i.e. not vendor specific)
| CapVBlankHighController
| CapGenericPreferredDepth
| CapGenericPreferShadow
| CapPrime
| CapTimestampMonotonic
| CapAsyncPageFlip -- ^ Support asynchronous page-flipping
| CapCursorWidth
| CapCursorHeight
| CapAddFrameBufferModifiers
| CapPageFlipTarget
deriving (Show,Eq,Enum)
-- Add 1 to the enum number to get the valid value
instance CEnum Capability where
fromCEnum = (+1) . fromIntegral . fromEnum
toCEnum = toEnum . (\x -> x-1) . fromIntegral
-- | drm_get_cap
--
-- The CURSOR_WIDTH and CURSOR_HEIGHT capabilities return a valid widthxheight
-- combination for the hardware cursor. The intention is that a hardware
-- agnostic userspace can query a cursor plane size to use.
--
-- Note that the cross-driver contract is to merely return a valid size; drivers
-- are free to attach another meaning on top, eg. i915 returns the maximum plane
-- size.
--
data StructGetCap = StructGetCap
{ gcCapability :: {-# UNPACK #-} !(EnumField Word64 Capability)
, gcValue :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
-- | Client capabilities
data ClientCapability
= ClientCapStereo3D -- ^ if set, the DRM core will expose the stereo 3D capabilities of the monitor by advertising the supported 3D layouts in the flags of struct drm_mode_modeinfo (cf Stereo3D)
| ClientCapUniversalPlanes -- ^ If set, the DRM core will expose all planes (overlay, primary, and cursor) to userspace.
| ClientCapAtomic -- ^ If set, the DRM core will expose atomic properties to userspace
deriving (Show,Eq,Enum)
-- Add 1 to the enum number to get the valid value
instance CEnum ClientCapability where
fromCEnum = (+1) . fromIntegral . fromEnum
toCEnum = toEnum . (\x -> x-1) . fromIntegral
data StructSetClientCap = StructSetClientCap
{ sccCapability :: {-# UNPACK #-} !(EnumField Word64 ClientCapability)
, sccValue :: {-# UNPACK #-} !Word64
} deriving (Generic,Storable)
data PrimeFlag
= PrimeFlagReadWrite
| PrimeFlagCloseOnExec
deriving (Show,Eq,CBitSet)
instance Enum PrimeFlag where
fromEnum PrimeFlagReadWrite = fromEnum HandleReadWrite
fromEnum PrimeFlagCloseOnExec = fromEnum HandleCloseOnExec
toEnum x = case toEnum x of
HandleReadWrite -> PrimeFlagReadWrite
HandleCloseOnExec -> PrimeFlagCloseOnExec
_ -> error ("Unknown prime flag: " ++ show x)
-- | struct drm_prime_handle
data StructPrimeHandle = StructPrimeHandle
{ sphHandle :: {-# UNPACK #-} !Word32
, sphFlags :: {-# UNPACK #-} !(BitSet Word32 PrimeFlag) -- ^ FD flags: only applciable for handle->fd
, sphFD :: {-# UNPACK #-} !Int32 -- ^ Returned DMAbuf file descriptor
}
deriving (Generic,Storable)
-----------------------------------------------------------------------------
-- IOCTLs
-----------------------------------------------------------------------------
drmIoctl :: (MonadInIO m, Storable a) => Word8 -> a -> Handle -> FlowT '[ErrorCode] m a
drmIoctl = ioctlWriteRead 0x64
ioctlGetCapabilities :: MonadInIO m => StructGetCap -> Handle -> FlowT '[ErrorCode] m StructGetCap
ioctlGetCapabilities = drmIoctl 0x0C
ioctlSetClientCapability :: MonadInIO m => StructSetClientCap -> Handle -> FlowT '[ErrorCode] m StructSetClientCap
ioctlSetClientCapability = drmIoctl 0x0D
ioctlGetResources :: MonadInIO m => StructCardRes -> Handle -> FlowT '[ErrorCode] m StructCardRes
ioctlGetResources = drmIoctl 0xA0
ioctlGetController :: MonadInIO m => StructController -> Handle -> FlowT '[ErrorCode] m StructController
ioctlGetController = drmIoctl 0xA1
ioctlSetController :: MonadInIO m => StructController -> Handle -> FlowT '[ErrorCode] m StructController
ioctlSetController = drmIoctl 0xA2
ioctlGetGamma :: MonadInIO m => StructControllerLut -> Handle -> FlowT '[ErrorCode] m StructControllerLut
ioctlGetGamma = drmIoctl 0xA4
ioctlSetGamma :: MonadInIO m => StructControllerLut -> Handle -> FlowT '[ErrorCode] m StructControllerLut
ioctlSetGamma = drmIoctl 0xA5
ioctlGetEncoder :: MonadInIO m => StructGetEncoder -> Handle -> FlowT '[ErrorCode] m StructGetEncoder
ioctlGetEncoder = drmIoctl 0xA6
ioctlGetConnector :: MonadInIO m => StructGetConnector -> Handle -> FlowT '[ErrorCode] m StructGetConnector
ioctlGetConnector = drmIoctl 0xA7
ioctlGetProperty :: MonadInIO m => StructGetProperty -> Handle -> FlowT '[ErrorCode] m StructGetProperty
ioctlGetProperty = drmIoctl 0xAA
ioctlSetProperty :: MonadInIO m => StructSetProperty -> Handle -> FlowT '[ErrorCode] m StructSetProperty
ioctlSetProperty = drmIoctl 0xAB
ioctlGetBlob :: MonadInIO m => StructGetBlob -> Handle -> FlowT '[ErrorCode] m StructGetBlob
ioctlGetBlob = drmIoctl 0xAC
ioctlRemoveFrameBuffer :: MonadInIO m => Word32 -> Handle -> FlowT '[ErrorCode] m Word32
ioctlRemoveFrameBuffer = drmIoctl 0xAF
ioctlPageFlip :: MonadInIO m => StructPageFlip -> Handle -> FlowT '[ErrorCode] m StructPageFlip
ioctlPageFlip = drmIoctl 0xB0
ioctlDirtyFrameBuffer :: MonadInIO m => StructFrameBufferDirty -> Handle -> FlowT '[ErrorCode] m StructFrameBufferDirty
ioctlDirtyFrameBuffer = drmIoctl 0xB1
ioctlCreateHostBuffer :: MonadInIO m => StructCreateDumb -> Handle -> FlowT '[ErrorCode] m StructCreateDumb
ioctlCreateHostBuffer = drmIoctl 0xB2
ioctlMapHostBuffer :: MonadInIO m => StructMapDumb -> Handle -> FlowT '[ErrorCode] m StructMapDumb
ioctlMapHostBuffer = drmIoctl 0xB3
ioctlDestroyHostBuffer :: MonadInIO m => StructDestroyDumb -> Handle -> FlowT '[ErrorCode] m StructDestroyDumb
ioctlDestroyHostBuffer = drmIoctl 0xB4
ioctlGetPlaneResources :: MonadInIO m => StructGetPlaneRes -> Handle -> FlowT '[ErrorCode] m StructGetPlaneRes
ioctlGetPlaneResources = drmIoctl 0xB5
ioctlGetPlane :: MonadInIO m => StructGetPlane -> Handle -> FlowT '[ErrorCode] m StructGetPlane
ioctlGetPlane = drmIoctl 0xB6
ioctlSetPlane :: MonadInIO m => StructSetPlane -> Handle -> FlowT '[ErrorCode] m StructSetPlane
ioctlSetPlane = drmIoctl 0xB7
ioctlAddFrameBuffer :: MonadInIO m => StructFrameBufferCommand -> Handle -> FlowT '[ErrorCode] m StructFrameBufferCommand
ioctlAddFrameBuffer = drmIoctl 0xB8
ioctlGetObjectProperties :: MonadInIO m => StructGetObjectProperties -> Handle -> FlowT '[ErrorCode] m StructGetObjectProperties
ioctlGetObjectProperties = drmIoctl 0xB9
ioctlSetObjectProperty :: MonadInIO m => StructSetObjectProperty -> Handle -> FlowT '[ErrorCode] m StructSetObjectProperty
ioctlSetObjectProperty = drmIoctl 0xBA
ioctlCursor :: MonadInIO m => StructCursor2 -> Handle -> FlowT '[ErrorCode] m StructCursor2
ioctlCursor = drmIoctl 0xBB
ioctlAtomic :: MonadInIO m => StructAtomic -> Handle -> FlowT '[ErrorCode] m StructAtomic
ioctlAtomic = drmIoctl 0xBC
ioctlCreateBlob :: MonadInIO m => StructCreateBlob -> Handle -> FlowT '[ErrorCode] m StructCreateBlob
ioctlCreateBlob = drmIoctl 0xBD
ioctlDestroyBlob :: MonadInIO m => StructDestroyBlob -> Handle -> FlowT '[ErrorCode] m StructDestroyBlob
ioctlDestroyBlob = drmIoctl 0xBE
-----------------------------------------------------------------------------
-- Events
-----------------------------------------------------------------------------
-- Header for events written back to userspace on the drm fd. The
-- type defines the type of event, the length specifies the total
-- length of the event (including the header), and user_data is
-- typically a 64 bit value passed with the ioctl that triggered the
-- event. A read on the drm fd will always only return complete
-- events, that is, if for example the read buffer is 100 bytes, and
-- there are two 64 byte events pending, only one will be returned.
--
-- Event types 0 - 0x7fffffff are generic drm events, 0x80000000 and
-- up are chipset specific.
-- | drm_event
data DRMEventHeader = DRMEventHeader
{ eventType :: {-# UNPACK #-} !Word32
, eventLength :: {-# UNPACK #-} !Word32
} deriving (Generic,Storable)
-- | Event type
data EventType
= VBlank -- ^ Beginning of the VBlank period
| PageFlipComplete -- ^ Page flipping complete
deriving (Show)
-- | Try to recognize the event type
toEventType :: Word32 -> Maybe EventType
toEventType v = case v of
0x01 -> Just VBlank
0x02 -> Just PageFlipComplete
_ -> Nothing
-- | drm_event_vblank
data DRMEvent = DRMEvent
{ drmEventType :: {-# UNPACK #-} !Word32
, drmEventSize :: {-# UNPACK #-} !Word32
, drmEventUserData :: {-# UNPACK #-} !Word64
, drmEventSeconds :: {-# UNPACK #-} !Word32
, drmEventMicroseconds :: {-# UNPACK #-} !Word32
, drmEventSequence :: {-# UNPACK #-} !Word32
, drmEventReserved :: {-# UNPACK #-} !Word32
} deriving (Show,Generic,Storable)
-- =============================================================
-- From linux/include/uapi/drm/drm_crtc.h
-- =============================================================
-----------------------------------------------------------------------------
-- Rotation/reflection
-----------------------------------------------------------------------------
data Rotation
= RotateNone
| Rotate90
| Rotate180
| Rotate270
deriving (Show,Eq,Enum)
data Reflection
= ReflectX
| ReflectY
deriving (Show,Eq,Enum,CBitSet)
type RotateReflect = BitFields Word8
'[ BitField 2 "padding" Word8
, BitField 2 "reflection" (BitSet Word8 Reflection)
, BitField 4 "rotation" (EnumField Word8 Rotation)
]
-----------------------------------------------------------------------------
-- SubPixel order
-----------------------------------------------------------------------------
-- | Indicate how a pixel is physically subdivised in RGB pixel elements
data SubPixel
= SubPixelUnknown
| SubPixelHorizontalRGB
| SubPixelHorizontalBGR
| SubPixelVerticalRGB
| SubPixelVerticalBGR
| SubPixelNone
deriving (Eq,Ord,Enum,Show,CEnum)
| hsyl20/ViperVM | haskus-system/src/lib/Haskus/System/Linux/Internals/Graphics.hs | bsd-3-clause | 39,672 | 0 | 12 | 8,144 | 6,414 | 3,810 | 2,604 | 728 | 7 |
-- | lexem defination of Cirno.
module Cirno.Lexer where
import Control.Monad (void)
import Text.Megaparsec
import qualified Text.Megaparsec.Lexer as L
import Text.Megaparsec.String
type Name = String
data Expr = EInt Integer
| Var Name
| App Expr Expr
| Lam Pattern Expr
| Let [Pattern] [Expr] Expr
| Letrec [Pattern] [Expr] Expr
| If Expr Expr Expr
| Case Expr [Clause]
| Op Name
deriving (Show)
data AriOp = Add
| Sub
| Mul
| Div
deriving (Show)
data RelOp = GT
| LT
| EQ
| GE
| LE
deriving (Show)
data Op = AriOp | RelOp deriving (Show)
data Pattern = PInt Integer
| PVar Name
| PCons Name [Pattern]
deriving (Show)
data Definition = DVar Name Expr
| DFun Name [Name] Expr
deriving (Show)
data Clause = Clause Pattern Expr deriving (Show)
spaceConsumer :: Parser ()
spaceConsumer = L.space
singleSpace -- how to consum single whitespace
lineComment -- how to consum line comment
blockComment -- how to consum block of comment
where singleSpace = void spaceChar
lineComment = L.skipLineComment "--"
blockComment = L.skipBlockComment "{-" "-}"
lexeme :: Parser a -> Parser a
lexeme = L.lexeme spaceConsumer
symbol :: String -> Parser String
symbol = L.symbol spaceConsumer
parens :: Parser a -> Parser a
parens = between (symbol "(") (symbol ")")
warpParens :: Parser a -> Parser a
warpParens p = parens (warpParens p) <|> p
integer :: Parser Integer
integer = lexeme L.integer
keywords :: [String]
keywords = ["if","else","then","do","case","of","return","let","letrec","in","otherwise","not"]
keyword :: String -> Parser ()
keyword w =
do void $ string w
void $ notFollowedBy alphaNumChar -- | perform longest match to make sure it is a keyword.
spaceConsumer
identifier' :: Parser String -- | an identifier is string start with a lowercase char, followed by
-- | many chars, but not a reserved keyword.
identifier' = name >>= check
where name = (:) <$> lowerChar <*> many alphaNumChar
check x | x `elem` keywords = fail ("keyword" ++ show x ++ "is a reserved word")
| otherwise = return x
identifier :: Parser String
identifier = lexeme identifier'
constructor' :: Parser String -- | same as constructor, but do not comsume whitespace
constructor' = name
where name = (:) <$> upperChar <*> many alphaNumChar
constructor :: Parser String
constructor = lexeme constructor'
| SuperHex/Cirno | src/Cirno/Lexer.hs | bsd-3-clause | 2,896 | 0 | 12 | 996 | 723 | 397 | 326 | 73 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types
-- Copyright : (C) 2014, 2018 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com
-- Stability : provisional
-- Portability : RecordWildCards
--
-- Data types for describing actions and data structures performed to interact
-- with Elasticsearch. The two main buckets your queries against Elasticsearch
-- will fall into are 'Query's and 'Filter's. 'Filter's are more like
-- traditional database constraints and often have preferable performance
-- properties. 'Query's support human-written textual queries, such as fuzzy
-- queries.
-------------------------------------------------------------------------------
module Database.Bloodhound.Types
( defaultCache
, defaultIndexSettings
, defaultIndexDocumentSettings
, mkSort
, showText
, unpackId
, mkMatchQuery
, mkMultiMatchQuery
, mkBoolQuery
, mkRangeQuery
, mkQueryStringQuery
, mkAggregations
, mkTermsAggregation
, mkTermsScriptAggregation
, mkDateHistogram
, mkCardinalityAggregation
, mkDocVersion
, mkStatsAggregation
, mkExtendedStatsAggregation
, docVersionNumber
, toMissing
, toTerms
, toDateHistogram
, toTopHits
, omitNulls
, BH(..)
, runBH
, BHEnv
, bhServer
, bhManager
, bhRequestHook
, mkBHEnv
, MonadBH(..)
, Version(..)
, VersionNumber(..)
, MaybeNA(..)
, BuildHash(..)
, Status(..)
, Existence(..)
, NullValue(..)
, IndexSettings(..)
, UpdatableIndexSetting(..)
, IndexSettingsSummary(..)
, AllocationPolicy(..)
, Compression(..)
, ReplicaBounds(..)
, Bytes(..)
, gigabytes
, megabytes
, kilobytes
, FSType(..)
, InitialShardCount(..)
, NodeAttrFilter(..)
, NodeAttrName(..)
, CompoundFormat(..)
, IndexTemplate(..)
, Server(..)
, Reply
, EsResult(..)
, EsResultFound(..)
, EsError(..)
, EsProtocolException(..)
, IndexAlias(..)
, IndexAliasName(..)
, IndexAliasAction(..)
, IndexAliasCreate(..)
, IndexAliasSummary(..)
, IndexAliasesSummary(..)
, AliasRouting(..)
, SearchAliasRouting(..)
, IndexAliasRouting(..)
, RoutingValue(..)
, DocVersion
, ExternalDocVersion(..)
, VersionControl(..)
, JoinRelation(..)
, IndexDocumentSettings(..)
, Query(..)
, Search(..)
, SearchType(..)
, SearchResult(..)
, ScrollId(..)
, HitsTotalRelation(..)
, HitsTotal(..)
, SearchHits(..)
, TrackSortScores
, From(..)
, Size(..)
, Source(..)
, PatternOrPatterns(..)
, Include(..)
, Exclude(..)
, Pattern(..)
, ShardResult(..)
, Hit(..)
, HitFields(..)
, Filter(..)
, BoolMatch(..)
, Term(..)
, GeoPoint(..)
, GeoBoundingBoxConstraint(..)
, GeoBoundingBox(..)
, GeoFilterType(..)
, Distance(..)
, DistanceUnit(..)
, DistanceType(..)
, DistanceRange(..)
, OptimizeBbox(..)
, LatLon(..)
, RangeValue(..)
, RangeExecution(..)
, LessThan(..)
, LessThanEq(..)
, GreaterThan(..)
, GreaterThanEq(..)
, LessThanD(..)
, LessThanEqD(..)
, GreaterThanD(..)
, GreaterThanEqD(..)
, Regexp(..)
, RegexpFlags(..)
, RegexpFlag(..)
, FieldName(..)
, ScriptFields(..)
, ScriptFieldName
, ScriptFieldValue
, Script(..)
, ScriptLanguage(..)
, ScriptSource(..)
, ScriptParams(..)
, ScriptParamName
, ScriptParamValue
, IndexName(..)
, IndexSelection(..)
, NodeSelection(..)
, NodeSelector(..)
, ForceMergeIndexSettings(..)
, defaultForceMergeIndexSettings
, TemplateName(..)
, IndexPattern(..)
, DocId(..)
, CacheName(..)
, CacheKey(..)
, BulkOperation(..)
, ReplicaCount(..)
, ShardCount(..)
, Sort
, SortMode(..)
, SortOrder(..)
, SortSpec(..)
, DefaultSort(..)
, Missing(..)
, OpenCloseIndex(..)
, Method
, Boost(..)
, MatchQuery(..)
, MultiMatchQuery(..)
, BoolQuery(..)
, BoostingQuery(..)
, CommonTermsQuery(..)
, FunctionScoreQuery(..)
, BoostMode(..)
, ScoreMode(..)
, FunctionScoreFunctions(..)
, ComponentFunctionScoreFunction(..)
, FunctionScoreFunction(..)
, Weight(..)
, Seed(..)
, FieldValueFactor(..)
, Factor(..)
, FactorModifier(..)
, FactorMissingFieldValue(..)
, DisMaxQuery(..)
, FuzzyLikeThisQuery(..)
, FuzzyLikeFieldQuery(..)
, FuzzyQuery(..)
, HasChildQuery(..)
, HasParentQuery(..)
, IndicesQuery(..)
, MoreLikeThisQuery(..)
, MoreLikeThisFieldQuery(..)
, NestedQuery(..)
, PrefixQuery(..)
, QueryStringQuery(..)
, SimpleQueryStringQuery(..)
, RangeQuery(..)
, RegexpQuery(..)
, QueryString(..)
, SearchTemplateId(..)
, SearchTemplateSource(..)
, SearchTemplate(..)
, GetTemplateScript(..)
, TemplateQueryKeyValuePairs(..)
, WildcardQuery(..)
, BooleanOperator(..)
, ZeroTermsQuery(..)
, CutoffFrequency(..)
, Analyzer(..)
, Tokenizer(..)
, TokenFilter(..)
, CharFilter(..)
, MaxExpansions(..)
, Lenient(..)
, MatchQueryType(..)
, MultiMatchQueryType(..)
, Tiebreaker(..)
, MinimumMatch(..)
, DisableCoord(..)
, CommonMinimumMatch(..)
, MinimumMatchHighLow(..)
, PrefixLength(..)
, Fuzziness(..)
, IgnoreTermFrequency(..)
, MaxQueryTerms(..)
, AggregateParentScore(..)
, IgnoreUnmapped(..)
, MinChildren(..)
, MaxChildren(..)
, ScoreType(..)
, Score
, Cache
, RelationName(..)
, BoostTerms(..)
, MaxWordLength(..)
, MinWordLength(..)
, MaxDocFrequency(..)
, MinDocFrequency(..)
, PhraseSlop(..)
, StopWord(..)
, QueryPath(..)
, MinimumTermFrequency(..)
, PercentMatch(..)
, FieldDefinition(..)
, MappingField(..)
, Mapping(..)
, UpsertActionMetadata(..)
, buildUpsertActionMetadata
, UpsertPayload(..)
, AllowLeadingWildcard(..)
, LowercaseExpanded(..)
, GeneratePhraseQueries(..)
, Locale(..)
, AnalyzeWildcard(..)
, EnablePositionIncrements(..)
, SimpleQueryFlag(..)
, FieldOrFields(..)
, Monoid(..)
, ToJSON(..)
, Interval(..)
, TimeInterval(..)
, ExecutionHint(..)
, CollectionMode(..)
, TermOrder(..)
, TermInclusion(..)
, SnapshotRepoSelection(..)
, GenericSnapshotRepo(..)
, SnapshotRepo(..)
, SnapshotRepoConversionError(..)
, SnapshotRepoType(..)
, GenericSnapshotRepoSettings(..)
, SnapshotRepoUpdateSettings(..)
, defaultSnapshotRepoUpdateSettings
, SnapshotRepoName(..)
, SnapshotRepoPattern(..)
, SnapshotVerification(..)
, SnapshotNodeVerification(..)
, FullNodeId(..)
, NodeName(..)
, ClusterName(..)
, NodesInfo(..)
, NodesStats(..)
, NodeStats(..)
, NodeBreakersStats(..)
, NodeBreakerStats(..)
, NodeHTTPStats(..)
, NodeTransportStats(..)
, NodeFSStats(..)
, NodeDataPathStats(..)
, NodeFSTotalStats(..)
, NodeNetworkStats(..)
, NodeThreadPoolStats(..)
, NodeJVMStats(..)
, JVMBufferPoolStats(..)
, JVMGCStats(..)
, JVMPoolStats(..)
, NodeProcessStats(..)
, NodeOSStats(..)
, LoadAvgs(..)
, NodeIndicesStats(..)
, EsAddress(..)
, PluginName(..)
, NodeInfo(..)
, NodePluginInfo(..)
, NodeHTTPInfo(..)
, NodeTransportInfo(..)
, BoundTransportAddress(..)
, NodeNetworkInfo(..)
, MacAddress(..)
, NetworkInterfaceName(..)
, NodeNetworkInterface(..)
, NodeThreadPoolInfo(..)
, ThreadPoolSize(..)
, ThreadPoolType(..)
, NodeJVMInfo(..)
, JVMMemoryPool(..)
, JVMGCCollector(..)
, JVMMemoryInfo(..)
, PID(..)
, NodeOSInfo(..)
, CPUInfo(..)
, NodeProcessInfo(..)
, FsSnapshotRepo(..)
, SnapshotCreateSettings(..)
, defaultSnapshotCreateSettings
, SnapshotSelection(..)
, SnapshotPattern(..)
, SnapshotInfo(..)
, SnapshotShardFailure(..)
, ShardId(..)
, SnapshotName(..)
, SnapshotState(..)
, SnapshotRestoreSettings(..)
, defaultSnapshotRestoreSettings
, RestoreRenamePattern(..)
, RestoreRenameToken(..)
, RRGroupRefNum
, rrGroupRefNum
, mkRRGroupRefNum
, RestoreIndexSettings(..)
, Suggest(..)
, SuggestType(..)
, PhraseSuggester(..)
, PhraseSuggesterHighlighter(..)
, PhraseSuggesterCollate(..)
, mkPhraseSuggester
, SuggestOptions(..)
, SuggestResponse(..)
, NamedSuggestionResponse(..)
, DirectGenerators(..)
, mkDirectGenerators
, DirectGeneratorSuggestModeTypes (..)
, Aggregation(..)
, Aggregations
, AggregationResults
, BucketValue(..)
, Bucket(..)
, BucketAggregation(..)
, TermsAggregation(..)
, MissingAggregation(..)
, ValueCountAggregation(..)
, FilterAggregation(..)
, CardinalityAggregation(..)
, DateHistogramAggregation(..)
, DateRangeAggregation(..)
, DateRangeAggRange(..)
, DateMathExpr(..)
, DateMathAnchor(..)
, DateMathModifier(..)
, DateMathUnit(..)
, TopHitsAggregation(..)
, StatisticsAggregation(..)
, SearchAfterKey
, CountQuery (..)
, CountResponse (..)
, CountShards (..)
, Highlights(..)
, FieldHighlight(..)
, HighlightSettings(..)
, PlainHighlight(..)
, PostingsHighlight(..)
, FastVectorHighlight(..)
, CommonHighlight(..)
, NonPostings(..)
, HighlightEncoder(..)
, HighlightTag(..)
, HitHighlight
, MissingResult(..)
, TermsResult(..)
, DateHistogramResult(..)
, DateRangeResult(..)
, TopHitResult(..)
, EsUsername(..)
, EsPassword(..)
, Analysis(..)
, AnalyzerDefinition(..)
, TokenizerDefinition(..)
, TokenFilterDefinition(..)
, CharFilterDefinition(..)
, Ngram(..)
, TokenChar(..)
, Shingle(..)
, Language(..)
) where
import Bloodhound.Import
import Database.Bloodhound.Internal.Aggregation
import Database.Bloodhound.Internal.Analysis
import Database.Bloodhound.Internal.Client
import Database.Bloodhound.Internal.Count
import Database.Bloodhound.Internal.Highlight
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query
import Database.Bloodhound.Internal.Sort
import Database.Bloodhound.Internal.Suggest
import qualified Data.HashMap.Strict as HM
{-| 'unpackId' is a silly convenience function that gets used once.
-}
unpackId :: DocId -> Text
unpackId (DocId docId) = docId
type TrackSortScores = Bool
data Search = Search { queryBody :: Maybe Query
, filterBody :: Maybe Filter
, sortBody :: Maybe Sort
, aggBody :: Maybe Aggregations
, highlight :: Maybe Highlights
-- default False
, trackSortScores :: TrackSortScores
, from :: From
, size :: Size
, searchType :: SearchType
, searchAfterKey :: Maybe SearchAfterKey
, fields :: Maybe [FieldName]
, scriptFields :: Maybe ScriptFields
, source :: Maybe Source
, suggestBody :: Maybe Suggest -- ^ Only one Suggestion request / response per Search is supported.
} deriving (Eq, Show)
instance ToJSON Search where
toJSON (Search mquery sFilter sort searchAggs
highlight sTrackSortScores sFrom sSize _ sAfter sFields
sScriptFields sSource sSuggest) =
omitNulls [ "query" .= query'
, "sort" .= sort
, "aggregations" .= searchAggs
, "highlight" .= highlight
, "from" .= sFrom
, "size" .= sSize
, "track_scores" .= sTrackSortScores
, "search_after" .= sAfter
, "fields" .= sFields
, "script_fields" .= sScriptFields
, "_source" .= sSource
, "suggest" .= sSuggest]
where query' = case sFilter of
Nothing -> mquery
Just x ->
Just
. QueryBoolQuery
$ mkBoolQuery (maybeToList mquery)
[x] [] []
data SearchType = SearchTypeQueryThenFetch
| SearchTypeDfsQueryThenFetch
deriving (Eq, Show)
instance ToJSON SearchType where
toJSON SearchTypeQueryThenFetch = String "query_then_fetch"
toJSON SearchTypeDfsQueryThenFetch = String "dfs_query_then_fetch"
instance FromJSON SearchType where
parseJSON (String "query_then_fetch") = pure $ SearchTypeQueryThenFetch
parseJSON (String "dfs_query_then_fetch") = pure $ SearchTypeDfsQueryThenFetch
parseJSON _ = empty
data Source =
NoSource
| SourcePatterns PatternOrPatterns
| SourceIncludeExclude Include Exclude
deriving (Eq, Show)
instance ToJSON Source where
toJSON NoSource = toJSON False
toJSON (SourcePatterns patterns) = toJSON patterns
toJSON (SourceIncludeExclude incl excl) = object [ "includes" .= incl, "excludes" .= excl ]
data PatternOrPatterns = PopPattern Pattern
| PopPatterns [Pattern] deriving (Eq, Read, Show)
instance ToJSON PatternOrPatterns where
toJSON (PopPattern pattern) = toJSON pattern
toJSON (PopPatterns patterns) = toJSON patterns
data Include = Include [Pattern] deriving (Eq, Read, Show)
data Exclude = Exclude [Pattern] deriving (Eq, Read, Show)
instance ToJSON Include where
toJSON (Include patterns) = toJSON patterns
instance ToJSON Exclude where
toJSON (Exclude patterns) = toJSON patterns
newtype Pattern = Pattern Text deriving (Eq, Read, Show)
instance ToJSON Pattern where
toJSON (Pattern pattern) = toJSON pattern
data SearchResult a =
SearchResult { took :: Int
, timedOut :: Bool
, shards :: ShardResult
, searchHits :: SearchHits a
, aggregations :: Maybe AggregationResults
, scrollId :: Maybe ScrollId
-- ^ Only one Suggestion request / response per
-- Search is supported.
, suggest :: Maybe NamedSuggestionResponse
}
deriving (Eq, Show)
instance (FromJSON a) => FromJSON (SearchResult a) where
parseJSON (Object v) = SearchResult <$>
v .: "took" <*>
v .: "timed_out" <*>
v .: "_shards" <*>
v .: "hits" <*>
v .:? "aggregations" <*>
v .:? "_scroll_id" <*>
v .:? "suggest"
parseJSON _ = empty
newtype ScrollId =
ScrollId Text
deriving (Eq, Show, Ord, ToJSON, FromJSON)
newtype SearchTemplateId = SearchTemplateId Text deriving (Eq, Show)
instance ToJSON SearchTemplateId where
toJSON (SearchTemplateId x) = toJSON x
newtype SearchTemplateSource = SearchTemplateSource Text deriving (Eq, Show)
instance ToJSON SearchTemplateSource where
toJSON (SearchTemplateSource x) = toJSON x
instance FromJSON SearchTemplateSource where
parseJSON (String s) = pure $ SearchTemplateSource s
parseJSON _ = empty
data ExpandWildcards = ExpandWildcardsAll
| ExpandWildcardsOpen
| ExpandWildcardsClosed
| ExpandWildcardsNone
deriving (Eq, Show)
instance ToJSON ExpandWildcards where
toJSON ExpandWildcardsAll = String "all"
toJSON ExpandWildcardsOpen = String "open"
toJSON ExpandWildcardsClosed = String "closed"
toJSON ExpandWildcardsNone = String "none"
instance FromJSON ExpandWildcards where
parseJSON (String "all") = pure $ ExpandWildcardsAll
parseJSON (String "open") = pure $ ExpandWildcardsOpen
parseJSON (String "closed") = pure $ ExpandWildcardsClosed
parseJSON (String "none") = pure $ ExpandWildcardsNone
parseJSON _ = empty
data TimeUnits = TimeUnitDays
| TimeUnitHours
| TimeUnitMinutes
| TimeUnitSeconds
| TimeUnitMilliseconds
| TimeUnitMicroseconds
| TimeUnitNanoseconds
deriving (Eq, Show)
instance ToJSON TimeUnits where
toJSON TimeUnitDays = String "d"
toJSON TimeUnitHours = String "h"
toJSON TimeUnitMinutes = String "m"
toJSON TimeUnitSeconds = String "s"
toJSON TimeUnitMilliseconds = String "ms"
toJSON TimeUnitMicroseconds = String "micros"
toJSON TimeUnitNanoseconds = String "nanos"
instance FromJSON TimeUnits where
parseJSON (String "d") = pure $ TimeUnitDays
parseJSON (String "h") = pure $ TimeUnitHours
parseJSON ( String "m") = pure $ TimeUnitMinutes
parseJSON (String "s") = pure $ TimeUnitSeconds
parseJSON (String "ms") = pure $ TimeUnitMilliseconds
parseJSON (String "micros") = pure $ TimeUnitMicroseconds
parseJSON (String "nanos") = pure $ TimeUnitNanoseconds
parseJSON _ = empty
data SearchTemplate = SearchTemplate {
searchTemplate :: Either SearchTemplateId SearchTemplateSource
, params :: TemplateQueryKeyValuePairs
, explainSearchTemplate :: Maybe Bool
, profileSearchTemplate :: Maybe Bool
} deriving (Eq, Show)
instance ToJSON SearchTemplate where
toJSON SearchTemplate{..} = omitNulls [
either ("id" .=) ("source" .=) searchTemplate
, "params" .= params
, "explain" .= explainSearchTemplate
, "profile" .= profileSearchTemplate
]
data GetTemplateScript = GetTemplateScript {
getTemplateScriptLang :: Maybe Text
, getTemplateScriptSource :: Maybe SearchTemplateSource
, getTemplateScriptOptions :: Maybe (HM.HashMap Text Text)
, getTemplateScriptId :: Text
, getTemplateScriptFound :: Bool
} deriving (Eq, Show)
instance FromJSON GetTemplateScript where
parseJSON (Object v) = do
script <- v .:? "script"
maybe
(GetTemplateScript Nothing Nothing Nothing <$> v .: "_id" <*> v .: "found")
(\s -> GetTemplateScript <$>
s .:? "lang" <*>
s .:? "source" <*>
s .:? "options" <*>
v .: "_id" <*>
v .: "found"
)
script
parseJSON _ = empty
| bitemyapp/bloodhound | src/Database/Bloodhound/Types.hs | bsd-3-clause | 20,305 | 0 | 20 | 6,425 | 4,483 | 2,848 | 1,635 | 590 | 1 |
module BuildNfa where
import Automata
import Regex
import Sets
build :: Regex -> Nfa Int
build Lambda = NFA
(makeSet [0 .. 1])
(sing (Emove 0 1))
0
(sing 1)
build (Lit c) = NFA
(makeSet [0 .. 1])
(sing (Move 0 c 1))
0
(sing 1)
build (Union r1 r2) = mOr (build r1) (build r2)
build (Concat r1 r2) = mThen (build r1) (build r2)
build (Kleene r) = mStar (build r)
mOr :: Nfa Int -> Nfa Int -> Nfa Int
mOr (NFA states1 moves1 start1 finish1) (NFA states2 moves2 start2 finish2)
= NFA (states1' `union` states2' `union` newstates)
(moves1' `union` moves2' `union` newmoves)
0
(sing (m1+m2+1))
where
m1 = card states1
m2 = card states2
states1' = mapSet (renumber 1) states1
states2' = mapSet (renumber (m1 + 1)) states2
newstates = makeSet [0, m1 + m2 + 1]
moves1' = mapSet (renumberMove 1) moves1
moves2' = mapSet (renumberMove (m1 + 1)) moves2
newmoves = makeSet [Emove 0 1, Emove 0 (m1+1), Emove m1 (m1 + m2+1), Emove (m1+m2) (m1+m2+1)]
-- Até aqui OK!
mThen :: Nfa Int -> Nfa Int -> Nfa Int
mThen (NFA states1 moves1 start1 finish1) (NFA states2 moves2 start2 finish2)
= NFA (states1 `union` states2') (moves1 `union` moves2')
start1
finish2'
where
states2' = mapSet (renumber k) states2
moves2' = mapSet (renumberMove k) moves2
finish2' = mapSet (renumber k) finish2
k = card states1 - 1
mStar :: Nfa Int -> Nfa Int
mStar (NFA states moves start finish)
= NFA
(states' `union` newstates)
(moves' `union` newmoves)
0
(sing (m+1))
where
m = card states
states' = mapSet (renumber 1) states
moves' = mapSet (renumberMove 1) moves
newstates = makeSet [ 0 , m+1 ]
newmoves = makeSet [ Emove 0 1 , Emove m 1 , Emove 0 (m+1) , Emove m (m+1) ]
mJoin :: [Nfa Int] -> (Nfa Int,[Set Int])
mJoin mc
= (NFA (foldl union empty newstates `union` makeSet [0])
(foldl union empty newmoves `union` emoves)
0
(foldl union empty newfinish), newfinish)
where
-- Leitura de dados
starts1 = map startstate mc
moves1 = map moves mc
states1 = map states mc
finish1 = map finishstates mc
-- Numero de estados de cada automato
m1 = map card states1
newm1 = scanl1 (+) (init m1)
transf = map (+1) (0:newm1)
-- Estados reajustados
newstates = zipWith (mapSet . renumber) transf states1
newmoves = zipWith (mapSet . renumberMove) transf moves1
newfinish = zipWith (mapSet . renumber) transf finish1
newstarts = zipWith (+) transf starts1
--
emoves = makeSet [Emove 0 s | s <- newstarts ]
{-mOrFinish :: Nfa Int -> Nfa Int -> (Nfa Int, (Set Int,Set Int))
mOrFinish (NFA states1 moves1 start1 finish1) (NFA states2 moves2 start2 finish2)
= (NFA (states1' `union` states2' `union` newstates)
(moves1' `union` moves2' `union` newmoves)
0
(sing (m1+m2+1)),
(finish1',finish2'))
where
m1 = card states1
m2 = card states2
finish1' = mapSet (renumber (m1 + 1)) finish1
finish2' = mapSet (renumber (m2 + 1)) finish2
states1' = mapSet (renumber 1) states1
states2' = mapSet (renumber (m1 + 1)) states2
moves1' = mapSet (renumberMove 1) moves1
moves2' = mapSet (renumberMove (m1 + 1)) moves2
newstates = makeSet [0, m1 + m2 + 1]
newmoves = makeSet [Emove 0 1, Emove 0 (m1+1), Emove m1 (m1 + m2+1), Emove (m1+m2) (m1+m2+1)]-}
renumber :: Int -> Int -> Int
renumber n st = n + st
renumberMove :: Int -> Move Int -> Move Int
renumberMove k (Move s1 c s2) = Move (renumber k s1) c (renumber k s2)
renumberMove k (Emove s1 s2) = Emove (renumber k s1) (renumber k s2)
| arthurmgo/regex-ftc | src/BuildNfa.hs | bsd-3-clause | 3,980 | 0 | 12 | 1,254 | 1,281 | 667 | 614 | 76 | 1 |
{-# LANGUAGE TupleSections #-}
module States.CreatingPlatform where
#include "Utils.cpp"
import Control.Applicative ((<$>))
import Data.Composition ((.:))
import Gamgine.Control ((?))
import qualified Gamgine.Math.Vect as V
import qualified Gamgine.Math.Box as B
import qualified GameData.Level as LV
import qualified GameData.Data as GD
import qualified GameData.Entity as E
import qualified GameData.Platform as PF
import qualified Entity.Id as EI
import qualified Gamgine.State.State as ST
import qualified States.GameRunning as GR
IMPORT_LENS_AS_LE
data CreatingPlatform = CreatingPlatform {
entityId :: Maybe Int,
startPos :: V.Vect
}
-- | the state for creating a platform during edit mode
mkCreatingPlatformState :: ST.State GD.Data
mkCreatingPlatformState =
mkState $ CreatingPlatform Nothing V.nullVec
where
mkState cp = ST.State {
ST.enter = \mp gd ->
let id = LV.freeEntityId . LE.getL GD.currentLevelL $ gd
bound = B.Box V.nullVec V.nullVec
gd' = LE.modL GD.currentLevelL (LV.addEntity (PF.newPlatform id (Left mp) bound) LV.ToActiveLayer) gd
in Just (gd', mkState cp {entityId = Just id, startPos = mp}),
ST.leave = (, mkState cp {entityId = Nothing, startPos = V.nullVec}),
ST.update = (, mkState cp) . GR.update,
ST.render = ((, mkState cp) <$>) .: GR.render,
ST.keyEvent = (, mkState cp) .: flip const,
ST.mouseEvent = (, mkState cp) .: flip const,
ST.mouseMoved = \mp gd ->
case cp of
CreatingPlatform {entityId = Just id, startPos = sp} ->
(E.eMap (\e -> id == EI.entityId e ? updatePosAndBound mp sp e $ e) gd,
mkState cp)
}
updatePosAndBound :: V.Vect -> V.Vect -> E.Entity -> E.Entity
updatePosAndBound v1 v2 pf@(E.Platform {}) =
let minPt = V.minVec v1 v2
maxPt = V.maxVec v1 v2
diffVec = V.map abs $ maxPt - minPt
in pf {E.platformPosition = Left minPt,
E.platformBound = B.Box V.nullVec diffVec}
updatePosAndBound _ _ e = e
| dan-t/layers | src/States/CreatingPlatform.hs | bsd-3-clause | 2,145 | 0 | 21 | 573 | 676 | 383 | 293 | -1 | -1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnNames]{Extracting imported and top-level names in scope}
-}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
module RnNames (
rnImports, getLocalNonValBinders, newRecordSelector,
rnExports, extendGlobalRdrEnvRn,
gresFromAvails,
calculateAvails,
reportUnusedNames,
checkConName
) where
#include "HsVersions.h"
import DynFlags
import HsSyn
import TcEnv
import RnEnv
import RnHsDoc ( rnHsDoc )
import LoadIface ( loadSrcInterface )
import TcRnMonad
import PrelNames
import Module
import Name
import NameEnv
import NameSet
import Avail
import FieldLabel
import HscTypes
import RdrName
import RdrHsSyn ( setRdrNameSpace )
import Outputable
import Maybes
import SrcLoc
import BasicTypes ( TopLevelFlag(..), StringLiteral(..) )
import ErrUtils
import Util
import FastString
import FastStringEnv
import ListSetOps
import Id
import Type
import PatSyn
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.Either ( partitionEithers, isRight, rights )
-- import qualified Data.Foldable as Foldable
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Ord ( comparing )
import Data.List ( partition, (\\), find, sortBy )
-- import qualified Data.Set as Set
import System.FilePath ((</>))
import System.IO
{-
************************************************************************
* *
\subsection{rnImports}
* *
************************************************************************
Note [Tracking Trust Transitively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we import a package as well as checking that the direct imports are safe
according to the rules outlined in the Note [HscMain . Safe Haskell Trust Check]
we must also check that these rules hold transitively for all dependent modules
and packages. Doing this without caching any trust information would be very
slow as we would need to touch all packages and interface files a module depends
on. To avoid this we make use of the property that if a modules Safe Haskell
mode changes, this triggers a recompilation from that module in the dependcy
graph. So we can just worry mostly about direct imports.
There is one trust property that can change for a package though without
recompliation being triggered: package trust. So we must check that all
packages a module tranitively depends on to be trusted are still trusted when
we are compiling this module (as due to recompilation avoidance some modules
below may not be considered trusted any more without recompilation being
triggered).
We handle this by augmenting the existing transitive list of packages a module M
depends on with a bool for each package that says if it must be trusted when the
module M is being checked for trust. This list of trust required packages for a
single import is gathered in the rnImportDecl function and stored in an
ImportAvails data structure. The union of these trust required packages for all
imports is done by the rnImports function using the combine function which calls
the plusImportAvails function that is a union operation for the ImportAvails
type. This gives us in an ImportAvails structure all packages required to be
trusted for the module we are currently compiling. Checking that these packages
are still trusted (and that direct imports are trusted) is done in
HscMain.checkSafeImports.
See the note below, [Trust Own Package] for a corner case in this method and
how its handled.
Note [Trust Own Package]
~~~~~~~~~~~~~~~~~~~~~~~~
There is a corner case of package trust checking that the usual transitive check
doesn't cover. (For how the usual check operates see the Note [Tracking Trust
Transitively] below). The case is when you import a -XSafe module M and M
imports a -XTrustworthy module N. If N resides in a different package than M,
then the usual check works as M will record a package dependency on N's package
and mark it as required to be trusted. If N resides in the same package as M
though, then importing M should require its own package be trusted due to N
(since M is -XSafe so doesn't create this requirement by itself). The usual
check fails as a module doesn't record a package dependency of its own package.
So instead we now have a bool field in a modules interface file that simply
states if the module requires its own package to be trusted. This field avoids
us having to load all interface files that the module depends on to see if one
is trustworthy.
Note [Trust Transitive Property]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
So there is an interesting design question in regards to transitive trust
checking. Say I have a module B compiled with -XSafe. B is dependent on a bunch
of modules and packages, some packages it requires to be trusted as its using
-XTrustworthy modules from them. Now if I have a module A that doesn't use safe
haskell at all and simply imports B, should A inherit all the the trust
requirements from B? Should A now also require that a package p is trusted since
B required it?
We currently say no but saying yes also makes sense. The difference is, if a
module M that doesn't use Safe Haskell imports a module N that does, should all
the trusted package requirements be dropped since M didn't declare that it cares
about Safe Haskell (so -XSafe is more strongly associated with the module doing
the importing) or should it be done still since the author of the module N that
uses Safe Haskell said they cared (so -XSafe is more strongly associated with
the module that was compiled that used it).
Going with yes is a simpler semantics we think and harder for the user to stuff
up but it does mean that Safe Haskell will affect users who don't care about
Safe Haskell as they might grab a package from Cabal which uses safe haskell (say
network) and that packages imports -XTrustworthy modules from another package
(say bytestring), so requires that package is trusted. The user may now get
compilation errors in code that doesn't do anything with Safe Haskell simply
because they are using the network package. They will have to call 'ghc-pkg
trust network' to get everything working. Due to this invasive nature of going
with yes we have gone with no for now.
-}
-- | Process Import Decls. See 'rnImportDecl' for a description of what
-- the return types represent.
-- Note: Do the non SOURCE ones first, so that we get a helpful warning
-- for SOURCE ones that are unnecessary
rnImports :: [LImportDecl RdrName]
-> RnM ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImports imports = do
this_mod <- getModule
let (source, ordinary) = partition is_source_import imports
is_source_import d = ideclSource (unLoc d)
stuff1 <- mapAndReportM (rnImportDecl this_mod) ordinary
stuff2 <- mapAndReportM (rnImportDecl this_mod) source
-- Safe Haskell: See Note [Tracking Trust Transitively]
let (decls, rdr_env, imp_avails, hpc_usage) = combine (stuff1 ++ stuff2)
return (decls, rdr_env, imp_avails, hpc_usage)
where
combine :: [(LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)]
-> ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
combine = foldr plus ([], emptyGlobalRdrEnv, emptyImportAvails, False)
plus (decl, gbl_env1, imp_avails1,hpc_usage1)
(decls, gbl_env2, imp_avails2,hpc_usage2)
= ( decl:decls,
gbl_env1 `plusGlobalRdrEnv` gbl_env2,
imp_avails1 `plusImportAvails` imp_avails2,
hpc_usage1 || hpc_usage2 )
-- | Given a located import declaration @decl@ from @this_mod@,
-- calculate the following pieces of information:
--
-- 1. An updated 'LImportDecl', where all unresolved 'RdrName' in
-- the entity lists have been resolved into 'Name's,
--
-- 2. A 'GlobalRdrEnv' representing the new identifiers that were
-- brought into scope (taking into account module qualification
-- and hiding),
--
-- 3. 'ImportAvails' summarizing the identifiers that were imported
-- by this declaration, and
--
-- 4. A boolean 'AnyHpcUsage' which is true if the imported module
-- used HPC.
rnImportDecl :: Module -> LImportDecl RdrName
-> RnM (LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImportDecl this_mod
(L loc decl@(ImportDecl { ideclName = loc_imp_mod_name, ideclPkgQual = mb_pkg
, ideclSource = want_boot, ideclSafe = mod_safe
, ideclQualified = qual_only, ideclImplicit = implicit
, ideclAs = as_mod, ideclHiding = imp_details }))
= setSrcSpan loc $ do
when (isJust mb_pkg) $ do
pkg_imports <- xoptM LangExt.PackageImports
when (not pkg_imports) $ addErr packageImportErr
-- If there's an error in loadInterface, (e.g. interface
-- file not found) we get lots of spurious errors from 'filterImports'
let imp_mod_name = unLoc loc_imp_mod_name
doc = ppr imp_mod_name <+> text "is directly imported"
-- Check for self-import, which confuses the typechecker (Trac #9032)
-- ghc --make rejects self-import cycles already, but batch-mode may not
-- at least not until TcIface.tcHiBootIface, which is too late to avoid
-- typechecker crashes. (Indirect self imports are not caught until
-- TcIface, see #10337 tracking how to make this error better.)
--
-- Originally, we also allowed 'import {-# SOURCE #-} M', but this
-- caused bug #10182: in one-shot mode, we should never load an hs-boot
-- file for the module we are compiling into the EPS. In principle,
-- it should be possible to support this mode of use, but we would have to
-- extend Provenance to support a local definition in a qualified location.
-- For now, we don't support it, but see #10336
when (imp_mod_name == moduleName this_mod &&
(case mb_pkg of -- If we have import "<pkg>" M, then we should
-- check that "<pkg>" is "this" (which is magic)
-- or the name of this_mod's package. Yurgh!
-- c.f. GHC.findModule, and Trac #9997
Nothing -> True
Just (StringLiteral _ pkg_fs) -> pkg_fs == fsLit "this" ||
fsToUnitId pkg_fs == moduleUnitId this_mod))
(addErr (text "A module cannot import itself:" <+> ppr imp_mod_name))
-- Check for a missing import list (Opt_WarnMissingImportList also
-- checks for T(..) items but that is done in checkDodgyImport below)
case imp_details of
Just (False, _) -> return () -- Explicit import list
_ | implicit -> return () -- Do not bleat for implicit imports
| qual_only -> return ()
| otherwise -> whenWOptM Opt_WarnMissingImportList $
addWarn (Reason Opt_WarnMissingImportList)
(missingImportListWarn imp_mod_name)
iface <- loadSrcInterface doc imp_mod_name want_boot (fmap sl_fs mb_pkg)
-- Compiler sanity check: if the import didn't say
-- {-# SOURCE #-} we should not get a hi-boot file
WARN( not want_boot && mi_boot iface, ppr imp_mod_name ) do
-- Issue a user warning for a redundant {- SOURCE -} import
-- NB that we arrange to read all the ordinary imports before
-- any of the {- SOURCE -} imports.
--
-- in --make and GHCi, the compilation manager checks for this,
-- and indeed we shouldn't do it here because the existence of
-- the non-boot module depends on the compilation order, which
-- is not deterministic. The hs-boot test can show this up.
dflags <- getDynFlags
warnIf NoReason
(want_boot && not (mi_boot iface) && isOneShot (ghcMode dflags))
(warnRedundantSourceImport imp_mod_name)
when (mod_safe && not (safeImportsOn dflags)) $
addErr (text "safe import can't be used as Safe Haskell isn't on!"
$+$ ptext (sLit $ "please enable Safe Haskell through either "
++ "Safe, Trustworthy or Unsafe"))
let
qual_mod_name = as_mod `orElse` imp_mod_name
imp_spec = ImpDeclSpec { is_mod = imp_mod_name, is_qual = qual_only,
is_dloc = loc, is_as = qual_mod_name }
-- filter the imports according to the import declaration
(new_imp_details, gres) <- filterImports iface imp_spec imp_details
-- for certain error messages, we’d like to know what could be imported
-- here, if everything were imported
potential_gres <- mkGlobalRdrEnv . snd <$> filterImports iface imp_spec Nothing
let gbl_env = mkGlobalRdrEnv gres
is_hiding | Just (True,_) <- imp_details = True
| otherwise = False
-- should the import be safe?
mod_safe' = mod_safe
|| (not implicit && safeDirectImpsReq dflags)
|| (implicit && safeImplicitImpsReq dflags)
let imv = ImportedModsVal
{ imv_name = qual_mod_name
, imv_span = loc
, imv_is_safe = mod_safe'
, imv_is_hiding = is_hiding
, imv_all_exports = potential_gres
, imv_qualified = qual_only
}
let imports
= (calculateAvails dflags iface mod_safe' want_boot)
{ imp_mods = unitModuleEnv (mi_module iface) [imv] }
-- Complain if we import a deprecated module
whenWOptM Opt_WarnWarningsDeprecations (
case (mi_warns iface) of
WarnAll txt -> addWarn (Reason Opt_WarnWarningsDeprecations)
(moduleWarn imp_mod_name txt)
_ -> return ()
)
let new_imp_decl = L loc (decl { ideclSafe = mod_safe'
, ideclHiding = new_imp_details })
return (new_imp_decl, gbl_env, imports, mi_hpc iface)
-- | Calculate the 'ImportAvails' induced by an import of a particular
-- interface, but without 'imp_mods'.
calculateAvails :: DynFlags
-> ModIface
-> IsSafeImport
-> IsBootInterface
-> ImportAvails
calculateAvails dflags iface mod_safe' want_boot =
let imp_mod = mi_module iface
orph_iface = mi_orphan iface
has_finsts = mi_finsts iface
deps = mi_deps iface
trust = getSafeMode $ mi_trust iface
trust_pkg = mi_trust_pkg iface
-- If the module exports anything defined in this module, just
-- ignore it. Reason: otherwise it looks as if there are two
-- local definition sites for the thing, and an error gets
-- reported. Easiest thing is just to filter them out up
-- front. This situation only arises if a module imports
-- itself, or another module that imported it. (Necessarily,
-- this invoves a loop.)
--
-- We do this *after* filterImports, so that if you say
-- module A where
-- import B( AType )
-- type AType = ...
--
-- module B( AType ) where
-- import {-# SOURCE #-} A( AType )
--
-- then you won't get a 'B does not export AType' message.
-- Compute new transitive dependencies
orphans | orph_iface = ASSERT( not (imp_mod `elem` dep_orphs deps) )
imp_mod : dep_orphs deps
| otherwise = dep_orphs deps
finsts | has_finsts = ASSERT( not (imp_mod `elem` dep_finsts deps) )
imp_mod : dep_finsts deps
| otherwise = dep_finsts deps
pkg = moduleUnitId (mi_module iface)
-- Does this import mean we now require our own pkg
-- to be trusted? See Note [Trust Own Package]
ptrust = trust == Sf_Trustworthy || trust_pkg
(dependent_mods, dependent_pkgs, pkg_trust_req)
| pkg == thisPackage dflags =
-- Imported module is from the home package
-- Take its dependent modules and add imp_mod itself
-- Take its dependent packages unchanged
--
-- NB: (dep_mods deps) might include a hi-boot file
-- for the module being compiled, CM. Do *not* filter
-- this out (as we used to), because when we've
-- finished dealing with the direct imports we want to
-- know if any of them depended on CM.hi-boot, in
-- which case we should do the hi-boot consistency
-- check. See LoadIface.loadHiBootInterface
((moduleName imp_mod,want_boot):dep_mods deps,dep_pkgs deps,ptrust)
| otherwise =
-- Imported module is from another package
-- Dump the dependent modules
-- Add the package imp_mod comes from to the dependent packages
ASSERT2( not (pkg `elem` (map fst $ dep_pkgs deps))
, ppr pkg <+> ppr (dep_pkgs deps) )
([], (pkg, False) : dep_pkgs deps, False)
in ImportAvails {
imp_mods = emptyModuleEnv, -- this gets filled in later
imp_orphs = orphans,
imp_finsts = finsts,
imp_dep_mods = mkModDeps dependent_mods,
imp_dep_pkgs = map fst $ dependent_pkgs,
-- Add in the imported modules trusted package
-- requirements. ONLY do this though if we import the
-- module as a safe import.
-- See Note [Tracking Trust Transitively]
-- and Note [Trust Transitive Property]
imp_trust_pkgs = if mod_safe'
then map fst $ filter snd dependent_pkgs
else [],
-- Do we require our own pkg to be trusted?
-- See Note [Trust Own Package]
imp_trust_own_pkg = pkg_trust_req
}
warnRedundantSourceImport :: ModuleName -> SDoc
warnRedundantSourceImport mod_name
= text "Unnecessary {-# SOURCE #-} in the import of module"
<+> quotes (ppr mod_name)
{-
************************************************************************
* *
\subsection{importsFromLocalDecls}
* *
************************************************************************
From the top-level declarations of this module produce
* the lexical environment
* the ImportAvails
created by its bindings.
Note [Top-level Names in Template Haskell decl quotes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also: Note [Interactively-bound Ids in GHCi] in HscTypes
Note [Looking up Exact RdrNames] in RnEnv
Consider a Template Haskell declaration quotation like this:
module M where
f x = h [d| f = 3 |]
When renaming the declarations inside [d| ...|], we treat the
top level binders specially in two ways
1. We give them an Internal Name, not (as usual) an External one.
This is done by RnEnv.newTopSrcBinder.
2. We make them *shadow* the outer bindings.
See Note [GlobalRdrEnv shadowing]
3. We find out whether we are inside a [d| ... |] by testing the TH
stage. This is a slight hack, because the stage field was really
meant for the type checker, and here we are not interested in the
fields of Brack, hence the error thunks in thRnBrack.
-}
extendGlobalRdrEnvRn :: [AvailInfo]
-> MiniFixityEnv
-> RnM (TcGblEnv, TcLclEnv)
-- Updates both the GlobalRdrEnv and the FixityEnv
-- We return a new TcLclEnv only because we might have to
-- delete some bindings from it;
-- see Note [Top-level Names in Template Haskell decl quotes]
extendGlobalRdrEnvRn avails new_fixities
= do { (gbl_env, lcl_env) <- getEnvs
; stage <- getStage
; isGHCi <- getIsGHCi
; let rdr_env = tcg_rdr_env gbl_env
fix_env = tcg_fix_env gbl_env
th_bndrs = tcl_th_bndrs lcl_env
th_lvl = thLevel stage
-- Delete new_occs from global and local envs
-- If we are in a TemplateHaskell decl bracket,
-- we are going to shadow them
-- See Note [GlobalRdrEnv shadowing]
inBracket = isBrackStage stage
lcl_env_TH = lcl_env { tcl_rdr = delLocalRdrEnvList (tcl_rdr lcl_env) new_occs }
-- See Note [GlobalRdrEnv shadowing]
lcl_env2 | inBracket = lcl_env_TH
| otherwise = lcl_env
-- Deal with shadowing: see Note [GlobalRdrEnv shadowing]
want_shadowing = isGHCi || inBracket
rdr_env1 | want_shadowing = shadowNames rdr_env new_names
| otherwise = rdr_env
lcl_env3 = lcl_env2 { tcl_th_bndrs = extendNameEnvList th_bndrs
[ (n, (TopLevel, th_lvl))
| n <- new_names ] }
; rdr_env2 <- foldlM add_gre rdr_env1 new_gres
; let fix_env' = foldl extend_fix_env fix_env new_gres
gbl_env' = gbl_env { tcg_rdr_env = rdr_env2, tcg_fix_env = fix_env' }
; traceRn (text "extendGlobalRdrEnvRn 2" <+> (pprGlobalRdrEnv True rdr_env2))
; return (gbl_env', lcl_env3) }
where
new_names = concatMap availNames avails
new_occs = map nameOccName new_names
-- If there is a fixity decl for the gre, add it to the fixity env
extend_fix_env fix_env gre
| Just (L _ fi) <- lookupFsEnv new_fixities (occNameFS occ)
= extendNameEnv fix_env name (FixItem occ fi)
| otherwise
= fix_env
where
name = gre_name gre
occ = greOccName gre
new_gres :: [GlobalRdrElt] -- New LocalDef GREs, derived from avails
new_gres = concatMap localGREsFromAvail avails
add_gre :: GlobalRdrEnv -> GlobalRdrElt -> RnM GlobalRdrEnv
-- Extend the GlobalRdrEnv with a LocalDef GRE
-- If there is already a LocalDef GRE with the same OccName,
-- report an error and discard the new GRE
-- This establishes INVARIANT 1 of GlobalRdrEnvs
add_gre env gre
| not (null dups) -- Same OccName defined twice
= do { addDupDeclErr (gre : dups); return env }
| otherwise
= return (extendGlobalRdrEnv env gre)
where
name = gre_name gre
occ = nameOccName name
dups = filter isLocalGRE (lookupGlobalRdrEnv env occ)
{- *********************************************************************
* *
getLocalDeclBindersd@ returns the names for an HsDecl
It's used for source code.
*** See Note [The Naming story] in HsDecls ****
* *
********************************************************************* -}
getLocalNonValBinders :: MiniFixityEnv -> HsGroup RdrName
-> RnM ((TcGblEnv, TcLclEnv), NameSet)
-- Get all the top-level binders bound the group *except*
-- for value bindings, which are treated separately
-- Specifically we return AvailInfo for
-- * type decls (incl constructors and record selectors)
-- * class decls (including class ops)
-- * associated types
-- * foreign imports
-- * value signatures (in hs-boot files only)
getLocalNonValBinders fixity_env
(HsGroup { hs_valds = binds,
hs_tyclds = tycl_decls,
hs_fords = foreign_decls })
= do { -- Process all type/class decls *except* family instances
; let inst_decls = tycl_decls >>= group_instds
; overload_ok <- xoptM LangExt.DuplicateRecordFields
; (tc_avails, tc_fldss)
<- fmap unzip $ mapM (new_tc overload_ok)
(tyClGroupTyClDecls tycl_decls)
; traceRn (text "getLocalNonValBinders 1" <+> ppr tc_avails)
; envs <- extendGlobalRdrEnvRn tc_avails fixity_env
; setEnvs envs $ do {
-- Bring these things into scope first
-- See Note [Looking up family names in family instances]
-- Process all family instances
-- to bring new data constructors into scope
; (nti_availss, nti_fldss) <- mapAndUnzipM (new_assoc overload_ok)
inst_decls
-- Finish off with value binders:
-- foreign decls and pattern synonyms for an ordinary module
-- type sigs in case of a hs-boot file only
; is_boot <- tcIsHsBootOrSig
; let val_bndrs | is_boot = hs_boot_sig_bndrs
| otherwise = for_hs_bndrs
; val_avails <- mapM new_simple val_bndrs
; let avails = concat nti_availss ++ val_avails
new_bndrs = availsToNameSetWithSelectors avails `unionNameSet`
availsToNameSetWithSelectors tc_avails
flds = concat nti_fldss ++ concat tc_fldss
; traceRn (text "getLocalNonValBinders 2" <+> ppr avails)
; (tcg_env, tcl_env) <- extendGlobalRdrEnvRn avails fixity_env
-- Extend tcg_field_env with new fields (this used to be the
-- work of extendRecordFieldEnv)
; let field_env = extendNameEnvList (tcg_field_env tcg_env) flds
envs = (tcg_env { tcg_field_env = field_env }, tcl_env)
; traceRn (text "getLocalNonValBinders 3" <+> vcat [ppr flds, ppr field_env])
; return (envs, new_bndrs) } }
where
ValBindsIn _val_binds val_sigs = binds
for_hs_bndrs :: [Located RdrName]
for_hs_bndrs = hsForeignDeclsBinders foreign_decls
-- In a hs-boot file, the value binders come from the
-- *signatures*, and there should be no foreign binders
hs_boot_sig_bndrs = [ L decl_loc (unLoc n)
| L decl_loc (TypeSig ns _) <- val_sigs, n <- ns]
-- the SrcSpan attached to the input should be the span of the
-- declaration, not just the name
new_simple :: Located RdrName -> RnM AvailInfo
new_simple rdr_name = do{ nm <- newTopSrcBinder rdr_name
; return (avail nm) }
new_tc :: Bool -> LTyClDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_tc overload_ok tc_decl -- NOT for type/data instances
= do { let (bndrs, flds) = hsLTyClDeclBinders tc_decl
; names@(main_name : sub_names) <- mapM newTopSrcBinder bndrs
; flds' <- mapM (newRecordSelector overload_ok sub_names) flds
; let fld_env = case unLoc tc_decl of
DataDecl { tcdDataDefn = d } -> mk_fld_env d names flds'
_ -> []
; return (AvailTC main_name names flds', fld_env) }
-- Calculate the mapping from constructor names to fields, which
-- will go in tcg_field_env. It's convenient to do this here where
-- we are working with a single datatype definition.
mk_fld_env :: HsDataDefn RdrName -> [Name] -> [FieldLabel] -> [(Name, [FieldLabel])]
mk_fld_env d names flds = concatMap find_con_flds (dd_cons d)
where
find_con_flds (L _ (ConDeclH98 { con_name = L _ rdr
, con_details = RecCon cdflds }))
= [( find_con_name rdr
, concatMap find_con_decl_flds (unLoc cdflds) )]
find_con_flds (L _ (ConDeclGADT
{ con_names = rdrs
, con_type = (HsIB { hsib_body = res_ty})}))
= map (\ (L _ rdr) -> ( find_con_name rdr
, concatMap find_con_decl_flds cdflds))
rdrs
where
(_tvs, _cxt, tau) = splitLHsSigmaTy res_ty
cdflds = case tau of
L _ (HsFunTy
(L _ (HsAppsTy
[L _ (HsAppPrefix (L _ (HsRecTy flds)))])) _) -> flds
L _ (HsFunTy (L _ (HsRecTy flds)) _) -> flds
_ -> []
find_con_flds _ = []
find_con_name rdr
= expectJust "getLocalNonValBinders/find_con_name" $
find (\ n -> nameOccName n == rdrNameOcc rdr) names
find_con_decl_flds (L _ x)
= map find_con_decl_fld (cd_fld_names x)
find_con_decl_fld (L _ (FieldOcc (L _ rdr) _))
= expectJust "getLocalNonValBinders/find_con_decl_fld" $
find (\ fl -> flLabel fl == lbl) flds
where lbl = occNameFS (rdrNameOcc rdr)
new_assoc :: Bool -> LInstDecl RdrName
-> RnM ([AvailInfo], [(Name, [FieldLabel])])
new_assoc _ (L _ (TyFamInstD {})) = return ([], [])
-- type instances don't bind new names
new_assoc overload_ok (L _ (DataFamInstD d))
= do { (avail, flds) <- new_di overload_ok Nothing d
; return ([avail], flds) }
new_assoc overload_ok (L _ (ClsInstD (ClsInstDecl { cid_poly_ty = inst_ty
, cid_datafam_insts = adts })))
| Just (L loc cls_rdr) <- getLHsInstDeclClass_maybe inst_ty
= do { cls_nm <- setSrcSpan loc $ lookupGlobalOccRn cls_rdr
; (avails, fldss)
<- mapAndUnzipM (new_loc_di overload_ok (Just cls_nm)) adts
; return (avails, concat fldss) }
| otherwise
= return ([], []) -- Do not crash on ill-formed instances
-- Eg instance !Show Int Trac #3811c
new_di :: Bool -> Maybe Name -> DataFamInstDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_di overload_ok mb_cls ti_decl
= do { main_name <- lookupFamInstName mb_cls (dfid_tycon ti_decl)
; let (bndrs, flds) = hsDataFamInstBinders ti_decl
; sub_names <- mapM newTopSrcBinder bndrs
; flds' <- mapM (newRecordSelector overload_ok sub_names) flds
; let avail = AvailTC (unLoc main_name) sub_names flds'
-- main_name is not bound here!
fld_env = mk_fld_env (dfid_defn ti_decl) sub_names flds'
; return (avail, fld_env) }
new_loc_di :: Bool -> Maybe Name -> LDataFamInstDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_loc_di overload_ok mb_cls (L _ d) = new_di overload_ok mb_cls d
newRecordSelector :: Bool -> [Name] -> LFieldOcc RdrName -> RnM FieldLabel
newRecordSelector _ [] _ = error "newRecordSelector: datatype has no constructors!"
newRecordSelector overload_ok (dc:_) (L loc (FieldOcc (L _ fld) _))
= do { selName <- newTopSrcBinder $ L loc $ field
; return $ qualFieldLbl { flSelector = selName } }
where
fieldOccName = occNameFS $ rdrNameOcc fld
qualFieldLbl = mkFieldLabelOccs fieldOccName (nameOccName dc) overload_ok
field | isExact fld = fld
-- use an Exact RdrName as is to preserve the bindings
-- of an already renamer-resolved field and its use
-- sites. This is needed to correctly support record
-- selectors in Template Haskell. See Note [Binders in
-- Template Haskell] in Convert.hs and Note [Looking up
-- Exact RdrNames] in RnEnv.hs.
| otherwise = mkRdrUnqual (flSelector qualFieldLbl)
{-
Note [Looking up family names in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module M where
type family T a :: *
type instance M.T Int = Bool
We might think that we can simply use 'lookupOccRn' when processing the type
instance to look up 'M.T'. Alas, we can't! The type family declaration is in
the *same* HsGroup as the type instance declaration. Hence, as we are
currently collecting the binders declared in that HsGroup, these binders will
not have been added to the global environment yet.
Solution is simple: process the type family declarations first, extend
the environment, and then process the type instances.
************************************************************************
* *
\subsection{Filtering imports}
* *
************************************************************************
@filterImports@ takes the @ExportEnv@ telling what the imported module makes
available, and filters it through the import spec (if any).
Note [Dealing with imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
For import M( ies ), we take the mi_exports of M, and make
imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name)
One entry for each Name that M exports; the AvailInfo is the
AvailInfo exported from M that exports that Name.
The situation is made more complicated by associated types. E.g.
module M where
class C a where { data T a }
instance C Int where { data T Int = T1 | T2 }
instance C Bool where { data T Int = T3 }
Then M's export_avails are (recall the AvailTC invariant from Avails.hs)
C(C,T), T(T,T1,T2,T3)
Notice that T appears *twice*, once as a child and once as a parent. From
this list we construt a raw list including
T -> (T, T( T1, T2, T3 ), Nothing)
T -> (C, C( C, T ), Nothing)
and we combine these (in function 'combine' in 'imp_occ_env' in
'filterImports') to get
T -> (T, T(T,T1,T2,T3), Just C)
So the overall imp_occ_env is
C -> (C, C(C,T), Nothing)
T -> (T, T(T,T1,T2,T3), Just C)
T1 -> (T1, T(T,T1,T2,T3), Nothing) -- similarly T2,T3
If we say
import M( T(T1,T2) )
then we get *two* Avails: C(T), T(T1,T2)
Note that the imp_occ_env will have entries for data constructors too,
although we never look up data constructors.
-}
filterImports
:: ModIface
-> ImpDeclSpec -- The span for the entire import decl
-> Maybe (Bool, Located [LIE RdrName]) -- Import spec; True => hiding
-> RnM (Maybe (Bool, Located [LIE Name]), -- Import spec w/ Names
[GlobalRdrElt]) -- Same again, but in GRE form
filterImports iface decl_spec Nothing
= return (Nothing, gresFromAvails (Just imp_spec) (mi_exports iface))
where
imp_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll }
filterImports iface decl_spec (Just (want_hiding, L l import_items))
= do -- check for errors, convert RdrNames to Names
items1 <- mapM lookup_lie import_items
let items2 :: [(LIE Name, AvailInfo)]
items2 = concat items1
-- NB the AvailInfo may have duplicates, and several items
-- for the same parent; e.g N(x) and N(y)
names = availsToNameSet (map snd items2)
keep n = not (n `elemNameSet` names)
pruned_avails = filterAvails keep all_avails
hiding_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll }
gres | want_hiding = gresFromAvails (Just hiding_spec) pruned_avails
| otherwise = concatMap (gresFromIE decl_spec) items2
return (Just (want_hiding, L l (map fst items2)), gres)
where
all_avails = mi_exports iface
-- See Note [Dealing with imports]
imp_occ_env :: OccEnv (Name, -- the name
AvailInfo, -- the export item providing the name
Maybe Name) -- the parent of associated types
imp_occ_env = mkOccEnv_C combine [ (nameOccName n, (n, a, Nothing))
| a <- all_avails, n <- availNames a]
where
-- See Note [Dealing with imports]
-- 'combine' is only called for associated data types which appear
-- twice in the all_avails. In the example, we combine
-- T(T,T1,T2,T3) and C(C,T) to give (T, T(T,T1,T2,T3), Just C)
-- NB: the AvailTC can have fields as well as data constructors (Trac #12127)
combine (name1, a1@(AvailTC p1 _ _), mp1)
(name2, a2@(AvailTC p2 _ _), mp2)
= ASSERT( name1 == name2 && isNothing mp1 && isNothing mp2 )
if p1 == name1 then (name1, a1, Just p2)
else (name1, a2, Just p1)
combine x y = pprPanic "filterImports/combine" (ppr x $$ ppr y)
lookup_name :: RdrName -> IELookupM (Name, AvailInfo, Maybe Name)
lookup_name rdr | isQual rdr = failLookupWith (QualImportError rdr)
| Just succ <- mb_success = return succ
| otherwise = failLookupWith BadImport
where
mb_success = lookupOccEnv imp_occ_env (rdrNameOcc rdr)
lookup_lie :: LIE RdrName -> TcRn [(LIE Name, AvailInfo)]
lookup_lie (L loc ieRdr)
= do (stuff, warns) <- setSrcSpan loc $
liftM (fromMaybe ([],[])) $
run_lookup (lookup_ie ieRdr)
mapM_ emit_warning warns
return [ (L loc ie, avail) | (ie,avail) <- stuff ]
where
-- Warn when importing T(..) if T was exported abstractly
emit_warning (DodgyImport n) = whenWOptM Opt_WarnDodgyImports $
addWarn (Reason Opt_WarnDodgyImports) (dodgyImportWarn n)
emit_warning MissingImportList = whenWOptM Opt_WarnMissingImportList $
addWarn (Reason Opt_WarnMissingImportList) (missingImportListItem ieRdr)
emit_warning BadImportW = whenWOptM Opt_WarnDodgyImports $
addWarn (Reason Opt_WarnDodgyImports) (lookup_err_msg BadImport)
run_lookup :: IELookupM a -> TcRn (Maybe a)
run_lookup m = case m of
Failed err -> addErr (lookup_err_msg err) >> return Nothing
Succeeded a -> return (Just a)
lookup_err_msg err = case err of
BadImport -> badImportItemErr iface decl_spec ieRdr all_avails
IllegalImport -> illegalImportItemErr
QualImportError rdr -> qualImportItemErr rdr
-- For each import item, we convert its RdrNames to Names,
-- and at the same time construct an AvailInfo corresponding
-- to what is actually imported by this item.
-- Returns Nothing on error.
-- We return a list here, because in the case of an import
-- item like C, if we are hiding, then C refers to *both* a
-- type/class and a data constructor. Moreover, when we import
-- data constructors of an associated family, we need separate
-- AvailInfos for the data constructors and the family (as they have
-- different parents). See Note [Dealing with imports]
lookup_ie :: IE RdrName -> IELookupM ([(IE Name, AvailInfo)], [IELookupWarning])
lookup_ie ie = handle_bad_import $ do
case ie of
IEVar (L l n) -> do
(name, avail, _) <- lookup_name n
return ([(IEVar (L l name), trimAvail avail name)], [])
IEThingAll (L l tc) -> do
(name, avail, mb_parent) <- lookup_name tc
let warns = case avail of
Avail {} -- e.g. f(..)
-> [DodgyImport tc]
AvailTC _ subs fs
| null (drop 1 subs) && null fs -- e.g. T(..) where T is a synonym
-> [DodgyImport tc]
| not (is_qual decl_spec) -- e.g. import M( T(..) )
-> [MissingImportList]
| otherwise
-> []
renamed_ie = IEThingAll (L l name)
sub_avails = case avail of
Avail {} -> []
AvailTC name2 subs fs -> [(renamed_ie, AvailTC name2 (subs \\ [name]) fs)]
case mb_parent of
Nothing -> return ([(renamed_ie, avail)], warns)
-- non-associated ty/cls
Just parent -> return ((renamed_ie, AvailTC parent [name] []) : sub_avails, warns)
-- associated type
IEThingAbs (L l tc)
| want_hiding -- hiding ( C )
-- Here the 'C' can be a data constructor
-- *or* a type/class, or even both
-> let tc_name = lookup_name tc
dc_name = lookup_name (setRdrNameSpace tc srcDataName)
in
case catIELookupM [ tc_name, dc_name ] of
[] -> failLookupWith BadImport
names -> return ([mkIEThingAbs l name | name <- names], [])
| otherwise
-> do nameAvail <- lookup_name tc
return ([mkIEThingAbs l nameAvail], [])
IEThingWith (L l rdr_tc) wc rdr_ns rdr_fs ->
ASSERT2(null rdr_fs, ppr rdr_fs) do
(name, AvailTC _ ns subflds, mb_parent) <- lookup_name rdr_tc
-- Look up the children in the sub-names of the parent
let subnames = case ns of -- The tc is first in ns,
[] -> [] -- if it is there at all
-- See the AvailTC Invariant in Avail.hs
(n1:ns1) | n1 == name -> ns1
| otherwise -> ns
case lookupChildren (map Left subnames ++ map Right subflds) rdr_ns of
Nothing -> failLookupWith BadImport
Just (childnames, childflds) ->
case mb_parent of
-- non-associated ty/cls
Nothing
-> return ([(IEThingWith (L l name) wc childnames childflds,
AvailTC name (name:map unLoc childnames) (map unLoc childflds))],
[])
-- associated ty
Just parent
-> return ([(IEThingWith (L l name) wc childnames childflds,
AvailTC name (map unLoc childnames) (map unLoc childflds)),
(IEThingWith (L l name) wc childnames childflds,
AvailTC parent [name] [])],
[])
_other -> failLookupWith IllegalImport
-- could be IEModuleContents, IEGroup, IEDoc, IEDocNamed
-- all errors.
where
mkIEThingAbs l (n, av, Nothing ) = (IEThingAbs (L l n),
trimAvail av n)
mkIEThingAbs l (n, _, Just parent) = (IEThingAbs (L l n),
AvailTC parent [n] [])
handle_bad_import m = catchIELookup m $ \err -> case err of
BadImport | want_hiding -> return ([], [BadImportW])
_ -> failLookupWith err
type IELookupM = MaybeErr IELookupError
data IELookupWarning
= BadImportW
| MissingImportList
| DodgyImport RdrName
-- NB. use the RdrName for reporting a "dodgy" import
data IELookupError
= QualImportError RdrName
| BadImport
| IllegalImport
failLookupWith :: IELookupError -> IELookupM a
failLookupWith err = Failed err
catchIELookup :: IELookupM a -> (IELookupError -> IELookupM a) -> IELookupM a
catchIELookup m h = case m of
Succeeded r -> return r
Failed err -> h err
catIELookupM :: [IELookupM a] -> [a]
catIELookupM ms = [ a | Succeeded a <- ms ]
{-
************************************************************************
* *
\subsection{Import/Export Utils}
* *
************************************************************************
-}
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1
plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2)
= case (n1==s1, n2==s2) of -- Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(fs1 `unionLists` fs2)
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(fs1 `unionLists` fs2)
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(fs1 `unionLists` fs2)
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
(fs1 `unionLists` fs2)
plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2)
= AvailTC n1 ss1 (fs1 `unionLists` fs2)
plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2)
= AvailTC n1 ss2 (fs1 `unionLists` fs2)
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
-- | trims an 'AvailInfo' to keep only a single name
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail b n) _ = Avail b n
trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of
Just x -> AvailTC n [] [x]
Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] []
-- | filters 'AvailInfo's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
-- | filters an 'AvailInfo' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail _ n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns fs ->
let ns' = filter keep ns
fs' = filter (keep . flSelector) fs in
if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest
-- | Given an import\/export spec, construct the appropriate 'GlobalRdrElt's.
gresFromIE :: ImpDeclSpec -> (LIE Name, AvailInfo) -> [GlobalRdrElt]
gresFromIE decl_spec (L loc ie, avail)
= gresFromAvail prov_fn avail
where
is_explicit = case ie of
IEThingAll (L _ name) -> \n -> n == name
_ -> \_ -> True
prov_fn name
= Just (ImpSpec { is_decl = decl_spec, is_item = item_spec })
where
item_spec = ImpSome { is_explicit = is_explicit name, is_iloc = loc }
{-
Note [Children for duplicate record fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the module
{-# LANGUAGE DuplicateRecordFields #-}
module M (F(foo, MkFInt, MkFBool)) where
data family F a
data instance F Int = MkFInt { foo :: Int }
data instance F Bool = MkFBool { foo :: Bool }
The `foo` in the export list refers to *both* selectors! For this
reason, lookupChildren builds an environment that maps the FastString
to a list of items, rather than a single item.
-}
mkChildEnv :: [GlobalRdrElt] -> NameEnv [GlobalRdrElt]
mkChildEnv gres = foldr add emptyNameEnv gres
where
add gre env = case gre_par gre of
FldParent p _ -> extendNameEnv_Acc (:) singleton env p gre
ParentIs p -> extendNameEnv_Acc (:) singleton env p gre
NoParent -> env
PatternSynonym -> env
findPatSyns :: [GlobalRdrElt] -> [GlobalRdrElt]
findPatSyns gres = foldr add [] gres
where
add g@(GRE { gre_par = PatternSynonym }) ps =
g:ps
add _ ps = ps
findChildren :: NameEnv [a] -> Name -> [a]
findChildren env n = lookupNameEnv env n `orElse` []
lookupChildren :: [Either Name FieldLabel] -> [Located RdrName]
-> Maybe ([Located Name], [Located FieldLabel])
-- (lookupChildren all_kids rdr_items) maps each rdr_item to its
-- corresponding Name all_kids, if the former exists
-- The matching is done by FastString, not OccName, so that
-- Cls( meth, AssocTy )
-- will correctly find AssocTy among the all_kids of Cls, even though
-- the RdrName for AssocTy may have a (bogus) DataName namespace
-- (Really the rdr_items should be FastStrings in the first place.)
lookupChildren all_kids rdr_items
= do xs <- mapM doOne rdr_items
return (fmap concat (partitionEithers xs))
where
doOne (L l r) = case (lookupFsEnv kid_env . occNameFS . rdrNameOcc) r of
Just [Left n] -> Just (Left (L l n))
Just rs | all isRight rs -> Just (Right (map (L l) (rights rs)))
_ -> Nothing
-- See Note [Children for duplicate record fields]
kid_env = extendFsEnvList_C (++) emptyFsEnv
[(either (occNameFS . nameOccName) flLabel x, [x]) | x <- all_kids]
classifyGREs :: [GlobalRdrElt] -> ([Name], [FieldLabel])
classifyGREs = partitionEithers . map classifyGRE
classifyGRE :: GlobalRdrElt -> Either Name FieldLabel
classifyGRE gre = case gre_par gre of
FldParent _ Nothing -> Right (FieldLabel (occNameFS (nameOccName n)) False n)
FldParent _ (Just lbl) -> Right (FieldLabel lbl True n)
_ -> Left n
where
n = gre_name gre
-- | Combines 'AvailInfo's from the same family
-- 'avails' may have several items with the same availName
-- E.g import Ix( Ix(..), index )
-- will give Ix(Ix,index,range) and Ix(index)
-- We want to combine these; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
{-
************************************************************************
* *
\subsection{Export list processing}
* *
************************************************************************
Processing the export list.
You might think that we should record things that appear in the export
list as ``occurrences'' (using @addOccurrenceName@), but you'd be
wrong. We do check (here) that they are in scope, but there is no
need to slurp in their actual declaration (which is what
@addOccurrenceName@ forces).
Indeed, doing so would big trouble when compiling @PrelBase@, because
it re-exports @GHC@, which includes @takeMVar#@, whose type includes
@ConcBase.StateAndSynchVar#@, and so on...
Note [Exports of data families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose you see (Trac #5306)
module M where
import X( F )
data instance F Int = FInt
What does M export? AvailTC F [FInt]
or AvailTC F [F,FInt]?
The former is strictly right because F isn't defined in this module.
But then you can never do an explicit import of M, thus
import M( F( FInt ) )
because F isn't exported by M. Nor can you import FInt alone from here
import M( FInt )
because we don't have syntax to support that. (It looks like an import of
the type FInt.)
At one point I implemented a compromise:
* When constructing exports with no export list, or with module M(
module M ), we add the parent to the exports as well.
* But not when you see module M( f ), even if f is a
class method with a parent.
* Nor when you see module M( module N ), with N /= M.
But the compromise seemed too much of a hack, so we backed it out.
You just have to use an explicit export list:
module M( F(..) ) where ...
-}
type ExportAccum -- The type of the accumulating parameter of
-- the main worker function in rnExports
= ([LIE Name], -- Export items with Names
ExportOccMap, -- Tracks exported occurrence names
[AvailInfo]) -- The accumulated exported stuff
-- Not nub'd!
emptyExportAccum :: ExportAccum
emptyExportAccum = ([], emptyOccEnv, [])
type ExportOccMap = OccEnv (Name, IE RdrName)
-- Tracks what a particular exported OccName
-- in an export list refers to, and which item
-- it came from. It's illegal to export two distinct things
-- that have the same occurrence name
rnExports :: Bool -- False => no 'module M(..) where' header at all
-> Maybe (Located [LIE RdrName]) -- Nothing => no explicit export list
-> TcGblEnv
-> RnM (Maybe [LIE Name], TcGblEnv)
-- Complains if two distinct exports have same OccName
-- Warns about identical exports.
-- Complains about exports items not in scope
rnExports explicit_mod exports
tcg_env@(TcGblEnv { tcg_mod = this_mod,
tcg_rdr_env = rdr_env,
tcg_imports = imports })
= unsetWOptM Opt_WarnWarningsDeprecations $
-- Do not report deprecations arising from the export
-- list, to avoid bleating about re-exporting a deprecated
-- thing (especially via 'module Foo' export item)
do {
-- If the module header is omitted altogether, then behave
-- as if the user had written "module Main(main) where..."
-- EXCEPT in interactive mode, when we behave as if he had
-- written "module Main where ..."
-- Reason: don't want to complain about 'main' not in scope
-- in interactive mode
; dflags <- getDynFlags
; let real_exports
| explicit_mod = exports
| ghcLink dflags == LinkInMemory = Nothing
| otherwise
= Just (noLoc [noLoc (IEVar (noLoc main_RDR_Unqual))])
-- ToDo: the 'noLoc' here is unhelpful if 'main'
-- turns out to be out of scope
; (rn_exports, avails) <- exports_from_avail real_exports rdr_env imports this_mod
; traceRn (ppr avails)
; let final_avails = nubAvails avails -- Combine families
final_ns = availsToNameSetWithSelectors final_avails
; traceRn (text "rnExports: Exports:" <+> ppr final_avails)
; let new_tcg_env =
(tcg_env { tcg_exports = final_avails,
tcg_rn_exports = case tcg_rn_exports tcg_env of
Nothing -> Nothing
Just _ -> rn_exports,
tcg_dus = tcg_dus tcg_env `plusDU`
usesOnly final_ns })
; return (rn_exports, new_tcg_env) }
exports_from_avail :: Maybe (Located [LIE RdrName])
-- Nothing => no explicit export list
-> GlobalRdrEnv
-> ImportAvails
-> Module
-> RnM (Maybe [LIE Name], [AvailInfo])
exports_from_avail Nothing rdr_env _imports _this_mod
-- The same as (module M) where M is the current module name,
-- so that's how we handle it, except we also export the data family
-- when a data instance is exported.
= let avails = [ fix_faminst $ availFromGRE gre
| gre <- globalRdrEnvElts rdr_env
, isLocalGRE gre ]
in return (Nothing, avails)
where
-- #11164: when we define a data instance
-- but not data family, re-export the family
-- Even though we don't check whether this is actually a data family
-- only data families can locally define subordinate things (`ns` here)
-- without locally defining (and instead importing) the parent (`n`)
fix_faminst (AvailTC n ns flds)
| not (n `elem` ns)
= AvailTC n (n:ns) flds
fix_faminst avail = avail
exports_from_avail (Just (L _ rdr_items)) rdr_env imports this_mod
= do (ie_names, _, exports) <- foldlM do_litem emptyExportAccum rdr_items
return (Just ie_names, exports)
where
do_litem :: ExportAccum -> LIE RdrName -> RnM ExportAccum
do_litem acc lie = setSrcSpan (getLoc lie) (exports_from_item acc lie)
-- Maps a parent to its in-scope children
kids_env :: NameEnv [GlobalRdrElt]
kids_env = mkChildEnv (globalRdrEnvElts rdr_env)
pat_syns :: [GlobalRdrElt]
pat_syns = findPatSyns (globalRdrEnvElts rdr_env)
imported_modules = [ imv_name imv
| xs <- moduleEnvElts $ imp_mods imports, imv <- xs ]
exports_from_item :: ExportAccum -> LIE RdrName -> RnM ExportAccum
exports_from_item acc@(ie_names, occs, exports)
(L loc (IEModuleContents (L lm mod)))
| let earlier_mods = [ mod
| (L _ (IEModuleContents (L _ mod))) <- ie_names ]
, mod `elem` earlier_mods -- Duplicate export of M
= do { warnIf (Reason Opt_WarnDuplicateExports) True
(dupModuleExport mod) ;
return acc }
| otherwise
= do { let { exportValid = (mod `elem` imported_modules)
|| (moduleName this_mod == mod)
; gre_prs = pickGREsModExp mod (globalRdrEnvElts rdr_env)
; new_exports = map (availFromGRE . fst) gre_prs
; names = map (gre_name . fst) gre_prs
; all_gres = foldr (\(gre1,gre2) gres -> gre1 : gre2 : gres) [] gre_prs
}
; checkErr exportValid (moduleNotImported mod)
; warnIf (Reason Opt_WarnDodgyExports)
(exportValid && null gre_prs)
(nullModuleExport mod)
; traceRn (text "efa" <+> (ppr mod $$ ppr all_gres))
; addUsedGREs all_gres
; occs' <- check_occs (IEModuleContents (noLoc mod)) occs names
-- This check_occs not only finds conflicts
-- between this item and others, but also
-- internally within this item. That is, if
-- 'M.x' is in scope in several ways, we'll have
-- several members of mod_avails with the same
-- OccName.
; traceRn (vcat [ text "export mod" <+> ppr mod
, ppr new_exports ])
; return (L loc (IEModuleContents (L lm mod)) : ie_names,
occs', new_exports ++ exports) }
exports_from_item acc@(lie_names, occs, exports) (L loc ie)
| isDoc ie
= do new_ie <- lookup_doc_ie ie
return (L loc new_ie : lie_names, occs, exports)
| otherwise
= do (new_ie, avail) <- lookup_ie ie
if isUnboundName (ieName new_ie)
then return acc -- Avoid error cascade
else do
occs' <- check_occs ie occs (availNames avail)
return (L loc new_ie : lie_names, occs', avail : exports)
-------------
lookup_ie :: IE RdrName -> RnM (IE Name, AvailInfo)
lookup_ie (IEVar (L l rdr))
= do (name, avail) <- lookupGreAvailRn rdr
return (IEVar (L l name), avail)
lookup_ie (IEThingAbs (L l rdr))
= do (name, avail) <- lookupGreAvailRn rdr
return (IEThingAbs (L l name), avail)
lookup_ie ie@(IEThingAll n)
= do
(n, avail, flds) <- lookup_ie_all ie n
let name = unLoc n
return (IEThingAll n, AvailTC name (name:avail) flds)
lookup_ie ie@(IEThingWith l wc sub_rdrs _)
= do
(lname, subs, avails, flds) <- lookup_ie_with ie l sub_rdrs
(_, all_avail, all_flds) <-
case wc of
NoIEWildcard -> return (lname, [], [])
IEWildcard _ -> lookup_ie_all ie l
let name = unLoc lname
return (IEThingWith lname wc subs [],
AvailTC name (name : avails ++ all_avail)
(flds ++ all_flds))
lookup_ie _ = panic "lookup_ie" -- Other cases covered earlier
lookup_ie_with :: IE RdrName -> Located RdrName -> [Located RdrName]
-> RnM (Located Name, [Located Name], [Name], [FieldLabel])
lookup_ie_with ie (L l rdr) sub_rdrs
= do name <- lookupGlobalOccRn rdr
let gres = findChildren kids_env name
mchildren =
lookupChildren (map classifyGRE (gres ++ pat_syns)) sub_rdrs
addUsedKids rdr gres
if isUnboundName name
then return (L l name, [], [name], [])
else
case mchildren of
Nothing -> do
addErr (exportItemErr ie)
return (L l name, [], [name], [])
Just (non_flds, flds) -> do
addUsedKids rdr gres
return (L l name, non_flds
, map unLoc non_flds
, map unLoc flds)
lookup_ie_all :: IE RdrName -> Located RdrName
-> RnM (Located Name, [Name], [FieldLabel])
lookup_ie_all ie (L l rdr) =
do name <- lookupGlobalOccRn rdr
let gres = findChildren kids_env name
(non_flds, flds) = classifyGREs gres
addUsedKids rdr gres
warnDodgyExports <- woptM Opt_WarnDodgyExports
when (null gres) $
if isTyConName name
then when warnDodgyExports $
addWarn (Reason Opt_WarnDodgyExports)
(dodgyExportWarn name)
else -- This occurs when you export T(..), but
-- only import T abstractly, or T is a synonym.
addErr (exportItemErr ie)
return (L l name, non_flds, flds)
-------------
lookup_doc_ie :: IE RdrName -> RnM (IE Name)
lookup_doc_ie (IEGroup lev doc) = do rn_doc <- rnHsDoc doc
return (IEGroup lev rn_doc)
lookup_doc_ie (IEDoc doc) = do rn_doc <- rnHsDoc doc
return (IEDoc rn_doc)
lookup_doc_ie (IEDocNamed str) = return (IEDocNamed str)
lookup_doc_ie _ = panic "lookup_doc_ie" -- Other cases covered earlier
-- In an export item M.T(A,B,C), we want to treat the uses of
-- A,B,C as if they were M.A, M.B, M.C
-- Happily pickGREs does just the right thing
addUsedKids :: RdrName -> [GlobalRdrElt] -> RnM ()
addUsedKids parent_rdr kid_gres = addUsedGREs (pickGREs parent_rdr kid_gres)
isDoc :: IE RdrName -> Bool
isDoc (IEDoc _) = True
isDoc (IEDocNamed _) = True
isDoc (IEGroup _ _) = True
isDoc _ = False
-------------------------------
check_occs :: IE RdrName -> ExportOccMap -> [Name] -> RnM ExportOccMap
check_occs ie occs names -- 'names' are the entities specifed by 'ie'
= foldlM check occs names
where
check occs name
= case lookupOccEnv occs name_occ of
Nothing -> return (extendOccEnv occs name_occ (name, ie))
Just (name', ie')
| name == name' -- Duplicate export
-- But we don't want to warn if the same thing is exported
-- by two different module exports. See ticket #4478.
-> do { warnIf (Reason Opt_WarnDuplicateExports)
(not (dupExport_ok name ie ie'))
(dupExportWarn name_occ ie ie')
; return occs }
| otherwise -- Same occ name but different names: an error
-> do { global_env <- getGlobalRdrEnv ;
addErr (exportClashErr global_env name' name ie' ie) ;
return occs }
where
name_occ = nameOccName name
dupExport_ok :: Name -> IE RdrName -> IE RdrName -> Bool
-- The Name is exported by both IEs. Is that ok?
-- "No" iff the name is mentioned explicitly in both IEs
-- or one of the IEs mentions the name *alone*
-- "Yes" otherwise
--
-- Examples of "no": module M( f, f )
-- module M( fmap, Functor(..) )
-- module M( module Data.List, head )
--
-- Example of "yes"
-- module M( module A, module B ) where
-- import A( f )
-- import B( f )
--
-- Example of "yes" (Trac #2436)
-- module M( C(..), T(..) ) where
-- class C a where { data T a }
-- instance C Int where { data T Int = TInt }
--
-- Example of "yes" (Trac #2436)
-- module Foo ( T ) where
-- data family T a
-- module Bar ( T(..), module Foo ) where
-- import Foo
-- data instance T Int = TInt
dupExport_ok n ie1 ie2
= not ( single ie1 || single ie2
|| (explicit_in ie1 && explicit_in ie2) )
where
explicit_in (IEModuleContents _) = False -- module M
explicit_in (IEThingAll r) = nameOccName n == rdrNameOcc (unLoc r) -- T(..)
explicit_in _ = True
single (IEVar {}) = True
single (IEThingAbs {}) = True
single _ = False
{-
*********************************************************
* *
\subsection{Unused names}
* *
*********************************************************
-}
reportUnusedNames :: Maybe (Located [LIE RdrName]) -- Export list
-> TcGblEnv -> RnM ()
reportUnusedNames _export_decls gbl_env
= do { traceRn ((text "RUN") <+> (ppr (tcg_dus gbl_env)))
; warnUnusedImportDecls gbl_env
; warnUnusedTopBinds unused_locals
; warnMissingSignatures gbl_env }
where
used_names :: NameSet
used_names = findUses (tcg_dus gbl_env) emptyNameSet
-- NB: currently, if f x = g, we only treat 'g' as used if 'f' is used
-- Hence findUses
-- Collect the defined names from the in-scope environment
defined_names :: [GlobalRdrElt]
defined_names = globalRdrEnvElts (tcg_rdr_env gbl_env)
-- Note that defined_and_used, defined_but_not_used
-- are both [GRE]; that's why we need defined_and_used
-- rather than just used_names
_defined_and_used, defined_but_not_used :: [GlobalRdrElt]
(_defined_and_used, defined_but_not_used)
= partition (gre_is_used used_names) defined_names
kids_env = mkChildEnv defined_names
-- This is done in mkExports too; duplicated work
gre_is_used :: NameSet -> GlobalRdrElt -> Bool
gre_is_used used_names (GRE {gre_name = name})
= name `elemNameSet` used_names
|| any (\ gre -> gre_name gre `elemNameSet` used_names) (findChildren kids_env name)
-- A use of C implies a use of T,
-- if C was brought into scope by T(..) or T(C)
-- Filter out the ones that are
-- (a) defined in this module, and
-- (b) not defined by a 'deriving' clause
-- The latter have an Internal Name, so we can filter them out easily
unused_locals :: [GlobalRdrElt]
unused_locals = filter is_unused_local defined_but_not_used
is_unused_local :: GlobalRdrElt -> Bool
is_unused_local gre = isLocalGRE gre && isExternalName (gre_name gre)
{-
*********************************************************
* *
\subsection{Unused imports}
* *
*********************************************************
This code finds which import declarations are unused. The
specification and implementation notes are here:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/UnusedImports
-}
type ImportDeclUsage
= ( LImportDecl Name -- The import declaration
, [AvailInfo] -- What *is* used (normalised)
, [Name] ) -- What is imported but *not* used
warnUnusedImportDecls :: TcGblEnv -> RnM ()
warnUnusedImportDecls gbl_env
= do { uses <- readMutVar (tcg_used_gres gbl_env)
; let user_imports = filterOut (ideclImplicit . unLoc) (tcg_rn_imports gbl_env)
-- This whole function deals only with *user* imports
-- both for warning about unnecessary ones, and for
-- deciding the minimal ones
rdr_env = tcg_rdr_env gbl_env
fld_env = mkFieldEnv rdr_env
; let usage :: [ImportDeclUsage]
usage = findImportUsage user_imports uses
; traceRn (vcat [ text "Uses:" <+> ppr uses
, text "Import usage" <+> ppr usage])
; whenWOptM Opt_WarnUnusedImports $
mapM_ (warnUnusedImport Opt_WarnUnusedImports fld_env) usage
; whenGOptM Opt_D_dump_minimal_imports $
printMinimalImports usage }
-- | Warn the user about top level binders that lack type signatures.
warnMissingSignatures :: TcGblEnv -> RnM ()
warnMissingSignatures gbl_env
= do { let exports = availsToNameSet (tcg_exports gbl_env)
sig_ns = tcg_sigs gbl_env
-- We use sig_ns to exclude top-level bindings that are generated by GHC
binds = collectHsBindsBinders $ tcg_binds gbl_env
pat_syns = tcg_patsyns gbl_env
-- Warn about missing signatures
-- Do this only when we we have a type to offer
; warn_missing_sigs <- woptM Opt_WarnMissingSignatures
; warn_only_exported <- woptM Opt_WarnMissingExportedSignatures
; warn_pat_syns <- woptM Opt_WarnMissingPatternSynonymSignatures
; let add_sig_warns
| warn_only_exported = add_warns Opt_WarnMissingExportedSignatures
| warn_missing_sigs = add_warns Opt_WarnMissingSignatures
| warn_pat_syns = add_warns Opt_WarnMissingPatternSynonymSignatures
| otherwise = return ()
add_warns flag
= when warn_pat_syns
(mapM_ add_pat_syn_warn pat_syns) >>
when (warn_missing_sigs || warn_only_exported)
(mapM_ add_bind_warn binds)
where
add_pat_syn_warn p
= add_warn (patSynName p) (pprPatSynType p)
add_bind_warn id
= do { env <- tcInitTidyEnv -- Why not use emptyTidyEnv?
; let name = idName id
(_, ty) = tidyOpenType env (idType id)
ty_msg = ppr ty
; add_warn name ty_msg }
add_warn name ty_msg
= when (name `elemNameSet` sig_ns && export_check name)
(addWarnAt (Reason flag) (getSrcSpan name)
(get_msg name ty_msg))
export_check name
= not warn_only_exported || name `elemNameSet` exports
get_msg name ty_msg
= sep [ text "Top-level binding with no type signature:",
nest 2 $ pprPrefixName name <+> dcolon <+> ty_msg ]
; add_sig_warns }
{-
Note [The ImportMap]
~~~~~~~~~~~~~~~~~~~~
The ImportMap is a short-lived intermediate data struture records, for
each import declaration, what stuff brought into scope by that
declaration is actually used in the module.
The SrcLoc is the location of the END of a particular 'import'
declaration. Why *END*? Because we don't want to get confused
by the implicit Prelude import. Consider (Trac #7476) the module
import Foo( foo )
main = print foo
There is an implicit 'import Prelude(print)', and it gets a SrcSpan
of line 1:1 (just the point, not a span). If we use the *START* of
the SrcSpan to identify the import decl, we'll confuse the implicit
import Prelude with the explicit 'import Foo'. So we use the END.
It's just a cheap hack; we could equally well use the Span too.
The AvailInfos are the things imported from that decl (just a list,
not normalised).
-}
type ImportMap = Map SrcLoc [AvailInfo] -- See [The ImportMap]
findImportUsage :: [LImportDecl Name]
-> [GlobalRdrElt]
-> [ImportDeclUsage]
findImportUsage imports used_gres
= map unused_decl imports
where
import_usage :: ImportMap
import_usage
= foldr extendImportMap Map.empty used_gres
unused_decl decl@(L loc (ImportDecl { ideclHiding = imps }))
= (decl, nubAvails used_avails, nameSetElemsStable unused_imps)
where
used_avails = Map.lookup (srcSpanEnd loc) import_usage `orElse` []
-- srcSpanEnd: see Note [The ImportMap]
used_names = availsToNameSetWithSelectors used_avails
used_parents = mkNameSet [n | AvailTC n _ _ <- used_avails]
unused_imps -- Not trivial; see eg Trac #7454
= case imps of
Just (False, L _ imp_ies) ->
foldr (add_unused . unLoc) emptyNameSet imp_ies
_other -> emptyNameSet -- No explicit import list => no unused-name list
add_unused :: IE Name -> NameSet -> NameSet
add_unused (IEVar (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAbs (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAll (L _ n)) acc = add_unused_all n acc
add_unused (IEThingWith (L _ p) wc ns fs) acc =
add_wc_all (add_unused_with p xs acc)
where xs = map unLoc ns ++ map (flSelector . unLoc) fs
add_wc_all = case wc of
NoIEWildcard -> id
IEWildcard _ -> add_unused_all p
add_unused _ acc = acc
add_unused_name n acc
| n `elemNameSet` used_names = acc
| otherwise = acc `extendNameSet` n
add_unused_all n acc
| n `elemNameSet` used_names = acc
| n `elemNameSet` used_parents = acc
| otherwise = acc `extendNameSet` n
add_unused_with p ns acc
| all (`elemNameSet` acc1) ns = add_unused_name p acc1
| otherwise = acc1
where
acc1 = foldr add_unused_name acc ns
-- If you use 'signum' from Num, then the user may well have
-- imported Num(signum). We don't want to complain that
-- Num is not itself mentioned. Hence the two cases in add_unused_with.
extendImportMap :: GlobalRdrElt -> ImportMap -> ImportMap
-- For each of a list of used GREs, find all the import decls that brought
-- it into scope; choose one of them (bestImport), and record
-- the RdrName in that import decl's entry in the ImportMap
extendImportMap gre imp_map
= add_imp gre (bestImport (gre_imp gre)) imp_map
where
add_imp :: GlobalRdrElt -> ImportSpec -> ImportMap -> ImportMap
add_imp gre (ImpSpec { is_decl = imp_decl_spec }) imp_map
= Map.insertWith add decl_loc [avail] imp_map
where
add _ avails = avail : avails -- add is really just a specialised (++)
decl_loc = srcSpanEnd (is_dloc imp_decl_spec)
-- For srcSpanEnd see Note [The ImportMap]
avail = availFromGRE gre
warnUnusedImport :: WarningFlag -> NameEnv (FieldLabelString, Name)
-> ImportDeclUsage -> RnM ()
warnUnusedImport flag fld_env (L loc decl, used, unused)
| Just (False,L _ []) <- ideclHiding decl
= return () -- Do not warn for 'import M()'
| Just (True, L _ hides) <- ideclHiding decl
, not (null hides)
, pRELUDE_NAME == unLoc (ideclName decl)
= return () -- Note [Do not warn about Prelude hiding]
| null used = addWarnAt (Reason flag) loc msg1 -- Nothing used; drop entire decl
| null unused = return () -- Everything imported is used; nop
| otherwise = addWarnAt (Reason flag) loc msg2 -- Some imports are unused
where
msg1 = vcat [pp_herald <+> quotes pp_mod <+> pp_not_used,
nest 2 (text "except perhaps to import instances from"
<+> quotes pp_mod),
text "To import instances alone, use:"
<+> text "import" <+> pp_mod <> parens Outputable.empty ]
msg2 = sep [pp_herald <+> quotes sort_unused,
text "from module" <+> quotes pp_mod <+> pp_not_used]
pp_herald = text "The" <+> pp_qual <+> text "import of"
pp_qual
| ideclQualified decl = text "qualified"
| otherwise = Outputable.empty
pp_mod = ppr (unLoc (ideclName decl))
pp_not_used = text "is redundant"
ppr_possible_field n = case lookupNameEnv fld_env n of
Just (fld, p) -> ppr p <> parens (ppr fld)
Nothing -> ppr n
-- Print unused names in a deterministic (lexicographic) order
sort_unused = pprWithCommas ppr_possible_field $
sortBy (comparing nameOccName) unused
{-
Note [Do not warn about Prelude hiding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not warn about
import Prelude hiding( x, y )
because even if nothing else from Prelude is used, it may be essential to hide
x,y to avoid name-shadowing warnings. Example (Trac #9061)
import Prelude hiding( log )
f x = log where log = ()
Note [Printing minimal imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To print the minimal imports we walk over the user-supplied import
decls, and simply trim their import lists. NB that
* We do *not* change the 'qualified' or 'as' parts!
* We do not disard a decl altogether; we might need instances
from it. Instead we just trim to an empty import list
-}
printMinimalImports :: [ImportDeclUsage] -> RnM ()
-- See Note [Printing minimal imports]
printMinimalImports imports_w_usage
= do { imports' <- mapM mk_minimal imports_w_usage
; this_mod <- getModule
; dflags <- getDynFlags
; liftIO $
do { h <- openFile (mkFilename dflags this_mod) WriteMode
; printForUser dflags h neverQualify (vcat (map ppr imports')) }
-- The neverQualify is important. We are printing Names
-- but they are in the context of an 'import' decl, and
-- we never qualify things inside there
-- E.g. import Blag( f, b )
-- not import Blag( Blag.f, Blag.g )!
}
where
mkFilename dflags this_mod
| Just d <- dumpDir dflags = d </> basefn
| otherwise = basefn
where
basefn = moduleNameString (moduleName this_mod) ++ ".imports"
mk_minimal (L l decl, used, unused)
| null unused
, Just (False, _) <- ideclHiding decl
= return (L l decl)
| otherwise
= do { let ImportDecl { ideclName = L _ mod_name
, ideclSource = is_boot
, ideclPkgQual = mb_pkg } = decl
; iface <- loadSrcInterface doc mod_name is_boot (fmap sl_fs mb_pkg)
; let lies = map (L l) (concatMap (to_ie iface) used)
; return (L l (decl { ideclHiding = Just (False, L l lies) })) }
where
doc = text "Compute minimal imports for" <+> ppr decl
to_ie :: ModIface -> AvailInfo -> [IE Name]
-- The main trick here is that if we're importing all the constructors
-- we want to say "T(..)", but if we're importing only a subset we want
-- to say "T(A,B,C)". So we have to find out what the module exports.
to_ie _ (Avail _ n)
= [IEVar (noLoc n)]
to_ie _ (AvailTC n [m] [])
| n==m = [IEThingAbs (noLoc n)]
to_ie iface (AvailTC n ns fs)
= case [(xs,gs) | AvailTC x xs gs <- mi_exports iface
, x == n
, x `elem` xs -- Note [Partial export]
] of
[xs] | all_used xs -> [IEThingAll (noLoc n)]
| otherwise -> [IEThingWith (noLoc n) NoIEWildcard
(map noLoc (filter (/= n) ns))
(map noLoc fs)]
-- Note [Overloaded field import]
_other | all_non_overloaded fs
-> map (IEVar . noLoc) $ ns ++ map flSelector fs
| otherwise -> [IEThingWith (noLoc n) NoIEWildcard
(map noLoc (filter (/= n) ns)) (map noLoc fs)]
where
fld_lbls = map flLabel fs
all_used (avail_occs, avail_flds)
= all (`elem` ns) avail_occs
&& all (`elem` fld_lbls) (map flLabel avail_flds)
all_non_overloaded = all (not . flIsOverloaded)
{-
Note [Partial export]
~~~~~~~~~~~~~~~~~~~~~
Suppose we have
module A( op ) where
class C a where
op :: a -> a
module B where
import A
f = ..op...
Then the minimal import for module B is
import A( op )
not
import A( C( op ) )
which we would usually generate if C was exported from B. Hence
the (x `elem` xs) test when deciding what to generate.
Note [Overloaded field import]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
On the other hand, if we have
{-# LANGUAGE DuplicateRecordFields #-}
module A where
data T = MkT { foo :: Int }
module B where
import A
f = ...foo...
then the minimal import for module B must be
import A ( T(foo) )
because when DuplicateRecordFields is enabled, field selectors are
not in scope without their enclosing datatype.
************************************************************************
* *
\subsection{Errors}
* *
************************************************************************
-}
qualImportItemErr :: RdrName -> SDoc
qualImportItemErr rdr
= hang (text "Illegal qualified name in import item:")
2 (ppr rdr)
badImportItemErrStd :: ModIface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrStd iface decl_spec ie
= sep [text "Module", quotes (ppr (is_mod decl_spec)), source_import,
text "does not export", quotes (ppr ie)]
where
source_import | mi_boot iface = text "(hi-boot interface)"
| otherwise = Outputable.empty
badImportItemErrDataCon :: OccName -> ModIface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrDataCon dataType_occ iface decl_spec ie
= vcat [ text "In module"
<+> quotes (ppr (is_mod decl_spec))
<+> source_import <> colon
, nest 2 $ quotes datacon
<+> text "is a data constructor of"
<+> quotes dataType
, text "To import it use"
, nest 2 $ quotes (text "import")
<+> ppr (is_mod decl_spec)
<> parens_sp (dataType <> parens_sp datacon)
, text "or"
, nest 2 $ quotes (text "import")
<+> ppr (is_mod decl_spec)
<> parens_sp (dataType <> text "(..)")
]
where
datacon_occ = rdrNameOcc $ ieName ie
datacon = parenSymOcc datacon_occ (ppr datacon_occ)
dataType = parenSymOcc dataType_occ (ppr dataType_occ)
source_import | mi_boot iface = text "(hi-boot interface)"
| otherwise = Outputable.empty
parens_sp d = parens (space <> d <> space) -- T( f,g )
badImportItemErr :: ModIface -> ImpDeclSpec -> IE RdrName -> [AvailInfo] -> SDoc
badImportItemErr iface decl_spec ie avails
= case find checkIfDataCon avails of
Just con -> badImportItemErrDataCon (availOccName con) iface decl_spec ie
Nothing -> badImportItemErrStd iface decl_spec ie
where
checkIfDataCon (AvailTC _ ns _) =
case find (\n -> importedFS == nameOccNameFS n) ns of
Just n -> isDataConName n
Nothing -> False
checkIfDataCon _ = False
availOccName = nameOccName . availName
nameOccNameFS = occNameFS . nameOccName
importedFS = occNameFS . rdrNameOcc $ ieName ie
illegalImportItemErr :: SDoc
illegalImportItemErr = text "Illegal import item"
dodgyImportWarn :: RdrName -> SDoc
dodgyImportWarn item = dodgyMsg (text "import") item
dodgyExportWarn :: Name -> SDoc
dodgyExportWarn item = dodgyMsg (text "export") item
dodgyMsg :: (OutputableBndr n, HasOccName n) => SDoc -> n -> SDoc
dodgyMsg kind tc
= sep [ text "The" <+> kind <+> ptext (sLit "item")
<+> quotes (ppr (IEThingAll (noLoc tc)))
<+> text "suggests that",
quotes (ppr tc) <+> text "has (in-scope) constructors or class methods,",
text "but it has none" ]
exportItemErr :: IE RdrName -> SDoc
exportItemErr export_item
= sep [ text "The export item" <+> quotes (ppr export_item),
text "attempts to export constructors or class methods that are not visible here" ]
exportClashErr :: GlobalRdrEnv -> Name -> Name -> IE RdrName -> IE RdrName
-> MsgDoc
exportClashErr global_env name1 name2 ie1 ie2
= vcat [ text "Conflicting exports for" <+> quotes (ppr occ) <> colon
, ppr_export ie1' name1'
, ppr_export ie2' name2' ]
where
occ = nameOccName name1
ppr_export ie name = nest 3 (hang (quotes (ppr ie) <+> text "exports" <+>
quotes (ppr name))
2 (pprNameProvenance (get_gre name)))
-- get_gre finds a GRE for the Name, so that we can show its provenance
get_gre name
= case lookupGRE_Name global_env name of
Just gre -> gre
Nothing -> pprPanic "exportClashErr" (ppr name)
get_loc name = greSrcSpan (get_gre name)
(name1', ie1', name2', ie2') = if get_loc name1 < get_loc name2
then (name1, ie1, name2, ie2)
else (name2, ie2, name1, ie1)
addDupDeclErr :: [GlobalRdrElt] -> TcRn ()
addDupDeclErr [] = panic "addDupDeclErr: empty list"
addDupDeclErr gres@(gre : _)
= addErrAt (getSrcSpan (last sorted_names)) $
-- Report the error at the later location
vcat [text "Multiple declarations of" <+>
quotes (ppr (nameOccName name)),
-- NB. print the OccName, not the Name, because the
-- latter might not be in scope in the RdrEnv and so will
-- be printed qualified.
text "Declared at:" <+>
vcat (map (ppr . nameSrcLoc) sorted_names)]
where
name = gre_name gre
sorted_names = sortWith nameSrcLoc (map gre_name gres)
dupExportWarn :: OccName -> IE RdrName -> IE RdrName -> SDoc
dupExportWarn occ_name ie1 ie2
= hsep [quotes (ppr occ_name),
text "is exported by", quotes (ppr ie1),
text "and", quotes (ppr ie2)]
dupModuleExport :: ModuleName -> SDoc
dupModuleExport mod
= hsep [text "Duplicate",
quotes (text "Module" <+> ppr mod),
text "in export list"]
moduleNotImported :: ModuleName -> SDoc
moduleNotImported mod
= text "The export item `module" <+> ppr mod <>
text "' is not imported"
nullModuleExport :: ModuleName -> SDoc
nullModuleExport mod
= text "The export item `module" <+> ppr mod <> ptext (sLit "' exports nothing")
missingImportListWarn :: ModuleName -> SDoc
missingImportListWarn mod
= text "The module" <+> quotes (ppr mod) <+> ptext (sLit "does not have an explicit import list")
missingImportListItem :: IE RdrName -> SDoc
missingImportListItem ie
= text "The import item" <+> quotes (ppr ie) <+> ptext (sLit "does not have an explicit import list")
moduleWarn :: ModuleName -> WarningTxt -> SDoc
moduleWarn mod (WarningTxt _ txt)
= sep [ text "Module" <+> quotes (ppr mod) <> ptext (sLit ":"),
nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ]
moduleWarn mod (DeprecatedTxt _ txt)
= sep [ text "Module" <+> quotes (ppr mod)
<+> text "is deprecated:",
nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ]
packageImportErr :: SDoc
packageImportErr
= text "Package-qualified imports are not enabled; use PackageImports"
-- This data decl will parse OK
-- data T = a Int
-- treating "a" as the constructor.
-- It is really hard to make the parser spot this malformation.
-- So the renamer has to check that the constructor is legal
--
-- We can get an operator as the constructor, even in the prefix form:
-- data T = :% Int Int
-- from interface files, which always print in prefix form
checkConName :: RdrName -> TcRn ()
checkConName name = checkErr (isRdrDataCon name) (badDataCon name)
badDataCon :: RdrName -> SDoc
badDataCon name
= hsep [text "Illegal data constructor name", quotes (ppr name)]
| sgillespie/ghc | compiler/rename/RnNames.hs | bsd-3-clause | 88,960 | 3 | 29 | 28,057 | 16,638 | 8,597 | 8,041 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Ivory.Language
import Ivory.Tower
import Ivory.Tower.Config
import Tower.Mini
simpleTower :: Component e
simpleTower = component "simple" $ tower $ do
towerModule towerDepModule
towerDepends towerDepModule
(c1in, c1out) <- channel
(chtx, chrx) <- channel
per <- period (Microseconds 1000)
monitor "periodicM" $ do
s <- state "local_st"
handler per "tickh" $ do
e <- emitter c1in 1
callback $ \_ -> do
emit e (constRef (s :: Ref 'Global ('Stored Uint8)))
monitor "withsharedM" $ do
s <- state "last_m2_chan1_message"
handler c1out "fromActiveh" $ do
e <- emitter chtx 1
callback $ \m -> do
refCopy s m
emitV e true
handler chrx "readStateh" $ do
callback $ \_m -> do
s' <- deref s
call_ printf "rsh: %u\n" s'
--------------------------------------------------------------------------------
main :: IO ()
main = compileTowerMini id p [simpleTower]
where
p topts = getConfig topts $ miniConfigParser defaultMiniConfig
--------------------------------------------------------------------------------
[ivory|
import (stdio.h, printf) void printf(string x, uint8_t y)
|]
towerDepModule :: Module
towerDepModule = package "towerDeps" $ do
incl printf
| GaloisInc/tower | tower-mini/test/Simple.hs | bsd-3-clause | 1,663 | 0 | 25 | 332 | 398 | 194 | 204 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Snap.Snaplet.Muttonchops where
import Data.ByteString (ByteString)
import Data.List
import Data.Map (Map)
import Data.String.Combinators ((<>))
import Data.Text (Text)
import Snap
import Snap.Snaplet
import System.Directory
import System.FilePath
import Text.Templating.Muttonchops
import qualified Data.Map as Map
import qualified Data.Text.IO as T
import qualified Text.Templating.Muttonchops as M
desc :: Text
desc = "Mustache Templating Engine Snaplet as bindings to Crustache"
data Muttonchops = Muttonchops {
rawTemplates :: Map FilePath Text
}
class HasMuttonchops app where
getMuttonchops :: app -> Muttonchops
muttonchopsInit :: FilePath -> SnapletInit app Muttonchops
muttonchopsInit path = makeSnaplet "muttonchops" desc Nothing $ do
snapletPath <- getSnapletFilePath
files <- liftIO $ filter (not . ("." `isPrefixOf`)) <$>
getDirectoryContents (snapletPath </> path)
withTemplates <- liftIO $ forM files $ \p -> do
let fullpath = snapletPath </> path </> p
f <- T.readFile fullpath
-- see https://github.com/ryb/muttonchops/issues/1
return (p, f <> "{{workaroundhack}}")
return $ Muttonchops $ Map.fromList withTemplates
renderAs :: (HasMuttonchops b, HasMuttonchops v) => ByteString -> FilePath
-> [(Text, Text)] -> Handler b v ()
renderAs mimeType p bindings = do
Muttonchops rawMap <- gets getMuttonchops
case Map.lookup p rawMap of
Nothing -> fail "template not found"
Just rawTemplate -> do
modifyResponse (setContentType mimeType)
writeText $ M.render rawTemplate bindings
| ryb/snaplet-muttonchops | Snap/Snaplet/Muttonchops.hs | bsd-3-clause | 1,663 | 0 | 17 | 330 | 453 | 241 | 212 | 40 | 2 |
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
-- Module : Math.Statistics
-- Copyright : (c) 2008 Marshall Beddoe
-- License : BSD3
--
-- Maintainer : bash@chodify.net
-- Stability : experimental
-- Portability : portable
--
-- Description :
-- A collection of commonly used statistical functions.
-----------------------------------------------------------------------------
module Numeric.Statistics ( -- * Different mean variants
mean
, meanWgh
, average
, harmean
, geomean
-- * Variance, standard deviation and moments
, stddev
, stddevp
, var
, pvar
, centralMoment
, devsq
-- * Skewness and kurtosis
, skew
, pearsonSkew1
, pearsonSkew2
, kurt
-- * Median, mode and quantiles
, median
, modes
, mode
, iqr
, quantile
, quantileAsc
-- * Other parameters
, range
, avgdev
-- * Covariance and corelation
, covar
, covMatrix
, pearson
, correl
-- * Simple regressions
, linreg
) where
import Data.List
import Data.Ord (comparing)
-- |Numerically stable mean
mean :: Fractional a => [a] -> a
mean x = fst $ foldl' addElement (0,0) x
where
addElement (!m,!n) x = (m + (x-m)/(n+1), n+1)
-- | Mean with weight. First element in tuple is element, second its weight
meanWgh :: Floating a => [(a,a)] -> a
meanWgh xs = (sum . map (uncurry (*)) $ xs) / (sum . map snd $ xs)
-- |Same as 'mean'
average :: Fractional a => [a] -> a
average = mean
-- |Harmonic mean
harmean :: (Fractional a) => [a] -> a
harmean xs = fromIntegral (length xs) / (sum $ map (1/) xs)
-- | Geometric mean
geomean :: (Floating a) => [a] -> a
geomean xs = (foldr1 (*) xs)**(1 / fromIntegral (length xs))
-- |Median
median :: (Fractional a, Ord a) => [a] -> a
median x | odd n = head $ drop (n `div` 2) x'
| even n = mean $ take 2 $ drop i x'
where i = (length x' `div` 2) - 1
x' = sort x
n = length x
-- | Modes returns a sorted list of modes in descending order
modes :: (Ord a) => [a] -> [(Int, a)]
modes xs = sortBy (comparing $ negate.fst) $ map (\x->(length x, head x)) $ (group.sort) xs
-- | Mode returns the mode of the list, otherwise Nothing
mode :: (Ord a) => [a] -> Maybe a
mode xs = case m of
[] -> Nothing
_ -> Just . snd $ head m
where m = filter (\(a,b) -> a > 1) (modes xs)
-- | Central moments
centralMoment :: (Fractional b, Integral t) => [b] -> t -> b
centralMoment xs 1 = 0
centralMoment xs r = (sum (map (\x -> (x-m)^r) xs)) / n
where
m = mean xs
n = fromIntegral $ length xs
-- | Range
range :: (Num a, Ord a) => [a] -> a
range xs = maximum xs - minimum xs
-- | Average deviation
avgdev :: (Floating a) => [a] -> a
avgdev xs = mean $ map (\x -> abs(x - m)) xs
where
m = mean xs
-- | Unbiased estimate of standard deviation of sample
stddev :: (Floating a) => [a] -> a
stddev xs = sqrt $ var xs
-- | Standard deviation of population
stddevp :: (Floating a) => [a] -> a
stddevp xs = sqrt $ pvar xs
-- |Population variance
pvar :: (Fractional a) => [a] -> a
pvar xs = centralMoment xs 2
-- |Unbiased estimate of sample variance
var :: (Fractional b) => [b] -> b
var xs = (var' 0 0 0 xs) / (fromIntegral $ length xs - 1)
where
var' _ _ s [] = s
var' m n s (x:xs) = var' nm (n + 1) (s + delta * (x - nm)) xs
where
delta = x - m
nm = m + delta/(fromIntegral $ n + 1)
-- |Interquartile range
iqr :: [a] -> [a]
iqr xs = take (length xs - 2*q) $ drop q xs
where
q = ((length xs) + 1) `div` 4
-- |Kurtosis
kurt :: (Floating b) => [b] -> b
kurt xs = ((centralMoment xs 4) / (centralMoment xs 2)^2)-3
-- | Arbitrary quantile q of an unsorted list. The quantile /q/ of /N/
-- data points is the point whose (zero-based) index in the sorted
-- data set is closest to /q(N-1)/.
quantile :: (Fractional b, Ord b) => Double -> [b] -> b
quantile q = quantileAsc q . sort
-- | As 'quantile' specialized for sorted data
quantileAsc :: (Fractional b, Ord b) => Double -> [b] -> b
quantileAsc _ [] = error "quantile on empty list"
quantileAsc q xs
| q < 0 || q > 1 = error "quantile out of range"
| otherwise = xs !! (quantIndex (length xs) q)
where quantIndex :: Int -> Double -> Int
quantIndex len q = case round $ q * (fromIntegral len - 1) of
idx | idx < 0 -> error "Quantile index too small"
| idx >= len -> error "Quantile index too large"
| otherwise -> idx
-- | Calculate skew
skew :: (Floating b) => [b] -> b
skew xs = (centralMoment xs 3) / (centralMoment xs 2)**(3/2)
-- |Calculates first Pearson skewness coeffcient.
pearsonSkew1 :: (Ord a, Floating a) => [a] -> a
pearsonSkew1 xs = 3 * (mean xs - mo) / stddev xs
where
mo = snd $ head $ modes xs
-- | Calculate second Pearson skewness coeffcient.
pearsonSkew2 :: (Ord a, Floating a) => [a] -> a
pearsonSkew2 xs = 3 * (mean xs - median xs) / stddev xs
-- | Sample Covariance
covar :: (Floating a) => [a] -> [a] -> a
covar xs ys = sum (zipWith (*) (map f1 xs) (map f2 ys)) / (n-1)
where
n = fromIntegral $ length $ xs
m1 = mean xs
m2 = mean ys
f1 x = x - m1
f2 x = x - m2
-- | Covariance matrix
covMatrix :: (Floating a) => [[a]] -> [[a]]
covMatrix xs = split' (length xs) cs
where
cs = [ covar a b | a <- xs, b <- xs]
split' n = unfoldr (\y -> if null y then Nothing else Just $ splitAt n y)
-- | Pearson's product-moment correlation coefficient
pearson :: (Floating a) => [a] -> [a] -> a
pearson x y = covar x y / (stddev x * stddev y)
-- | Same as 'pearson'
correl :: (Floating a) => [a] -> [a] -> a
correl = pearson
-- | Least-squares linear regression of /y/ against /x/ for a
-- collection of (/x/, /y/) data, in the form of (/b0/, /b1/, /r/)
-- where the regression is /y/ = /b0/ + /b1/ * /x/ with Pearson
-- coefficient /r/
linreg :: (Floating b) => [(b, b)] -> (b, b, b)
linreg xys = let !xs = map fst xys
!ys = map snd xys
!n = fromIntegral $ length xys
!sX = sum xs
!sY = sum ys
!sXX = sum $ map (^ 2) xs
!sXY = sum $ map (uncurry (*)) xys
!sYY = sum $ map (^ 2) ys
!alpha = (sY - beta * sX) / n
!beta = (n * sXY - sX * sY) / (n * sXX - sX * sX)
!r = (n * sXY - sX * sY) / (sqrt $ (n * sXX - sX^2) * (n * sYY - sY ^ 2))
in (alpha, beta, r)
-- | Returns the sum of square deviations from their sample mean.
devsq :: (Floating a) => [a] -> a
devsq xs = sum $ map (\x->(x-m)**2) xs
where m = mean xs
| haas/hstats | src/Numeric/Statistics.hs | bsd-3-clause | 7,640 | 0 | 16 | 2,821 | 2,573 | 1,380 | 1,193 | 133 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main
( main
) where
import qualified Control.Monad.Reader as R
import qualified Data.Text as Text
import Control.Monad.IO.Class (liftIO)
import Data.Text (Text)
import Drive
import Drive.Describe
import Drive.Slack
program :: SlackP Text
program = do
users <- listUsers
let me = filter (hasName "palfrey") users
case me of
[x] -> sendMessage x "hello" >> pure "sent message"
_ -> pure "no users found"
where
hasName :: Text -> Target -> Bool
hasName s (User _ x) = s `Text.isInfixOf` x
hasName _ _ = False
main :: IO ()
main = do
describe program >>= print
withSlackCredentials "credentials/slack.yaml" $
R.runReaderT (run program >>= liftIO . print)
>>= print
where
describe
= slackToDescribeI >---> execDescribe
run :: (CanSlack env m) => SlackP a -> m a
run = identityI >---> execSlack
| palf/free-driver | packages/drive-slack/src/Main.hs | bsd-3-clause | 994 | 0 | 13 | 297 | 294 | 156 | 138 | 30 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
-- | Tests for various constructs
module Imperative where
import Data.Int
import Data.Word
import System.Directory
import System.FilePath
import System.Process
import System.Random
import Language.Embedded.Imperative
import Language.Embedded.Backend.C
import Language.Embedded.CExp
type CMD
= RefCMD
:+: ArrCMD
:+: ControlCMD
:+: PtrCMD
:+: FileCMD
:+: C_CMD
type Prog = Program CMD (Param2 CExp CType)
prog :: Prog ()
prog = do
r <- initRef (10 :: CExp Int32)
a <- getRef r
modifyRef r (*a)
printf "%d\n" a
-- | Test primitive types
testTypes :: Prog ()
testTypes = do
inp :: CExp Int32 <- fget stdin
a <- unsafeFreezeRef =<< initRef (i2n inp + 0x88 :: CExp Int8)
b <- unsafeFreezeRef =<< initRef (i2n inp + 0x8888 :: CExp Int16)
c <- unsafeFreezeRef =<< initRef (i2n inp + 0x88888888 :: CExp Int32)
d <- unsafeFreezeRef =<< initRef (i2n inp + 0x8888888888888888 :: CExp Int64)
e <- unsafeFreezeRef =<< initRef (i2n inp + 0xEE :: CExp Word8)
f <- unsafeFreezeRef =<< initRef (i2n inp + 0xEEEE :: CExp Word16)
g <- unsafeFreezeRef =<< initRef (i2n inp + 0xEEEEEEEE :: CExp Word32)
h <- unsafeFreezeRef =<< initRef (i2n inp + 0xEEEEEEEEEEEEEEEE :: CExp Word64)
i <- unsafeFreezeRef =<< initRef (i2n inp - 9 :: CExp Float)
j <- unsafeFreezeRef =<< initRef (i2n inp - 10 :: CExp Double)
printf "%d %d %d %ld %u %u %u %lu %.3f %.3f\n" a b c d e f g h i j
k1 <- unsafeFreezeRef =<< initRef true
k2 <- unsafeFreezeRef =<< initRef true
iff ((k1 #&& k2) #|| not_ k1) (printf "true") (printf "false")
testCExp :: Prog ()
testCExp = do
a :: CExp Int32 <- fget stdin
let b = a#==10 ? a*3 $ a-5+8
let c = i2n a/23 :: CExp Double
printf "%d " b
printf "%d " (not_ (a#==10) ? a*3 $ a-5+8)
printf "%d " (a `quot_` b)
printf "%d " (a #% b)
printf "%d " (cond (i2b a) a b)
printf "%d " (b2i (not_ (a#==10)) * a)
printf "%.3f " c
printf "%.3f " (i2n a :: CExp Float)
testRef :: Prog ()
testRef = do
r1 <- newRef
r2 <- initRef (3 :: CExp Int32)
modifyRef r2 (*2)
setRef r1 =<< getRef r2
a <- unsafeFreezeRef r1
b <- unsafeFreezeRef r2
printf "%d %d\n" a b
testCopyArr1 :: Prog ()
testCopyArr1 = do
arr1 :: Arr Word32 Int32 <- newArr (10 :: CExp Word32)
arr2 :: Arr Word32 Int32 <- newArr (10 :: CExp Word32)
sequence_ [setArr arr1 i (i2n i+10) | i' <- [0..9], let i = fromInteger i']
copyArr (arr2,0) (arr1,0) 10
sequence_ [getArr arr2 i >>= printf "%d " . (*3) | i' <- [0..9], let i = fromInteger i']
printf "\n"
testCopyArr2 :: Prog ()
testCopyArr2 = do
arr1 :: Arr Word32 Int32 <- newArr (20 :: CExp Word32)
arr2 :: Arr Word32 Int32 <- newArr (20 :: CExp Word32)
sequence_ [setArr arr1 i (i2n i+10) | i' <- [0..19], let i = fromInteger i']
copyArr (arr2,10) (arr1,5) 10
sequence_ [getArr arr2 i >>= printf "%d " . (*3) | i' <- [10..19], let i = fromInteger i']
printf "\n"
testArr2 :: Prog ()
testArr2 = do
n <- fget stdin
arr :: Arr Word32 Int32 <- newArr n -- Array of dynamic length
sequence_ [setArr arr (i2n i) i | i' <- [0..3], let i = fromInteger i']
sequence_ [getArr arr i >>= printf "%d " . (*3) | i' <- [0..3], let i = fromInteger i']
printf "\n"
return ()
testArr3 :: Prog ()
testArr3 = do
arr :: Arr Word32 Int32 <- constArr [8,7,6,5]
sequence_ [getArr arr i >>= printf "%d " . (*3) | i' <- [0..3], let i = fromInteger i']
printf "\n"
return ()
testArr4 :: Prog ()
testArr4 = do
arr :: Arr Word32 Int32 <- constArr [8,7,6,5]
iarr <- freezeArr arr 4
sequence_ [printf "%d " $ iarr #! i | i' <- [0..3], let i = fromInteger i']
printf "\n"
testArr5 :: Prog ()
testArr5 = do
arr :: Arr Word32 Int32 <- constArr [8,7,6,5]
iarr <- unsafeFreezeArr arr
sequence_ [printf "%d " $ iarr #! i | i' <- [0..3], let i = fromInteger i']
printf "\n"
testArr6 :: Prog ()
testArr6 = do
arr :: Arr Word32 Int32 <- constArr [8,7,6,5]
iarr <- unsafeFreezeArr arr
arr2 <- unsafeThawArr iarr
sequence_ [getArr arr2 i >>= printf "%d " | i <- map fromInteger [0..3]]
printf "\n"
testArr7 :: Prog ()
testArr7 = do
arr :: Arr Word32 Int32 <- constArr [8,7,6,5]
iarr <- freezeArr arr 4
arr2 <- thawArr iarr 4
sequence_ [getArr arr2 i >>= printf "%d " | i <- map fromInteger [0..3]]
printf "\n"
testSwap1 :: Prog ()
testSwap1 = do
arr1 :: Arr Word32 Int32 <- constArr [1,2,3,4]
arr2 :: Arr Word32 Int32 <- constArr [11,12,13,14]
unsafeSwap arr1 arr2
sequence_ [getArr arr1 i >>= printf "%d " | i <- map fromInteger [0..3]]
printf "\n"
testSwap2 :: Prog ()
testSwap2 = do
arr1 :: Arr Word32 Int32 <- constArr [1,2,3,4]
n <- fget stdin
arr2 :: Arr Word32 Int32 <- newArr n
copyArr (arr2,0) (arr1,0) 4
setArr arr2 2 22
unsafeSwap arr1 arr2
sequence_ [getArr arr1 i >>= printf "%d " | i <- map fromInteger [0..3]]
printf "\n"
sequence_ [getArr arr2 i >>= printf "%d " | i <- map fromInteger [0..3]]
printf "\n"
testIf1 :: Prog ()
testIf1 = do
inp :: CExp Int32 <- fget stdin
a <- ifE (inp #== 10) (return (inp+1)) (return (inp*3))
b <- ifE (not_ (inp #== 10)) (return (a+1)) (return (a*3))
printf "%d %d\n" a b
testIf2 :: Prog ()
testIf2 = do
inp :: CExp Int32 <- fget stdin
iff (inp #== 11) (printf "== 11\n") (printf "/= 11\n")
iff (not_ (inp #== 11)) (printf "/= 11\n") (printf "== 11\n")
iff (inp #== 12) (printf "== 12\n") (return ())
iff (not_ (inp #== 12)) (return ()) (printf "== 12\n")
iff (inp #== 13) (printf "== 13\n") (return ())
iff (not_ (inp #== 13)) (return ()) (printf "== 13\n")
iff (inp #== 14) (return ()) (return ())
-- Loop from 0 to 9 in steps of 1
testFor1 :: Prog ()
testFor1 = for (0,1,9) $ \i ->
printf "%d\n" (i :: CExp Int8)
-- Loop from 9 to 0 in steps of 2
testFor2 :: Prog ()
testFor2 = for (9,-2,0) $ \i ->
printf "%d\n" (i :: CExp Int8)
-- Loop from 0 to but excluding 10 in steps of 2
testFor3 :: Prog ()
testFor3 = for (0, 2, Excl 10) $ \i ->
printf "%d\n" (i :: CExp Int8)
-- While loop tested in `sumInput` in Demo.hs.
testAssert :: Prog ()
testAssert = do
inp :: CExp Int32 <- fget stdin
assert (inp #> 0) "input too small"
printf "past assertion\n"
-- This tests that `formatSpecifier` works as it should for different types
testPrintScan :: (Formattable a, CType a) => CExp a -> Prog ()
testPrintScan a = do
i <- fget stdin
fput stdout "" (i `asTypeOf` a) ""
testPtr :: Prog ()
testPtr = do
addInclude "<stdlib.h>"
addInclude "<string.h>"
addInclude "<stdio.h>"
p :: Ptr Int32 <- newPtr
callProcAssign p "malloc" [valArg (100 :: CExp Word32)]
arr :: Arr Word32 Int32 <- constArr [34,45,56,67,78]
callProc "memcpy" [ptrArg p, arrArg arr, valArg (5*4 :: CExp Word32)] -- sizeof(int32_t) = 4
callProc "printf" [strArg "%d\n", deref $ ptrArg p]
iarr :: IArr Word32 Int32 <- unsafeFreezeArr =<< ptrToArr p
printf "sum: %d\n" (iarr#!0 + iarr#!1 + iarr#!2 + iarr#!3 + iarr#!4)
callProc "free" [ptrArg p]
testArgs :: Prog ()
testArgs = do
addInclude "<stdio.h>"
addInclude "<stdbool.h>"
addDefinition setPtr_def
addDefinition ret_def
let v = 55 :: CExp Int32
r <- initRef (66 :: CExp Int32)
a :: Arr Int32 Int32 <- constArr [234..300]
ia <- freezeArr a 10
p :: Ptr Int32 <- newPtr
o <- newObject "int" False
op <- newObject "int" True
callProcAssign p "setPtr" [refArg r]
callProcAssign o "ret" [valArg v]
callProcAssign op "setPtr" [refArg r]
callProc "printf"
[ strArg "%d %d %d %d %d %d %d %d %d %d\n"
, valArg v
, deref (refArg r)
, deref (arrArg a)
, deref (iarrArg ia)
, deref (ptrArg p)
, deref (offset (iarrArg ia) (3 :: CExp Word32))
, deref (offset (ptrArg p) (0 :: CExp Word32))
, objArg o
, deref (objArg op)
, constArg "bool" "true"
]
where
setPtr_def = [cedecl|
int * setPtr (int *a) {
return a;
}
|]
ret_def = [cedecl|
int ret (int a) {
return a;
}
|]
testExternArgs :: Prog ()
testExternArgs = do
addInclude "<stdbool.h>"
let v = 55 :: CExp Int32
externProc "val_proc1" [valArg v]
externProc "val_proc2" [offset3 $ valArg v]
-- Normal integer addition (slight misuse of `offset`)
_ :: CExp Int32 <- externFun "val_fun" [valArg v]
r <- initRef v
externProc "ref_proc1" [refArg r]
externProc "ref_proc2" [deref $ refArg r] -- TODO Simplify
a :: Arr Int32 Int32 <- newArr 10
externProc "arr_proc1" [arrArg a]
externProc "arr_proc2" [addr $ arrArg a]
externProc "arr_proc3" [deref $ arrArg a]
externProc "arr_proc4" [offset3 $ arrArg a]
externProc "arr_proc5" [deref $ offset3 $ arrArg a]
externProc "arr_proc6" [offsetMinus $ arrArg a]
p :: Ptr Int32 <- newPtr
externProc "ptr_proc1" [ptrArg p]
externProc "ptr_proc2" [addr $ ptrArg p]
externProc "ptr_proc3" [deref $ ptrArg p]
o <- newObject "int" False
externProc "obj_proc1" [objArg o]
externProc "obj_proc2" [addr $ objArg o]
op <- newObject "int" True
externProc "obj_proc3" [objArg op]
externProc "obj_proc4" [addr $ objArg op]
externProc "obj_proc5" [deref $ objArg op]
externProc "obj_proc6" [offset3 $ objArg op]
let s = "apa"
externProc "str_proc1" [strArg s]
externProc "str_proc2" [deref $ strArg s]
externProc "const_proc" [constArg "bool" "true"]
return ()
where
offset3 = flip offset (3 :: CExp Int32)
offsetMinus = flip offset (-3 :: CExp Int32) . offset3
testCallFun :: Prog ()
testCallFun = do
addInclude "<math.h>"
i :: CExp Int32 <- fget stdin
a <- callFun "sin" [valArg (i2n i :: CExp Double)]
printf "%.3f\n" (a :: CExp Double)
multiModule :: Prog ()
multiModule = do
addInclude "<stdlib.h>"
addExternProc "func_in_other" []
inModule "other" $ do
addDefinition [cedecl|
void func_in_other(void) {
puts("Hello from the other module!");
} |]
addInclude "<stdio.h>"
callProc "func_in_other" []
testMultiModule :: IO ()
testMultiModule = do
tmp <- getTemporaryDirectory
rand <- randomRIO (1, maxBound :: Int)
let temp = tmp </> "imperative-edsl_" ++ show rand
exists <- doesDirectoryExist temp
when exists $ removeDirectoryRecursive temp
createDirectory temp
let ms = compileAll multiModule
files = [temp </> "imperative-edsl_" ++ m ++ ".c" | (m,_) <- ms]
exe = temp </> "imperative-edsl"
cmd = unwords $ ("cc -o" : exe : files)
zipWithM_ writeFile files (map snd ms)
putStrLn cmd
system cmd
putStrLn exe
system exe
exists <- doesDirectoryExist temp
when exists $ removeDirectoryRecursive temp
----------------------------------------
-- It would be nice to be able to run these tests using Tests.Tasty.HUnit, but
-- I wasn't able to make that work, probably due to the use of `fakeIO` in the
-- tests. First, Tasty wasn't able to silence the output of the tests, and
-- secondly, the tests would always fail when running a second time.
testAll = do
tag "testTypes" >> compareCompiled testTypes (runIO testTypes) "0\n"
tag "testCExp" >> compareCompiledM testCExp (runIO testCExp) "44\n"
tag "testRef" >> compareCompiled testRef (runIO testRef) ""
tag "testCopyArr1" >> compareCompiled testCopyArr1 (runIO testCopyArr1) ""
tag "testCopyArr2" >> compareCompiled testCopyArr2 (runIO testCopyArr2) ""
tag "testArr2" >> compareCompiled testArr2 (runIO testArr2) "20\n"
tag "testArr3" >> compareCompiled testArr3 (runIO testArr3) ""
tag "testArr4" >> compareCompiled testArr4 (runIO testArr4) ""
tag "testArr5" >> compareCompiled testArr5 (runIO testArr5) ""
tag "testArr6" >> compareCompiled testArr6 (runIO testArr6) ""
tag "testArr7" >> compareCompiled testArr7 (runIO testArr6) ""
tag "testArr7" >> compareCompiled testArr7 (runIO testArr7) ""
tag "testSwap1" >> compareCompiled testSwap1 (runIO testSwap1) ""
tag "testSwap2" >> compareCompiled testSwap2 (runIO testSwap2) "45\n"
tag "testIf1" >> compareCompiled testIf1 (runIO testIf1) "12\n"
tag "testIf2" >> compareCompiled testIf2 (runIO testIf2) "12\n"
tag "testFor1" >> compareCompiled testFor1 (runIO testFor1) ""
tag "testFor2" >> compareCompiled testFor2 (runIO testFor2) ""
tag "testFor3" >> compareCompiled testFor3 (runIO testFor3) ""
tag "testAssert" >> compareCompiled testAssert (runIO testAssert) "45"
tag "testPtr" >> compareCompiled testPtr (putStrLn "34" >> putStrLn "sum: 280") ""
tag "testArgs" >> compareCompiled testArgs (putStrLn "55 66 234 234 66 237 66 55 66 1") ""
tag "testPrintScan_Int8" >> compareCompiled (testPrintScan int8) (runIO (testPrintScan int8)) "45"
tag "testPrintScan_Int16" >> compareCompiled (testPrintScan int16) (runIO (testPrintScan int16)) "45"
tag "testPrintScan_Int32" >> compareCompiled (testPrintScan int32) (runIO (testPrintScan int32)) "45"
tag "testPrintScan_Int64" >> compareCompiled (testPrintScan int64) (runIO (testPrintScan int64)) "45"
tag "testPrintScan_Word8" >> compareCompiled (testPrintScan word8) (runIO (testPrintScan word8)) "45"
tag "testPrintScan_Word16" >> compareCompiled (testPrintScan word16) (runIO (testPrintScan word16)) "45"
tag "testPrintScan_Word32" >> compareCompiled (testPrintScan word32) (runIO (testPrintScan word32)) "45"
tag "testPrintScan_Word64" >> compareCompiled (testPrintScan word64) (runIO (testPrintScan word64)) "45"
tag "testPrintScan_Float" >> captureCompiled (testPrintScan float) "45"
tag "testPrintScan_Double" >> captureCompiled (testPrintScan double) "45"
-- `testPrintScan` for floating point types can't be compared to `runIO`,
-- becuase different number of digits are printed
tag "testExternArgs" >> compileAndCheck testExternArgs
tag "testCallFun" >> compareCompiledM testCallFun (putStrLn "-0.757") "4"
tag "multiModule" >> testMultiModule
where
tag str = putStrLn $ "---------------- tests/Imperative.hs/" ++ str ++ "\n"
compareCompiledM = compareCompiled' def {externalFlagsPost = ["-lm"]}
int8 = 0 :: CExp Int8
int16 = 0 :: CExp Int16
int32 = 0 :: CExp Int32
int64 = 0 :: CExp Int64
word8 = 0 :: CExp Word8
word16 = 0 :: CExp Word16
word32 = 0 :: CExp Word32
word64 = 0 :: CExp Word64
float = 0 :: CExp Float
double = 0 :: CExp Double
| kmate/imperative-edsl | tests/Imperative.hs | bsd-3-clause | 15,582 | 0 | 17 | 4,357 | 5,708 | 2,701 | 3,007 | 335 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
-- | Lower-level primitives to drive Shake, which are wrapped into the
-- 'Development.Shake.shake' function. Useful if you want to perform multiple Shake
-- runs in a row without reloading from the database.
-- Sometimes used in conjunction with @'shakeFiles'=\"\/dev\/null\"@.
-- Using these functions you can approximate the 'Development.Shake.shake' experience with:
--
-- @
-- shake opts rules = do
-- (_, after) \<- 'shakeWithDatabase' opts rules $ \\db -> do
-- 'shakeOneShotDatabase' db
-- 'shakeRunDatabase' db []
-- 'shakeRunAfter' opts after
-- @
module Development.Shake.Database(
ShakeDatabase,
shakeOpenDatabase,
shakeWithDatabase,
shakeOneShotDatabase,
shakeRunDatabase,
shakeLiveFilesDatabase,
shakeProfileDatabase,
shakeErrorsDatabase,
shakeRunAfter
) where
import Control.Concurrent.Extra
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.IORef
import General.Cleanup
import Development.Shake.Internal.Errors
import Development.Shake.Internal.Options
import Development.Shake.Internal.Core.Rules
import Development.Shake.Internal.Core.Run
import Development.Shake.Internal.Core.Types
import Development.Shake.Internal.Rules.Default
data UseState
= Closed
| Using String
| Open {openOneShot :: Bool, openRequiresReset :: Bool}
-- | The type of an open Shake database. Created with
-- 'shakeOpenDatabase' or 'shakeWithDatabase'. Used with
-- 'shakeRunDatabase'. You may not execute simultaneous calls using 'ShakeDatabase'
-- on separate threads (it will raise an error).
data ShakeDatabase = ShakeDatabase (Var UseState) RunState
-- | Given some options and rules, return a pair. The first component opens the database,
-- the second cleans it up. The creation /does not/ need to be run masked, because the
-- cleanup is able to run at any point. Most users should prefer 'shakeWithDatabase'
-- which handles exceptions duration creation properly.
shakeOpenDatabase :: ShakeOptions -> Rules () -> IO (IO ShakeDatabase, IO ())
shakeOpenDatabase opts rules = do
(cleanup, clean) <- newCleanup
use <- newVar $ Open False False
let alloc =
withOpen use "shakeOpenDatabase" id $ \_ ->
ShakeDatabase use <$> open cleanup opts (rules >> defaultRules)
let free = do
modifyVar_ use $ \case
Using s -> throwM $ errorStructured "Error when calling shakeOpenDatabase close function, currently running" [("Existing call", Just s)] ""
_ -> pure Closed
clean
pure (alloc, free)
withOpen :: Var UseState -> String -> (UseState -> UseState) -> (UseState -> IO a) -> IO a
withOpen var name final act = mask $ \restore -> do
o <- modifyVar var $ \case
Using s -> throwM $ errorStructured ("Error when calling " ++ name ++ ", currently running") [("Existing call", Just s)] ""
Closed -> throwM $ errorStructured ("Error when calling " ++ name ++ ", already closed") [] ""
o@Open{} -> pure (Using name, o)
let clean = writeVar var $ final o
res <- restore (act o) `onException` clean
clean
pure res
-- | Declare that a just-openned database will be used to call 'shakeRunDatabase' at most once.
-- If so, an optimisation can be applied to retain less memory.
shakeOneShotDatabase :: ShakeDatabase -> IO ()
shakeOneShotDatabase (ShakeDatabase use _) =
withOpen use "shakeOneShotDatabase" (\o -> o{openOneShot=True}) $ \_ -> pure ()
-- | Given some options and rules, create a 'ShakeDatabase' that can be used to run
-- executions.
shakeWithDatabase :: ShakeOptions -> Rules () -> (ShakeDatabase -> IO a) -> IO a
shakeWithDatabase opts rules act = do
(db, clean) <- shakeOpenDatabase opts rules
(act =<< db) `finally` clean
-- | Given a 'ShakeDatabase', what files did the execution ensure were up-to-date
-- in the previous call to 'shakeRunDatabase'. Corresponds to the list of files
-- written out to 'shakeLiveFiles'.
shakeLiveFilesDatabase :: ShakeDatabase -> IO [FilePath]
shakeLiveFilesDatabase (ShakeDatabase use s) =
withOpen use "shakeLiveFilesDatabase" id $ \_ ->
liveFilesState s
-- | Given a 'ShakeDatabase', generate profile information to the given file about the latest run.
-- See 'shakeReport' for the types of file that can be generated.
shakeProfileDatabase :: ShakeDatabase -> FilePath -> IO ()
shakeProfileDatabase (ShakeDatabase use s) file =
withOpen use "shakeProfileDatabase" id $ \_ ->
profileState s file
-- | Given a 'ShakeDatabase', what files did the execution reach an error on last time.
-- Some special considerations when using this function:
--
-- * The presence of an error does not mean the build will fail, specifically if a
-- previously required dependency was run and raised an error, then the thing that previously
-- required it will be run. If the build system has changed in an untracked manner,
-- the build may succeed this time round.
--
-- * If the previous run actually failed then 'shakeRunDatabase' will have thrown an exception.
-- You probably want to catch that exception so you can make the call to 'shakeErrorsDatabase'.
--
-- * You may see a single failure reported multiple times, with increasingly large call stacks, showing
-- the ways in which the error lead to further errors throughout.
--
-- * The 'SomeException' values are highly likely to be of type 'ShakeException'.
--
-- * If you want as many errors as possile in one run set @'shakeStaunch'=True@.
shakeErrorsDatabase :: ShakeDatabase -> IO [(String, SomeException)]
shakeErrorsDatabase (ShakeDatabase use s) =
withOpen use "shakeErrorsDatabase" id $ \_ ->
errorsState s
-- | Given an open 'ShakeDatabase', run both whatever actions were added to the 'Rules',
-- plus the list of 'Action' given here. Returns the results from the explicitly passed
-- actions along with a list of actions to run after the database was closed, as added with
-- 'Development.Shake.runAfter' and 'Development.Shake.removeFilesAfter'.
shakeRunDatabase :: ShakeDatabase -> [Action a] -> IO ([a], [IO ()])
shakeRunDatabase (ShakeDatabase use s) as =
withOpen use "shakeRunDatabase" (\o -> o{openRequiresReset=True}) $ \Open{..} -> do
when openRequiresReset $ do
when openOneShot $
throwM $ errorStructured "Error when calling shakeRunDatabase twice, after calling shakeOneShotDatabase" [] ""
reset s
(refs, as) <- fmap unzip $ forM as $ \a -> do
ref <- newIORef Nothing
pure (ref, liftIO . writeIORef ref . Just =<< a)
after <- run s openOneShot $ map void as
results <- mapM readIORef refs
case sequence results of
Just result -> pure (result, after)
Nothing -> throwM $ errorInternal "Expected all results were written, but some where not"
| ndmitchell/shake | src/Development/Shake/Database.hs | bsd-3-clause | 7,008 | 0 | 20 | 1,454 | 1,230 | 657 | 573 | 86 | 3 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
module Network.Monitoring.Riemann.Proto.Query (Query(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Query = Query{string :: !(P'.Maybe P'.Utf8)}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable Query where
mergeAppend (Query x'1) (Query y'1) = Query (P'.mergeAppend x'1 y'1)
instance P'.Default Query where
defaultValue = Query P'.defaultValue
instance P'.Wire Query where
wireSize ft' self'@(Query x'1)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = (P'.wireSizeOpt 1 9 x'1)
wirePut ft' self'@(Query x'1)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
P'.wirePutOpt 10 9 x'1
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
10 -> Prelude'.fmap (\ !new'Field -> old'Self{string = Prelude'.Just new'Field}) (P'.wireGet 9)
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> Query) Query where
getVal m' f' = f' m'
instance P'.GPB Query
instance P'.ReflectDescriptor Query where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [10])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".ExtraRiemann.Query\", haskellPrefix = [MName \"Network\",MName \"Monitoring\",MName \"Riemann\"], parentModule = [MName \"Proto\"], baseName = MName \"Query\"}, descFilePath = [\"Network\",\"Monitoring\",\"Riemann\",\"Proto\",\"Query.hs\"], isGroup = False, fields = fromList [FieldInfo {fieldName = ProtoFName {protobufName' = FIName \".ExtraRiemann.Query.string\", haskellPrefix' = [MName \"Network\",MName \"Monitoring\",MName \"Riemann\"], parentModule' = [MName \"Proto\",MName \"Query\"], baseName' = FName \"string\"}, fieldNumber = FieldId {getFieldId = 1}, wireTag = WireTag {getWireTag = 10}, packedTag = Nothing, wireTagLength = 1, isPacked = False, isRequired = False, canRepeat = False, mightPack = False, typeCode = FieldType {getFieldType = 9}, typeName = Nothing, hsRawDefault = Nothing, hsDefault = Nothing}], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False}" | telser/riemann-hs | src/Network/Monitoring/Riemann/Proto/Query.hs | mit | 2,949 | 0 | 17 | 577 | 603 | 314 | 289 | 49 | 0 |
{-# LANGUAGE Trustworthy #-}
-- |
-- Module : Data.Hash.SL2
-- License : MIT
-- Maintainer : Sam Rijs <srijs@airpost.net>
--
-- An algebraic hash function, inspired by the paper /Hashing with SL2/ by
-- Tillich and Zemor.
--
-- The hash function is based on matrix multiplication in the special linear group
-- of degree 2, over a Galois field of order 2^127, with all computations modulo
-- the polynomial x^127 + x^63 + 1.
--
-- This construction gives some nice properties, which traditional bit-scambling
-- hash functions don't possess, including it being composable. It holds:
--
-- prop> hash (m1 <> m2) == hash m1 <> hash m2
--
-- Following that, the hash function is also parallelisable. If a message @m@ can be divided
-- into a list of chunks @cs@, the hash of the message can be calculated in parallel:
--
-- prop> mconcat (parMap rpar hash cs) == hash m
--
-- All operations in this package are implemented in a very efficient manner using SSE instructions.
--
module Data.Hash.SL2
( Hash
-- ** Hashing
, hash
, append, prepend
, foldAppend, foldPrepend
-- ** Composition
, unit, concat, concatAll
-- ** Parsing
, parse
-- ** Validation
, valid, validate
-- ** Packing
, pack8, pack16, pack32, pack64
-- ** Unpacking
, unpack8, unpack16, unpack32, unpack64
) where
import Prelude hiding (concat)
import Data.Hash.SL2.Internal (Hash)
import Data.Hash.SL2.Unsafe
import qualified Data.Hash.SL2.Mutable as Mutable
import System.IO.Unsafe
import Data.ByteString (ByteString)
import Data.Word
import Data.Foldable (foldl', foldr')
instance Show Hash where
show h = unsafePerformIO $ unsafeUseAsPtr h Mutable.serialize
instance Eq Hash where
a == b = unsafePerformIO $ unsafeUseAsPtr2 a b Mutable.eq
instance Ord Hash where
compare a b = unsafePerformIO $ unsafeUseAsPtr2 a b Mutable.cmp
instance Monoid Hash where
mempty = unit
mappend = concat
mconcat = concatAll
-- | /O(n)/ Calculate the hash of the 'ByteString'. Alias for @('append' 'unit')@.
hash :: ByteString -> Hash
hash = append unit
-- | /O(n)/ Append the hash of the 'ByteString' to the existing 'Hash'.
-- A significantly faster equivalent of @((. 'hash') . 'concat')@.
append :: Hash -> ByteString -> Hash
append h s = fst $ unsafePerformIO $ Mutable.withCopy h $ Mutable.append s
{-# RULES "hash/concat" forall h s . concat h (hash s) = append h s #-}
-- | /O(n)/ Prepend the hash of the 'ByteString' to the existing 'Hash'.
-- A significantly faster equivalent of @('concat' . 'hash')@.
prepend :: ByteString -> Hash -> Hash
prepend s h = fst $ unsafePerformIO $ Mutable.withCopy h $ Mutable.prepend s
{-# RULES "concat/hash" forall s h . concat (hash s) h = prepend s h #-}
-- | /O(n)/ Append the hash of every 'ByteString' to the existing 'Hash', from left to right.
-- A significantly faster equivalent of @('foldl' 'append')@.
foldAppend :: Foldable t => Hash -> t ByteString -> Hash
foldAppend h ss = fst $ unsafePerformIO $ Mutable.withCopy h $ Mutable.foldAppend ss
{-# RULES "foldl/append" forall h ss . foldl append h ss = foldAppend h ss #-}
{-# RULES "foldl'/append" forall h ss . foldl' append h ss = foldAppend h ss #-}
-- | /O(n)/ Prepend the hash of every 'ByteString' to the existing 'Hash', from right to left.
-- A significantly faster equivalent of @('flip' ('foldr' 'prepend'))@.
foldPrepend :: Foldable t => t ByteString -> Hash -> Hash
foldPrepend ss h = fst $ unsafePerformIO $ Mutable.withCopy h $ Mutable.foldPrepend ss
{-# RULES "foldr/prepend" forall ss h . foldr prepend h ss = foldPrepend ss h #-}
{-# RULES "foldr'/prepend" forall ss h . foldr' prepend h ss = foldPrepend ss h #-}
-- | /O(1)/ The unit element for concatenation. Alias for 'mempty'.
unit :: Hash
unit = fst $ unsafePerformIO $ unsafeWithNew Mutable.unit
-- | /O(1)/ Concatenate two hashes. Alias for 'mappend'.
concat :: Hash -> Hash -> Hash
concat a b = fst $ unsafePerformIO $ unsafeWithNew (unsafeUseAsPtr2 a b . Mutable.concat)
{-# INLINE[1] concat #-}
-- | /O(n)/ Concatenate a list of hashes. Alias for 'mconcat'.
concatAll :: [Hash] -> Hash
concatAll [] = unit
concatAll [h] = h
concatAll (h:hs) = fst $ unsafePerformIO $ Mutable.withCopy h $ \p ->
mapM_ (flip unsafeUseAsPtr $ Mutable.concat p p) hs
-- | /O(1)/ Parse the representation generated by 'show'.
parse :: String -> Maybe Hash
parse s = uncurry (<$) $ unsafePerformIO $ unsafeWithNew $ Mutable.unserialize s
-- | /O(1)/ Check a hash for bit-level validity.
valid :: Hash -> Bool
valid h = unsafePerformIO $ unsafeUseAsPtr h Mutable.valid
-- | /O(1)/ Validate a hash on the bit-level. From @'valid' h == 'True'@ follows @'validate' h == 'Just' h@.
validate :: Hash -> Maybe Hash
validate h | valid h = Just h
validate _ = Nothing
-- | /O(1)/ Pack a list of 64 8-bit words.
pack8 :: [Word8] -> Maybe Hash
pack8 ws | length ws == 64 = validate (unsafePack ws)
pack8 _ = Nothing
-- | /O(1)/ Pack a list of 32 16-bit words.
pack16 :: [Word16] -> Maybe Hash
pack16 ws | length ws == 32 = validate (unsafePack ws)
pack16 _ = Nothing
-- | /O(1)/ Pack a list of 16 32-bit words.
pack32 :: [Word32] -> Maybe Hash
pack32 ws | length ws == 16 = validate (unsafePack ws)
pack32 _ = Nothing
-- | /O(1)/ Pack a list of 8 64-bit words.
pack64 :: [Word64] -> Maybe Hash
pack64 ws | length ws == 8 = validate (unsafePack ws)
pack64 _ = Nothing
-- | /O(1)/ Unpack into list of 64 8-bit words.
unpack8 :: Hash -> [Word8]
unpack8 = unsafeUnpack
-- | /O(1)/ Unpack into list of 32 16-bit words.
unpack16 :: Hash -> [Word16]
unpack16 = unsafeUnpack
-- | /O(1)/ Unpack into list of 16 32-bit words.
unpack32 :: Hash -> [Word32]
unpack32 = unsafeUnpack
-- | /O(1)/ Unpack into list of 8 64-bit words.
unpack64 :: Hash -> [Word64]
unpack64 = unsafeUnpack
| srijs/hwsl2-haskell | src/Data/Hash/SL2.hs | mit | 5,752 | 0 | 11 | 1,072 | 1,085 | 592 | 493 | 82 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53Domains.TransferDomain
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation transfers a domain from another registrar to Amazon Route 53.
-- When the transfer is complete, the domain is registered with the AWS
-- registrar partner, Gandi.
--
-- For transfer requirements, a detailed procedure, and information about
-- viewing the status of a domain transfer, see <http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-transfer-to-route-53.html Transferring Registration for aDomain to Amazon Route 53> in the Amazon Route 53 Developer Guide.
--
-- If the registrar for your domain is also the DNS service provider for the
-- domain, we highly recommend that you consider transferring your DNS service
-- to Amazon Route 53 or to another DNS service provider before you transfer
-- your registration. Some registrars provide free DNS service when you purchase
-- a domain registration. When you transfer the registration, the previous
-- registrar will not renew your domain registration and could end your DNS
-- service at any time.
--
-- Caution! If the registrar for your domain is also the DNS service provider
-- for the domain and you don't transfer DNS service to another provider, your
-- website, email, and the web applications associated with the domain might
-- become unavailable. If the transfer is successful, this method returns an
-- operation ID that you can use to track the progress and completion of the
-- action. If the transfer doesn't complete successfully, the domain registrant
-- will be notified by email.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/api-TransferDomain.html>
module Network.AWS.Route53Domains.TransferDomain
(
-- * Request
TransferDomain
-- ** Request constructor
, transferDomain
-- ** Request lenses
, tdAdminContact
, tdAuthCode
, tdAutoRenew
, tdDomainName
, tdDurationInYears
, tdIdnLangCode
, tdNameservers
, tdPrivacyProtectAdminContact
, tdPrivacyProtectRegistrantContact
, tdPrivacyProtectTechContact
, tdRegistrantContact
, tdTechContact
-- * Response
, TransferDomainResponse
-- ** Response constructor
, transferDomainResponse
-- ** Response lenses
, tdrOperationId
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Route53Domains.Types
import qualified GHC.Exts
data TransferDomain = TransferDomain
{ _tdAdminContact :: ContactDetail
, _tdAuthCode :: Maybe (Sensitive Text)
, _tdAutoRenew :: Maybe Bool
, _tdDomainName :: Text
, _tdDurationInYears :: Nat
, _tdIdnLangCode :: Maybe Text
, _tdNameservers :: List "Nameservers" Nameserver
, _tdPrivacyProtectAdminContact :: Maybe Bool
, _tdPrivacyProtectRegistrantContact :: Maybe Bool
, _tdPrivacyProtectTechContact :: Maybe Bool
, _tdRegistrantContact :: ContactDetail
, _tdTechContact :: ContactDetail
} deriving (Eq, Read, Show)
-- | 'TransferDomain' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tdAdminContact' @::@ 'ContactDetail'
--
-- * 'tdAuthCode' @::@ 'Maybe' 'Text'
--
-- * 'tdAutoRenew' @::@ 'Maybe' 'Bool'
--
-- * 'tdDomainName' @::@ 'Text'
--
-- * 'tdDurationInYears' @::@ 'Natural'
--
-- * 'tdIdnLangCode' @::@ 'Maybe' 'Text'
--
-- * 'tdNameservers' @::@ ['Nameserver']
--
-- * 'tdPrivacyProtectAdminContact' @::@ 'Maybe' 'Bool'
--
-- * 'tdPrivacyProtectRegistrantContact' @::@ 'Maybe' 'Bool'
--
-- * 'tdPrivacyProtectTechContact' @::@ 'Maybe' 'Bool'
--
-- * 'tdRegistrantContact' @::@ 'ContactDetail'
--
-- * 'tdTechContact' @::@ 'ContactDetail'
--
transferDomain :: Text -- ^ 'tdDomainName'
-> Natural -- ^ 'tdDurationInYears'
-> ContactDetail -- ^ 'tdAdminContact'
-> ContactDetail -- ^ 'tdRegistrantContact'
-> ContactDetail -- ^ 'tdTechContact'
-> TransferDomain
transferDomain p1 p2 p3 p4 p5 = TransferDomain
{ _tdDomainName = p1
, _tdDurationInYears = withIso _Nat (const id) p2
, _tdAdminContact = p3
, _tdRegistrantContact = p4
, _tdTechContact = p5
, _tdIdnLangCode = Nothing
, _tdNameservers = mempty
, _tdAuthCode = Nothing
, _tdAutoRenew = Nothing
, _tdPrivacyProtectAdminContact = Nothing
, _tdPrivacyProtectRegistrantContact = Nothing
, _tdPrivacyProtectTechContact = Nothing
}
-- | Provides detailed contact information.
--
-- Type: Complex
--
-- Children: 'FirstName', 'MiddleName', 'LastName', 'ContactType', 'OrganizationName', 'AddressLine1', 'AddressLine2', 'City', 'State', 'CountryCode', 'ZipCode', 'PhoneNumber', 'Email', 'Fax', 'ExtraParams'
--
-- Required: Yes
tdAdminContact :: Lens' TransferDomain ContactDetail
tdAdminContact = lens _tdAdminContact (\s a -> s { _tdAdminContact = a })
-- | The authorization code for the domain. You get this value from the current
-- registrar.
--
-- Type: String
--
-- Required: Yes
tdAuthCode :: Lens' TransferDomain (Maybe Text)
tdAuthCode = lens _tdAuthCode (\s a -> s { _tdAuthCode = a }) . mapping _Sensitive
-- | Indicates whether the domain will be automatically renewed (true) or not
-- (false). Autorenewal only takes effect after the account is charged.
--
-- Type: Boolean
--
-- Valid values: 'true' | 'false'
--
-- Default: true
--
-- Required: No
tdAutoRenew :: Lens' TransferDomain (Maybe Bool)
tdAutoRenew = lens _tdAutoRenew (\s a -> s { _tdAutoRenew = a })
-- | The name of a domain.
--
-- Type: String
--
-- Default: None
--
-- Constraints: The domain name can contain only the letters a through z, the
-- numbers 0 through 9, and hyphen (-). Internationalized Domain Names are not
-- supported.
--
-- Required: Yes
tdDomainName :: Lens' TransferDomain Text
tdDomainName = lens _tdDomainName (\s a -> s { _tdDomainName = a })
-- | The number of years the domain will be registered. Domains are registered for
-- a minimum of one year. The maximum period depends on the top-level domain.
--
-- Type: Integer
--
-- Default: 1
--
-- Valid values: Integer from 1 to 10
--
-- Required: Yes
tdDurationInYears :: Lens' TransferDomain Natural
tdDurationInYears =
lens _tdDurationInYears (\s a -> s { _tdDurationInYears = a })
. _Nat
-- | Reserved for future use.
tdIdnLangCode :: Lens' TransferDomain (Maybe Text)
tdIdnLangCode = lens _tdIdnLangCode (\s a -> s { _tdIdnLangCode = a })
-- | Contains details for the host and glue IP addresses.
--
-- Type: Complex
--
-- Children: 'GlueIps', 'Name'
--
-- Required: No
tdNameservers :: Lens' TransferDomain [Nameserver]
tdNameservers = lens _tdNameservers (\s a -> s { _tdNameservers = a }) . _List
-- | Whether you want to conceal contact information from WHOIS queries. If you
-- specify true, WHOIS ("who is") queries will return contact information for
-- our registrar partner, Gandi, instead of the contact information that you
-- enter.
--
-- Type: Boolean
--
-- Default: 'true'
--
-- Valid values: 'true' | 'false'
--
-- Required: No
tdPrivacyProtectAdminContact :: Lens' TransferDomain (Maybe Bool)
tdPrivacyProtectAdminContact =
lens _tdPrivacyProtectAdminContact
(\s a -> s { _tdPrivacyProtectAdminContact = a })
-- | Whether you want to conceal contact information from WHOIS queries. If you
-- specify true, WHOIS ("who is") queries will return contact information for
-- our registrar partner, Gandi, instead of the contact information that you
-- enter.
--
-- Type: Boolean
--
-- Default: 'true'
--
-- Valid values: 'true' | 'false'
--
-- Required: No
tdPrivacyProtectRegistrantContact :: Lens' TransferDomain (Maybe Bool)
tdPrivacyProtectRegistrantContact =
lens _tdPrivacyProtectRegistrantContact
(\s a -> s { _tdPrivacyProtectRegistrantContact = a })
-- | Whether you want to conceal contact information from WHOIS queries. If you
-- specify true, WHOIS ("who is") queries will return contact information for
-- our registrar partner, Gandi, instead of the contact information that you
-- enter.
--
-- Type: Boolean
--
-- Default: 'true'
--
-- Valid values: 'true' | 'false'
--
-- Required: No
tdPrivacyProtectTechContact :: Lens' TransferDomain (Maybe Bool)
tdPrivacyProtectTechContact =
lens _tdPrivacyProtectTechContact
(\s a -> s { _tdPrivacyProtectTechContact = a })
-- | Provides detailed contact information.
--
-- Type: Complex
--
-- Children: 'FirstName', 'MiddleName', 'LastName', 'ContactType', 'OrganizationName', 'AddressLine1', 'AddressLine2', 'City', 'State', 'CountryCode', 'ZipCode', 'PhoneNumber', 'Email', 'Fax', 'ExtraParams'
--
-- Required: Yes
tdRegistrantContact :: Lens' TransferDomain ContactDetail
tdRegistrantContact =
lens _tdRegistrantContact (\s a -> s { _tdRegistrantContact = a })
-- | Provides detailed contact information.
--
-- Type: Complex
--
-- Children: 'FirstName', 'MiddleName', 'LastName', 'ContactType', 'OrganizationName', 'AddressLine1', 'AddressLine2', 'City', 'State', 'CountryCode', 'ZipCode', 'PhoneNumber', 'Email', 'Fax', 'ExtraParams'
--
-- Required: Yes
tdTechContact :: Lens' TransferDomain ContactDetail
tdTechContact = lens _tdTechContact (\s a -> s { _tdTechContact = a })
newtype TransferDomainResponse = TransferDomainResponse
{ _tdrOperationId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'TransferDomainResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tdrOperationId' @::@ 'Text'
--
transferDomainResponse :: Text -- ^ 'tdrOperationId'
-> TransferDomainResponse
transferDomainResponse p1 = TransferDomainResponse
{ _tdrOperationId = p1
}
-- | Identifier for tracking the progress of the request. To use this ID to query
-- the operation status, use GetOperationDetail.
--
-- Type: String
--
-- Default: None
--
-- Constraints: Maximum 255 characters.
tdrOperationId :: Lens' TransferDomainResponse Text
tdrOperationId = lens _tdrOperationId (\s a -> s { _tdrOperationId = a })
instance ToPath TransferDomain where
toPath = const "/"
instance ToQuery TransferDomain where
toQuery = const mempty
instance ToHeaders TransferDomain
instance ToJSON TransferDomain where
toJSON TransferDomain{..} = object
[ "DomainName" .= _tdDomainName
, "IdnLangCode" .= _tdIdnLangCode
, "DurationInYears" .= _tdDurationInYears
, "Nameservers" .= _tdNameservers
, "AuthCode" .= _tdAuthCode
, "AutoRenew" .= _tdAutoRenew
, "AdminContact" .= _tdAdminContact
, "RegistrantContact" .= _tdRegistrantContact
, "TechContact" .= _tdTechContact
, "PrivacyProtectAdminContact" .= _tdPrivacyProtectAdminContact
, "PrivacyProtectRegistrantContact" .= _tdPrivacyProtectRegistrantContact
, "PrivacyProtectTechContact" .= _tdPrivacyProtectTechContact
]
instance AWSRequest TransferDomain where
type Sv TransferDomain = Route53Domains
type Rs TransferDomain = TransferDomainResponse
request = post "TransferDomain"
response = jsonResponse
instance FromJSON TransferDomainResponse where
parseJSON = withObject "TransferDomainResponse" $ \o -> TransferDomainResponse
<$> o .: "OperationId"
| romanb/amazonka | amazonka-route53-domains/gen/Network/AWS/Route53Domains/TransferDomain.hs | mpl-2.0 | 12,762 | 0 | 11 | 2,864 | 1,385 | 869 | 516 | 136 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateImage
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates an Amazon EBS-backed AMI from an Amazon EBS-backed instance that is
-- either running or stopped.
--
-- If you customized your instance with instance store volumes or EBS volumes
-- in addition to the root device volume, the new AMI contains block device
-- mapping information for those volumes. When you launch an instance from this
-- new AMI, the instance automatically launches with those additional volumes.
--
-- For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/creating-an-ami-ebs.html Creating Amazon EBS-Backed Linux AMIs> in the /Amazon Elastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateImage.html>
module Network.AWS.EC2.CreateImage
(
-- * Request
CreateImage
-- ** Request constructor
, createImage
-- ** Request lenses
, ci1BlockDeviceMappings
, ci1Description
, ci1DryRun
, ci1InstanceId
, ci1Name
, ci1NoReboot
-- * Response
, CreateImageResponse
-- ** Response constructor
, createImageResponse
-- ** Response lenses
, cirImageId
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateImage = CreateImage
{ _ci1BlockDeviceMappings :: List "BlockDeviceMapping" BlockDeviceMapping
, _ci1Description :: Maybe Text
, _ci1DryRun :: Maybe Bool
, _ci1InstanceId :: Text
, _ci1Name :: Text
, _ci1NoReboot :: Maybe Bool
} deriving (Eq, Read, Show)
-- | 'CreateImage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ci1BlockDeviceMappings' @::@ ['BlockDeviceMapping']
--
-- * 'ci1Description' @::@ 'Maybe' 'Text'
--
-- * 'ci1DryRun' @::@ 'Maybe' 'Bool'
--
-- * 'ci1InstanceId' @::@ 'Text'
--
-- * 'ci1Name' @::@ 'Text'
--
-- * 'ci1NoReboot' @::@ 'Maybe' 'Bool'
--
createImage :: Text -- ^ 'ci1InstanceId'
-> Text -- ^ 'ci1Name'
-> CreateImage
createImage p1 p2 = CreateImage
{ _ci1InstanceId = p1
, _ci1Name = p2
, _ci1DryRun = Nothing
, _ci1Description = Nothing
, _ci1NoReboot = Nothing
, _ci1BlockDeviceMappings = mempty
}
-- | Information about one or more block device mappings.
ci1BlockDeviceMappings :: Lens' CreateImage [BlockDeviceMapping]
ci1BlockDeviceMappings =
lens _ci1BlockDeviceMappings (\s a -> s { _ci1BlockDeviceMappings = a })
. _List
-- | A description for the new image.
ci1Description :: Lens' CreateImage (Maybe Text)
ci1Description = lens _ci1Description (\s a -> s { _ci1Description = a })
ci1DryRun :: Lens' CreateImage (Maybe Bool)
ci1DryRun = lens _ci1DryRun (\s a -> s { _ci1DryRun = a })
-- | The ID of the instance.
ci1InstanceId :: Lens' CreateImage Text
ci1InstanceId = lens _ci1InstanceId (\s a -> s { _ci1InstanceId = a })
-- | A name for the new image.
--
-- Constraints: 3-128 alphanumeric characters, parentheses (()), square
-- brackets ([]), spaces ( ), periods (.), slashes (/), dashes (-), single
-- quotes ('), at-signs (@), or underscores(_)
ci1Name :: Lens' CreateImage Text
ci1Name = lens _ci1Name (\s a -> s { _ci1Name = a })
-- | By default, this parameter is set to 'false', which means Amazon EC2 attempts
-- to shut down the instance cleanly before image creation and then reboots the
-- instance. When the parameter is set to 'true', Amazon EC2 doesn't shut down the
-- instance before creating the image. When this option is used, file system
-- integrity on the created image can't be guaranteed.
ci1NoReboot :: Lens' CreateImage (Maybe Bool)
ci1NoReboot = lens _ci1NoReboot (\s a -> s { _ci1NoReboot = a })
newtype CreateImageResponse = CreateImageResponse
{ _cirImageId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'CreateImageResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cirImageId' @::@ 'Maybe' 'Text'
--
createImageResponse :: CreateImageResponse
createImageResponse = CreateImageResponse
{ _cirImageId = Nothing
}
-- | The ID of the new AMI.
cirImageId :: Lens' CreateImageResponse (Maybe Text)
cirImageId = lens _cirImageId (\s a -> s { _cirImageId = a })
instance ToPath CreateImage where
toPath = const "/"
instance ToQuery CreateImage where
toQuery CreateImage{..} = mconcat
[ "BlockDeviceMapping" `toQueryList` _ci1BlockDeviceMappings
, "Description" =? _ci1Description
, "DryRun" =? _ci1DryRun
, "InstanceId" =? _ci1InstanceId
, "Name" =? _ci1Name
, "NoReboot" =? _ci1NoReboot
]
instance ToHeaders CreateImage
instance AWSRequest CreateImage where
type Sv CreateImage = EC2
type Rs CreateImage = CreateImageResponse
request = post "CreateImage"
response = xmlResponse
instance FromXML CreateImageResponse where
parseXML x = CreateImageResponse
<$> x .@? "imageId"
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateImage.hs | mpl-2.0 | 6,129 | 0 | 10 | 1,427 | 789 | 480 | 309 | 86 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.KMS.EnableKeyRotation
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Enables rotation of the specified customer master key.
--
-- /See:/ <http://docs.aws.amazon.com/kms/latest/APIReference/API_EnableKeyRotation.html AWS API Reference> for EnableKeyRotation.
module Network.AWS.KMS.EnableKeyRotation
(
-- * Creating a Request
enableKeyRotation
, EnableKeyRotation
-- * Request Lenses
, ekrKeyId
-- * Destructuring the Response
, enableKeyRotationResponse
, EnableKeyRotationResponse
) where
import Network.AWS.KMS.Types
import Network.AWS.KMS.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'enableKeyRotation' smart constructor.
newtype EnableKeyRotation = EnableKeyRotation'
{ _ekrKeyId :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'EnableKeyRotation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ekrKeyId'
enableKeyRotation
:: Text -- ^ 'ekrKeyId'
-> EnableKeyRotation
enableKeyRotation pKeyId_ =
EnableKeyRotation'
{ _ekrKeyId = pKeyId_
}
-- | A unique identifier for the customer master key. This value can be a
-- globally unique identifier or the fully specified ARN to a key.
--
-- - Key ARN Example -
-- arn:aws:kms:us-east-1:123456789012:key\/12345678-1234-1234-1234-123456789012
-- - Globally Unique Key ID Example -
-- 12345678-1234-1234-1234-123456789012
ekrKeyId :: Lens' EnableKeyRotation Text
ekrKeyId = lens _ekrKeyId (\ s a -> s{_ekrKeyId = a});
instance AWSRequest EnableKeyRotation where
type Rs EnableKeyRotation = EnableKeyRotationResponse
request = postJSON kMS
response = receiveNull EnableKeyRotationResponse'
instance ToHeaders EnableKeyRotation where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("TrentService.EnableKeyRotation" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON EnableKeyRotation where
toJSON EnableKeyRotation'{..}
= object (catMaybes [Just ("KeyId" .= _ekrKeyId)])
instance ToPath EnableKeyRotation where
toPath = const "/"
instance ToQuery EnableKeyRotation where
toQuery = const mempty
-- | /See:/ 'enableKeyRotationResponse' smart constructor.
data EnableKeyRotationResponse =
EnableKeyRotationResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'EnableKeyRotationResponse' with the minimum fields required to make a request.
--
enableKeyRotationResponse
:: EnableKeyRotationResponse
enableKeyRotationResponse = EnableKeyRotationResponse'
| fmapfmapfmap/amazonka | amazonka-kms/gen/Network/AWS/KMS/EnableKeyRotation.hs | mpl-2.0 | 3,475 | 0 | 12 | 720 | 403 | 245 | 158 | 56 | 1 |
module Distribution.Server.Features.StaticFiles (
staticFilesFeature
) where
import Distribution.Server.Framework
-- | The feature to serve the static html files.
--
-- Don't think this is going to be used that much, as it's not too modular, and
-- must be last in order. Top-level handling seems more appropriate.
staticFilesFeature :: HackageFeature
staticFilesFeature = HackageFeature {
featureName = "static files",
serverPart = serveStaticFiles,
-- There is no persistent state for this feature,
-- so nothing needs to be backed up.
dumpBackup = Nothing,
restoreBackup = Nothing
}
serveStaticFiles :: ServerEnv -> ServerPart Response
serveStaticFiles env =
fileServe ["hackage.html"] (serverStaticDir env)
| isomorphism/hackage2 | Distribution/Server/Features/StaticFiles.hs | bsd-3-clause | 753 | 0 | 7 | 140 | 95 | 59 | 36 | 12 | 1 |
{-# LANGUAGE CPP #-}
module Distribution.Client.Dependency.Modular.Cycles (
detectCyclesPhase
) where
import Prelude hiding (cycle)
import Data.Graph (SCC)
import qualified Data.Graph as Gr
import qualified Data.Map as Map
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.Tree
import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS
-- | Find and reject any solutions that are cyclic
detectCyclesPhase :: Tree QGoalReason -> Tree QGoalReason
detectCyclesPhase = cata go
where
-- The only node of interest is DoneF
go :: TreeF QGoalReason (Tree QGoalReason) -> Tree QGoalReason
go (PChoiceF qpn gr cs) = PChoice qpn gr cs
go (FChoiceF qfn gr w m cs) = FChoice qfn gr w m cs
go (SChoiceF qsn gr w cs) = SChoice qsn gr w cs
go (GoalChoiceF cs) = GoalChoice cs
go (FailF cs reason) = Fail cs reason
-- We check for cycles only if we have actually found a solution
-- This minimizes the number of cycle checks we do as cycles are rare
go (DoneF revDeps) = do
case findCycles revDeps of
Nothing -> Done revDeps
Just relSet -> Fail relSet CyclicDependencies
-- | Given the reverse dependency map from a 'Done' node in the tree, as well
-- as the full conflict set containing all decisions that led to that 'Done'
-- node, check if the solution is cyclic. If it is, return the conflict set
-- containing all decisions that could potentially break the cycle.
findCycles :: RevDepMap -> Maybe (ConflictSet QPN)
findCycles revDeps =
case cycles of
[] -> Nothing
c:_ -> Just $ CS.unions $ map (varToConflictSet . P) c
where
cycles :: [[QPN]]
cycles = [vs | Gr.CyclicSCC vs <- scc]
scc :: [SCC QPN]
scc = Gr.stronglyConnComp . map aux . Map.toList $ revDeps
aux :: (QPN, [(comp, QPN)]) -> (QPN, QPN, [QPN])
aux (fr, to) = (fr, fr, map snd to)
| tolysz/prepare-ghcjs | spec-lts8/cabal/cabal-install/Distribution/Client/Dependency/Modular/Cycles.hs | bsd-3-clause | 2,017 | 0 | 12 | 460 | 516 | 285 | 231 | 34 | 7 |
module Settings.Packages.GhcPrim (ghcPrimPackageArgs) where
import Oracles.Flag
import Expression
ghcPrimPackageArgs :: Args
ghcPrimPackageArgs = package ghcPrim ? mconcat
[ builder GhcCabal ? arg "--flag=include-ghc-prim"
, builder (Cc CompileC) ?
(not <$> flag GccIsClang) ?
input "//cbits/atomic.c" ? arg "-Wno-sync-nand" ]
| bgamari/shaking-up-ghc | src/Settings/Packages/GhcPrim.hs | bsd-3-clause | 355 | 0 | 13 | 67 | 93 | 48 | 45 | 9 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-- | Confirm that a build plan has a consistent set of dependencies.
module Stackage.CheckBuildPlan
( checkBuildPlan
, BadBuildPlan
) where
import Control.Monad.Writer.Strict (Writer, execWriter, tell)
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Stackage.BuildConstraints
import Stackage.BuildPlan
import Stackage.PackageDescription
import Stackage.Prelude
-- | Check the build plan for missing deps, wrong versions, etc.
checkBuildPlan :: (MonadThrow m) => BuildPlan -> m ()
checkBuildPlan BuildPlan {..}
| null errs' = return ()
| otherwise = throwM errs
where
allPackages = map (,mempty) (siCorePackages bpSystemInfo) ++
map (ppVersion &&& M.keys . M.filter libAndExe . sdPackages . ppDesc) bpPackages
errs@(BadBuildPlan errs') =
execWriter $ mapM_ (checkDeps allPackages) $ mapToList bpPackages
-- Only looking at libraries and executables, benchmarks and tests
-- are allowed to create cycles (e.g. test-framework depends on
-- text, which uses test-framework in its test-suite).
libAndExe (DepInfo cs _) = any (flip elem [CompLibrary,CompExecutable]) cs
-- | For a given package name and plan, check that its dependencies are:
--
-- 1. Existent (existing in the provided package map)
-- 2. Within version range
-- 3. Check for dependency cycles.
checkDeps :: Map PackageName (Version,[PackageName])
-> (PackageName, PackagePlan)
-> Writer BadBuildPlan ()
checkDeps allPackages (user, pb) =
mapM_ go $ mapToList $ sdPackages $ ppDesc pb
where
go (dep, diRange -> range) =
case lookup dep allPackages of
Nothing -> tell $ BadBuildPlan $ singletonMap (dep, Nothing) errMap
Just (version,deps)
| version `withinRange` range ->
occursCheck allPackages
(\d v ->
tell $ BadBuildPlan $ singletonMap
(d,v)
errMap)
dep
deps
[]
| otherwise -> tell $ BadBuildPlan $ singletonMap
(dep, Just version)
errMap
where
errMap = singletonMap pu range
pu = PkgUser
{ puName = user
, puVersion = ppVersion pb
, puMaintainer = pcMaintainer $ ppConstraints pb
, puGithubPings = ppGithubPings pb
}
-- | Check whether the package(s) occurs within its own dependency
-- tree.
occursCheck
:: Monad m
=> Map PackageName (Version,[PackageName])
-- ^ All packages.
-> (PackageName -> Maybe Version -> m ())
-- ^ Report an erroneous package.
-> PackageName
-- ^ Starting package to check for cycles in.
-> [PackageName]
-- ^ Dependencies of the package.
-> [PackageName]
-- ^ Previously seen packages up the dependency tree.
-> m ()
occursCheck allPackages reportError =
go
where
go pkg deps seen =
case find (flip elem seen) deps of
Just cyclic ->
reportError cyclic $
fmap fst (lookup cyclic allPackages)
Nothing ->
forM_ deps $
\pkg' ->
case lookup pkg' allPackages of
Just (_v,deps')
| pkg' /= pkg -> go pkg' deps' seen'
_ -> return ()
where seen' = pkg : seen
data PkgUser = PkgUser
{ puName :: PackageName
, puVersion :: Version
, puMaintainer :: Maybe Maintainer
, puGithubPings :: Set Text
}
deriving (Eq, Ord)
pkgUserShow1 :: PkgUser -> Text
pkgUserShow1 PkgUser {..} = concat
[ display puName
, "-"
, display puVersion
]
pkgUserShow2 :: PkgUser -> Text
pkgUserShow2 PkgUser {..} = unwords
$ (maybe "No maintainer" unMaintainer puMaintainer ++ ".")
: map (cons '@') (setToList puGithubPings)
newtype BadBuildPlan =
BadBuildPlan (Map (PackageName, Maybe Version) (Map PkgUser VersionRange))
deriving Typeable
instance Exception BadBuildPlan
instance Show BadBuildPlan where
show (BadBuildPlan errs) =
unpack $ concatMap go $ mapToList errs
where
go ((dep, mdepVer), users) = unlines
$ ""
: showDepVer dep mdepVer
: map showUser (mapToList users)
showDepVer :: PackageName -> Maybe Version -> Text
showDepVer dep Nothing = display dep ++ " (not present) depended on by:"
showDepVer dep (Just version) = concat
[ display dep
, "-"
, display version
, " depended on by:"
]
showUser :: (PkgUser, VersionRange) -> Text
showUser (pu, range) = concat
[ "- "
, pkgUserShow1 pu
, " ("
-- add a space after < to avoid confusing Markdown processors (like
-- Github's issue tracker)
, T.replace "<" "< " $ display range
, "). "
, pkgUserShow2 pu
]
instance Monoid BadBuildPlan where
mempty = BadBuildPlan mempty
mappend (BadBuildPlan x) (BadBuildPlan y) =
BadBuildPlan $ unionWith (unionWith intersectVersionRanges) x y
| myfreeweb/stackage | Stackage/CheckBuildPlan.hs | mit | 5,796 | 0 | 18 | 2,018 | 1,273 | 672 | 601 | -1 | -1 |
f a = \a -> a + a | mpickering/hlint-refactor | tests/examples/Lambda1.hs | bsd-3-clause | 17 | 1 | 7 | 7 | 22 | 9 | 13 | 1 | 1 |
{-|
Module : IRTS.Portable
Description : Serialise Idris' IR to JSON.
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE FlexibleInstances, OverloadedStrings, TypeSynonymInstances #-}
module IRTS.Portable (writePortable) where
import Idris.Core.CaseTree
import Idris.Core.Evaluate
import Idris.Core.TT
import IRTS.Bytecode
import IRTS.CodegenCommon
import IRTS.Defunctionalise
import IRTS.Simplified
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import System.IO
data CodegenFile = CGFile {
fileType :: String,
version :: Int,
cgInfo :: CodegenInfo
}
-- Update the version when the format changes
formatVersion :: Int
formatVersion = 3
writePortable :: Handle -> CodegenInfo -> IO ()
writePortable file ci = do
let json = encode $ CGFile "idris-codegen" formatVersion ci
B.hPut file json
instance ToJSON CodegenFile where
toJSON (CGFile ft v ci) = object ["file-type" .= ft,
"version" .= v,
"codegen-info" .= toJSON ci]
instance ToJSON CodegenInfo where
toJSON ci = object ["output-file" .= (outputFile ci),
"includes" .= (includes ci),
"import-dirs" .= (importDirs ci),
"compile-objs" .= (compileObjs ci),
"compile-libs" .= (compileLibs ci),
"compiler-flags" .= (compilerFlags ci),
"interfaces" .= (interfaces ci),
"exports" .= (exportDecls ci),
"lift-decls" .= (liftDecls ci),
"defun-decls" .= (defunDecls ci),
"simple-decls" .= (simpleDecls ci),
"bytecode" .= (map toBC (simpleDecls ci)),
"tt-decls" .= (ttDecls ci)]
instance ToJSON Name where
toJSON n = toJSON $ showCG n
instance ToJSON ExportIFace where
toJSON (Export n f xs) = object ["ffi-desc" .= n,
"interface-file" .= f,
"exports" .= xs]
instance ToJSON FDesc where
toJSON (FCon n) = object ["FCon" .= n]
toJSON (FStr s) = object ["FStr" .= s]
toJSON (FUnknown) = object ["FUnknown" .= Null]
toJSON (FIO fd) = object ["FIO" .= fd]
toJSON (FApp n xs) = object ["FApp" .= (n, xs)]
instance ToJSON Export where
toJSON (ExportData fd) = object ["ExportData" .= fd]
toJSON (ExportFun n dsc ret args) = object ["ExportFun" .= (n, dsc, ret, args)]
instance ToJSON LDecl where
toJSON (LFun opts name args def) = object ["LFun" .= (opts, name, args, def)]
toJSON (LConstructor name tag ar) = object ["LConstructor" .= (name, tag, ar)]
instance ToJSON LOpt where
toJSON Inline = String "Inline"
toJSON NoInline = String "NoInline"
instance ToJSON LExp where
toJSON (LV lv) = object ["LV" .= lv]
toJSON (LApp tail exp args) = object ["LApp" .= (tail, exp, args)]
toJSON (LLazyApp name exps) = object ["LLazyApp" .= (name, exps)]
toJSON (LLazyExp exp) = object ["LLazyExp" .= exp]
toJSON (LForce exp) = object ["LForce" .= exp]
toJSON (LLet name a b) = object ["LLet" .= (name, a, b)]
toJSON (LLam args exp) = object ["LLam" .= (args, exp)]
toJSON (LProj exp i) = object ["LProj" .= (exp, i)]
toJSON (LCon lv i n exps) = object ["LCon" .= (lv, i, n, exps)]
toJSON (LCase ct exp alts) = object ["LCase" .= (ct, exp, alts)]
toJSON (LConst c) = object ["LConst" .= c]
toJSON (LForeign fd ret exps) = object ["LForeign" .= (fd, ret, exps)]
toJSON (LOp prim exps) = object ["LOp" .= (prim, exps)]
toJSON LNothing = object ["LNothing" .= Null]
toJSON (LError s) = object ["LError" .= s]
instance ToJSON LVar where
toJSON (Loc i) = object ["Loc" .= i]
toJSON (Glob n) = object ["Glob" .= n]
instance ToJSON CaseType where
toJSON Updatable = String "Updatable"
toJSON Shared = String "Shared"
instance ToJSON LAlt where
toJSON (LConCase i n ns exp) = object ["LConCase" .= (i, n, ns, exp)]
toJSON (LConstCase c exp) = object ["LConstCase" .= (c, exp)]
toJSON (LDefaultCase exp) = object ["LDefaultCase" .= exp]
instance ToJSON Const where
toJSON (I i) = object ["int" .= i]
toJSON (BI i) = object ["bigint" .= (show i)]
toJSON (Fl d) = object ["double" .= d]
toJSON (Ch c) = object ["char" .= (show c)]
toJSON (Str s) = object ["string" .= s]
toJSON (B8 b) = object ["bits8" .= b]
toJSON (B16 b) = object ["bits16" .= b]
toJSON (B32 b) = object ["bits32" .= b]
toJSON (B64 b) = object ["bits64" .= b]
toJSON (AType at) = object ["atype" .= at]
toJSON StrType = object ["strtype" .= Null]
toJSON WorldType = object ["worldtype" .= Null]
toJSON TheWorld = object ["theworld" .= Null]
toJSON VoidType = object ["voidtype" .= Null]
toJSON Forgot = object ["forgot" .= Null]
instance ToJSON ArithTy where
toJSON (ATInt it) = object ["ATInt" .= it]
toJSON ATFloat = object ["ATFloat" .= Null]
instance ToJSON IntTy where
toJSON it = toJSON $ intTyName it
instance ToJSON PrimFn where
toJSON (LPlus aty) = object ["LPlus" .= aty]
toJSON (LMinus aty) = object ["LMinus" .= aty]
toJSON (LTimes aty) = object ["LTimes" .= aty]
toJSON (LUDiv aty) = object ["LUDiv" .= aty]
toJSON (LSDiv aty) = object ["LSDiv" .= aty]
toJSON (LURem ity) = object ["LURem" .= ity]
toJSON (LSRem aty) = object ["LSRem" .= aty]
toJSON (LAnd ity) = object ["LAnd" .= ity]
toJSON (LOr ity) = object ["LOr" .= ity]
toJSON (LXOr ity) = object ["LXOr" .= ity]
toJSON (LCompl ity) = object ["LCompl" .= ity]
toJSON (LSHL ity) = object ["LSHL" .= ity]
toJSON (LLSHR ity) = object ["LLSHR" .= ity]
toJSON (LASHR ity) = object ["LASHR" .= ity]
toJSON (LEq aty) = object ["LEq" .= aty]
toJSON (LLt ity) = object ["LLt" .= ity]
toJSON (LLe ity) = object ["LLe" .= ity]
toJSON (LGt ity) = object ["LGt" .= ity]
toJSON (LGe ity) = object ["LGe" .= ity]
toJSON (LSLt aty) = object ["LSLt" .= aty]
toJSON (LSLe aty) = object ["LSLe" .= aty]
toJSON (LSGt aty) = object ["LSGt" .= aty]
toJSON (LSGe aty) = object ["LSGe" .= aty]
toJSON (LZExt from to) = object ["LZExt" .= (from, to)]
toJSON (LSExt from to) = object ["LSExt" .= (from, to)]
toJSON (LTrunc from to) = object ["LTrunc" .= (from, to)]
toJSON LStrConcat = object ["LStrConcat" .= Null]
toJSON LStrLt = object ["LStrLt" .= Null]
toJSON LStrEq = object ["LStrEq" .= Null]
toJSON LStrLen = object ["LStrLen" .= Null]
toJSON (LIntFloat ity) = object ["LIntFloat" .= ity]
toJSON (LFloatInt ity) = object ["LFloatInt" .= ity]
toJSON (LIntStr ity) = object ["LIntStr" .= ity]
toJSON (LStrInt ity) = object ["LStrInt" .= ity]
toJSON (LIntCh ity) = object ["LIntCh" .= ity]
toJSON (LChInt ity) = object ["LChInt" .= ity]
toJSON LFloatStr = object ["LFloatStr" .= Null]
toJSON LStrFloat = object ["LStrFloat" .= Null]
toJSON (LBitCast from to) = object ["LBitCast" .= (from, to)]
toJSON LFExp = object ["LFExp" .= Null]
toJSON LFLog = object ["LFLog" .= Null]
toJSON LFSin = object ["LFSin" .= Null]
toJSON LFCos = object ["LFCos" .= Null]
toJSON LFTan = object ["LFTan" .= Null]
toJSON LFASin = object ["LFASin" .= Null]
toJSON LFACos = object ["LFACos" .= Null]
toJSON LFATan = object ["LFATan" .= Null]
toJSON LFATan2 = object ["LFATan2" .= Null]
toJSON LFSqrt = object ["LFSqrt" .= Null]
toJSON LFFloor = object ["LFFloor" .= Null]
toJSON LFCeil = object ["LFCeil" .= Null]
toJSON LFNegate = object ["LFNegate" .= Null]
toJSON LStrHead = object ["LStrHead" .= Null]
toJSON LStrTail = object ["LStrTail" .= Null]
toJSON LStrCons = object ["LStrCons" .= Null]
toJSON LStrIndex = object ["LStrIndex" .= Null]
toJSON LStrRev = object ["LStrRev" .= Null]
toJSON LStrSubstr = object ["LStrSubstr" .= Null]
toJSON LReadStr = object ["LReadStr" .= Null]
toJSON LWriteStr = object ["LWriteStr" .= Null]
toJSON LSystemInfo = object ["LSystemInfo" .= Null]
toJSON LFork = object ["LFork" .= Null]
toJSON LPar = object ["LPar" .= Null]
toJSON (LExternal name) = object ["LExternal" .= name]
toJSON LCrash = object ["LCrash" .= Null]
toJSON LNoOp = object ["LNoOp" .= Null]
instance ToJSON DDecl where
toJSON (DFun name args exp) = object ["DFun" .= (name, args, exp)]
toJSON (DConstructor name tag arity) = object ["DConstructor" .= (name, tag, arity)]
instance ToJSON DExp where
toJSON (DV lv) = object ["DV" .= lv]
toJSON (DApp tail name exps) = object ["DApp" .= (tail, name, exps)]
toJSON (DLet name a b) = object ["DLet" .= (name, a, b)]
toJSON (DUpdate name exp) = object ["DUpdate" .= (name,exp)]
toJSON (DProj exp i) = object ["DProj" .= (exp, i)]
toJSON (DC lv i name exp) = object ["DC" .= (lv, i, name, exp)]
toJSON (DCase ct exp alts) = object ["DCase" .= (ct, exp, alts)]
toJSON (DChkCase exp alts) = object ["DChkCase" .= (exp, alts)]
toJSON (DConst c) = object ["DConst" .= c]
toJSON (DForeign fd ret exps) = object ["DForeign" .= (fd, ret, exps)]
toJSON (DOp prim exps) = object ["DOp" .= (prim, exps)]
toJSON DNothing = object ["DNothing" .= Null]
toJSON (DError s) = object ["DError" .= s]
instance ToJSON DAlt where
toJSON (DConCase i n ns exp) = object ["DConCase" .= (i, n, ns, exp)]
toJSON (DConstCase c exp) = object ["DConstCase" .= (c, exp)]
toJSON (DDefaultCase exp) = object ["DDefaultCase" .= exp]
instance ToJSON SDecl where
toJSON (SFun name args i exp) = object ["SFun" .= (name, args, i, exp)]
instance ToJSON SExp where
toJSON (SV lv) = object ["SV" .= lv]
toJSON (SApp tail name exps) = object ["SApp" .= (tail, name, exps)]
toJSON (SLet lv a b) = object ["SLet" .= (lv, a, b)]
toJSON (SUpdate lv exp) = object ["SUpdate" .= (lv, exp)]
toJSON (SProj lv i) = object ["SProj" .= (lv, i)]
toJSON (SCon lv i name vars) = object ["SCon" .= (lv, i, name, vars)]
toJSON (SCase ct lv alts) = object ["SCase" .= (ct, lv, alts)]
toJSON (SChkCase lv alts) = object ["SChkCase" .= (lv, alts)]
toJSON (SConst c) = object ["SConst" .= c]
toJSON (SForeign fd ret exps) = object ["SForeign" .= (fd, ret, exps)]
toJSON (SOp prim vars) = object ["SOp" .= (prim, vars)]
toJSON SNothing = object ["SNothing" .= Null]
toJSON (SError s) = object ["SError" .= s]
instance ToJSON SAlt where
toJSON (SConCase i j n ns exp) = object ["SConCase" .= (i, j, n, ns, exp)]
toJSON (SConstCase c exp) = object ["SConstCase" .= (c, exp)]
toJSON (SDefaultCase exp) = object ["SDefaultCase" .= exp]
instance ToJSON BC where
toJSON (ASSIGN r1 r2) = object ["ASSIGN" .= (r1, r2)]
toJSON (ASSIGNCONST r c) = object ["ASSIGNCONST" .= (r, c)]
toJSON (UPDATE r1 r2) = object ["UPDATE" .= (r1, r2)]
toJSON (MKCON con mr i regs) = object ["MKCON" .= (con, mr, i, regs)]
toJSON (CASE b r alts def) = object ["CASE" .= (b, r, alts, def)]
toJSON (PROJECT r loc arity) = object ["PROJECT" .= (r, loc, arity)]
toJSON (PROJECTINTO r1 r2 loc) = object ["PROJECTINTO" .= (r1, r2, loc)]
toJSON (CONSTCASE r alts def) = object ["CONSTCASE" .= (r, alts, def)]
toJSON (CALL name) = object ["CALL" .= name]
toJSON (TAILCALL name) = object ["TAILCALL" .= name]
toJSON (FOREIGNCALL r fd ret exps) = object ["FOREIGNCALL" .= (r, fd, ret, exps)]
toJSON (SLIDE i) = object ["SLIDE" .= i]
toJSON (RESERVE i) = object ["RESERVE" .= i]
toJSON (RESERVENOALLOC i) = object ["RESERVENOALLOC" .= i]
toJSON (ADDTOP i) = object ["ADDTOP" .= i]
toJSON (TOPBASE i) = object ["TOPBASE" .= i]
toJSON (BASETOP i) = object ["BASETOP" .= i]
toJSON REBASE = object ["REBASE" .= Null]
toJSON STOREOLD = object ["STOREOLD" .= Null]
toJSON (OP r prim args) = object ["OP" .= (r, prim, args)]
toJSON (NULL r) = object ["NULL" .= r]
toJSON (ERROR s) = object ["ERROR" .= s]
instance ToJSON Reg where
toJSON RVal = object ["RVal" .= Null]
toJSON (T i) = object ["T" .= i]
toJSON (L i) = object ["L" .= i]
toJSON Tmp = object ["Tmp" .= Null]
instance ToJSON RigCount where
toJSON r = object ["RigCount" .= show r]
instance ToJSON Totality where
toJSON t = object ["Totality" .= show t]
instance ToJSON MetaInformation where
toJSON m = object ["MetaInformation" .= show m]
instance ToJSON Def where
toJSON (Function ty tm) = object ["Function" .= (ty, tm)]
toJSON (TyDecl nm ty) = object ["TyDecl" .= (nm, ty)]
toJSON (Operator ty n f) = Null -- Operator and CaseOp omits same values as in IBC.hs
toJSON (CaseOp info ty argTy _ _ cdefs) = object ["CaseOp" .= (info, ty, argTy, cdefs)]
instance (ToJSON t) => ToJSON (TT t) where
toJSON (P nt name term) = object ["P" .= (nt, name, term)]
toJSON (V n) = object ["V" .= n]
toJSON (Bind n b tt) = object ["Bind" .= (n, b, tt)]
toJSON (App s t1 t2) = object ["App" .= (s, t1, t2)]
toJSON (Constant c) = object ["Constant" .= c]
toJSON (Proj tt n) = object ["Proj" .= (tt, n)]
toJSON Erased = object ["Erased" .= Null]
toJSON Impossible = object ["Impossible" .= Null]
toJSON (Inferred tt) = object ["Inferred" .= tt]
toJSON (TType u) = object ["TType" .= u]
toJSON (UType u) = object ["UType" .= (show u)]
instance ToJSON UExp where
toJSON (UVar src n) = object ["UVar" .= (src, n)]
toJSON (UVal n) = object ["UVal" .= n]
instance (ToJSON t) => ToJSON (AppStatus t) where
toJSON Complete = object ["Complete" .= Null]
toJSON MaybeHoles = object ["MaybeHoles" .= Null]
toJSON (Holes ns) = object ["Holes" .= ns]
instance (ToJSON t) => ToJSON (Binder t) where
toJSON (Lam rc bty) = object ["Lam" .= (rc, bty)]
toJSON (Pi c i t k) = object ["Pi" .= (c, i, t, k)]
toJSON (Let rc t v) = object ["Let" .= (t, v)]
toJSON (NLet t v) = object ["NLet" .= (t, v)]
toJSON (Hole t) = object ["Hole" .= (t)]
toJSON (GHole l ns t) = object ["GHole" .= (l, ns, t)]
toJSON (Guess t v) = object ["Guess" .= (t, v)]
toJSON (PVar rc t) = object ["PVar" .= (rc, t)]
toJSON (PVTy t) = object ["PVTy" .= (t)]
instance ToJSON ImplicitInfo where
toJSON (Impl a b c) = object ["Impl" .= (a, b, c)]
instance ToJSON NameType where
toJSON Bound = object ["Bound" .= Null]
toJSON Ref = object ["Ref" .= Null]
toJSON (DCon a b c) = object ["DCon" .= (a, b, c)]
toJSON (TCon a b) = object ["TCon" .= (a, b)]
instance ToJSON CaseDefs where
toJSON (CaseDefs rt ct) = object ["Runtime" .= rt, "Compiletime" .= ct]
instance (ToJSON t) => ToJSON (SC' t) where
toJSON (Case ct n alts) = object ["Case" .= (ct, n, alts)]
toJSON (ProjCase t alts) = object ["ProjCase" .= (t, alts)]
toJSON (STerm t) = object ["STerm" .= t]
toJSON (UnmatchedCase s) = object ["UnmatchedCase" .= s]
toJSON ImpossibleCase = object ["ImpossibleCase" .= Null]
instance (ToJSON t) => ToJSON (CaseAlt' t) where
toJSON (ConCase n c ns sc) = object ["ConCase" .= (n, c, ns, sc)]
toJSON (FnCase n ns sc) = object ["FnCase" .= (n, ns, sc)]
toJSON (ConstCase c sc) = object ["ConstCase" .= (c, sc)]
toJSON (SucCase n sc) = object ["SucCase" .= (n, sc)]
toJSON (DefaultCase sc) = object ["DefaultCase" .= sc]
instance ToJSON CaseInfo where
toJSON (CaseInfo a b c) = object ["CaseInfo" .= (a, b, c)]
instance ToJSON Accessibility where
toJSON a = object ["Accessibility" .= show a]
| kojiromike/Idris-dev | src/IRTS/Portable.hs | bsd-3-clause | 15,614 | 0 | 12 | 3,876 | 6,888 | 3,574 | 3,314 | 309 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE Rank2Types #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Database.Persist.Sql.Orphan.PersistStore
( withRawQuery
, BackendKey(..)
, toSqlKey
, fromSqlKey
, getFieldName
, getTableName
, tableDBName
, fieldDBName
) where
import Database.Persist
import Database.Persist.Sql.Types
import Database.Persist.Sql.Raw
import Database.Persist.Sql.Util (dbIdColumns)
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import Data.Text (Text, unpack)
import Data.Monoid (mappend, (<>))
import Control.Monad.IO.Class
import Data.ByteString.Char8 (readInteger)
import Data.Maybe (isJust)
import Data.List (find)
import Control.Monad.Trans.Reader (ReaderT, ask)
import Data.Acquire (with)
import Data.Int (Int64)
import Web.PathPieces (PathPiece)
import Database.Persist.Sql.Class (PersistFieldSql)
import qualified Data.Aeson as A
import Control.Exception.Lifted (throwIO)
withRawQuery :: MonadIO m
=> Text
-> [PersistValue]
-> C.Sink [PersistValue] IO a
-> ReaderT SqlBackend m a
withRawQuery sql vals sink = do
srcRes <- rawQueryRes sql vals
liftIO $ with srcRes (C.$$ sink)
toSqlKey :: ToBackendKey SqlBackend record => Int64 -> Key record
toSqlKey = fromBackendKey . SqlBackendKey
fromSqlKey :: ToBackendKey SqlBackend record => Key record -> Int64
fromSqlKey = unSqlBackendKey . toBackendKey
whereStmtForKey :: PersistEntity record => SqlBackend -> Key record -> Text
whereStmtForKey conn k =
T.intercalate " AND "
$ map (<> "=? ")
$ dbIdColumns conn entDef
where
entDef = entityDef $ dummyFromKey k
-- | get the SQL string for the table that a PeristEntity represents
-- Useful for raw SQL queries
--
-- Your backend may provide a more convenient tableName function
-- which does not operate in a Monad
getTableName :: forall record m.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
, Monad m
) => record -> ReaderT SqlBackend m Text
getTableName rec = do
conn <- ask
return $ connEscapeName conn $ tableDBName rec
-- | useful for a backend to implement tableName by adding escaping
tableDBName :: forall record.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
) => record -> DBName
tableDBName rec = entityDB $ entityDef (Just rec)
-- | get the SQL string for the field that an EntityField represents
-- Useful for raw SQL queries
--
-- Your backend may provide a more convenient fieldName function
-- which does not operate in a Monad
getFieldName :: forall record typ m.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
, Monad m
)
=> EntityField record typ -> ReaderT SqlBackend m Text
getFieldName rec = do
conn <- ask
return $ connEscapeName conn $ fieldDBName rec
-- | useful for a backend to implement fieldName by adding escaping
fieldDBName :: forall record typ. (PersistEntity record) => EntityField record typ -> DBName
fieldDBName = fieldDB . persistFieldDef
instance PersistStore SqlBackend where
newtype BackendKey SqlBackend = SqlBackendKey { unSqlBackendKey :: Int64 }
deriving (Show, Read, Eq, Ord, Num, Integral, PersistField, PersistFieldSql, PathPiece, Real, Enum, Bounded, A.ToJSON, A.FromJSON)
update _ [] = return ()
update k upds = do
conn <- ask
let go'' n Assign = n <> "=?"
go'' n Add = T.concat [n, "=", n, "+?"]
go'' n Subtract = T.concat [n, "=", n, "-?"]
go'' n Multiply = T.concat [n, "=", n, "*?"]
go'' n Divide = T.concat [n, "=", n, "/?"]
go'' _ (BackendSpecificUpdate up) = error $ T.unpack $ "BackendSpecificUpdate" `mappend` up `mappend` "not supported"
let go' (x, pu) = go'' (connEscapeName conn x) pu
let wher = whereStmtForKey conn k
let sql = T.concat
[ "UPDATE "
, connEscapeName conn $ tableDBName $ recordTypeFromKey k
, " SET "
, T.intercalate "," $ map (go' . go) upds
, " WHERE "
, wher
]
rawExecute sql $
map updatePersistValue upds `mappend` keyToValues k
where
go x = (fieldDB $ updateFieldDef x, updateUpdate x)
insert val = do
conn <- ask
let esql = connInsertSql conn t vals
key <-
case esql of
ISRSingle sql -> withRawQuery sql vals $ do
x <- CL.head
case x of
Just [PersistInt64 i] -> case keyFromValues [PersistInt64 i] of
Left err -> error $ "SQL insert: keyFromValues: PersistInt64 " `mappend` show i `mappend` " " `mappend` unpack err
Right k -> return k
Nothing -> error $ "SQL insert did not return a result giving the generated ID"
Just vals' -> case keyFromValues vals' of
Left _ -> error $ "Invalid result from a SQL insert, got: " ++ show vals'
Right k -> return k
ISRInsertGet sql1 sql2 -> do
rawExecute sql1 vals
withRawQuery sql2 [] $ do
mm <- CL.head
let m = maybe
(Left $ "No results from ISRInsertGet: " `mappend` tshow (sql1, sql2))
Right mm
-- TODO: figure out something better for MySQL
let convert x =
case x of
[PersistByteString i] -> case readInteger i of -- mssql
Just (ret,"") -> [PersistInt64 $ fromIntegral ret]
_ -> x
_ -> x
-- Yes, it's just <|>. Older bases don't have the
-- instance for Either.
onLeft Left{} x = x
onLeft x _ = x
case m >>= (\x -> keyFromValues x `onLeft` keyFromValues (convert x)) of
Right k -> return k
Left err -> throw $ "ISRInsertGet: keyFromValues failed: " `mappend` err
ISRManyKeys sql fs -> do
rawExecute sql vals
case entityPrimary t of
Nothing -> error $ "ISRManyKeys is used when Primary is defined " ++ show sql
Just pdef ->
let pks = map fieldHaskell $ compositeFields pdef
keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) fs
in case keyFromValues keyvals of
Right k -> return k
Left e -> error $ "ISRManyKeys: unexpected keyvals result: " `mappend` unpack e
return key
where
tshow :: Show a => a -> Text
tshow = T.pack . show
throw = liftIO . throwIO . userError . T.unpack
t = entityDef $ Just val
vals = map toPersistValue $ toPersistFields val
insertMany [] = return []
insertMany vals = do
conn <- ask
case connInsertManySql conn of
Nothing -> mapM insert vals
Just insertManyFn -> do
case insertManyFn ent valss of
ISRSingle sql -> rawSql sql (concat valss)
_ -> error "ISRSingle is expected from the connInsertManySql function"
where
ent = entityDef vals
valss = map (map toPersistValue . toPersistFields) vals
insertMany_ [] = return ()
insertMany_ vals = do
conn <- ask
let sql = T.concat
[ "INSERT INTO "
, connEscapeName conn (entityDB t)
, "("
, T.intercalate "," $ map (connEscapeName conn . fieldDB) $ entityFields t
, ") VALUES ("
, T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," $ map (const "?") (entityFields t)
, ")"
]
rawExecute sql (concat valss)
where
t = entityDef vals
valss = map (map toPersistValue . toPersistFields) vals
replace k val = do
conn <- ask
let t = entityDef $ Just val
let wher = whereStmtForKey conn k
let sql = T.concat
[ "UPDATE "
, connEscapeName conn (entityDB t)
, " SET "
, T.intercalate "," (map (go conn . fieldDB) $ entityFields t)
, " WHERE "
, wher
]
vals = map toPersistValue (toPersistFields val) `mappend` keyToValues k
rawExecute sql vals
where
go conn x = connEscapeName conn x `T.append` "=?"
insertKey = insrepHelper "INSERT"
repsert key value = do
mExisting <- get key
case mExisting of
Nothing -> insertKey key value
Just _ -> replace key value
get k = do
conn <- ask
let t = entityDef $ dummyFromKey k
let cols = T.intercalate ","
$ map (connEscapeName conn . fieldDB) $ entityFields t
noColumns :: Bool
noColumns = null $ entityFields t
let wher = whereStmtForKey conn k
let sql = T.concat
[ "SELECT "
, if noColumns then "*" else cols
, " FROM "
, connEscapeName conn $ entityDB t
, " WHERE "
, wher
]
withRawQuery sql (keyToValues k) $ do
res <- CL.head
case res of
Nothing -> return Nothing
Just vals ->
case fromPersistValues $ if noColumns then [] else vals of
Left e -> error $ "get " ++ show k ++ ": " ++ unpack e
Right v -> return $ Just v
delete k = do
conn <- ask
rawExecute (sql conn) (keyToValues k)
where
wher conn = whereStmtForKey conn k
sql conn = T.concat
[ "DELETE FROM "
, connEscapeName conn $ tableDBName $ recordTypeFromKey k
, " WHERE "
, wher conn
]
dummyFromKey :: Key record -> Maybe record
dummyFromKey = Just . recordTypeFromKey
recordTypeFromKey :: Key record -> record
recordTypeFromKey _ = error "dummyFromKey"
insrepHelper :: (MonadIO m, PersistEntity val)
=> Text
-> Key val
-> val
-> ReaderT SqlBackend m ()
insrepHelper command k val = do
conn <- ask
rawExecute (sql conn) vals
where
t = entityDef $ Just val
sql conn = T.concat
[ command
, " INTO "
, connEscapeName conn (entityDB t)
, "("
, T.intercalate ","
$ map (connEscapeName conn)
$ fieldDB (entityId t) : map fieldDB (entityFields t)
, ") VALUES("
, T.intercalate "," ("?" : map (const "?") (entityFields t))
, ")"
]
vals = keyToValues k ++ map toPersistValue (toPersistFields val)
updateFieldDef :: PersistEntity v => Update v -> FieldDef
updateFieldDef (Update f _ _) = persistFieldDef f
updateFieldDef (BackendUpdate {}) = error "updateFieldDef did not expect BackendUpdate"
updatePersistValue :: Update v -> PersistValue
updatePersistValue (Update _ v _) = toPersistValue v
updatePersistValue (BackendUpdate {}) = error "updatePersistValue did not expect BackendUpdate"
| jasonzoladz/persistent | persistent/Database/Persist/Sql/Orphan/PersistStore.hs | mit | 12,241 | 116 | 25 | 4,512 | 3,130 | 1,633 | 1,497 | 259 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Applicative
-- Copyright : Conor McBride and Ross Paterson 2005
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- This module describes a structure intermediate between a functor and
-- a monad (technically, a strong lax monoidal functor). Compared with
-- monads, this interface lacks the full power of the binding operation
-- '>>=', but
--
-- * it has more instances.
--
-- * it is sufficient for many uses, e.g. context-free parsing, or the
-- 'Data.Traversable.Traversable' class.
--
-- * instances can perform analysis of computations before they are
-- executed, and thus produce shared optimizations.
--
-- This interface was introduced for parsers by Niklas Röjemo, because
-- it admits more sharing than the monadic interface. The names here are
-- mostly based on parsing work by Doaitse Swierstra.
--
-- For more details, see
-- <http://www.soi.city.ac.uk/~ross/papers/Applicative.html Applicative Programming with Effects>,
-- by Conor McBride and Ross Paterson.
module Control.Applicative (
-- * Applicative functors
Applicative(..),
-- * Alternatives
Alternative(..),
-- * Instances
Const(..), WrappedMonad(..), WrappedArrow(..), ZipList(..),
-- * Utility functions
(<$>), (<$), (<**>),
liftA, liftA2, liftA3,
optional,
) where
import Control.Category hiding ((.), id)
import Control.Arrow
import Data.Maybe
import Data.Tuple
import Data.Eq
import Data.Ord
import Data.Foldable (Foldable(..))
import Data.Functor ((<$>))
import GHC.Base
import GHC.Generics
import GHC.List (repeat, zipWith)
import GHC.Read (Read(readsPrec), readParen, lex)
import GHC.Show (Show(showsPrec), showParen, showString)
newtype Const a b = Const { getConst :: a }
deriving (Generic, Generic1, Monoid, Eq, Ord)
instance Read a => Read (Const a b) where
readsPrec d = readParen (d > 10)
$ \r -> [(Const x,t) | ("Const", s) <- lex r, (x, t) <- readsPrec 11 s]
instance Show a => Show (Const a b) where
showsPrec d (Const x) = showParen (d > 10) $
showString "Const " . showsPrec 11 x
instance Foldable (Const m) where
foldMap _ _ = mempty
instance Functor (Const m) where
fmap _ (Const v) = Const v
instance Monoid m => Applicative (Const m) where
pure _ = Const mempty
(<*>) = coerce (mappend :: m -> m -> m)
-- This is pretty much the same as
-- Const f <*> Const v = Const (f `mappend` v)
-- but guarantees that mappend for Const a b will have the same arity
-- as the one for a; it won't create a closure to raise the arity
-- to 2.
newtype WrappedMonad m a = WrapMonad { unwrapMonad :: m a }
deriving (Generic, Generic1, Monad)
instance Monad m => Functor (WrappedMonad m) where
fmap f (WrapMonad v) = WrapMonad (liftM f v)
instance Monad m => Applicative (WrappedMonad m) where
pure = WrapMonad . pure
WrapMonad f <*> WrapMonad v = WrapMonad (f `ap` v)
instance MonadPlus m => Alternative (WrappedMonad m) where
empty = WrapMonad mzero
WrapMonad u <|> WrapMonad v = WrapMonad (u `mplus` v)
newtype WrappedArrow a b c = WrapArrow { unwrapArrow :: a b c }
deriving (Generic, Generic1)
instance Arrow a => Functor (WrappedArrow a b) where
fmap f (WrapArrow a) = WrapArrow (a >>> arr f)
instance Arrow a => Applicative (WrappedArrow a b) where
pure x = WrapArrow (arr (const x))
WrapArrow f <*> WrapArrow v = WrapArrow (f &&& v >>> arr (uncurry id))
instance (ArrowZero a, ArrowPlus a) => Alternative (WrappedArrow a b) where
empty = WrapArrow zeroArrow
WrapArrow u <|> WrapArrow v = WrapArrow (u <+> v)
-- | Lists, but with an 'Applicative' functor based on zipping, so that
--
-- @f '<$>' 'ZipList' xs1 '<*>' ... '<*>' 'ZipList' xsn = 'ZipList' (zipWithn f xs1 ... xsn)@
--
newtype ZipList a = ZipList { getZipList :: [a] }
deriving ( Show, Eq, Ord, Read, Functor
, Foldable, Generic, Generic1)
-- See Data.Traversable for Traversabel instance due to import loops
instance Applicative ZipList where
pure x = ZipList (repeat x)
ZipList fs <*> ZipList xs = ZipList (zipWith id fs xs)
-- extra functions
-- | One or none.
optional :: Alternative f => f a -> f (Maybe a)
optional v = Just <$> v <|> pure Nothing
| siddhanathan/ghc | libraries/base/Control/Applicative.hs | bsd-3-clause | 4,718 | 0 | 11 | 1,033 | 1,161 | 646 | 515 | 68 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>>Run Applications | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Sadržaj</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Traži</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriti</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/invoke/src/main/javahelp/org/zaproxy/zap/extension/invoke/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 982 | 76 | 55 | 159 | 425 | 213 | 212 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>>Run Applications | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İndeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Axtar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/invoke/src/main/javahelp/org/zaproxy/zap/extension/invoke/resources/help_az_AZ/helpset_az_AZ.hs | apache-2.0 | 983 | 76 | 55 | 159 | 419 | 211 | 208 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{- Based on example config yi example-configs/yi-vim.hs -}
module IDE.YiConfig (
defaultYiConfig
, Config
, Control
, ControlM
, YiM
, start
, runControl
, liftYi
) where
#ifdef LEKSAH_WITH_YI
import Data.List (reverse, isPrefixOf)
import Yi
import qualified Yi.Keymap.Vim as V2
import qualified Yi.Keymap.Vim.Common as V2
import qualified Yi.Keymap.Vim.Utils as V2
import qualified Yi.Mode.Haskell as Haskell
import qualified Yi.UI.Pango
import Yi.UI.Pango.Control
import Control.Monad (replicateM_)
import Control.Applicative (Alternative(..))
import qualified Data.Text as T (singleton)
import Data.Monoid ((<>))
import qualified Yi.Rope as R (toText)
start yiConfig f =
startControl yiConfig $ do
yiControl <- getControl
controlIO (f yiControl)
-- Set soft tabs of 4 spaces in width.
prefIndent :: Mode s -> Mode s
prefIndent m = m {
modeIndentSettings = IndentSettings
{
expandTabs = True,
shiftWidth = 4,
tabSize = 4
}}
defaultYiConfig = defaultVimConfig {
modeTable = myModes ++ modeTable defaultVimConfig,
defaultKm = myKeymapSet,
configCheckExternalChangesObsessively = False
}
defaultSearchKeymap :: Keymap
defaultSearchKeymap = do
Event (KASCII c) [] <- anyEvent
write (isearchAddE $ T.singleton c)
myKeymapSet :: KeymapSet
myKeymapSet = V2.mkKeymapSet $ V2.defVimConfig `override` \super this ->
let eval = V2.pureEval this
in super {
-- Here we can add custom bindings.
-- See Yi.Keymap.Vim.Common for datatypes and
-- Yi.Keymap.Vim.Utils for useful functions like mkStringBindingE
-- In case of conflict, that is if there exist multiple bindings
-- whose prereq function returns WholeMatch,
-- the first such binding is used.
-- So it's important to have custom bindings first.
V2.vimBindings = myBindings eval <> V2.vimBindings super
}
myBindings :: (V2.EventString -> EditorM ()) -> [V2.VimBinding]
myBindings eval =
let nmap x y = V2.mkStringBindingE V2.Normal V2.Drop (x, y, id)
imap x y = V2.VimBindingE (\evs state -> case V2.vsMode state of
V2.Insert _ ->
fmap (const (y >> return V2.Continue))
(evs `V2.matchesString` x)
_ -> V2.NoMatch)
in [ nmap "<C-h>" previousTabE
, nmap "<C-l>" nextTabE
, nmap "<C-l>" nextTabE
-- Press space to clear incremental search highlight
, nmap " " (eval ":nohlsearch<CR>")
-- for times when you don't press shift hard enough
, nmap ";" (eval ":")
, nmap "<F3>" (withCurrentBuffer deleteTrailingSpaceB)
, nmap "<F4>" (withCurrentBuffer moveToSol)
, nmap "<F1>" (withCurrentBuffer readCurrentWordB >>= printMsg . R.toText)
, imap "<Home>" (withCurrentBuffer moveToSol)
, imap "<End>" (withCurrentBuffer moveToEol)
]
myModes :: [AnyMode]
myModes = [
AnyMode Haskell.fastMode {
-- Disable beautification
modePrettify = const $ return ()
}
]
#else
data Config = Config
data Control = Control
data ControlM a = ControlM
data YiM a = YiM
defaultYiConfig :: Config
defaultYiConfig = Config
start :: Config -> (Control -> IO a) -> IO a
start yiConfig f = f Control
runControl :: ControlM a -> Control -> IO a
runControl = undefined
liftYi :: YiM a -> ControlM a
liftYi = undefined
#endif
| 573/leksah | src/IDE/YiConfig.hs | gpl-2.0 | 3,656 | 0 | 21 | 1,027 | 746 | 421 | 325 | 23 | 1 |
-- This encapsulates the pattern of programs that gradually (and lazily)
-- build a bytestring. If append is too strict in its second argument then
-- we get a stack overflow for large n.
module Main (main) where
import qualified Data.ByteString.Lazy.Char8 as LC
import System.Environment
main :: IO ()
main = do xs <- getArgs
case xs of
[x] ->
case reads x of
[(n, "")] ->
LC.putStr $ foo n
_ -> error "Bad argument"
_ -> error "Need exactly 1 argument (number of times to loop)"
foo :: Int -> LC.ByteString
foo 0 = LC.empty
foo (n+1) = LC.pack "foo\n" `LC.append` foo n
| meiersi/bytestring-builder | tests/lazybuild.hs | bsd-3-clause | 704 | 1 | 14 | 236 | 167 | 90 | 77 | -1 | -1 |
module MutRec where
-- Mutual recursion with different
-- names for the same type variable
f t = x
where
x :: [a]
y :: b
(x,y,z,r) = ([y,z], z, head x, t)
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/MutRec.hs | bsd-3-clause | 172 | 0 | 8 | 50 | 68 | 42 | 26 | 5 | 1 |
{-# LANGUAGE RankNTypes #-}
module StreamingPlayground
where
import Conduit
import Data.MonoTraversable (Element, MonoFoldable, ofoldMap)
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString as B
-- https://haskell-lang.org/library/conduit
myYieldMany :: (Monad m, MonoFoldable mono) => mono -> Producer m (Element mono)
myYieldMany =
ofoldMap yield
yieldManyList :: Monad m => [a] -> Producer m a
yieldManyList [] =
return ()
yieldManyList (x:xs) =
yield x >> yieldManyList xs
myMapC :: Monad m => (i -> o) -> ConduitM i o m ()
myMapC f =
loop
where
loop = do
mx <- await
case mx of
Nothing -> return ()
Just x -> do
yield (f x)
loop
myFilterC :: Monad m => (i -> Bool) -> ConduitM i i m ()
myFilterC p =
loop
where
loop = do
mx <- await
maybe (return ()) (\x -> if p x then yield x >> loop else loop) mx
myMapMC :: Monad m => (i -> m o) -> ConduitM i o m ()
myMapMC f =
loop
where
loop = do
mx <- await
case mx of
Nothing ->
return ()
Just x -> do
x' <- lift (f x)
yield x'
loop
myMapM_C :: Monad m => (i -> m ()) -> ConduitM i o m ()
myMapM_C f =
loop
where
loop = do
mx <- await
case mx of
Nothing ->
return ()
Just x -> do
_ <- lift (f x)
loop
myPeek :: Monad m => ConduitM i o m (Maybe i) -- Consumer i m (Maybe i)
myPeek = do
mx <- await
maybe (return Nothing) (\x -> leftover x >> return (Just x)) mx
mySinkNull :: Monad m => ConduitM i o m ()
mySinkNull =
loop
where
loop = do
mx <- await
maybe (return ()) (const loop) mx
myTakeCE :: Monad m => Int -> ConduitM B.ByteString B.ByteString m ()
myTakeCE n = do
mx <- await
maybe (return ()) myTakeE mx
where
myTakeE xs =
let
(ls, rs) =
B.splitAt n xs
in
yield ls >> leftover rs
{-
runConduit $ yield xs
.| do myTakeCE 1 .| myMapM_C print
myMapM_C print
output:
"f"
"oo"
-}
tagger :: Monad m => ConduitM Int (Either Int Int) m ()
tagger =
mapC $ \i -> if even i then Left i else Right i
evens, odds :: Monad m => ConduitM Int String m ()
evens =
mapC $ \i -> "Even number: " ++ show i
odds =
mapC $ \i -> "Odd number: "++ show i
left :: Either l r -> Maybe l
left =
either Just (const Nothing)
right :: Either l r -> Maybe r
right =
either (const Nothing) Just
inside :: Monad m => ConduitM (Either Int Int) String m ()
inside =
getZipConduit
$ ZipConduit (concatMapC left .| evens)
*> ZipConduit (concatMapC right .| odds)
main :: IO ()
main = do
runConduit $ yieldManyList [1..10]
.| myMapC (+1)
.| myFilterC (<10)
.| do myPeek >>= liftIO . putStrLn . show
myMapM_C print
-- this doesn't print anything since everything has been consumed
.| myMapM_C (print :: Int -> IO ())
runConduit $ enumFromToC 1 10
.| tagger
.| inside
.| mapM_C putStrLn
let src = yieldMany [1..3 :: Int]
conduit1 = mapC (+1)
conduit2 = concatMapC (replicate 2)
conduit = getZipConduit $ ZipConduit conduit1 *> ZipConduit conduit2
sink = mapM_C print
-- src $$ conduit =$ sink
runConduit $ src
.| conduit -- () <$ sequenceConduits [conduit1, conduit2]
.| sink
| futtetennista/IntroductionToFunctionalProgramming | src/StreamingPlayground.hs | mit | 3,359 | 0 | 17 | 1,025 | 1,306 | 644 | 662 | 111 | 2 |
import Data.Char
import Data.List
-- Exercise 1.1. Define 'headHunter xss' that takes the head of the first list element. If the first element has no head, it takes the head of the second element. If the second element has no head, it takes the head of the third element. If none of this works, the function returns an error.
headHunter :: [[a]] -> a
headHunter ((x:_):_) = x
headHunter (_:(x:_):_) = x
headHunter (_:_:(x:_):_) = x
headHunter _ = error "Nope."
-- Exercise 1.2. Define 'firstColumn m' that returns the first column of a matrix.
firstColumn :: [[a]] -> [a]
firstColumn m = [x | (x:_) <- m]
-- Exercise 1.3. Define 'shoutOutLoud' that repeats three times the initial letter of each word in a string.
shoutOutLoud :: String -> String
shoutOutLoud xs = unwords [replicate 3 x ++ r | (x:r) <- words xs]
-- Exercise 2.1. Define 'pad' that pads the shorter of two the strings with trailing spaces and returns both strings capitalized.
pad :: String -> String -> (String, String)
pad (x:xs) (y:ys) = (toUpper x : xs ++ (replicate (l - length xs) ' '), toUpper y : ys ++ (replicate (l - length ys) ' '))
where l = max (length xs) (length ys)
-- Exercise 2.2. Define 'quartiles xs' that returns the quartiles (q1,q2,q3) of a given list.
quartiles :: [Int] -> (Double,Double,Double)
quartiles xs = (median p1, median ys, median p2)
where ys = sort xs
l = length xs
zs = splitAt (quot l 2) ys
p1 = fst zs
p2 = if odd l then tail $ snd zs else snd zs
median :: (Integral a, Fractional b) => [a] -> b
median [] = error "median: Empty list"
median xs
| odd l = realToFrac $ ys !! h
| otherwise = realToFrac (ys !! h + ys !! (h-1)) / 2
where l = length xs
h = l `div` 2
ys = sort xs
-- Exercise 3.1 == Exercise 2.1
pad' :: String -> String -> (String, String)
pad' (x:xs) (y:ys) = let l = max (length xs) (length ys) in (toUpper x : xs ++ (replicate (l - length xs) ' '), toUpper y : ys ++ (replicate (l - length ys) ' '))
-- Exercise 3.2 == Exercise 2.2
quartiles' :: [Int] -> (Double,Double,Double)
quartiles' xs = let ys = sort xs
l = length xs
zs = splitAt (quot l 2) ys
p1 = fst zs
p2 = if odd l then tail $ snd zs else snd zs
in (median p1, median ys, median p2)
-- Exercise 4.1 Write a function that takes in a pair (a,b) and a list [c] and returns the following string: "The pair [contains two ones|contains one one|does not contain a single one] and the second element of the list is <x>"
profun :: (Show a, Num t1, Num t, Eq t1, Eq t) => (t, t1) -> [a] -> [Char]
profun _ [] = error "No second element"
profun _ [_] = error "No Secnod element"
profun p (_:x:_) = "The pair " ++ (case p of
(1, 1) -> "contains two ones"
(1, _) -> "contains one one"
(_, 1) -> "contains one one"
(_, _) -> "does not contain a single one")
++ "and the second element of the list is " ++ show x
| kbiscanic/PUH | hw04/exercises.hs | mit | 2,997 | 9 | 13 | 773 | 1,112 | 556 | 556 | 47 | 4 |
module Unison.Runtime.Lock where
-- | Autoreleasing lock type. `tryAcquire`, if successful, returns a `Lease`
-- which will be valid for some period of time, during which it may be assumed
-- that other calls to `tryAcquire` on the same `Lock` will fail.
data Lock = Lock { tryAcquire :: IO (Maybe Lease) }
-- | A `Lease` should be released under normal operation, but in the
-- event of an untimely crash, it will also become invalid on its own
-- at some point.
data Lease = Lease { valid :: IO Bool, release :: IO () }
| nightscape/platform | node/src/Unison/Runtime/Lock.hs | mit | 524 | 0 | 11 | 100 | 66 | 40 | 26 | 3 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
-- I generally try to avoid modules full of (only) types but these are here
-- so the can be shared in both Technique.Translate and Technique.Builtins.
-- |
-- Error messages from compiling.
module Technique.Failure where
import Core.System.Base
import Core.System.Pretty
import Core.Text.Rope
import Core.Text.Utilities
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Set as OrdSet
import qualified Data.Text as T
import Data.Void
import Technique.Formatter
import Technique.Language hiding (Label)
import Text.Megaparsec (PosState (..), SourcePos (..))
import Text.Megaparsec.Error
( ErrorItem (..),
ParseError (..),
ParseErrorBundle (..),
)
import Text.Megaparsec.Pos (unPos)
import Prelude hiding (lines)
data Status = Ok | Failed CompilationError | Reload
instance Render Status where
type Token Status = TechniqueToken
colourize = colourizeTechnique
highlight status = case status of
Ok -> annotate LabelToken "ok"
Failed e -> highlight e
Reload -> annotate MagicToken "Δ"
data Source = Source
{ sourceContents :: Rope,
sourceFilename :: FilePath,
sourceOffset :: !Offset
}
deriving (Eq, Ord, Show)
instance Located Source where
locationOf = sourceOffset
instance Render Source where
type Token Source = TechniqueToken
colourize = colourizeTechnique
highlight source = pretty (sourceFilename source) <+> pretty (sourceOffset source)
emptySource :: Source
emptySource =
Source
{ sourceContents = emptyRope,
sourceFilename = "<undefined>",
sourceOffset = -1
}
data FailureReason
= InvalidSetup -- TODO placeholder
| ParsingFailed [ErrorItem Char] [ErrorItem Char]
| VariableAlreadyInUse Identifier
| ProcedureAlreadyDeclared Identifier
| CallToUnknownProcedure Identifier
| UseOfUnknownIdentifier Identifier
| EncounteredUndefined
deriving (Show, Eq)
instance Enum FailureReason where
fromEnum x = case x of
InvalidSetup -> 1
ParsingFailed _ _ -> 2
VariableAlreadyInUse _ -> 3
ProcedureAlreadyDeclared _ -> 4
CallToUnknownProcedure _ -> 5
UseOfUnknownIdentifier _ -> 6
EncounteredUndefined -> 7
toEnum = undefined
data CompilationError = CompilationError Source FailureReason
deriving (Show)
instance Exception CompilationError
exitCodeFor :: CompilationError -> Int
exitCodeFor (CompilationError _ reason) = fromEnum reason
-- TODO upgrade this to (Doc ann) so we can get prettier error messages.
instance Render FailureReason where
type Token FailureReason = TechniqueToken
colourize = colourizeTechnique
highlight failure = case failure of
InvalidSetup -> "Invalid setup!"
ParsingFailed unexpected expected ->
let un = case unexpected of
[] -> emptyDoc
(item : _) -> "unexpected " <> formatErrorItem FilenameToken item <> hardline
ex = case expected of
[] -> emptyDoc
items -> "expecting " <> fillCat (fancyPunctuate (fmap (formatErrorItem SymbolToken) items)) <> "."
in un <> ex
VariableAlreadyInUse i -> "Variable by the name of '" <> annotate VariableToken (highlight i) <> "' already defined."
ProcedureAlreadyDeclared i -> "Procedure by the name of '" <> annotate ProcedureToken (highlight i) <> "' already declared."
CallToUnknownProcedure i -> "Call to unknown procedure '" <> annotate ApplicationToken (highlight i) <> "'."
UseOfUnknownIdentifier i -> "Variable '" <> annotate VariableToken (highlight i) <> "' not in scope."
EncounteredUndefined -> "Encountered an " <> annotate ErrorToken "undefined" <> " marker."
fancyPunctuate :: [Doc ann] -> [Doc ann]
fancyPunctuate list = case list of
[] -> []
[x] -> [x]
(x1 : x2 : []) -> x1 : ", or " : x2 : []
(x1 : xs) -> x1 : ", " : fancyPunctuate xs
-- |
-- ErrorItem is a bit overbearing, but we handle its /four/ cases by saying
-- single quotes around characters, double quotes around strings, /no/ quotes
-- around labels (descriptive text) and hard code the end of input and newline
-- cases.
formatErrorItem :: TechniqueToken -> ErrorItem Char -> Doc TechniqueToken
formatErrorItem token item = case item of
-- It would appear that **prettyprinter** has a Pretty instance for
-- NonEmpty a. In this case token ~ Char so these are Strings, ish.
-- Previously we converted to Rope, but looks like we can go directly.
Tokens tokens ->
case NonEmpty.uncons tokens of
(ch, Nothing) -> case ch of
'\n' -> annotate token "newline"
_ -> pretty '\'' <> annotate token (pretty ch) <> pretty '\''
_ -> pretty '\"' <> annotate token (pretty tokens) <> pretty '\"'
Label chars ->
annotate token (pretty chars)
EndOfInput ->
"end of input"
numberOfCarots :: FailureReason -> Int
numberOfCarots reason = case reason of
InvalidSetup -> 0
ParsingFailed unexpected _ -> case unexpected of
[] -> 1
(item : _) -> case item of
Tokens tokens -> NonEmpty.length tokens
Label chars -> NonEmpty.length chars
EndOfInput -> 1
VariableAlreadyInUse i -> widthRope (unIdentifier i)
ProcedureAlreadyDeclared i -> widthRope (unIdentifier i)
CallToUnknownProcedure i -> widthRope (unIdentifier i)
UseOfUnknownIdentifier i -> widthRope (unIdentifier i)
EncounteredUndefined -> 1
instance Render CompilationError where
type Token CompilationError = TechniqueToken
colourize = colourizeTechnique
highlight (CompilationError source reason) =
let filename = pretty (sourceFilename source)
contents = intoRope (sourceContents source)
o = sourceOffset source
-- Given an offset point where the error occured, split the input at that
-- point.
(before, _) = splitRope o contents
(l, c) = calculatePositionEnd before
-- Isolate the line on which the error occured. l and c are 1-origin here,
-- so if there's only a single line (or empty file) we take that one single
-- line and then last one is also that line.
lines = breakLines contents
lines' = take l lines
offending =
if nullRope contents
then emptyRope
else last lines'
-- Now prepare for rendering. If the offending line is long trim it. Then
-- create a line with some carets which show where the problem is.
linenum = pretty l
colunum = pretty c
(truncated, _) = splitRope 77 offending
trimmed =
if widthRope offending > 77 && c < 77
then truncated <> "..."
else offending
padding = replicateChar (c - 1) ' '
num = numberOfCarots reason
caroted = replicateChar num '^'
columns =
if num > 1
then colunum <> "-" <> pretty (c + num - 1)
else colunum
in annotate FilenameToken filename <> ":" <> linenum <> ":" <> columns <> hardline
<> hardline
<> pretty trimmed
<> hardline
<> pretty padding
<> annotate ErrorToken (pretty caroted)
<> hardline
<> hardline
<> highlight reason
-- |
-- When we get a failure in the parsing stage **megaparsec** returns a
-- ParseErrorBundle. Extract the first error message therein (later handle
-- more? Yeah nah), and convert it into something we can use.
extractErrorBundle :: Source -> ParseErrorBundle T.Text Void -> CompilationError
extractErrorBundle source bundle =
let errors = bundleErrors bundle
first = NonEmpty.head errors
(o, unexpected, expected) = extractParseError first
pstate = bundlePosState bundle
srcpos = pstateSourcePos pstate
l0 = unPos . sourceLine $ srcpos
c0 = unPos . sourceColumn $ srcpos
-- Do we need these? For all the examples we have seen the values of l0 and c0
-- are `1`. **megaparsec** delays calculation of line and column until
-- error rendering time. Perhaps we need to record this.
l = if l0 > 1 then error "Unexpected line balance" else 0
c = if c0 > 1 then error "Unexpected columns balance" else 0
reason = ParsingFailed unexpected expected
source' =
source
{ sourceOffset = o + l + c
}
in CompilationError source' reason
extractParseError :: ParseError T.Text Void -> (Int, [ErrorItem Char], [ErrorItem Char])
extractParseError e = case e of
TrivialError o unexpected0 expected0 ->
let unexpected = case unexpected0 of
Just item -> item : []
Nothing -> []
expected = OrdSet.toList expected0
in (o, unexpected, expected)
FancyError _ _ -> error "Unexpected parser error"
| oprdyn/technique | lib/Technique/Failure.hs | mit | 8,747 | 7 | 24 | 2,089 | 2,036 | 1,063 | 973 | 185 | 10 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
module Data.Vinyl.Functor where
import Control.Applicative
import Data.Foldable
import Data.Traversable
import Foreign.Storable
newtype Identity a
= Identity { getIdentity :: a }
deriving ( Functor
, Foldable
, Traversable
, Storable
)
data Thunk a
= Thunk { getThunk :: a }
deriving ( Functor
, Foldable
, Traversable
)
newtype Lift (op :: l -> l' -> *) (f :: k -> l) (g :: k -> l') (x :: k)
= Lift { getLift :: op (f x) (g x) }
newtype Compose (f :: l -> *) (g :: k -> l) (x :: k)
= Compose { getCompose :: f (g x) }
deriving (Storable)
type f :. g = Compose f g
infixr 9 :.
newtype Const (a :: *) (b :: k)
= Const { getConst :: a }
deriving ( Functor
, Foldable
, Traversable
, Storable
)
instance (Functor f, Functor g) => Functor (Compose f g) where
fmap f (Compose x) = Compose (fmap (fmap f) x)
instance (Foldable f, Foldable g) => Foldable (Compose f g) where
foldMap f (Compose t) = foldMap (foldMap f) t
instance (Traversable f, Traversable g) => Traversable (Compose f g) where
traverse f (Compose t) = Compose <$> traverse (traverse f) t
instance (Applicative f, Applicative g) => Applicative (Compose f g) where
pure x = Compose (pure (pure x))
Compose f <*> Compose x = Compose ((<*>) <$> f <*> x)
instance Applicative Identity where
pure = Identity
Identity f <*> Identity x = Identity (f x)
instance Monad Identity where
return = Identity
Identity x >>= f = f x
instance Show a => Show (Identity a) where
show (Identity x) = show x
instance Applicative Thunk where
pure = Thunk
(Thunk f) <*> (Thunk x) = Thunk (f x)
instance Monad Thunk where
return = Thunk
(Thunk x) >>= f = f x
instance Show a => Show (Thunk a) where
show (Thunk x) = show x
instance (Functor f, Functor g) => Functor (Lift (,) f g) where
fmap f (Lift (x, y)) = Lift (fmap f x, fmap f y)
instance (Functor f, Functor g) => Functor (Lift Either f g) where
fmap f (Lift (Left x)) = Lift . Left . fmap f $ x
fmap f (Lift (Right x)) = Lift . Right . fmap f $ x
instance (Applicative f, Applicative g) => Applicative (Lift (,) f g) where
pure x = Lift (pure x, pure x)
Lift (f, g) <*> Lift (x, y) = Lift (f <*> x, g <*> y)
| plow-technologies/Vinyl | Data/Vinyl/Functor.hs | mit | 2,642 | 0 | 10 | 757 | 1,077 | 571 | 506 | 69 | 0 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Data.Array (listArray)
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Matrix (saddlePoints)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "saddlePoints" $ for_ cases test
where
test (description, xss, expected) = it description assertion
where
assertion = saddlePoints matrix `shouldBe` expected
rows = length xss
columns = length $ head xss
matrix = listArray ((1, 1), (rows, columns)) (concat xss)
cases = [ ( "Example from README",
[ [9, 8, 7]
, [5, 3, 2]
, [6, 6, 7] ], [(2, 1)] )
, ( "empty matrix has none", [], [] )
, ( "no saddle point",
[ [1, 2, 3]
, [3, 1, 2]
, [2, 3, 1] ], [] )
, ( "multiple saddle points in a column",
[ [4, 5, 4]
, [3, 5, 5]
, [1, 5, 4] ], [ (1, 2)
, (2, 2)
, (3, 2) ] )
, ( "multiple saddle points in a row",
[ [6, 7, 8]
, [5, 5, 5]
, [7, 5, 6] ], [ (2, 1)
, (2, 2)
, (2, 3) ] )
, ( "bottom-right corner",
[ [8, 7, 9]
, [6, 7, 6]
, [3, 2, 5] ], [(3, 3)] )
, ( "non-square matrix",
[ [3, 1, 3]
, [3, 2, 4] ], [ (1, 1)
, (1, 3) ] )
, ( "Can identify that saddle points in a single column matrix are those with the minimum value",
[ [2]
, [1]
, [4]
, [1] ], [ (2, 1)
, (4, 1) ] )
, ( "Can identify that saddle points in a single row matrix are those with the maximum value",
[ [2, 5, 3, 5] ], [ (1, 2)
, (1, 4) ] )
]
-- 8cdbe8cfcba7338c519b07bb29563b7c3ad01a80
| exercism/xhaskell | exercises/practice/saddle-points/test/Tests.hs | mit | 2,271 | 1 | 8 | 1,074 | 710 | 445 | 265 | 53 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGCircleElement
(js_getCx, getCx, js_getCy, getCy, js_getR, getR, SVGCircleElement,
castToSVGCircleElement, gTypeSVGCircleElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"cx\"]" js_getCx ::
JSRef SVGCircleElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGCircleElement.cx Mozilla SVGCircleElement.cx documentation>
getCx ::
(MonadIO m) => SVGCircleElement -> m (Maybe SVGAnimatedLength)
getCx self
= liftIO ((js_getCx (unSVGCircleElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"cy\"]" js_getCy ::
JSRef SVGCircleElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGCircleElement.cy Mozilla SVGCircleElement.cy documentation>
getCy ::
(MonadIO m) => SVGCircleElement -> m (Maybe SVGAnimatedLength)
getCy self
= liftIO ((js_getCy (unSVGCircleElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"r\"]" js_getR ::
JSRef SVGCircleElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGCircleElement.r Mozilla SVGCircleElement.r documentation>
getR ::
(MonadIO m) => SVGCircleElement -> m (Maybe SVGAnimatedLength)
getR self
= liftIO ((js_getR (unSVGCircleElement self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/SVGCircleElement.hs | mit | 2,187 | 18 | 11 | 281 | 565 | 336 | 229 | 36 | 1 |
{-# LANGUAGE ParallelListComp, TransformListComp, MonadComprehensions, RecordWildCards #-}
import GHC.Exts
import qualified Data.Map as M
import Data.List
import Data.Ord
-- Just one more sql-like query
-- After RecordWildCards
-- Parallel List Comp: Zips lists together instead of pulling all of the elt's out
arithmetic :: [Int]
arithmetic = [ x + y * z
| x <- [0..10]
| y <- [10..20]
| z <- [20..30]
]
fibs :: [Int]
fibs = 0 : 1 : [ x + y
| x <- fibs
| y <- tail fibs
]
-- λ> take 10 fibs
-- [0,1,1,2,3,5,8,13,21,34]
fiblikes :: [Int]
fiblikes = 0 : 1 : 2 : [ x + y + z
| x <- fibs
| y <- tail fibs
| z <- tail (tail fibs)
]
-- λ> take 10 fiblikes
-- [0,1,2,4,6,10,16,26,42,68]
-- (0.02 secs, 2644856 bytes)
-- SQL-like comprehensions
-- sortWith :: Ord b => (a -> b) -> [a] -> [a]
-- the :: Eq a => [a] -> a
-- groupWith :: Ord b => (a -> b) -> [a] -> [[a]]
data Character = Character{ firstName :: String, lastName :: String, birthYear :: Int }
deriving (Show, Eq)
friends :: [Character]
friends = [ Character "Phoebe" "Buffay" 1963
, Character "Chandler" "Bing" 1969
, Character "Rachel" "Green" 1969
, Character "Joey" "Tribbiani" 1967
, Character "Ross" "Geller" 1966
]
oldest :: Int -> [Character] -> [String]
oldest k tbl = [ firstName ++ " " ++ lastName
| Character{..} <- tbl
, then sortWith by birthYear
, then take k ]
groupByLargest :: Ord b => (a -> b) -> [a] -> [[a]]
groupByLargest f = sortBy (comparing (negate . length)) . groupWith f
bestBirthYears :: Int -> [Character] -> [(Int, [String])]
bestBirthYears k tbl = [ (the birthYear, firstName)
| Character{..} <- tbl
, then group by birthYear using groupByLargest
, then take k
]
-- ONE more query.
employees = [ ("Simon", "MS", 80)
, ("Erik", "MS", 100)
, ("Phil", "Ed", 40)
, ("Gordon", "Ed", 45)
, ("Paul", "Yale", 60) ]
output = [ (the dept, sum salary)
| (name, dept, salary) <- employees
, then group by dept using groupWith
-- , then sortWith by (sum salary)
, then take 5 ]
-- keep in mind the original list comprehension:
-- [(a, b) | a <- xs, b <- ys]
-- == do a <- xs
-- b <- ys
-- return (a, b)
sqrts :: M.Map Int Int
sqrts = M.fromList $ [ (x, sx)
| x <- map (^2) [1..100]
| sx <- [1..100]
]
monadExample :: Maybe Int
monadExample = [ x+y | x <- Just 1, y <- Just 2 ]
sumIntSqrts :: Int -> Int -> Maybe Int
sumIntSqrts a b = [ x + y | x <- M.lookup a sqrts, y <- M.lookup b sqrts ]
greet :: IO String
greet = [ name
| name <- getLine
, _ <- putStrLn $ unwords ["Hello, ", name, "!"]
]
-- couple more monad examples could be nice
-- Desugars to:
-- do x <- Just 1
-- y <- Just 2
-- return $ x + y
| 5outh/WeekOfPragmas | ListComprehensions.hs | mit | 3,174 | 10 | 11 | 1,117 | 920 | 502 | 418 | 60 | 1 |
-- Copyright © 2012 Julian Blake Kongslie <jblake@omgwallhack.org>
-- Licensed under the MIT license.
module Database.Local
where
import Database.HDBC
import Database.HDBC.PostgreSQL
data LocalDB = LocalDB
{ db :: !Connection
, allBooks :: !Statement
, filename :: !Statement
, newStory :: !Statement
, prune :: !Statement
, pruned :: !Statement
, sources :: !Statement
}
connectLocal :: IO LocalDB
connectLocal = do
db <- connectPostgreSQL "dbname=fanfiction user=fanfiction host=/tmp"
allBooks <- prepare db "SELECT story_id FROM stories WHERE NOT pruned;"
filename <- prepare db "SELECT get_filename( ?, ? );"
newStory <- prepare db "SELECT add_story_source( ?, ? );"
prune <- prepare db "SELECT del_story( ? );"
pruned <- prepare db "SELECT filename FROM stories WHERE filename IS NOT NULL AND pruned;"
sources <- prepare db "SELECT source, ref FROM sources WHERE story_id = ? ORDER BY source ASC;"
return $ LocalDB
{ db = db
, allBooks = allBooks
, filename = filename
, newStory = newStory
, prune = prune
, pruned = pruned
, sources = sources
}
iterateM :: (Monad m) => m (Maybe a) -> (a -> m ()) -> m ()
iterateM gen iter = do
x <- gen
case x of
Nothing -> return ()
Just x' -> iter x' >> iterateM gen iter
foreachBook :: LocalDB -> (String -> [(String, String)] -> IO ()) -> IO ()
foreachBook pg act = withTransaction (db pg) $ const $ do
execute (allBooks pg) []
iterateM (fetchRow $ allBooks pg) $ \[storyID] -> do
execute (sources pg) [storyID]
info <- fetchAllRows' $ sources pg
act (fromSql storyID) [ (fromSql source, fromSql ref) | [source, ref] <- info ]
pruneBooks :: LocalDB -> [String] -> IO ()
pruneBooks pg books = withTransaction (db pg) $ const $ executeMany (prune pg) [[toSql book] | book <- books]
deadBooks :: LocalDB -> IO [String]
deadBooks pg = withTransaction (db pg) $ const $ do
execute (pruned pg) []
dead <- fetchAllRows' $ pruned pg
return [ fromSql book | [book] <- dead ]
makeFilename :: LocalDB -> String -> String -> IO String
makeFilename pg storyID defaultName = withTransaction (db pg) $ const $ do
execute (filename pg) [toSql storyID, toSql defaultName]
Just [file] <- fetchRow $ filename pg
return $ fromSql file
addStory :: LocalDB -> String -> String -> IO String
addStory pg site ref = withTransaction (db pg) $ const $ do
execute (newStory pg) [toSql site, toSql ref]
Just [ref] <- fetchRow $ newStory pg
return $ fromSql ref
| jblake/fanfiction | src/Database/Local.hs | mit | 2,511 | 0 | 16 | 545 | 894 | 441 | 453 | 72 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html
module Stratosphere.ResourceProperties.CodeDeployDeploymentGroupGitHubLocation where
import Stratosphere.ResourceImports
-- | Full data type definition for CodeDeployDeploymentGroupGitHubLocation.
-- See 'codeDeployDeploymentGroupGitHubLocation' for a more convenient
-- constructor.
data CodeDeployDeploymentGroupGitHubLocation =
CodeDeployDeploymentGroupGitHubLocation
{ _codeDeployDeploymentGroupGitHubLocationCommitId :: Val Text
, _codeDeployDeploymentGroupGitHubLocationRepository :: Val Text
} deriving (Show, Eq)
instance ToJSON CodeDeployDeploymentGroupGitHubLocation where
toJSON CodeDeployDeploymentGroupGitHubLocation{..} =
object $
catMaybes
[ (Just . ("CommitId",) . toJSON) _codeDeployDeploymentGroupGitHubLocationCommitId
, (Just . ("Repository",) . toJSON) _codeDeployDeploymentGroupGitHubLocationRepository
]
-- | Constructor for 'CodeDeployDeploymentGroupGitHubLocation' containing
-- required fields as arguments.
codeDeployDeploymentGroupGitHubLocation
:: Val Text -- ^ 'cddgghlCommitId'
-> Val Text -- ^ 'cddgghlRepository'
-> CodeDeployDeploymentGroupGitHubLocation
codeDeployDeploymentGroupGitHubLocation commitIdarg repositoryarg =
CodeDeployDeploymentGroupGitHubLocation
{ _codeDeployDeploymentGroupGitHubLocationCommitId = commitIdarg
, _codeDeployDeploymentGroupGitHubLocationRepository = repositoryarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation-commitid
cddgghlCommitId :: Lens' CodeDeployDeploymentGroupGitHubLocation (Val Text)
cddgghlCommitId = lens _codeDeployDeploymentGroupGitHubLocationCommitId (\s a -> s { _codeDeployDeploymentGroupGitHubLocationCommitId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-deployment-revision-githublocation.html#cfn-properties-codedeploy-deploymentgroup-deployment-revision-githublocation-repository
cddgghlRepository :: Lens' CodeDeployDeploymentGroupGitHubLocation (Val Text)
cddgghlRepository = lens _codeDeployDeploymentGroupGitHubLocationRepository (\s a -> s { _codeDeployDeploymentGroupGitHubLocationRepository = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/CodeDeployDeploymentGroupGitHubLocation.hs | mit | 2,576 | 0 | 13 | 221 | 266 | 152 | 114 | 29 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.GLUT.Overlay
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/GLUT/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : stable
-- Portability : portable
--
-- When overlay hardware is available, GLUT provides a set of routines for
-- establishing, using, and removing an overlay for GLUT windows. When an
-- overlay is established, a separate OpenGL context is also established. A
-- window\'s overlay OpenGL state is kept distinct from the normal planes\'
-- OpenGL state.
--
--------------------------------------------------------------------------------
module Graphics.UI.GLUT.Overlay (
-- * Overlay creation and destruction
hasOverlay, overlayPossible,
-- * Showing and hiding an overlay
overlayVisible,
-- * Changing the /layer in use/
Layer(..), layerInUse,
-- * Re-displaying
postOverlayRedisplay
) where
import Data.StateVar
import Graphics.Rendering.OpenGL ( GLenum )
import Graphics.UI.GLUT.QueryUtils
import Graphics.UI.GLUT.Raw
import Graphics.UI.GLUT.Types
--------------------------------------------------------------------------------
-- | Controls the overlay for the /current window/. The requested display mode
-- for the overlay is determined by the /initial display mode/.
-- 'overlayPossible' can be used to determine if an overlay is possible for the
-- /current window/ with the current /initial display mode/. Do not attempt to
-- establish an overlay when one is not possible; GLUT will terminate the
-- program.
--
-- When 'hasOverlay' is set to 'True' when an overlay already exists, the
-- existing overlay is first removed, and then a new overlay is established. The
-- state of the old overlay\'s OpenGL context is discarded. Implicitly, the
-- window\'s /layer in use/ changes to the overlay immediately after the overlay
-- is established.
--
-- The initial display state of an overlay is shown, however the overlay is only
-- actually shown if the overlay\'s window is shown.
--
-- Setting 'hasOverlay' to 'False' is safe even if no overlay is currently
-- established, nothing happens in this case. Implicitly, the window\'s /layer
-- in use/ changes to the normal plane immediately once the overlay is removed.
--
-- If the program intends to re-establish the overlay later, it is typically
-- faster and less resource intensive to use 'overlayVisible' to simply change
-- the display status of the overlay.
--
-- /X Implementation Notes:/ GLUT for X uses the @SERVER_OVERLAY_VISUALS@
-- convention to determine if overlay visuals are available. While the
-- convention allows for opaque overlays (no transparency) and overlays with the
-- transparency specified as a bitmask, GLUT overlay management only provides
-- access to transparent pixel overlays.
--
-- Until RGBA overlays are better understood, GLUT only supports color index
-- overlays.
hasOverlay :: StateVar Bool
hasOverlay = makeStateVar getHasOverlay setHasOverlay
setHasOverlay :: Bool -> IO ()
setHasOverlay False = glutRemoveOverlay
setHasOverlay True = glutEstablishOverlay
getHasOverlay :: IO Bool
getHasOverlay = layerGet (/= 0) glut_HAS_OVERLAY
--------------------------------------------------------------------------------
-- | Contains 'True' if an overlay could be established for the /current window/
-- given the current /initial display mode/. If it contains 'False',
-- 'setHasOverlay' will fail with a fatal error if called.
overlayPossible :: GettableStateVar Bool
overlayPossible = makeGettableStateVar $ layerGet (/= 0) glut_OVERLAY_POSSIBLE
--------------------------------------------------------------------------------
-- | Controls the visibility of the overlay of the /current window/.
--
-- The effect of showing or hiding an overlay takes place immediately. Note that
-- setting 'overlayVisible' to 'True' will not actually display the overlay
-- unless the window is also shown (and even a shown window may be obscured by
-- other windows, thereby obscuring the overlay). It is typically faster and
-- less resource intensive to use the routines below to control the display
-- status of an overlay as opposed to removing and re-establishing the overlay.
overlayVisible :: SettableStateVar Bool
overlayVisible =
makeSettableStateVar $ \flag ->
if flag then glutShowOverlay else glutHideOverlay
--------------------------------------------------------------------------------
-- | The /layer in use/.
data Layer
= Normal -- ^ The normal plane.
| Overlay -- ^ The overlay.
deriving ( Eq, Ord, Show )
marshalLayer :: Layer -> GLenum
marshalLayer x = case x of
Normal -> glut_NORMAL
Overlay -> glut_OVERLAY
unmarshalLayer :: GLenum -> Layer
unmarshalLayer x
| x == glut_NORMAL = Normal
| x == glut_OVERLAY = Overlay
| otherwise = error ("unmarshalLayer: illegal value " ++ show x)
--------------------------------------------------------------------------------
-- | Controls the per-window /layer in use/ for the /current window/, which can
-- either be the normal plane or the overlay. Selecting the overlay should only
-- be done if an overlay exists, however windows without an overlay may still
-- set the /layer in use/ to 'Normal'. OpenGL commands for the window are
-- directed to the current /layer in use/.
layerInUse :: StateVar Layer
layerInUse =
makeStateVar getLayerInUse setLayerInUse
setLayerInUse :: Layer -> IO ()
setLayerInUse = glutUseLayer . marshalLayer
getLayerInUse :: IO Layer
getLayerInUse = layerGet (unmarshalLayer . fromIntegral) glut_LAYER_IN_USE
--------------------------------------------------------------------------------
-- | Mark the overlay of the given window (or the /current window/, if none is
-- supplied) as needing to be redisplayed. The next iteration through
-- 'Graphics.UI.GLUT.Begin.mainLoop', the window\'s overlay display callback
-- (or simply the display callback if no overlay display callback is registered)
-- will be called to redisplay the window\'s overlay plane. Multiple calls to
-- 'postOverlayRedisplay' before the next display callback opportunity (or
-- overlay display callback opportunity if one is registered) generate only a
-- single redisplay. 'postOverlayRedisplay' may be called within a window\'s
-- display or overlay display callback to re-mark that window for redisplay.
--
-- Logically, overlay damage notification for a window is treated as a
-- 'postOverlayRedisplay' on the damaged window. Unlike damage reported by the
-- window system, 'postOverlayRedisplay' will not set to true the overlay\'s
-- damaged status (see 'Graphics.UI.GLUT.State.damaged').
--
-- Also, see 'Graphics.UI.GLUT.Window.postRedisplay'.
postOverlayRedisplay :: Maybe Window -> IO ()
postOverlayRedisplay =
maybe glutPostOverlayRedisplay (\(Window win) -> glutPostWindowOverlayRedisplay win)
| ducis/haAni | hs/common/Graphics/UI/GLUT/Overlay.hs | gpl-2.0 | 6,954 | 0 | 9 | 1,066 | 519 | 324 | 195 | 46 | 2 |
module Computability.Languages where
import Notes
import Computability.Languages.Macro
import Computability.Languages.Terms
import Functions.BinaryOperation.Terms
import Sets.Algebra.Union.Terms
import Sets.Basics.Terms
import Computability.Symbols.Macro
import Computability.Symbols.Terms hiding (concatenation,
concatenation',
concatenationDefinitionLabel)
languages :: Note
languages = section "Languages" $ do
languageDefinition
languageConcatenationDefinition
concatenationAssociative
concatenationNotCommutative
selfConcatenationDefinition
kleeneStarDefinition
languagePlusDefinition
languesOverAlphabetDefinition
infiniteLanguagesCountable
uncountablyManyLanguages
reverseLanguageDefinition
languageDefinition :: Note
languageDefinition = de $ do
lab languageDefinitionLabel
s ["A ", language', " over an ", alphabet, " ", m alph_, " is a ", set, " of finite strings over that ", alphabet]
languageConcatenationDefinition :: Note
languageConcatenationDefinition = de $ do
lab concatenationDefinitionLabel
s ["The ", concatenation', " ", m (l 1 <@@> l 2), " of two languages ", m (l 1), and, m (l 2), " is the following ", language]
ma $ (l 1 <@@> l 2) === setcmpr (ss 1 <@> ss 2) (cs [ss 1 ∈ l 1, ss 2 ∈ l 2])
where
l n = lan_ !: n
ss n = str_ !: n
concatenationAssociative :: Note
concatenationAssociative = thm $ do
s [the, concatenation, " of languages is ", associative_]
toprove
concatenationNotCommutative :: Note
concatenationNotCommutative = thm $ do
s [the, concatenation, " of languages is ", emph "not", " ", commutative]
cexneeded
selfConcatenationDefinition :: Note
selfConcatenationDefinition = de $ do
s [the, concatenation, " of a ", language, " ", m lan_, " with itself ", m n, " times is denoted as ", m (lan_ ^@: n)]
s [m (lan_ ^@: 0), " is defined as ", m (setof estr)]
ma $ lan_ ^@: n === (lan_ <@@> (lan_ ^@: (n - 1)))
where n = "n"
kleeneStarDefinition :: Note
kleeneStarDefinition = de $ do
lab kleeneStarDefinitionLabel
s [the, kleeneStar', " ", m (ks lan_), " of a ", language, " ", m lan_, " is the ", union, " of all the concatenations of ", m lan_, " with itself"]
ma $ ks lan_ === setuncmp (n ∈ naturals) (lan_ ^@: n)
where n = "n"
languagePlusDefinition :: Note
languagePlusDefinition = de $ do
s [m (lp lan_), " is defined as ", m (lan_ <@@> ks lan_)]
languesOverAlphabetDefinition :: Note
languesOverAlphabetDefinition = de $ do
s ["The ", set, " of all languages over an ", alphabet, " ", m alph_, " is denoted as follows"]
ma $ loa_ === powset strsof_
infiniteLanguagesCountable :: Note
infiniteLanguagesCountable = thm $ do
s ["Infinite languages are countable"]
refneeded "countable"
toprove
uncountablyManyLanguages :: Note
uncountablyManyLanguages = thm $ do
s ["There are uncountably infinitely many languages over a given ", alphabet]
refneeded "uncountably infinite"
toprove
reverseLanguageDefinition :: Note
reverseLanguageDefinition = de $ do
lab reverseLanguageDefinitionLabel
s ["The ", reverseLanguage', " ", m (rlan lan_), " is the ", language, " of all reverse strings of the strings in ", m lan_]
ma $ rlan lan_ === setcmpr (rstr str_) (str_ ∈ lan_)
| NorfairKing/the-notes | src/Computability/Languages.hs | gpl-2.0 | 3,486 | 0 | 14 | 838 | 975 | 511 | 464 | 77 | 1 |
{- Merch.Race.Ruleset - Ruleset-handling code.
Copyright 2013 Alan Manuel K. Gloria
This file is part of Merchant's Race.
Merchant's Race is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Merchant's Race is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Merchant's Race. If not, see <http://www.gnu.org/licenses/>.
-}
{- This module is intended to be imported qualified. -}
module Merch.Race.Ruleset
( Ruleset
, items
, settlementTypes
, difficulties
, producers
, consumers
, craftsmen
, terrain
, nameGenerator
, settlementGenerator
, cartUpgradeCost
, supplyCost
, innCost
, marketLunchCost
, pubLunchCost
, bandits
, permeability
, interestRate
, centerPrice
, pidSettings
, startingLoan
, describe
) where
import Merch.Race.Data
import qualified Merch.Race.Ruleset.Data as D
import Merch.Race.Ruleset.Data(Ruleset)
import qualified Data.Map as Map
import Data.Map(Map)
items :: Ruleset -> [Item]
items r = Map.keys $ D.itemmap r
settlementTypes :: Ruleset -> [SettlementType]
settlementTypes r = Map.keys $ D.settlementtypemap r
difficulties :: Ruleset -> [Difficulty]
difficulties r = Map.keys $ D.difficultymap r
lookupSettlementType func r k =
case Map.lookup k (D.settlementtypemap r) of
Just d -> func d
producers :: Ruleset -> SettlementType -> [ProdCons]
producers = lookupSettlementType D.stproducers
consumers :: Ruleset -> SettlementType -> [ProdCons]
consumers = lookupSettlementType D.stconsumers
craftsmen :: Ruleset -> SettlementType -> [Craftsman]
craftsmen = lookupSettlementType D.stcraftsmen
terrain :: Ruleset -> SettlementType -> [Terrain]
terrain = lookupSettlementType D.stterrain
nameGenerator :: Ruleset -> NameGenerator
nameGenerator r =
case D.namegeneratormaybe r of
Just n -> n
Nothing -> NGString ""
settlementGenerator :: Ruleset -> [(Int, SettlementType)]
settlementGenerator = D.settlementgeneratorlist
lookupDifficulty func r k =
case Map.lookup k (D.difficultymap r) of
Just d -> func d
cartUpgradeCost :: Ruleset -> Difficulty -> (Rational, ItemSet)
cartUpgradeCost = lookupDifficulty D.cartupgradecost
supplyCost :: Ruleset -> Difficulty -> (Rational, ItemSet)
supplyCost = lookupDifficulty D.supplycost
innCost :: Ruleset -> Difficulty -> (Rational, ItemSet)
innCost = lookupDifficulty D.inncost
marketLunchCost :: Ruleset -> Difficulty -> (Rational, ItemSet)
marketLunchCost = lookupDifficulty D.marketlunchcost
pubLunchCost :: Ruleset -> Difficulty -> (Rational, ItemSet)
pubLunchCost = lookupDifficulty D.publunchcost
bandits :: Ruleset -> Difficulty -> Map Terrain Rational
bandits = lookupDifficulty D.banditprobability
permeability :: Ruleset -> Difficulty -> Rational
permeability = lookupDifficulty D.permeability
interestRate :: Ruleset -> Difficulty -> Rational
interestRate = lookupDifficulty D.interestrate
centerPrice :: Ruleset -> Difficulty -> Price
centerPrice = lookupDifficulty D.centerprice
pidSettings :: Ruleset -> Difficulty -> (Rational, Rational, Rational)
pidSettings = lookupDifficulty D.pidsettings
startingLoan :: Ruleset -> Difficulty -> Price
startingLoan = lookupDifficulty D.startingloan
-- Types that can be described in a ruleset.
class Describable i where
describe :: Ruleset -> i -> String
instance Describable Item where
describe r i =
case Map.lookup i $ D.itemmap r of
Just s -> s
instance Describable SettlementType where
describe r i =
case Map.lookup i $ D.settlementtypemap r of
Just d -> D.stdesc d
instance Describable Difficulty where
describe r i =
case Map.lookup i $ D.difficultymap r of
Just d -> D.difficultydesc d
| AmkG/merchants-race | Merch/Race/Ruleset.hs | gpl-3.0 | 4,062 | 0 | 10 | 672 | 940 | 503 | 437 | 91 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : HEP.Automation.MadGraph.Type
-- Copyright : (c) 2011-2013,2015 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-- types for madgraph
--
-----------------------------------------------------------------------------
module HEP.Automation.MadGraph.Type where
import Data.Typeable
import Data.Data
--
import HEP.Parser.LHE.Sanitizer.Type
newtype HashSalt = HashSalt { unHashSalt :: Maybe Int }
deriving (Show,Typeable,Data)
data MGProcess = MGProc { mgp_definelines :: [String]
, mgp_processes :: [String] }
deriving (Show,Typeable,Data)
-- |
newtype Polarization = RH { rhpol_percent :: Double }
deriving (Show,Typeable,Data)
-- |
data InitPolarization = InitPolarization
{ particle1pol :: Polarization
, particle2pol :: Polarization
}
deriving (Show,Typeable,Data)
-- |
data Detector = Tevatron | LHC | CMS | ATLAS
deriving (Show,Typeable,Data)
-- |
data MachineType = TeVatron
| LHC7 Detector
| LHC8 Detector
| LHC10 Detector
| LHC13 Detector
| LHC14 Detector
| Parton Double Detector
| PolParton Double InitPolarization Detector
deriving (Show,Typeable,Data)
-- |
data RGRunType = Fixed | Auto
deriving (Show,Typeable,Data)
-- |
data CutType = NoCut | DefCut | KCut
deriving (Show,Typeable,Data)
-- |
data MatchType = NoMatch | MLM
deriving (Show,Typeable,Data)
-- |
data PYTHIAType = NoPYTHIA
| RunPYTHIA
| RunPYTHIA8
| RunPYTHIA6Detail { isISROn :: Bool
, isFSROn :: Bool }
deriving (Show,Typeable,Data)
-- | jet algorithm implemented in PGS (Cone, K_T , anti-K_T algorithm)
data PGSJetAlgorithm = Cone Double | KTJet Double | AntiKTJet Double
deriving (Show, Typeable, Data)
-- |
data PGSTau = WithTau deriving (Show,Typeable,Data)
-- |
type PGSJetAlgoNTau = (PGSJetAlgorithm,PGSTau)
-- |
data PGSType = NoPGS | RunPGS PGSJetAlgoNTau
deriving (Show,Typeable,Data)
-- |
data HEPFileType = NoUploadHEP | UploadHEP
deriving (Show, Typeable, Data)
| wavewave/madgraph-auto | src/HEP/Automation/MadGraph/Type.hs | gpl-3.0 | 2,737 | 0 | 9 | 921 | 528 | 320 | 208 | 47 | 0 |
module Language.Dockerfile.Syntax where
import Data.ByteString.Char8 (ByteString)
import Data.Maybe
import Data.String
import Text.Read
type Image = String
type Tag = String
data Ports = Ports [Integer]
| PortStr String
deriving(Show, Eq, Ord)
instance IsString Ports where
fromString p = case readMaybe p of
Just i -> Ports [i]
Nothing ->
let rs = map readMaybe (words p)
in if all isJust rs
then Ports (catMaybes rs)
else PortStr p
type Directory = String
data BaseImage
= UntaggedImage Image
| TaggedImage Image Tag
| DigestedImage Image ByteString
deriving (Eq, Ord, Show)
-- | Type of the Dockerfile AST
type Dockerfile = [InstructionPos]
type Source = String
type Destination = String
type Arguments = [String]
type Pairs = [(String, String)]
-- | All commands available in Dockerfiles
data Instruction
= From BaseImage
| Add Source Destination
| User String
| Label Pairs
| Stopsignal String
| Copy Source Destination
| Run Arguments
| Cmd Arguments
| Workdir Directory
| Expose Ports
| Volume String
| Entrypoint Arguments
| Maintainer String
| Env Pairs
| Arg String
| Comment String
| OnBuild Instruction
| EOL
deriving (Eq, Ord, Show)
type Filename = String
type Linenumber = Int
-- | 'Instruction' with additional location information required for creating
-- good check messages
data InstructionPos = InstructionPos Instruction Filename Linenumber
deriving (Eq, Ord, Show)
instruction :: InstructionPos -> Instruction
instruction (InstructionPos i _ _) = i
sourcename :: InstructionPos -> Filename
sourcename (InstructionPos _ fn _) = fn
| beijaflor-io/haskell-language-dockerfile | src/Language/Dockerfile/Syntax.hs | gpl-3.0 | 1,734 | 0 | 15 | 425 | 466 | 265 | 201 | 57 | 1 |
module Jason.Util
(
bsToStr
, strToBS
, bsToText
, textToBS
) where
import Data.ByteString
import Data.Text as T
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
bsToStr :: ByteString -> String
bsToStr = T.unpack . decodeUtf8
strToBS :: String -> ByteString
strToBS = encodeUtf8 . T.pack
bsToText :: ByteString -> Text
bsToText = decodeUtf8
textToBS :: Text -> ByteString
textToBS = encodeUtf8
| TOSPIO/jason | src/Jason/Util.hs | gpl-3.0 | 445 | 0 | 6 | 107 | 114 | 68 | 46 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | FR3D provides a very convenient library of explored RNA structures. We are
-- mostly interested in the "basepairs" files. In contrast to the RNAstrand
-- library or melting experiments, these data sets provide non-canonical RNA
-- pairing.
--
-- NOTE that FR3D entries contain basepairs both in (i,j) as well as (j,i)
-- orientation (with i<j).
module Biobase.FR3D where
import Data.ByteString.Char8 as BS
import Data.List as L
import Data.Tuple.Select (sel1)
import Biobase.Primary
import Biobase.Secondary
-- | Encapsulates all the "basepairs" information.
data FR3D = FR3D
{ pdbid :: ByteString
, chains :: [(ByteString,ByteString)]
, basepairs :: [Basepair]
} deriving (Show)
-- | A single basepair in a basepair system.
data Basepair = Basepair
{ interaction :: ExtPairAnnotation
-- nucleotide 1
, nucleotide1 :: Char
, pdbnumber1 :: Int
, chain1 :: ByteString
, seqpos1 :: Int
-- nucleotide 2
, nucleotide2 :: Char
, pdbnumber2 :: Int
, chain2 :: ByteString
, seqpos2 :: Int
} deriving (Show,Eq,Ord)
-- | Linearized FR3D format.
data LinFR3D = LinFR3D
{ pdbID :: ByteString
, sequence :: ByteString
, pairs :: [TriPair]
} deriving (Show)
type TriPair = (ExtPairIdx,ExtPair,Basepair) -- we keep the ExtPair information as provided by the non-linearized FR3D data
-- | The default format is a bit unwieldy; Linearization assumes that all
-- sequences are in 5'->3' order; then produces one sequence with "&"
-- separating the sequences and pairs reduced to (Int,Int,cWW).
linearizeFR3D :: FR3D -> LinFR3D
linearizeFR3D FR3D{..} = LinFR3D
{ pdbID = pdbid
, sequence = BS.intercalate "&" $ L.map snd chains
, pairs = L.map f basepairs
} where
trans = snd $ L.mapAccumL ( \acc (x,y) -> (acc + 1 + BS.length y, (x,acc))
) 0 chains
f bp@Basepair{..} = (pi,p,bp) where
pi = ( ( maybe (-1) (\v -> v+seqpos1) $ L.lookup chain1 trans
, maybe (-1) (\v -> v+seqpos2) $ L.lookup chain2 trans
)
, interaction
)
p = ( (mkNuc nucleotide1, mkNuc nucleotide2), interaction )
class RemoveDuplicatePairs a where
removeDuplicatePairs :: a -> a
instance RemoveDuplicatePairs FR3D where
removeDuplicatePairs x@FR3D{..} = x{basepairs = L.filter f basepairs} where
f Basepair{..} = (chain1,seqpos1) < (chain2,seqpos2)
instance RemoveDuplicatePairs LinFR3D where
removeDuplicatePairs x@LinFR3D{..} = x{pairs = L.filter (f.sel1) pairs} where
f ((x,y),_) = x<y
-- ** Checking data structures
-- | Checks an FR3D file for correctness. Returns either a Left on errors or
-- Right FR3D if correct.
--
-- TODO chain existence check
checkFR3D fr3d@FR3D{..}
| L.null xs = Right fr3d
| otherwise = Left (fr3d,xs)
where
xs = [ x
| x <- basepairs
, let Just c1 = lookup (chain1 x) chains
, let Just c2 = lookup (chain2 x) chains
, seqpos1 x < 0
|| seqpos2 x < 0
|| seqpos1 x >= BS.length c1
|| seqpos2 x >= BS.length c2
|| nucleotide1 x /= c1 `BS.index` seqpos1 x
|| nucleotide2 x /= c2 `BS.index` seqpos2 x
]
checkLinFR3D linfr3d@LinFR3D{..}
| L.null xs = Right linfr3d
| otherwise = Left (linfr3d,xs)
where
xs = [ x
| x@(pi,p,_) <- pairs
, baseL pi < 0
|| baseR pi < 0
|| baseL pi >= BS.length sequence
|| baseR pi >= BS.length sequence
|| mkNuc (sequence `BS.index` baseL pi) /= baseL p
|| mkNuc (sequence `BS.index` baseR pi) /= baseR p
]
| choener/BiobaseFR3D | Biobase/FR3D.hs | gpl-3.0 | 3,671 | 0 | 22 | 938 | 1,067 | 586 | 481 | 74 | 1 |
import Joc
import System.Random
main:: IO ()
main = do putStrLn "Escriu el número de rondes:"
nmoves <- getLine
g <- getStdGen
partida1 (read nmoves) [] [] g 0
partida1:: Int -> [Moviment] -> [Moviment] -> StdGen -> Int -> IO ()
partida1 nmoves l1 l2 g1 n = if (nmoves < 1)
then (escriuResultat n)
else do putStrLn "Escriu el teu Moviment (Llangardaix,Paper,Pedra,Tisores,Spock) seguit d'un salt de línea:"
m1 <- getLine
putStrLn ("El teu Moviment és: " ++ (show m1) ++ " El meu Moviment és: " ++ (show m2))
guanyador nmoves (read m1) m2 l1 l2 g2 n
where
(m2,g2) = (estrategia1 l1 l2 g1)
escriuResultat :: (Ord a, Num a) => a -> IO ()
escriuResultat n = if (n > 0) then putStrLn "Fi, guanya l'Usuari."
else if (n == 0) then putStrLn "Fi, Empat."
else putStrLn "Fi, guanya l'Ordinador."
guanyador:: Int -> Moviment -> Moviment -> [Moviment] -> [Moviment] -> StdGen -> Int -> IO()
guanyador nmoves m1 m2 l1 l2 g n = if (m1 > m2) then (do putStrLn ("Guanya l'Usuari." ++ "\n")
partida1 (nmoves-1) (m1:l1) (m2:l1) g (n+1))
else if (m1 == m2) then (do putStrLn ("Empat." ++ "\n")
partida1 (nmoves-1) (m1:l1) (m2:l2) g n)
else (do putStrLn ("Guanya l'Ordinador." ++ "\n")
partida1 (nmoves-1) (m1:l1) (m2:l2) g (n-1))
| marccomino/Small_Programs_in_Haskell | Practica2/JocUsuari.hs | gpl-3.0 | 1,365 | 20 | 13 | 366 | 586 | 301 | 285 | 26 | 3 |
module Main where
import Waifu.Simple
main = waifu
| Volafile-sekrit-prokekt/waifu-simulator | src/Main.hs | gpl-3.0 | 56 | 0 | 4 | 13 | 14 | 9 | 5 | 3 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sk-SK">
<title>TLS Debug | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/tlsdebug/src/main/javahelp/org/zaproxy/zap/extension/tlsdebug/resources/help_sk_SK/helpset_sk_SK.hs | apache-2.0 | 970 | 83 | 52 | 159 | 396 | 209 | 187 | -1 | -1 |
module Aliyun.Auth (
hmacSha1
, base64
, md5
, Secret(..)
, Message(..)
, AkId(..)
) where
import Data.ByteString.Base64 as BB
import Data.ByteString.Char8 as BC
import Data.ByteString as B
import Data.HMAC
import Data.Word (Word8)
import qualified Data.Digest.MD5 as M
newtype Secret = Secret { unSecret :: String } deriving (Eq)
instance Show Secret where
show (Secret s) = s
newtype Message = Message { unMessage :: ByteString } deriving (Eq)
instance Show Message where
show (Message m) = BC.unpack m
hmacSha1 :: Secret -> Message -> [Word8]
hmacSha1 secret msg = hmac_sha1 (B.unpack . BC.pack $ unSecret secret) (B.unpack $ unMessage msg)
base64 :: [Word8] -> String
base64 = BC.unpack . BB.encode . B.pack
newtype AkId = AkId { unAkId :: String } deriving (Eq)
instance Show AkId where
show (AkId id) = id
md5 :: B.ByteString -> B.ByteString
md5 = B.pack . M.hash . B.unpack
| izgzhen/aliyun-haskell-sdk | src/Aliyun/Auth.hs | apache-2.0 | 908 | 0 | 9 | 172 | 340 | 198 | 142 | 28 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
module Data.Geometry.GeoJsonToMvt where
import qualified Control.Monad.ST as MonadST
import qualified Data.Aeson as Aeson
import qualified Data.Foldable as Foldable
import qualified Data.Geospatial as Geospatial
import Data.Monoid
import qualified Data.Sequence as Sequence
import qualified Data.STRef as STRef
import Prelude hiding (Left, Right)
import qualified Data.Geometry.Types.GeoJsonFeatures as TypesGeoJsonFeatures
import qualified Data.Geometry.Types.MvtFeatures as TypesMvtFeatures
-- Lib
geoJsonFeaturesToMvtFeatures :: TypesGeoJsonFeatures.MvtFeatures -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> MonadST.ST s TypesGeoJsonFeatures.MvtFeatures
geoJsonFeaturesToMvtFeatures layer features = do
ops <- STRef.newSTRef 0
Foldable.foldMap (convertFeature layer ops) features
-- Feature
convertFeature :: TypesGeoJsonFeatures.MvtFeatures -> STRef.STRef s Word -> Geospatial.GeoFeature Aeson.Value -> MonadST.ST s TypesGeoJsonFeatures.MvtFeatures
convertFeature layer ops (Geospatial.GeoFeature _ geom props mfid) = do
fid <- convertId mfid ops
pure $ convertGeometry layer fid props geom
-- Geometry
convertGeometry :: TypesGeoJsonFeatures.MvtFeatures -> Word -> Aeson.Value -> Geospatial.GeospatialGeometry -> TypesGeoJsonFeatures.MvtFeatures
convertGeometry layer@TypesGeoJsonFeatures.MvtFeatures{..} fid props geom =
case geom of
Geospatial.NoGeometry -> mempty
Geospatial.Point g -> layer { TypesGeoJsonFeatures.mvtPoints = TypesMvtFeatures.mkPoint fid props (TypesGeoJsonFeatures.convertPoint g) mvtPoints }
Geospatial.MultiPoint g -> layer { TypesGeoJsonFeatures.mvtPoints = TypesMvtFeatures.mkPoint fid props (TypesGeoJsonFeatures.convertMultiPoint g) mvtPoints }
Geospatial.Line g -> layer { TypesGeoJsonFeatures.mvtLines = TypesMvtFeatures.mkLineString fid props (TypesGeoJsonFeatures.convertLineString g) mvtLines }
Geospatial.MultiLine g -> layer { TypesGeoJsonFeatures.mvtLines = TypesMvtFeatures.mkLineString fid props (TypesGeoJsonFeatures.convertMultiLineString g) mvtLines }
Geospatial.Polygon g -> layer { TypesGeoJsonFeatures.mvtPolygons = TypesMvtFeatures.mkPolygon fid props (TypesGeoJsonFeatures.convertPolygon g) mvtPolygons }
Geospatial.MultiPolygon g -> layer { TypesGeoJsonFeatures.mvtPolygons = TypesMvtFeatures.mkPolygon fid props (TypesGeoJsonFeatures.convertMultiPolygon g) mvtPolygons }
Geospatial.Collection gs -> Foldable.foldMap (convertGeometry layer fid props) gs
-- FeatureID
readFeatureID :: Maybe Geospatial.FeatureID -> Maybe Word
readFeatureID mfid =
case mfid of
Just (Geospatial.FeatureIDNumber x) -> Just (fromIntegral x)
_ -> Nothing
convertId :: Maybe Geospatial.FeatureID -> STRef.STRef s Word -> MonadST.ST s Word
convertId mfid ops =
case readFeatureID mfid of
Just val -> pure val
Nothing -> do
STRef.modifySTRef ops (+1)
STRef.readSTRef ops
| sitewisely/zellige | src/Data/Geometry/GeoJsonToMvt.hs | apache-2.0 | 3,221 | 0 | 13 | 646 | 758 | 394 | 364 | 44 | 8 |
{-# LANGUAGE CPP, NondecreasingIndentation, TupleSections, RecordWildCards #-}
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
--
-- (c) The University of Glasgow 2002-2006
--
-- | The dynamic linker for GHCi.
--
-- This module deals with the top-level issues of dynamic linking,
-- calling the object-code linker and the byte-code linker where
-- necessary.
module Linker ( getHValue, showLinkerState,
linkExpr, linkDecls, unload, withExtendedLinkEnv,
extendLinkEnv, deleteFromLinkEnv,
extendLoadedPkgs,
linkPackages,initDynLinker,linkModule,
linkCmdLineLibs
) where
#include "HsVersions.h"
import GhcPrelude
import GHCi
import GHCi.RemoteTypes
import LoadIface
import ByteCodeLink
import ByteCodeAsm
import ByteCodeTypes
import TcRnMonad
import Packages
import DriverPhases
import Finder
import HscTypes
import Name
import NameEnv
import Module
import ListSetOps
import DynFlags
import BasicTypes
import Outputable
import Panic
import Util
import ErrUtils
import SrcLoc
import qualified Maybes
import UniqDSet
import FastString
import Platform
import SysTools
import FileCleanup
-- Standard libraries
import Control.Monad
import Data.Char (isSpace)
import Data.IORef
import Data.List
import Data.Maybe
import Control.Concurrent.MVar
import System.FilePath
import System.Directory
import System.IO.Unsafe
import System.Environment (lookupEnv)
#if defined(mingw32_HOST_OS)
import System.Win32.Info (getSystemDirectory)
#endif
import Exception
import Foreign (Ptr) -- needed for 2nd stage
{- **********************************************************************
The Linker's state
********************************************************************* -}
{-
The persistent linker state *must* match the actual state of the
C dynamic linker at all times, so we keep it in a private global variable.
The global IORef used for PersistentLinkerState actually contains another MVar.
The reason for this is that we want to allow another loaded copy of the GHC
library to side-effect the PLS and for those changes to be reflected here.
The PersistentLinkerState maps Names to actual closures (for
interpreted code only), for use during linking.
-}
#if STAGE < 2
GLOBAL_VAR_M(v_PersistentLinkerState, newMVar (panic "Dynamic linker not initialised"), MVar PersistentLinkerState)
GLOBAL_VAR(v_InitLinkerDone, False, Bool) -- Set True when dynamic linker is initialised
#else
SHARED_GLOBAL_VAR_M( v_PersistentLinkerState
, getOrSetLibHSghcPersistentLinkerState
, "getOrSetLibHSghcPersistentLinkerState"
, newMVar (panic "Dynamic linker not initialised")
, MVar PersistentLinkerState)
-- Set True when dynamic linker is initialised
SHARED_GLOBAL_VAR( v_InitLinkerDone
, getOrSetLibHSghcInitLinkerDone
, "getOrSetLibHSghcInitLinkerDone"
, False
, Bool)
#endif
modifyPLS_ :: (PersistentLinkerState -> IO PersistentLinkerState) -> IO ()
modifyPLS_ f = readIORef v_PersistentLinkerState >>= flip modifyMVar_ f
modifyPLS :: (PersistentLinkerState -> IO (PersistentLinkerState, a)) -> IO a
modifyPLS f = readIORef v_PersistentLinkerState >>= flip modifyMVar f
data PersistentLinkerState
= PersistentLinkerState {
-- Current global mapping from Names to their true values
closure_env :: ClosureEnv,
-- The current global mapping from RdrNames of DataCons to
-- info table addresses.
-- When a new Unlinked is linked into the running image, or an existing
-- module in the image is replaced, the itbl_env must be updated
-- appropriately.
itbl_env :: !ItblEnv,
-- The currently loaded interpreted modules (home package)
bcos_loaded :: ![Linkable],
-- And the currently-loaded compiled modules (home package)
objs_loaded :: ![Linkable],
-- The currently-loaded packages; always object code
-- Held, as usual, in dependency order; though I am not sure if
-- that is really important
pkgs_loaded :: ![LinkerUnitId],
-- we need to remember the name of previous temporary DLL/.so
-- libraries so we can link them (see #10322)
temp_sos :: ![(FilePath, String)] }
emptyPLS :: DynFlags -> PersistentLinkerState
emptyPLS _ = PersistentLinkerState {
closure_env = emptyNameEnv,
itbl_env = emptyNameEnv,
pkgs_loaded = init_pkgs,
bcos_loaded = [],
objs_loaded = [],
temp_sos = [] }
-- Packages that don't need loading, because the compiler
-- shares them with the interpreted program.
--
-- The linker's symbol table is populated with RTS symbols using an
-- explicit list. See rts/Linker.c for details.
where init_pkgs = map toInstalledUnitId [rtsUnitId]
extendLoadedPkgs :: [InstalledUnitId] -> IO ()
extendLoadedPkgs pkgs =
modifyPLS_ $ \s ->
return s{ pkgs_loaded = pkgs ++ pkgs_loaded s }
extendLinkEnv :: [(Name,ForeignHValue)] -> IO ()
extendLinkEnv new_bindings =
modifyPLS_ $ \pls -> do
let ce = closure_env pls
let new_ce = extendClosureEnv ce new_bindings
return pls{ closure_env = new_ce }
deleteFromLinkEnv :: [Name] -> IO ()
deleteFromLinkEnv to_remove =
modifyPLS_ $ \pls -> do
let ce = closure_env pls
let new_ce = delListFromNameEnv ce to_remove
return pls{ closure_env = new_ce }
-- | Get the 'HValue' associated with the given name.
--
-- May cause loading the module that contains the name.
--
-- Throws a 'ProgramError' if loading fails or the name cannot be found.
getHValue :: HscEnv -> Name -> IO ForeignHValue
getHValue hsc_env name = do
initDynLinker hsc_env
pls <- modifyPLS $ \pls -> do
if (isExternalName name) then do
(pls', ok) <- linkDependencies hsc_env pls noSrcSpan
[nameModule name]
if (failed ok) then throwGhcExceptionIO (ProgramError "")
else return (pls', pls')
else
return (pls, pls)
case lookupNameEnv (closure_env pls) name of
Just (_,aa) -> return aa
Nothing
-> ASSERT2(isExternalName name, ppr name)
do let sym_to_find = nameToCLabel name "closure"
m <- lookupClosure hsc_env (unpackFS sym_to_find)
case m of
Just hvref -> mkFinalizedHValue hsc_env hvref
Nothing -> linkFail "ByteCodeLink.lookupCE"
(unpackFS sym_to_find)
linkDependencies :: HscEnv -> PersistentLinkerState
-> SrcSpan -> [Module]
-> IO (PersistentLinkerState, SuccessFlag)
linkDependencies hsc_env pls span needed_mods = do
-- initDynLinker (hsc_dflags hsc_env)
let hpt = hsc_HPT hsc_env
dflags = hsc_dflags hsc_env
-- The interpreter and dynamic linker can only handle object code built
-- the "normal" way, i.e. no non-std ways like profiling or ticky-ticky.
-- So here we check the build tag: if we're building a non-standard way
-- then we need to find & link object files built the "normal" way.
maybe_normal_osuf <- checkNonStdWay dflags span
-- Find what packages and linkables are required
(lnks, pkgs) <- getLinkDeps hsc_env hpt pls
maybe_normal_osuf span needed_mods
-- Link the packages and modules required
pls1 <- linkPackages' hsc_env pkgs pls
linkModules hsc_env pls1 lnks
-- | Temporarily extend the linker state.
withExtendedLinkEnv :: (ExceptionMonad m) =>
[(Name,ForeignHValue)] -> m a -> m a
withExtendedLinkEnv new_env action
= gbracket (liftIO $ extendLinkEnv new_env)
(\_ -> reset_old_env)
(\_ -> action)
where
-- Remember that the linker state might be side-effected
-- during the execution of the IO action, and we don't want to
-- lose those changes (we might have linked a new module or
-- package), so the reset action only removes the names we
-- added earlier.
reset_old_env = liftIO $ do
modifyPLS_ $ \pls ->
let cur = closure_env pls
new = delListFromNameEnv cur (map fst new_env)
in return pls{ closure_env = new }
-- | Display the persistent linker state.
showLinkerState :: DynFlags -> IO ()
showLinkerState dflags
= do pls <- readIORef v_PersistentLinkerState >>= readMVar
putLogMsg dflags NoReason SevDump noSrcSpan
(defaultDumpStyle dflags)
(vcat [text "----- Linker state -----",
text "Pkgs:" <+> ppr (pkgs_loaded pls),
text "Objs:" <+> ppr (objs_loaded pls),
text "BCOs:" <+> ppr (bcos_loaded pls)])
{- **********************************************************************
Initialisation
********************************************************************* -}
-- | Initialise the dynamic linker. This entails
--
-- a) Calling the C initialisation procedure,
--
-- b) Loading any packages specified on the command line,
--
-- c) Loading any packages specified on the command line, now held in the
-- @-l@ options in @v_Opt_l@,
--
-- d) Loading any @.o\/.dll@ files specified on the command line, now held
-- in @ldInputs@,
--
-- e) Loading any MacOS frameworks.
--
-- NOTE: This function is idempotent; if called more than once, it does
-- nothing. This is useful in Template Haskell, where we call it before
-- trying to link.
--
initDynLinker :: HscEnv -> IO ()
initDynLinker hsc_env =
modifyPLS_ $ \pls0 -> do
done <- readIORef v_InitLinkerDone
if done then return pls0
else do writeIORef v_InitLinkerDone True
reallyInitDynLinker hsc_env
reallyInitDynLinker :: HscEnv -> IO PersistentLinkerState
reallyInitDynLinker hsc_env = do
-- Initialise the linker state
let dflags = hsc_dflags hsc_env
pls0 = emptyPLS dflags
-- (a) initialise the C dynamic linker
initObjLinker hsc_env
-- (b) Load packages from the command-line (Note [preload packages])
pls <- linkPackages' hsc_env (preloadPackages (pkgState dflags)) pls0
-- steps (c), (d) and (e)
linkCmdLineLibs' hsc_env pls
linkCmdLineLibs :: HscEnv -> IO ()
linkCmdLineLibs hsc_env = do
initDynLinker hsc_env
modifyPLS_ $ \pls -> do
linkCmdLineLibs' hsc_env pls
linkCmdLineLibs' :: HscEnv -> PersistentLinkerState -> IO PersistentLinkerState
linkCmdLineLibs' hsc_env pls =
do
let dflags@(DynFlags { ldInputs = cmdline_ld_inputs
, libraryPaths = lib_paths_base})
= hsc_dflags hsc_env
-- (c) Link libraries from the command-line
let minus_ls_1 = [ lib | Option ('-':'l':lib) <- cmdline_ld_inputs ]
-- On Windows we want to add libpthread by default just as GCC would.
-- However because we don't know the actual name of pthread's dll we
-- need to defer this to the locateLib call so we can't initialize it
-- inside of the rts. Instead we do it here to be able to find the
-- import library for pthreads. See Trac #13210.
let platform = targetPlatform dflags
os = platformOS platform
minus_ls = case os of
OSMinGW32 -> "pthread" : minus_ls_1
_ -> minus_ls_1
-- See Note [Fork/Exec Windows]
gcc_paths <- getGCCPaths dflags os
lib_paths_env <- addEnvPaths "LIBRARY_PATH" lib_paths_base
maybePutStrLn dflags "Search directories (user):"
maybePutStr dflags (unlines $ map (" "++) lib_paths_env)
maybePutStrLn dflags "Search directories (gcc):"
maybePutStr dflags (unlines $ map (" "++) gcc_paths)
libspecs
<- mapM (locateLib hsc_env False lib_paths_env gcc_paths) minus_ls
-- (d) Link .o files from the command-line
classified_ld_inputs <- mapM (classifyLdInput dflags)
[ f | FileOption _ f <- cmdline_ld_inputs ]
-- (e) Link any MacOS frameworks
let platform = targetPlatform dflags
let (framework_paths, frameworks) =
if platformUsesFrameworks platform
then (frameworkPaths dflags, cmdlineFrameworks dflags)
else ([],[])
-- Finally do (c),(d),(e)
let cmdline_lib_specs = catMaybes classified_ld_inputs
++ libspecs
++ map Framework frameworks
if null cmdline_lib_specs then return pls
else do
-- Add directories to library search paths, this only has an effect
-- on Windows. On Unix OSes this function is a NOP.
let all_paths = let paths = takeDirectory (fst $ sPgm_c $ settings dflags)
: framework_paths
++ lib_paths_base
++ [ takeDirectory dll | DLLPath dll <- libspecs ]
in nub $ map normalise paths
let lib_paths = nub $ lib_paths_base ++ gcc_paths
all_paths_env <- addEnvPaths "LD_LIBRARY_PATH" all_paths
pathCache <- mapM (addLibrarySearchPath hsc_env) all_paths_env
pls1 <- foldM (preloadLib hsc_env lib_paths framework_paths) pls
cmdline_lib_specs
maybePutStr dflags "final link ... "
ok <- resolveObjs hsc_env
-- DLLs are loaded, reset the search paths
mapM_ (removeLibrarySearchPath hsc_env) $ reverse pathCache
if succeeded ok then maybePutStrLn dflags "done"
else throwGhcExceptionIO (ProgramError "linking extra libraries/objects failed")
return pls1
{- Note [preload packages]
Why do we need to preload packages from the command line? This is an
explanation copied from #2437:
I tried to implement the suggestion from #3560, thinking it would be
easy, but there are two reasons we link in packages eagerly when they
are mentioned on the command line:
* So that you can link in extra object files or libraries that
depend on the packages. e.g. ghc -package foo -lbar where bar is a
C library that depends on something in foo. So we could link in
foo eagerly if and only if there are extra C libs or objects to
link in, but....
* Haskell code can depend on a C function exported by a package, and
the normal dependency tracking that TH uses can't know about these
dependencies. The test ghcilink004 relies on this, for example.
I conclude that we need two -package flags: one that says "this is a
package I want to make available", and one that says "this is a
package I want to link in eagerly". Would that be too complicated for
users?
-}
classifyLdInput :: DynFlags -> FilePath -> IO (Maybe LibrarySpec)
classifyLdInput dflags f
| isObjectFilename platform f = return (Just (Object f))
| isDynLibFilename platform f = return (Just (DLLPath f))
| otherwise = do
putLogMsg dflags NoReason SevInfo noSrcSpan
(defaultUserStyle dflags)
(text ("Warning: ignoring unrecognised input `" ++ f ++ "'"))
return Nothing
where platform = targetPlatform dflags
preloadLib
:: HscEnv -> [String] -> [String] -> PersistentLinkerState
-> LibrarySpec -> IO PersistentLinkerState
preloadLib hsc_env lib_paths framework_paths pls lib_spec = do
maybePutStr dflags ("Loading object " ++ showLS lib_spec ++ " ... ")
case lib_spec of
Object static_ish -> do
(b, pls1) <- preload_static lib_paths static_ish
maybePutStrLn dflags (if b then "done" else "not found")
return pls1
Archive static_ish -> do
b <- preload_static_archive lib_paths static_ish
maybePutStrLn dflags (if b then "done" else "not found")
return pls
DLL dll_unadorned -> do
maybe_errstr <- loadDLL hsc_env (mkSOName platform dll_unadorned)
case maybe_errstr of
Nothing -> maybePutStrLn dflags "done"
Just mm | platformOS platform /= OSDarwin ->
preloadFailed mm lib_paths lib_spec
Just mm | otherwise -> do
-- As a backup, on Darwin, try to also load a .so file
-- since (apparently) some things install that way - see
-- ticket #8770.
let libfile = ("lib" ++ dll_unadorned) <.> "so"
err2 <- loadDLL hsc_env libfile
case err2 of
Nothing -> maybePutStrLn dflags "done"
Just _ -> preloadFailed mm lib_paths lib_spec
return pls
DLLPath dll_path -> do
do maybe_errstr <- loadDLL hsc_env dll_path
case maybe_errstr of
Nothing -> maybePutStrLn dflags "done"
Just mm -> preloadFailed mm lib_paths lib_spec
return pls
Framework framework ->
if platformUsesFrameworks (targetPlatform dflags)
then do maybe_errstr <- loadFramework hsc_env framework_paths framework
case maybe_errstr of
Nothing -> maybePutStrLn dflags "done"
Just mm -> preloadFailed mm framework_paths lib_spec
return pls
else panic "preloadLib Framework"
where
dflags = hsc_dflags hsc_env
platform = targetPlatform dflags
preloadFailed :: String -> [String] -> LibrarySpec -> IO ()
preloadFailed sys_errmsg paths spec
= do maybePutStr dflags "failed.\n"
throwGhcExceptionIO $
CmdLineError (
"user specified .o/.so/.DLL could not be loaded ("
++ sys_errmsg ++ ")\nWhilst trying to load: "
++ showLS spec ++ "\nAdditional directories searched:"
++ (if null paths then " (none)" else
intercalate "\n" (map (" "++) paths)))
-- Not interested in the paths in the static case.
preload_static _paths name
= do b <- doesFileExist name
if not b then return (False, pls)
else if dynamicGhc
then do pls1 <- dynLoadObjs hsc_env pls [name]
return (True, pls1)
else do loadObj hsc_env name
return (True, pls)
preload_static_archive _paths name
= do b <- doesFileExist name
if not b then return False
else do if dynamicGhc
then panic "Loading archives not supported"
else loadArchive hsc_env name
return True
{- **********************************************************************
Link a byte-code expression
********************************************************************* -}
-- | Link a single expression, /including/ first linking packages and
-- modules that this expression depends on.
--
-- Raises an IO exception ('ProgramError') if it can't find a compiled
-- version of the dependents to link.
--
linkExpr :: HscEnv -> SrcSpan -> UnlinkedBCO -> IO ForeignHValue
linkExpr hsc_env span root_ul_bco
= do {
-- Initialise the linker (if it's not been done already)
; initDynLinker hsc_env
-- Take lock for the actual work.
; modifyPLS $ \pls0 -> do {
-- Link the packages and modules required
; (pls, ok) <- linkDependencies hsc_env pls0 span needed_mods
; if failed ok then
throwGhcExceptionIO (ProgramError "")
else do {
-- Link the expression itself
let ie = itbl_env pls
ce = closure_env pls
-- Link the necessary packages and linkables
; let nobreakarray = error "no break array"
bco_ix = mkNameEnv [(unlinkedBCOName root_ul_bco, 0)]
; resolved <- linkBCO hsc_env ie ce bco_ix nobreakarray root_ul_bco
; [root_hvref] <- createBCOs hsc_env [resolved]
; fhv <- mkFinalizedHValue hsc_env root_hvref
; return (pls, fhv)
}}}
where
free_names = uniqDSetToList (bcoFreeNames root_ul_bco)
needed_mods :: [Module]
needed_mods = [ nameModule n | n <- free_names,
isExternalName n, -- Names from other modules
not (isWiredInName n) -- Exclude wired-in names
] -- (see note below)
-- Exclude wired-in names because we may not have read
-- their interface files, so getLinkDeps will fail
-- All wired-in names are in the base package, which we link
-- by default, so we can safely ignore them here.
dieWith :: DynFlags -> SrcSpan -> MsgDoc -> IO a
dieWith dflags span msg = throwGhcExceptionIO (ProgramError (showSDoc dflags (mkLocMessage SevFatal span msg)))
checkNonStdWay :: DynFlags -> SrcSpan -> IO (Maybe FilePath)
checkNonStdWay dflags srcspan
| gopt Opt_ExternalInterpreter dflags = return Nothing
-- with -fexternal-interpreter we load the .o files, whatever way
-- they were built. If they were built for a non-std way, then
-- we will use the appropriate variant of the iserv binary to load them.
| interpWays == haskellWays = return Nothing
-- Only if we are compiling with the same ways as GHC is built
-- with, can we dynamically load those object files. (see #3604)
| objectSuf dflags == normalObjectSuffix && not (null haskellWays)
= failNonStd dflags srcspan
| otherwise = return (Just (interpTag ++ "o"))
where
haskellWays = filter (not . wayRTSOnly) (ways dflags)
interpTag = case mkBuildTag interpWays of
"" -> ""
tag -> tag ++ "_"
normalObjectSuffix :: String
normalObjectSuffix = phaseInputExt StopLn
failNonStd :: DynFlags -> SrcSpan -> IO (Maybe FilePath)
failNonStd dflags srcspan = dieWith dflags srcspan $
text "Cannot load" <+> compWay <+>
text "objects when GHC is built" <+> ghciWay $$
text "To fix this, either:" $$
text " (1) Use -fexternal-interpreter, or" $$
text " (2) Build the program twice: once" <+>
ghciWay <> text ", and then" $$
text " with" <+> compWay <+>
text "using -osuf to set a different object file suffix."
where compWay
| WayDyn `elem` ways dflags = text "-dynamic"
| WayProf `elem` ways dflags = text "-prof"
| otherwise = text "normal"
ghciWay
| dynamicGhc = text "with -dynamic"
| rtsIsProfiled = text "with -prof"
| otherwise = text "the normal way"
getLinkDeps :: HscEnv -> HomePackageTable
-> PersistentLinkerState
-> Maybe FilePath -- replace object suffices?
-> SrcSpan -- for error messages
-> [Module] -- If you need these
-> IO ([Linkable], [InstalledUnitId]) -- ... then link these first
-- Fails with an IO exception if it can't find enough files
getLinkDeps hsc_env hpt pls replace_osuf span mods
-- Find all the packages and linkables that a set of modules depends on
= do {
-- 1. Find the dependent home-pkg-modules/packages from each iface
-- (omitting modules from the interactive package, which is already linked)
; (mods_s, pkgs_s) <- follow_deps (filterOut isInteractiveModule mods)
emptyUniqDSet emptyUniqDSet;
; let {
-- 2. Exclude ones already linked
-- Main reason: avoid findModule calls in get_linkable
mods_needed = mods_s `minusList` linked_mods ;
pkgs_needed = pkgs_s `minusList` pkgs_loaded pls ;
linked_mods = map (moduleName.linkableModule)
(objs_loaded pls ++ bcos_loaded pls) }
-- 3. For each dependent module, find its linkable
-- This will either be in the HPT or (in the case of one-shot
-- compilation) we may need to use maybe_getFileLinkable
; let { osuf = objectSuf dflags }
; lnks_needed <- mapM (get_linkable osuf) mods_needed
; return (lnks_needed, pkgs_needed) }
where
dflags = hsc_dflags hsc_env
this_pkg = thisPackage dflags
-- The ModIface contains the transitive closure of the module dependencies
-- within the current package, *except* for boot modules: if we encounter
-- a boot module, we have to find its real interface and discover the
-- dependencies of that. Hence we need to traverse the dependency
-- tree recursively. See bug #936, testcase ghci/prog007.
follow_deps :: [Module] -- modules to follow
-> UniqDSet ModuleName -- accum. module dependencies
-> UniqDSet InstalledUnitId -- accum. package dependencies
-> IO ([ModuleName], [InstalledUnitId]) -- result
follow_deps [] acc_mods acc_pkgs
= return (uniqDSetToList acc_mods, uniqDSetToList acc_pkgs)
follow_deps (mod:mods) acc_mods acc_pkgs
= do
mb_iface <- initIfaceCheck (text "getLinkDeps") hsc_env $
loadInterface msg mod (ImportByUser False)
iface <- case mb_iface of
Maybes.Failed err -> throwGhcExceptionIO (ProgramError (showSDoc dflags err))
Maybes.Succeeded iface -> return iface
when (mi_boot iface) $ link_boot_mod_error mod
let
pkg = moduleUnitId mod
deps = mi_deps iface
pkg_deps = dep_pkgs deps
(boot_deps, mod_deps) = partitionWith is_boot (dep_mods deps)
where is_boot (m,True) = Left m
is_boot (m,False) = Right m
boot_deps' = filter (not . (`elementOfUniqDSet` acc_mods)) boot_deps
acc_mods' = addListToUniqDSet acc_mods (moduleName mod : mod_deps)
acc_pkgs' = addListToUniqDSet acc_pkgs $ map fst pkg_deps
--
if pkg /= this_pkg
then follow_deps mods acc_mods (addOneToUniqDSet acc_pkgs' (toInstalledUnitId pkg))
else follow_deps (map (mkModule this_pkg) boot_deps' ++ mods)
acc_mods' acc_pkgs'
where
msg = text "need to link module" <+> ppr mod <+>
text "due to use of Template Haskell"
link_boot_mod_error mod =
throwGhcExceptionIO (ProgramError (showSDoc dflags (
text "module" <+> ppr mod <+>
text "cannot be linked; it is only available as a boot module")))
no_obj :: Outputable a => a -> IO b
no_obj mod = dieWith dflags span $
text "cannot find object file for module " <>
quotes (ppr mod) $$
while_linking_expr
while_linking_expr = text "while linking an interpreted expression"
-- This one is a build-system bug
get_linkable osuf mod_name -- A home-package module
| Just mod_info <- lookupHpt hpt mod_name
= adjust_linkable (Maybes.expectJust "getLinkDeps" (hm_linkable mod_info))
| otherwise
= do -- It's not in the HPT because we are in one shot mode,
-- so use the Finder to get a ModLocation...
mb_stuff <- findHomeModule hsc_env mod_name
case mb_stuff of
Found loc mod -> found loc mod
_ -> no_obj mod_name
where
found loc mod = do {
-- ...and then find the linkable for it
mb_lnk <- findObjectLinkableMaybe mod loc ;
case mb_lnk of {
Nothing -> no_obj mod ;
Just lnk -> adjust_linkable lnk
}}
adjust_linkable lnk
| Just new_osuf <- replace_osuf = do
new_uls <- mapM (adjust_ul new_osuf)
(linkableUnlinked lnk)
return lnk{ linkableUnlinked=new_uls }
| otherwise =
return lnk
adjust_ul new_osuf (DotO file) = do
MASSERT(osuf `isSuffixOf` file)
let file_base = fromJust (stripExtension osuf file)
new_file = file_base <.> new_osuf
ok <- doesFileExist new_file
if (not ok)
then dieWith dflags span $
text "cannot find object file "
<> quotes (text new_file) $$ while_linking_expr
else return (DotO new_file)
adjust_ul _ (DotA fp) = panic ("adjust_ul DotA " ++ show fp)
adjust_ul _ (DotDLL fp) = panic ("adjust_ul DotDLL " ++ show fp)
adjust_ul _ l@(BCOs {}) = return l
{- **********************************************************************
Loading a Decls statement
********************************************************************* -}
linkDecls :: HscEnv -> SrcSpan -> CompiledByteCode -> IO ()
linkDecls hsc_env span cbc@CompiledByteCode{..} = do
-- Initialise the linker (if it's not been done already)
initDynLinker hsc_env
-- Take lock for the actual work.
modifyPLS $ \pls0 -> do
-- Link the packages and modules required
(pls, ok) <- linkDependencies hsc_env pls0 span needed_mods
if failed ok
then throwGhcExceptionIO (ProgramError "")
else do
-- Link the expression itself
let ie = plusNameEnv (itbl_env pls) bc_itbls
ce = closure_env pls
-- Link the necessary packages and linkables
new_bindings <- linkSomeBCOs hsc_env ie ce [cbc]
nms_fhvs <- makeForeignNamedHValueRefs hsc_env new_bindings
let pls2 = pls { closure_env = extendClosureEnv ce nms_fhvs
, itbl_env = ie }
return (pls2, ())
where
free_names = uniqDSetToList $
foldr (unionUniqDSets . bcoFreeNames) emptyUniqDSet bc_bcos
needed_mods :: [Module]
needed_mods = [ nameModule n | n <- free_names,
isExternalName n, -- Names from other modules
not (isWiredInName n) -- Exclude wired-in names
] -- (see note below)
-- Exclude wired-in names because we may not have read
-- their interface files, so getLinkDeps will fail
-- All wired-in names are in the base package, which we link
-- by default, so we can safely ignore them here.
{- **********************************************************************
Loading a single module
********************************************************************* -}
linkModule :: HscEnv -> Module -> IO ()
linkModule hsc_env mod = do
initDynLinker hsc_env
modifyPLS_ $ \pls -> do
(pls', ok) <- linkDependencies hsc_env pls noSrcSpan [mod]
if (failed ok) then throwGhcExceptionIO (ProgramError "could not link module")
else return pls'
{- **********************************************************************
Link some linkables
The linkables may consist of a mixture of
byte-code modules and object modules
********************************************************************* -}
linkModules :: HscEnv -> PersistentLinkerState -> [Linkable]
-> IO (PersistentLinkerState, SuccessFlag)
linkModules hsc_env pls linkables
= mask_ $ do -- don't want to be interrupted by ^C in here
let (objs, bcos) = partition isObjectLinkable
(concatMap partitionLinkable linkables)
-- Load objects first; they can't depend on BCOs
(pls1, ok_flag) <- dynLinkObjs hsc_env pls objs
if failed ok_flag then
return (pls1, Failed)
else do
pls2 <- dynLinkBCOs hsc_env pls1 bcos
return (pls2, Succeeded)
-- HACK to support f-x-dynamic in the interpreter; no other purpose
partitionLinkable :: Linkable -> [Linkable]
partitionLinkable li
= let li_uls = linkableUnlinked li
li_uls_obj = filter isObject li_uls
li_uls_bco = filter isInterpretable li_uls
in
case (li_uls_obj, li_uls_bco) of
(_:_, _:_) -> [li {linkableUnlinked=li_uls_obj},
li {linkableUnlinked=li_uls_bco}]
_ -> [li]
findModuleLinkable_maybe :: [Linkable] -> Module -> Maybe Linkable
findModuleLinkable_maybe lis mod
= case [LM time nm us | LM time nm us <- lis, nm == mod] of
[] -> Nothing
[li] -> Just li
_ -> pprPanic "findModuleLinkable" (ppr mod)
linkableInSet :: Linkable -> [Linkable] -> Bool
linkableInSet l objs_loaded =
case findModuleLinkable_maybe objs_loaded (linkableModule l) of
Nothing -> False
Just m -> linkableTime l == linkableTime m
{- **********************************************************************
The object-code linker
********************************************************************* -}
dynLinkObjs :: HscEnv -> PersistentLinkerState -> [Linkable]
-> IO (PersistentLinkerState, SuccessFlag)
dynLinkObjs hsc_env pls objs = do
-- Load the object files and link them
let (objs_loaded', new_objs) = rmDupLinkables (objs_loaded pls) objs
pls1 = pls { objs_loaded = objs_loaded' }
unlinkeds = concatMap linkableUnlinked new_objs
wanted_objs = map nameOfObject unlinkeds
if interpreterDynamic (hsc_dflags hsc_env)
then do pls2 <- dynLoadObjs hsc_env pls1 wanted_objs
return (pls2, Succeeded)
else do mapM_ (loadObj hsc_env) wanted_objs
-- Link them all together
ok <- resolveObjs hsc_env
-- If resolving failed, unload all our
-- object modules and carry on
if succeeded ok then do
return (pls1, Succeeded)
else do
pls2 <- unload_wkr hsc_env [] pls1
return (pls2, Failed)
dynLoadObjs :: HscEnv -> PersistentLinkerState -> [FilePath]
-> IO PersistentLinkerState
dynLoadObjs _ pls [] = return pls
dynLoadObjs hsc_env pls objs = do
let dflags = hsc_dflags hsc_env
let platform = targetPlatform dflags
let minus_ls = [ lib | Option ('-':'l':lib) <- ldInputs dflags ]
let minus_big_ls = [ lib | Option ('-':'L':lib) <- ldInputs dflags ]
(soFile, libPath , libName) <-
newTempLibName dflags TFL_CurrentModule (soExt platform)
let
dflags2 = dflags {
-- We don't want the original ldInputs in
-- (they're already linked in), but we do want
-- to link against previous dynLoadObjs
-- libraries if there were any, so that the linker
-- can resolve dependencies when it loads this
-- library.
ldInputs =
concatMap
(\(lp, l) ->
[ Option ("-L" ++ lp)
, Option "-Xlinker"
, Option "-rpath"
, Option "-Xlinker"
, Option lp
, Option ("-l" ++ l)
])
(temp_sos pls)
++ concatMap
(\lp ->
[ Option ("-L" ++ lp)
, Option "-Xlinker"
, Option "-rpath"
, Option "-Xlinker"
, Option lp
])
minus_big_ls
-- See Note [-Xlinker -rpath vs -Wl,-rpath]
++ map (\l -> Option ("-l" ++ l)) minus_ls,
-- Add -l options and -L options from dflags.
--
-- When running TH for a non-dynamic way, we still
-- need to make -l flags to link against the dynamic
-- libraries, so we need to add WayDyn to ways.
--
-- Even if we're e.g. profiling, we still want
-- the vanilla dynamic libraries, so we set the
-- ways / build tag to be just WayDyn.
ways = [WayDyn],
buildTag = mkBuildTag [WayDyn],
outputFile = Just soFile
}
-- link all "loaded packages" so symbols in those can be resolved
-- Note: We are loading packages with local scope, so to see the
-- symbols in this link we must link all loaded packages again.
linkDynLib dflags2 objs (pkgs_loaded pls)
-- if we got this far, extend the lifetime of the library file
changeTempFilesLifetime dflags TFL_GhcSession [soFile]
m <- loadDLL hsc_env soFile
case m of
Nothing -> return pls { temp_sos = (libPath, libName) : temp_sos pls }
Just err -> panic ("Loading temp shared object failed: " ++ err)
rmDupLinkables :: [Linkable] -- Already loaded
-> [Linkable] -- New linkables
-> ([Linkable], -- New loaded set (including new ones)
[Linkable]) -- New linkables (excluding dups)
rmDupLinkables already ls
= go already [] ls
where
go already extras [] = (already, extras)
go already extras (l:ls)
| linkableInSet l already = go already extras ls
| otherwise = go (l:already) (l:extras) ls
{- **********************************************************************
The byte-code linker
********************************************************************* -}
dynLinkBCOs :: HscEnv -> PersistentLinkerState -> [Linkable]
-> IO PersistentLinkerState
dynLinkBCOs hsc_env pls bcos = do
let (bcos_loaded', new_bcos) = rmDupLinkables (bcos_loaded pls) bcos
pls1 = pls { bcos_loaded = bcos_loaded' }
unlinkeds :: [Unlinked]
unlinkeds = concatMap linkableUnlinked new_bcos
cbcs :: [CompiledByteCode]
cbcs = map byteCodeOfObject unlinkeds
ies = map bc_itbls cbcs
gce = closure_env pls
final_ie = foldr plusNameEnv (itbl_env pls) ies
names_and_refs <- linkSomeBCOs hsc_env final_ie gce cbcs
-- We only want to add the external ones to the ClosureEnv
let (to_add, to_drop) = partition (isExternalName.fst) names_and_refs
-- Immediately release any HValueRefs we're not going to add
freeHValueRefs hsc_env (map snd to_drop)
-- Wrap finalizers on the ones we want to keep
new_binds <- makeForeignNamedHValueRefs hsc_env to_add
return pls1 { closure_env = extendClosureEnv gce new_binds,
itbl_env = final_ie }
-- Link a bunch of BCOs and return references to their values
linkSomeBCOs :: HscEnv
-> ItblEnv
-> ClosureEnv
-> [CompiledByteCode]
-> IO [(Name,HValueRef)]
-- The returned HValueRefs are associated 1-1 with
-- the incoming unlinked BCOs. Each gives the
-- value of the corresponding unlinked BCO
linkSomeBCOs hsc_env ie ce mods = foldr fun do_link mods []
where
fun CompiledByteCode{..} inner accum =
case bc_breaks of
Nothing -> inner ((panic "linkSomeBCOs: no break array", bc_bcos) : accum)
Just mb -> withForeignRef (modBreaks_flags mb) $ \breakarray ->
inner ((breakarray, bc_bcos) : accum)
do_link [] = return []
do_link mods = do
let flat = [ (breakarray, bco) | (breakarray, bcos) <- mods, bco <- bcos ]
names = map (unlinkedBCOName . snd) flat
bco_ix = mkNameEnv (zip names [0..])
resolved <- sequence [ linkBCO hsc_env ie ce bco_ix breakarray bco
| (breakarray, bco) <- flat ]
hvrefs <- createBCOs hsc_env resolved
return (zip names hvrefs)
-- | Useful to apply to the result of 'linkSomeBCOs'
makeForeignNamedHValueRefs
:: HscEnv -> [(Name,HValueRef)] -> IO [(Name,ForeignHValue)]
makeForeignNamedHValueRefs hsc_env bindings =
mapM (\(n, hvref) -> (n,) <$> mkFinalizedHValue hsc_env hvref) bindings
{- **********************************************************************
Unload some object modules
********************************************************************* -}
-- ---------------------------------------------------------------------------
-- | Unloading old objects ready for a new compilation sweep.
--
-- The compilation manager provides us with a list of linkables that it
-- considers \"stable\", i.e. won't be recompiled this time around. For
-- each of the modules current linked in memory,
--
-- * if the linkable is stable (and it's the same one -- the user may have
-- recompiled the module on the side), we keep it,
--
-- * otherwise, we unload it.
--
-- * we also implicitly unload all temporary bindings at this point.
--
unload :: HscEnv
-> [Linkable] -- ^ The linkables to *keep*.
-> IO ()
unload hsc_env linkables
= mask_ $ do -- mask, so we're safe from Ctrl-C in here
-- Initialise the linker (if it's not been done already)
initDynLinker hsc_env
new_pls
<- modifyPLS $ \pls -> do
pls1 <- unload_wkr hsc_env linkables pls
return (pls1, pls1)
let dflags = hsc_dflags hsc_env
debugTraceMsg dflags 3 $
text "unload: retaining objs" <+> ppr (objs_loaded new_pls)
debugTraceMsg dflags 3 $
text "unload: retaining bcos" <+> ppr (bcos_loaded new_pls)
return ()
unload_wkr :: HscEnv
-> [Linkable] -- stable linkables
-> PersistentLinkerState
-> IO PersistentLinkerState
-- Does the core unload business
-- (the wrapper blocks exceptions and deals with the PLS get and put)
unload_wkr hsc_env keep_linkables pls = do
let (objs_to_keep, bcos_to_keep) = partition isObjectLinkable keep_linkables
discard keep l = not (linkableInSet l keep)
(objs_to_unload, remaining_objs_loaded) =
partition (discard objs_to_keep) (objs_loaded pls)
(bcos_to_unload, remaining_bcos_loaded) =
partition (discard bcos_to_keep) (bcos_loaded pls)
mapM_ unloadObjs objs_to_unload
mapM_ unloadObjs bcos_to_unload
-- If we unloaded any object files at all, we need to purge the cache
-- of lookupSymbol results.
when (not (null (objs_to_unload ++
filter (not . null . linkableObjs) bcos_to_unload))) $
purgeLookupSymbolCache hsc_env
let bcos_retained = mkModuleSet $ map linkableModule remaining_bcos_loaded
-- Note that we want to remove all *local*
-- (i.e. non-isExternal) names too (these are the
-- temporary bindings from the command line).
keep_name (n,_) = isExternalName n &&
nameModule n `elemModuleSet` bcos_retained
itbl_env' = filterNameEnv keep_name (itbl_env pls)
closure_env' = filterNameEnv keep_name (closure_env pls)
new_pls = pls { itbl_env = itbl_env',
closure_env = closure_env',
bcos_loaded = remaining_bcos_loaded,
objs_loaded = remaining_objs_loaded }
return new_pls
where
unloadObjs :: Linkable -> IO ()
unloadObjs lnk
| dynamicGhc = return ()
-- We don't do any cleanup when linking objects with the
-- dynamic linker. Doing so introduces extra complexity for
-- not much benefit.
| otherwise
= mapM_ (unloadObj hsc_env) [f | DotO f <- linkableUnlinked lnk]
-- The components of a BCO linkable may contain
-- dot-o files. Which is very confusing.
--
-- But the BCO parts can be unlinked just by
-- letting go of them (plus of course depopulating
-- the symbol table which is done in the main body)
{- **********************************************************************
Loading packages
********************************************************************* -}
data LibrarySpec
= Object FilePath -- Full path name of a .o file, including trailing .o
-- For dynamic objects only, try to find the object
-- file in all the directories specified in
-- v_Library_paths before giving up.
| Archive FilePath -- Full path name of a .a file, including trailing .a
| DLL String -- "Unadorned" name of a .DLL/.so
-- e.g. On unix "qt" denotes "libqt.so"
-- On Windows "burble" denotes "burble.DLL" or "libburble.dll"
-- loadDLL is platform-specific and adds the lib/.so/.DLL
-- suffixes platform-dependently
| DLLPath FilePath -- Absolute or relative pathname to a dynamic library
-- (ends with .dll or .so).
| Framework String -- Only used for darwin, but does no harm
-- If this package is already part of the GHCi binary, we'll already
-- have the right DLLs for this package loaded, so don't try to
-- load them again.
--
-- But on Win32 we must load them 'again'; doing so is a harmless no-op
-- as far as the loader is concerned, but it does initialise the list
-- of DLL handles that rts/Linker.c maintains, and that in turn is
-- used by lookupSymbol. So we must call addDLL for each library
-- just to get the DLL handle into the list.
partOfGHCi :: [PackageName]
partOfGHCi
| isWindowsHost || isDarwinHost = []
| otherwise = map (PackageName . mkFastString)
["base", "template-haskell", "editline"]
showLS :: LibrarySpec -> String
showLS (Object nm) = "(static) " ++ nm
showLS (Archive nm) = "(static archive) " ++ nm
showLS (DLL nm) = "(dynamic) " ++ nm
showLS (DLLPath nm) = "(dynamic) " ++ nm
showLS (Framework nm) = "(framework) " ++ nm
-- TODO: Make this type more precise
type LinkerUnitId = InstalledUnitId
-- | Link exactly the specified packages, and their dependents (unless of
-- course they are already linked). The dependents are linked
-- automatically, and it doesn't matter what order you specify the input
-- packages.
--
linkPackages :: HscEnv -> [LinkerUnitId] -> IO ()
-- NOTE: in fact, since each module tracks all the packages it depends on,
-- we don't really need to use the package-config dependencies.
--
-- However we do need the package-config stuff (to find aux libs etc),
-- and following them lets us load libraries in the right order, which
-- perhaps makes the error message a bit more localised if we get a link
-- failure. So the dependency walking code is still here.
linkPackages hsc_env new_pkgs = do
-- It's probably not safe to try to load packages concurrently, so we take
-- a lock.
initDynLinker hsc_env
modifyPLS_ $ \pls -> do
linkPackages' hsc_env new_pkgs pls
linkPackages' :: HscEnv -> [LinkerUnitId] -> PersistentLinkerState
-> IO PersistentLinkerState
linkPackages' hsc_env new_pks pls = do
pkgs' <- link (pkgs_loaded pls) new_pks
return $! pls { pkgs_loaded = pkgs' }
where
dflags = hsc_dflags hsc_env
link :: [LinkerUnitId] -> [LinkerUnitId] -> IO [LinkerUnitId]
link pkgs new_pkgs =
foldM link_one pkgs new_pkgs
link_one pkgs new_pkg
| new_pkg `elem` pkgs -- Already linked
= return pkgs
| Just pkg_cfg <- lookupInstalledPackage dflags new_pkg
= do { -- Link dependents first
pkgs' <- link pkgs (depends pkg_cfg)
-- Now link the package itself
; linkPackage hsc_env pkg_cfg
; return (new_pkg : pkgs') }
| otherwise
= throwGhcExceptionIO (CmdLineError ("unknown package: " ++ unpackFS (installedUnitIdFS new_pkg)))
linkPackage :: HscEnv -> PackageConfig -> IO ()
linkPackage hsc_env pkg
= do
let dflags = hsc_dflags hsc_env
platform = targetPlatform dflags
dirs | interpreterDynamic dflags = Packages.libraryDynDirs pkg
| otherwise = Packages.libraryDirs pkg
let hs_libs = Packages.hsLibraries pkg
-- The FFI GHCi import lib isn't needed as
-- compiler/ghci/Linker.hs + rts/Linker.c link the
-- interpreted references to FFI to the compiled FFI.
-- We therefore filter it out so that we don't get
-- duplicate symbol errors.
hs_libs' = filter ("HSffi" /=) hs_libs
-- Because of slight differences between the GHC dynamic linker and
-- the native system linker some packages have to link with a
-- different list of libraries when using GHCi. Examples include: libs
-- that are actually gnu ld scripts, and the possibility that the .a
-- libs do not exactly match the .so/.dll equivalents. So if the
-- package file provides an "extra-ghci-libraries" field then we use
-- that instead of the "extra-libraries" field.
extra_libs =
(if null (Packages.extraGHCiLibraries pkg)
then Packages.extraLibraries pkg
else Packages.extraGHCiLibraries pkg)
++ [ lib | '-':'l':lib <- Packages.ldOptions pkg ]
-- See Note [Fork/Exec Windows]
gcc_paths <- getGCCPaths dflags (platformOS platform)
dirs_env <- addEnvPaths "LIBRARY_PATH" dirs
hs_classifieds
<- mapM (locateLib hsc_env True dirs_env gcc_paths) hs_libs'
extra_classifieds
<- mapM (locateLib hsc_env False dirs_env gcc_paths) extra_libs
let classifieds = hs_classifieds ++ extra_classifieds
-- Complication: all the .so's must be loaded before any of the .o's.
let known_dlls = [ dll | DLLPath dll <- classifieds ]
dlls = [ dll | DLL dll <- classifieds ]
objs = [ obj | Object obj <- classifieds ]
archs = [ arch | Archive arch <- classifieds ]
-- Add directories to library search paths
let dll_paths = map takeDirectory known_dlls
all_paths = nub $ map normalise $ dll_paths ++ dirs
all_paths_env <- addEnvPaths "LD_LIBRARY_PATH" all_paths
pathCache <- mapM (addLibrarySearchPath hsc_env) all_paths_env
maybePutStr dflags
("Loading package " ++ sourcePackageIdString pkg ++ " ... ")
-- See comments with partOfGHCi
when (packageName pkg `notElem` partOfGHCi) $ do
loadFrameworks hsc_env platform pkg
mapM_ (load_dyn hsc_env)
(known_dlls ++ map (mkSOName platform) dlls)
-- After loading all the DLLs, we can load the static objects.
-- Ordering isn't important here, because we do one final link
-- step to resolve everything.
mapM_ (loadObj hsc_env) objs
mapM_ (loadArchive hsc_env) archs
maybePutStr dflags "linking ... "
ok <- resolveObjs hsc_env
-- DLLs are loaded, reset the search paths
-- Import libraries will be loaded via loadArchive so only
-- reset the DLL search path after all archives are loaded
-- as well.
mapM_ (removeLibrarySearchPath hsc_env) $ reverse pathCache
if succeeded ok
then maybePutStrLn dflags "done."
else let errmsg = "unable to load package `"
++ sourcePackageIdString pkg ++ "'"
in throwGhcExceptionIO (InstallationError errmsg)
-- we have already searched the filesystem; the strings passed to load_dyn
-- can be passed directly to loadDLL. They are either fully-qualified
-- ("/usr/lib/libfoo.so"), or unqualified ("libfoo.so"). In the latter case,
-- loadDLL is going to search the system paths to find the library.
--
load_dyn :: HscEnv -> FilePath -> IO ()
load_dyn hsc_env dll = do
r <- loadDLL hsc_env dll
case r of
Nothing -> return ()
Just err -> throwGhcExceptionIO (CmdLineError ("can't load .so/.DLL for: "
++ dll ++ " (" ++ err ++ ")" ))
loadFrameworks :: HscEnv -> Platform -> PackageConfig -> IO ()
loadFrameworks hsc_env platform pkg
= when (platformUsesFrameworks platform) $ mapM_ load frameworks
where
fw_dirs = Packages.frameworkDirs pkg
frameworks = Packages.frameworks pkg
load fw = do r <- loadFramework hsc_env fw_dirs fw
case r of
Nothing -> return ()
Just err -> throwGhcExceptionIO (CmdLineError ("can't load framework: "
++ fw ++ " (" ++ err ++ ")" ))
-- Try to find an object file for a given library in the given paths.
-- If it isn't present, we assume that addDLL in the RTS can find it,
-- which generally means that it should be a dynamic library in the
-- standard system search path.
-- For GHCi we tend to prefer dynamic libraries over static ones as
-- they are easier to load and manage, have less overhead.
locateLib :: HscEnv -> Bool -> [FilePath] -> [FilePath] -> String
-> IO LibrarySpec
locateLib hsc_env is_hs lib_dirs gcc_dirs lib
| not is_hs
-- For non-Haskell libraries (e.g. gmp, iconv):
-- first look in library-dirs for a dynamic library (on User paths only)
-- (libfoo.so)
-- then try looking for import libraries on Windows (on User paths only)
-- (.dll.a, .lib)
-- first look in library-dirs for a dynamic library (on GCC paths only)
-- (libfoo.so)
-- then check for system dynamic libraries (e.g. kernel32.dll on windows)
-- then try looking for import libraries on Windows (on GCC paths only)
-- (.dll.a, .lib)
-- then look in library-dirs for a static library (libfoo.a)
-- then look in library-dirs and inplace GCC for a dynamic library (libfoo.so)
-- then try looking for import libraries on Windows (.dll.a, .lib)
-- then look in library-dirs and inplace GCC for a static library (libfoo.a)
-- then try "gcc --print-file-name" to search gcc's search path
-- for a dynamic library (#5289)
-- otherwise, assume loadDLL can find it
--
-- The logic is a bit complicated, but the rationale behind it is that
-- loading a shared library for us is O(1) while loading an archive is
-- O(n). Loading an import library is also O(n) so in general we prefer
-- shared libraries because they are simpler and faster.
--
= findDll user `orElse`
tryImpLib user `orElse`
findDll gcc `orElse`
findSysDll `orElse`
tryImpLib gcc `orElse`
findArchive `orElse`
tryGcc `orElse`
assumeDll
| loading_dynamic_hs_libs -- search for .so libraries first.
= findHSDll `orElse`
findDynObject `orElse`
assumeDll
| loading_profiled_hs_libs -- only a libHSfoo_p.a archive will do.
= findArchive `orElse`
assumeDll
| otherwise
-- HSfoo.o is the best, but only works for the normal way
-- libHSfoo.a is the backup option.
= findObject `orElse`
findArchive `orElse`
assumeDll
where
dflags = hsc_dflags hsc_env
dirs = lib_dirs ++ gcc_dirs
gcc = False
user = True
obj_file = lib <.> "o"
dyn_obj_file = lib <.> "dyn_o"
arch_files = [ "lib" ++ lib ++ lib_tag <.> "a"
, lib <.> "a" -- native code has no lib_tag
, "lib" ++ lib, lib
]
lib_tag = if is_hs && loading_profiled_hs_libs then "_p" else ""
loading_profiled_hs_libs = interpreterProfiled dflags
loading_dynamic_hs_libs = interpreterDynamic dflags
import_libs = [ lib <.> "lib" , "lib" ++ lib <.> "lib"
, "lib" ++ lib <.> "dll.a", lib <.> "dll.a"
]
hs_dyn_lib_name = lib ++ '-':programName dflags ++ projectVersion dflags
hs_dyn_lib_file = mkHsSOName platform hs_dyn_lib_name
so_name = mkSOName platform lib
lib_so_name = "lib" ++ so_name
dyn_lib_file = case (arch, os) of
(ArchX86_64, OSSolaris2) -> "64" </> so_name
_ -> so_name
findObject = liftM (fmap Object) $ findFile dirs obj_file
findDynObject = liftM (fmap Object) $ findFile dirs dyn_obj_file
findArchive = let local name = liftM (fmap Archive) $ findFile dirs name
in apply (map local arch_files)
findHSDll = liftM (fmap DLLPath) $ findFile dirs hs_dyn_lib_file
findDll re = let dirs' = if re == user then lib_dirs else gcc_dirs
in liftM (fmap DLLPath) $ findFile dirs' dyn_lib_file
findSysDll = fmap (fmap $ DLL . dropExtension . takeFileName) $
findSystemLibrary hsc_env so_name
tryGcc = let search = searchForLibUsingGcc dflags
dllpath = liftM (fmap DLLPath)
short = dllpath $ search so_name lib_dirs
full = dllpath $ search lib_so_name lib_dirs
gcc name = liftM (fmap Archive) $ search name lib_dirs
files = import_libs ++ arch_files
in apply $ short : full : map gcc files
tryImpLib re = case os of
OSMinGW32 ->
let dirs' = if re == user then lib_dirs else gcc_dirs
implib name = liftM (fmap Archive) $
findFile dirs' name
in apply (map implib import_libs)
_ -> return Nothing
assumeDll = return (DLL lib)
infixr `orElse`
f `orElse` g = f >>= maybe g return
apply [] = return Nothing
apply (x:xs) = do x' <- x
if isJust x'
then return x'
else apply xs
platform = targetPlatform dflags
arch = platformArch platform
os = platformOS platform
searchForLibUsingGcc :: DynFlags -> String -> [FilePath] -> IO (Maybe FilePath)
searchForLibUsingGcc dflags so dirs = do
-- GCC does not seem to extend the library search path (using -L) when using
-- --print-file-name. So instead pass it a new base location.
str <- askLd dflags (map (FileOption "-B") dirs
++ [Option "--print-file-name", Option so])
let file = case lines str of
[] -> ""
l:_ -> l
if (file == so)
then return Nothing
else return (Just file)
-- | Retrieve the list of search directory GCC and the System use to find
-- libraries and components. See Note [Fork/Exec Windows].
getGCCPaths :: DynFlags -> OS -> IO [FilePath]
getGCCPaths dflags os
= case os of
OSMinGW32 ->
do gcc_dirs <- getGccSearchDirectory dflags "libraries"
sys_dirs <- getSystemDirectories
return $ nub $ gcc_dirs ++ sys_dirs
_ -> return []
-- | Cache for the GCC search directories as this can't easily change
-- during an invocation of GHC. (Maybe with some env. variable but we'll)
-- deal with that highly unlikely scenario then.
{-# NOINLINE gccSearchDirCache #-}
gccSearchDirCache :: IORef [(String, [String])]
gccSearchDirCache = unsafePerformIO $ newIORef []
-- Note [Fork/Exec Windows]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- fork/exec is expensive on Windows, for each time we ask GCC for a library we
-- have to eat the cost of af least 3 of these: gcc -> real_gcc -> cc1.
-- So instead get a list of location that GCC would search and use findDirs
-- which hopefully is written in an optimized mannor to take advantage of
-- caching. At the very least we remove the overhead of the fork/exec and waits
-- which dominate a large percentage of startup time on Windows.
getGccSearchDirectory :: DynFlags -> String -> IO [FilePath]
getGccSearchDirectory dflags key = do
cache <- readIORef gccSearchDirCache
case lookup key cache of
Just x -> return x
Nothing -> do
str <- askLd dflags [Option "--print-search-dirs"]
let line = dropWhile isSpace str
name = key ++ ": ="
if null line
then return []
else do let val = split $ find name line
dirs <- filterM doesDirectoryExist val
modifyIORef' gccSearchDirCache ((key, dirs):)
return val
where split :: FilePath -> [FilePath]
split r = case break (==';') r of
(s, [] ) -> [s]
(s, (_:xs)) -> s : split xs
find :: String -> String -> String
find r x = let lst = lines x
val = filter (r `isPrefixOf`) lst
in if null val
then []
else case break (=='=') (head val) of
(_ , []) -> []
(_, (_:xs)) -> xs
-- | Get a list of system search directories, this to alleviate pressure on
-- the findSysDll function.
getSystemDirectories :: IO [FilePath]
#if defined(mingw32_HOST_OS)
getSystemDirectories = fmap (:[]) getSystemDirectory
#else
getSystemDirectories = return []
#endif
-- | Merge the given list of paths with those in the environment variable
-- given. If the variable does not exist then just return the identity.
addEnvPaths :: String -> [String] -> IO [String]
addEnvPaths name list
= do -- According to POSIX (chapter 8.3) a zero-length prefix means current
-- working directory. Replace empty strings in the env variable with
-- `working_dir` (see also #14695).
working_dir <- getCurrentDirectory
values <- lookupEnv name
case values of
Nothing -> return list
Just arr -> return $ list ++ splitEnv working_dir arr
where
splitEnv :: FilePath -> String -> [String]
splitEnv working_dir value =
case break (== envListSep) value of
(x, [] ) ->
[if null x then working_dir else x]
(x, (_:xs)) ->
(if null x then working_dir else x) : splitEnv working_dir xs
#if defined(mingw32_HOST_OS)
envListSep = ';'
#else
envListSep = ':'
#endif
-- ----------------------------------------------------------------------------
-- Loading a dynamic library (dlopen()-ish on Unix, LoadLibrary-ish on Win32)
-- Darwin / MacOS X only: load a framework
-- a framework is a dynamic library packaged inside a directory of the same
-- name. They are searched for in different paths than normal libraries.
loadFramework :: HscEnv -> [FilePath] -> FilePath -> IO (Maybe String)
loadFramework hsc_env extraPaths rootname
= do { either_dir <- tryIO getHomeDirectory
; let homeFrameworkPath = case either_dir of
Left _ -> []
Right dir -> [dir </> "Library/Frameworks"]
ps = extraPaths ++ homeFrameworkPath ++ defaultFrameworkPaths
; mb_fwk <- findFile ps fwk_file
; case mb_fwk of
Just fwk_path -> loadDLL hsc_env fwk_path
Nothing -> return (Just "not found") }
-- Tried all our known library paths, but dlopen()
-- has no built-in paths for frameworks: give up
where
fwk_file = rootname <.> "framework" </> rootname
-- sorry for the hardcoded paths, I hope they won't change anytime soon:
defaultFrameworkPaths = ["/Library/Frameworks", "/System/Library/Frameworks"]
{- **********************************************************************
Helper functions
********************************************************************* -}
maybePutStr :: DynFlags -> String -> IO ()
maybePutStr dflags s
= when (verbosity dflags > 1) $
putLogMsg dflags
NoReason
SevInteractive
noSrcSpan
(defaultUserStyle dflags)
(text s)
maybePutStrLn :: DynFlags -> String -> IO ()
maybePutStrLn dflags s = maybePutStr dflags (s ++ "\n")
| shlevy/ghc | compiler/ghci/Linker.hs | bsd-3-clause | 65,205 | 11 | 24 | 20,215 | 11,713 | 5,998 | 5,715 | -1 | -1 |
{-|
Module : Numeric.MixedType.FieldSpec
Description : hspec tests for multiplication and exponentiation
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
-}
module Numeric.MixedTypes.FieldSpec (spec) where
import MixedTypesNumPrelude
import Test.Hspec
spec :: Spec
spec = do
specCanDivNotMixed tInt
specCanDivNotMixed tInteger
specCanDivNotMixed tRational
specCanDiv tInt tInteger
specCanDiv tRational tInteger
| michalkonecny/mixed-types-num | test/Numeric/MixedTypes/FieldSpec.hs | bsd-3-clause | 554 | 0 | 7 | 120 | 65 | 32 | 33 | 10 | 1 |
{-# language CPP #-}
-- No documentation found for Chapter "Promoted_From_VK_EXT_pipeline_creation_feedback"
module Vulkan.Core13.Promoted_From_VK_EXT_pipeline_creation_feedback ( PipelineCreationFeedback(..)
, PipelineCreationFeedbackCreateInfo(..)
, StructureType(..)
, PipelineCreationFeedbackFlagBits(..)
, PipelineCreationFeedbackFlags
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Word (Word64)
import Data.Kind (Type)
import Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits (PipelineCreationFeedbackFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO))
import Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits (PipelineCreationFeedbackFlagBits(..))
import Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits (PipelineCreationFeedbackFlags)
import Vulkan.Core10.Enums.StructureType (StructureType(..))
-- | VkPipelineCreationFeedback - Feedback about the creation of a pipeline
-- or pipeline stage
--
-- = Description
--
-- If the
-- 'Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits.PIPELINE_CREATION_FEEDBACK_VALID_BIT'
-- is not set in @flags@, an implementation /must/ not set any other bits
-- in @flags@, and the values of all other 'PipelineCreationFeedback' data
-- members are undefined.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_pipeline_creation_feedback VK_EXT_pipeline_creation_feedback>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_3 VK_VERSION_1_3>,
-- 'PipelineCreationFeedbackCreateInfo',
-- 'Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits.PipelineCreationFeedbackFlagBits',
-- 'Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits.PipelineCreationFeedbackFlags'
data PipelineCreationFeedback = PipelineCreationFeedback
{ -- | @flags@ is a bitmask of
-- 'Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits.PipelineCreationFeedbackFlagBits'
-- providing feedback about the creation of a pipeline or of a pipeline
-- stage.
flags :: PipelineCreationFeedbackFlags
, -- | @duration@ is the duration spent creating a pipeline or pipeline stage
-- in nanoseconds.
duration :: Word64
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineCreationFeedback)
#endif
deriving instance Show PipelineCreationFeedback
instance ToCStruct PipelineCreationFeedback where
withCStruct x f = allocaBytes 16 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineCreationFeedback{..} f = do
poke ((p `plusPtr` 0 :: Ptr PipelineCreationFeedbackFlags)) (flags)
poke ((p `plusPtr` 8 :: Ptr Word64)) (duration)
f
cStructSize = 16
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr PipelineCreationFeedbackFlags)) (zero)
poke ((p `plusPtr` 8 :: Ptr Word64)) (zero)
f
instance FromCStruct PipelineCreationFeedback where
peekCStruct p = do
flags <- peek @PipelineCreationFeedbackFlags ((p `plusPtr` 0 :: Ptr PipelineCreationFeedbackFlags))
duration <- peek @Word64 ((p `plusPtr` 8 :: Ptr Word64))
pure $ PipelineCreationFeedback
flags duration
instance Storable PipelineCreationFeedback where
sizeOf ~_ = 16
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineCreationFeedback where
zero = PipelineCreationFeedback
zero
zero
-- | VkPipelineCreationFeedbackCreateInfo - Request for feedback about the
-- creation of a pipeline
--
-- = Description
--
-- An implementation /should/ write pipeline creation feedback to
-- @pPipelineCreationFeedback@ and /may/ write pipeline stage creation
-- feedback to @pPipelineStageCreationFeedbacks@. An implementation /must/
-- set or clear the
-- 'Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits.PIPELINE_CREATION_FEEDBACK_VALID_BIT'
-- in 'PipelineCreationFeedback'::@flags@ for @pPipelineCreationFeedback@
-- and every element of @pPipelineStageCreationFeedbacks@.
--
-- Note
--
-- One common scenario for an implementation to skip per-stage feedback is
-- when
-- 'Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits.PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT'
-- is set in @pPipelineCreationFeedback@.
--
-- When chained to
-- 'Vulkan.Extensions.VK_KHR_ray_tracing_pipeline.RayTracingPipelineCreateInfoKHR',
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV', or
-- 'Vulkan.Core10.Pipeline.GraphicsPipelineCreateInfo', the @i@ element of
-- @pPipelineStageCreationFeedbacks@ corresponds to the @i@ element of
-- 'Vulkan.Extensions.VK_KHR_ray_tracing_pipeline.RayTracingPipelineCreateInfoKHR'::@pStages@,
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV'::@pStages@,
-- or 'Vulkan.Core10.Pipeline.GraphicsPipelineCreateInfo'::@pStages@. When
-- chained to 'Vulkan.Core10.Pipeline.ComputePipelineCreateInfo', the first
-- element of @pPipelineStageCreationFeedbacks@ corresponds to
-- 'Vulkan.Core10.Pipeline.ComputePipelineCreateInfo'::@stage@.
--
-- == Valid Usage
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pipelineStageCreationFeedbackCount-02668#
-- When chained to 'Vulkan.Core10.Pipeline.GraphicsPipelineCreateInfo',
-- 'PipelineCreationFeedback'::@pipelineStageCreationFeedbackCount@
-- /must/ equal
-- 'Vulkan.Core10.Pipeline.GraphicsPipelineCreateInfo'::@stageCount@
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pipelineStageCreationFeedbackCount-02669#
-- When chained to 'Vulkan.Core10.Pipeline.ComputePipelineCreateInfo',
-- 'PipelineCreationFeedback'::@pipelineStageCreationFeedbackCount@
-- /must/ equal 1
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pipelineStageCreationFeedbackCount-02670#
-- When chained to
-- 'Vulkan.Extensions.VK_KHR_ray_tracing_pipeline.RayTracingPipelineCreateInfoKHR',
-- 'PipelineCreationFeedback'::@pipelineStageCreationFeedbackCount@
-- /must/ equal
-- 'Vulkan.Extensions.VK_KHR_ray_tracing_pipeline.RayTracingPipelineCreateInfoKHR'::@stageCount@
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pipelineStageCreationFeedbackCount-02969#
-- When chained to
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV',
-- 'PipelineCreationFeedback'::@pipelineStageCreationFeedbackCount@
-- /must/ equal
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV'::@stageCount@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO'
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pPipelineCreationFeedback-parameter#
-- @pPipelineCreationFeedback@ /must/ be a valid pointer to a
-- 'PipelineCreationFeedback' structure
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pPipelineStageCreationFeedbacks-parameter#
-- @pPipelineStageCreationFeedbacks@ /must/ be a valid pointer to an
-- array of @pipelineStageCreationFeedbackCount@
-- 'PipelineCreationFeedback' structures
--
-- - #VUID-VkPipelineCreationFeedbackCreateInfo-pipelineStageCreationFeedbackCount-arraylength#
-- @pipelineStageCreationFeedbackCount@ /must/ be greater than @0@
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_pipeline_creation_feedback VK_EXT_pipeline_creation_feedback>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_3 VK_VERSION_1_3>,
-- 'Vulkan.Core10.Pipeline.ComputePipelineCreateInfo',
-- 'Vulkan.Core10.Pipeline.GraphicsPipelineCreateInfo',
-- 'PipelineCreationFeedback',
-- 'Vulkan.Extensions.VK_KHR_ray_tracing_pipeline.RayTracingPipelineCreateInfoKHR',
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PipelineCreationFeedbackCreateInfo = PipelineCreationFeedbackCreateInfo
{ -- | @pPipelineCreationFeedback@ is a pointer to a 'PipelineCreationFeedback'
-- structure.
pipelineCreationFeedback :: Ptr PipelineCreationFeedback
, -- | @pipelineStageCreationFeedbackCount@ is the number of elements in
-- @pPipelineStageCreationFeedbacks@.
pipelineStageCreationFeedbackCount :: Word32
, -- | @pPipelineStageCreationFeedbacks@ is a pointer to an array of
-- @pipelineStageCreationFeedbackCount@ 'PipelineCreationFeedback'
-- structures.
pipelineStageCreationFeedbacks :: Ptr PipelineCreationFeedback
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineCreationFeedbackCreateInfo)
#endif
deriving instance Show PipelineCreationFeedbackCreateInfo
instance ToCStruct PipelineCreationFeedbackCreateInfo where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineCreationFeedbackCreateInfo{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr (Ptr PipelineCreationFeedback))) (pipelineCreationFeedback)
poke ((p `plusPtr` 24 :: Ptr Word32)) (pipelineStageCreationFeedbackCount)
poke ((p `plusPtr` 32 :: Ptr (Ptr PipelineCreationFeedback))) (pipelineStageCreationFeedbacks)
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr (Ptr PipelineCreationFeedback))) (zero)
poke ((p `plusPtr` 24 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 32 :: Ptr (Ptr PipelineCreationFeedback))) (zero)
f
instance FromCStruct PipelineCreationFeedbackCreateInfo where
peekCStruct p = do
pPipelineCreationFeedback <- peek @(Ptr PipelineCreationFeedback) ((p `plusPtr` 16 :: Ptr (Ptr PipelineCreationFeedback)))
pipelineStageCreationFeedbackCount <- peek @Word32 ((p `plusPtr` 24 :: Ptr Word32))
pPipelineStageCreationFeedbacks <- peek @(Ptr PipelineCreationFeedback) ((p `plusPtr` 32 :: Ptr (Ptr PipelineCreationFeedback)))
pure $ PipelineCreationFeedbackCreateInfo
pPipelineCreationFeedback pipelineStageCreationFeedbackCount pPipelineStageCreationFeedbacks
instance Storable PipelineCreationFeedbackCreateInfo where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineCreationFeedbackCreateInfo where
zero = PipelineCreationFeedbackCreateInfo
zero
zero
zero
| expipiplus1/vulkan | src/Vulkan/Core13/Promoted_From_VK_EXT_pipeline_creation_feedback.hs | bsd-3-clause | 11,702 | 0 | 14 | 1,710 | 1,580 | 928 | 652 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.VI.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.VI.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "VI Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| facebookincubator/duckling | tests/Duckling/Ordinal/VI/Tests.hs | bsd-3-clause | 503 | 0 | 9 | 77 | 79 | 50 | 29 | 11 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Machine.Tee
-- Copyright : (C) 2012 Edward Kmett, Rúnar Bjarnason, Paul Chiusano
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : Rank-2 Types, GADTs
--
----------------------------------------------------------------------------
module Data.Machine.Tee
( -- * Tees
Tee, TeeT
, T(..)
, tee
, addL, addR
, capL, capR
) where
import Data.Machine.Is
import Data.Machine.Process
import Data.Machine.Type
import Data.Machine.Source
import Prelude hiding ((.),id)
-------------------------------------------------------------------------------
-- Tees
-------------------------------------------------------------------------------
-- | The input descriptor for a 'Tee' or 'TeeT'
data T a b c where
L :: T a b a
R :: T a b b
-- | A 'Machine' that can read from two input stream in a deterministic manner.
type Tee a b c = Machine (T a b) c
-- | A 'Machine' that can read from two input stream in a deterministic manner with monadic side-effects.
type TeeT m a b c = MachineT m (T a b) c
-- | Compose a pair of pipes onto the front of a Tee.
tee :: Monad m => ProcessT m a a' -> ProcessT m b b' -> TeeT m a' b' c -> TeeT m a b c
tee ma mb m = MachineT $ runMachineT m >>= \v -> case v of
Stop -> return Stop
Yield o k -> return $ Yield o $ tee ma mb k
Await f L ff -> runMachineT ma >>= \u -> case u of
Stop -> runMachineT $ tee stopped mb ff
Yield a k -> runMachineT $ tee k mb $ f a
Await g Refl fg ->
return $ Await (\a -> tee (g a) mb $ encased v) L $ tee fg mb $ encased v
Await f R ff -> runMachineT mb >>= \u -> case u of
Stop -> runMachineT $ tee ma stopped ff
Yield b k -> runMachineT $ tee ma k $ f b
Await g Refl fg ->
return $ Await (\b -> tee ma (g b) $ encased v) R $ tee ma fg $ encased v
-- | Precompose a pipe onto the left input of a tee.
addL :: Monad m => ProcessT m a b -> TeeT m b c d -> TeeT m a c d
addL p = tee p echo
{-# INLINE addL #-}
-- | Precompose a pipe onto the right input of a tee.
addR :: Monad m => ProcessT m b c -> TeeT m a c d -> TeeT m a b d
addR = tee echo
{-# INLINE addR #-}
-- | Tie off one input of a tee by connecting it to a known source.
capL :: Monad m => SourceT m a -> TeeT m a b c -> ProcessT m b c
capL s t = fit cappedT $ addL s t
{-# INLINE capL #-}
-- | Tie off one input of a tee by connecting it to a known source.
capR :: Monad m => SourceT m b -> TeeT m a b c -> ProcessT m a c
capR s t = fit cappedT $ addR s t
{-# INLINE capR #-}
-- | Natural transformation used by 'capL' and 'capR'.
cappedT :: T a a b -> Is a b
cappedT R = Refl
cappedT L = Refl
{-# INLINE cappedT #-}
| YoEight/machines | src/Data/Machine/Tee.hs | bsd-3-clause | 2,911 | 0 | 24 | 714 | 858 | 443 | 415 | 49 | 8 |
module Main where
import Cataskell.Server.Main
main :: IO ()
main = serverMain
| corajr/cataskell | cataskell-server/Main.hs | bsd-3-clause | 81 | 0 | 6 | 14 | 25 | 15 | 10 | 4 | 1 |
module Paths_minimal_resource_protocol (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import Data.Version (Version(..))
import System.Environment (getEnv)
version :: Version
version = Version {versionBranch = [0,0,0,1], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/Users/hi5networks/.cabal/bin"
libdir = "/Users/hi5networks/.cabal/lib/minimal-resource-protocol-0.0.0.1/ghc-7.2.2"
datadir = "/Users/hi5networks/.cabal/share/minimal-resource-protocol-0.0.0.1"
libexecdir = "/Users/hi5networks/.cabal/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catch (getEnv "minimal_resource_protocol_bindir") (\_ -> return bindir)
getLibDir = catch (getEnv "minimal_resource_protocol_libdir") (\_ -> return libdir)
getDataDir = catch (getEnv "minimal_resource_protocol_datadir") (\_ -> return datadir)
getLibexecDir = catch (getEnv "minimal_resource_protocol_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| jfischoff/minimal-resource-protocol | dist/build/autogen/Paths_minimal_resource_protocol.hs | bsd-3-clause | 1,140 | 0 | 10 | 144 | 283 | 163 | 120 | 22 | 1 |
-- | Natural numbers union infinity. This version is modelled on 'Int's.
module Math.Misc.NatInfinity where
import qualified Math.Misc.Nat as Nat
data NInfinity = C Nat.N | Infinity
deriving Eq
fromNat :: Nat.N -> NInfinity
fromNat = C
fromInt :: Int -> NInfinity
fromInt = fromNat . Nat.fromInt
toInt :: NInfinity -> Int
toInt Infinity = error "Attempting to convert Infinity to an Int."
toInt (C m) = Nat.toInt m
instance Show NInfinity where
show (C m) = show m
show Infinity = "∞"
instance Ord NInfinity where
compare Infinity Infinity = EQ
compare Infinity (C _) = GT
compare (C _) Infinity = LT
compare (C m) (C n) = compare m n | michiexile/hplex | pershom/src/Math/Misc/NatInfinity.hs | bsd-3-clause | 676 | 0 | 8 | 152 | 219 | 116 | 103 | 19 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module DTX2MIDI.DTX
( DTX (..),
Line (..),
Object (..),
ObjectValue (..),
Header (..),
Key (..),
Channel (..),
Comment (..),
Note (..),
object,
header,
comment,
isHeader,
isComment,
isObject,
isBPM,
isBaseBPM,
isNoteObject,
headers,
objects,
baseBPM,
)
where
import Data.List (find)
import Data.Maybe (maybeToList)
import Data.Text (Text, pack, singleton, unpack)
type DTX = [Line]
type Key = Text
type Channel = Text
type Comment = Text
type Note = Text
data Header = Header
{ headerKey :: Key,
headerChannel :: Channel,
headerValue :: Text
}
deriving (Show, Eq)
-- TODO: Add support for tempo change event and measure length ratio change event
data ObjectValue
= HiHatClose [Note]
| Snare [Note]
| BassDrum [Note]
| HighTom [Note]
| LowTom [Note]
| Cymbal [Note]
| FloorTom [Note]
| HiHatOpen [Note]
| RideCymbal [Note]
| LeftCymbal [Note]
| LeftPedal [Note]
| LeftBassDrum [Note]
| MeasureLengthRatio Double
| UnsupportedEvent Channel Text
deriving (Show, Eq)
data Object = Object
{ objectKey :: Key,
objectValue :: ObjectValue
}
deriving (Show, Eq)
data Line
= LineHeader Header
| LineComment Comment
| LineObject Object
deriving (Show, Eq)
header :: Line -> Maybe Header
header (LineHeader h) = Just h
header _ = Nothing
comment :: Line -> Maybe Comment
comment (LineComment c) = Just c
comment _ = Nothing
object :: Line -> Maybe Object
object (LineObject o) = Just o
object _ = Nothing
isHeader :: Line -> Bool
isHeader (LineHeader _) = True
isHeader _ = False
isComment :: Line -> Bool
isComment (LineComment _) = True
isComment _ = False
isObject :: Line -> Bool
isObject (LineObject _) = True
isObject _ = False
isBPM :: Header -> Channel -> Bool
isBPM (Header "BPM" c _) c' = c == c'
isBPM _ _ = False
isBaseBPM :: Header -> Bool
isBaseBPM header = isBPM header ""
isNoteObject :: Object -> Bool
isNoteObject (Object _ (UnsupportedEvent _ _)) = False
isNoteObject _ = True
headers :: DTX -> [Header]
headers dtx = (maybeToList . header) =<< dtx
objects :: DTX -> [Object]
objects dtx = (maybeToList . object) =<< dtx
baseBPM :: DTX -> Maybe Double
baseBPM dtx =
fmap readValue $ find isBaseBPM $ headers dtx
where
readValue :: Header -> Double
readValue = read . unpack . headerValue
| akiomik/dtx2midi | src/DTX2MIDI/DTX.hs | bsd-3-clause | 2,411 | 0 | 9 | 557 | 842 | 479 | 363 | 96 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import AnimatedDangerzone.Types
import Control.Lens
import Control.Applicative
import NetworkedGame.Server
import qualified Data.Map as Map
import Network (PortID(..))
import System.IO
data ServerState =
ServerState { _pastPlayers :: Map.Map String Player
, _serverWorld :: World
}
makeLenses ''ServerState
callbacks :: NetworkServer ClientMsg ServerState
callbacks = NetworkServer
{ serverPort = PortNumber 1600
, eventsPerSecond = -1 -- no timer yet
, onTick = myTick
, onConnect = myConnect
, onDisconnect = myDisconnect
, onCommand = myCommand
}
myTick :: Handles -> Float -> ServerState -> IO ServerState
myTick hs elapsed st = return st
myConnect :: Handles -> ConnectionId -> ServerState -> IO ServerState
myConnect hs con st = return st
myDisconnect :: Handles -> ConnectionId -> ServerState -> IO ServerState
myDisconnect hs c st =
do case st^.serverWorld.worldPlayers.at c of
Nothing -> do putStrLn "User disconnected: (unknown)"
return st
Just p -> do
let st' = st & serverWorld . worldPlayers . at c .~ Nothing
& pastPlayers . at (p^.playerName) .~ (Just p)
announce hs $ QuitPlayer c
putStrLn $ "User disconnected: " ++ p^.playerName
return st'
myCommand :: Handles -> ConnectionId -> ClientMsg -> ServerState -> IO ServerState
myCommand hs c msg st =
-- Depending on whether this connection corresponds to a known player, handle
-- the message differently.
let handler = case st^.serverWorld.worldPlayers.at c of
Nothing -> handleUnknownPlayerCommand
Just _ -> handleKnownPlayerCommand
in handler hs c msg st
handleUnknownPlayerCommand :: Handles -> ConnectionId -> ClientMsg -> ServerState -> IO ServerState
handleUnknownPlayerCommand hs c msg st =
case msg of
ClientHello name -> do
-- If the name corresponds to a logged-in user, send a conflict response;
-- else log the user in.
case any ((== name) . _playerName) $ Map.elems (st^.serverWorld.worldPlayers) of
True -> do
announceOne hs c UsernameConflict
return st
False -> do
-- Use the player data already in the world (if previously connected) or
-- create a new player record otherwise.
let Just p = st^.pastPlayers.at name <|> Just (newPlayer name)
st' = st & serverWorld . worldPlayers . at c ?~ p
putStrLn $ "User connected: " ++ name
putStrLn $ " player info: " ++ show p
announceOne hs c $ Hello c
announce hs $ NewPlayer c p
announceOne hs c $ SetWorld (st'^.serverWorld)
return st'
_ -> return st
handleKnownPlayerCommand :: Handles -> ConnectionId -> ClientMsg -> ServerState -> IO ServerState
handleKnownPlayerCommand hs c msg st =
case msg of
ClientMove coord -> do putStrLn $ "Player " ++ show c ++ " moved to " ++ show coord
announce hs $ MovePlayer c coord
let st' = st & serverWorld . worldPlayers . ix c . playerCoord .~ coord
return st'
_ -> return st
initialWorld :: World
initialWorld = World
{ _worldPlayers = Map.empty
, _worldBlocks = Map.fromList [ ((r,c), b)
| (r, row) <- zip [0..] $ reverse blocks
, (c, b) <- zip [0..] row
]
}
blocks :: [[Block]]
blocks =
[ [Air, Air, Air, Rock, Rock, Rock, Rock, Rock]
, [Air, Rock, Rock, Air, Air, Air, Air, Air, Rock, Rock]
, [Rock, Lava, Stones,Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Lava, Stones,Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Lava, Stones,Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Stones,Air, Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Air, Air, Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Air, Air, Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Air, Air, Air, Air, Air, Air, Air, Air, Air, Rock]
, [Rock, Air, Air, Air, Air, Air, Air, Ice, Ice, Air, Rock]
, [Rock, Air, Air, Air, Air, Air, Air, Air, Ice, Air, Rock]
, [Rock, Air, Air, Air, Air, Air, Air, Air, Air, Air, Rock]
, [Air, Rock, Rock, Air, Air, Air, Air, Air, Rock, Rock]
, [Air, Air, Air, Rock, Rock, Rock, Rock, Rock]
]
newPlayer :: String -> Player
newPlayer name = Player
{ _playerName = name
, _playerCoord = (0,0)
}
initialState :: ServerState
initialState = ServerState
{ _serverWorld = initialWorld
, _pastPlayers = Map.empty
}
main :: IO ()
main = serverMain callbacks initialState
| glguy/animated-dangerzone | src/Server.hs | bsd-3-clause | 4,923 | 12 | 21 | 1,515 | 1,559 | 867 | 692 | -1 | -1 |
{-# LANGUAGE LambdaCase, RecordWildCards, RankNTypes, TemplateHaskell #-}
module AbstractInterpretation.CreatedBy.CodeGenBase where
import Data.Int
import Data.Word
import Data.Set (Set)
import Data.Map (Map)
import Data.Vector (Vector)
import qualified Data.Bimap as Bimap
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Vector as Vec
import Control.Monad.State
import Lens.Micro.Platform
import Grin.Grin
import Grin.TypeEnvDefs
import qualified AbstractInterpretation.IR as IR
import AbstractInterpretation.IR (Instruction(..), Reg(..))
data CGState
= CGState
{ _sMemoryCounter :: Word32
, _sRegisterCounter :: Word32
, _sInstructions :: [Instruction]
-- mapping
, _sRegisterMap :: Map Name Reg
, _sFunctionArgMap :: Map Name (Reg, [Reg])
, _sTagMap :: Bimap.Bimap Tag IR.Tag
, _sProducerMap :: Map.Map Reg Name
-- internal
, _sExternalMap :: Map Name External
}
deriving (Show)
concat <$> mapM makeLenses [''CGState]
emptyCGState :: CGState
emptyCGState = CGState
{ _sMemoryCounter = 0
, _sRegisterCounter = 0
, _sInstructions = []
-- mapping
, _sRegisterMap = mempty
, _sFunctionArgMap = mempty
, _sTagMap = Bimap.empty
, _sProducerMap = mempty
, _sExternalMap = mempty
}
type CG = State CGState
data Result
= R IR.Reg
| Z
| A CPat (CG Result)
emit :: IR.Instruction -> CG ()
emit inst = modify' $ \s@CGState{..} -> s {_sInstructions = inst : _sInstructions}
addExternal :: External -> CG ()
addExternal e = modify' $ \s@CGState{..} -> s {_sExternalMap = Map.insert (eName e) e _sExternalMap}
getExternal :: Name -> CG (Maybe External)
getExternal name = Map.lookup name <$> gets _sExternalMap
-- creates regsiters for function arguments and result
getOrAddFunRegs :: Name -> Int -> CG (IR.Reg, [IR.Reg])
getOrAddFunRegs name arity = do
funMap <- gets _sFunctionArgMap
case Map.lookup name funMap of
Just x -> pure x
Nothing -> do
resReg <- newReg
argRegs <- replicateM arity newReg
let funRegs = (resReg, argRegs)
modify' $ \s@CGState{..} -> s {_sFunctionArgMap = Map.insert name funRegs _sFunctionArgMap}
pure funRegs
newReg :: CG IR.Reg
newReg = state $ \s@CGState{..} -> (IR.Reg _sRegisterCounter, s {_sRegisterCounter = succ _sRegisterCounter})
newMem :: CG IR.Mem
newMem = state $ \s@CGState{..} -> (IR.Mem _sMemoryCounter, s {_sMemoryCounter = succ _sMemoryCounter})
addReg :: Name -> IR.Reg -> CG ()
addReg name reg = modify' $ \s@CGState{..} -> s {_sRegisterMap = Map.insert name reg _sRegisterMap}
getReg :: Name -> CG IR.Reg
getReg name = do
regMap <- gets _sRegisterMap
case Map.lookup name regMap of
Nothing -> error $ "unknown variable " ++ unpackName name
Just reg -> pure reg
getTag :: Tag -> CG IR.Tag
getTag tag = do
tagMap <- gets _sTagMap
case Bimap.lookup tag tagMap of
Just t -> pure t
Nothing -> do
let t = IR.Tag . fromIntegral $ Bimap.size tagMap
modify' $ \s -> s {_sTagMap = Bimap.insert tag t tagMap}
pure t
codeGenBlock :: CG a -> CG (a,[IR.Instruction])
codeGenBlock genM = do
instructions <- state $ \s@CGState{..} -> (_sInstructions, s {_sInstructions = []})
ret <- genM
blockInstructions <- state $ \s@CGState{..} -> (reverse _sInstructions, s {_sInstructions = instructions})
pure (ret, blockInstructions)
codeGenBlock_ :: CG a -> CG [IR.Instruction]
codeGenBlock_ = fmap snd . codeGenBlock
codeGenSimpleType :: SimpleType -> CG IR.Reg
codeGenSimpleType = \case
T_Unit -> newRegWithSimpleType (-1)
T_Int64 -> newRegWithSimpleType (-2)
T_Word64 -> newRegWithSimpleType (-3)
T_Float -> newRegWithSimpleType (-4)
T_Bool -> newRegWithSimpleType (-5)
T_String -> newRegWithSimpleType (-6)
T_Char -> newRegWithSimpleType (-7)
T_UnspecifiedLocation -> newRegWithSimpleType (-8)
T_Location locs -> do
r <- newReg
let locs' = map fromIntegral locs
mapM_ (`extendSimpleType` r) locs'
pure r
t -> newReg
where
-- TODO: rename simple type to something more generic,
newRegWithSimpleType :: IR.SimpleType -> CG IR.Reg
newRegWithSimpleType irTy = newReg >>= extendSimpleType irTy
-- TODO: rename simple type to something more generic,
extendSimpleType :: IR.SimpleType -> IR.Reg -> CG IR.Reg
extendSimpleType irTy r = do
emit IR.Set
{ dstReg = r
, constant = IR.CSimpleType irTy
}
pure r
codeGenNodeSetWith :: (Tag -> Vector SimpleType -> CG IR.Reg) ->
NodeSet -> CG IR.Reg
codeGenNodeSetWith cgNodeTy ns = do
let (tags, argss) = unzip . Map.toList $ ns
dst <- newReg
nodeRegs <- zipWithM cgNodeTy tags argss
forM_ nodeRegs $ \src -> emit IR.Move { srcReg = src, dstReg = dst }
pure dst
-- Generate a node type from type information,
-- but preserve the first field for tag information.
codeGenTaggedNodeType :: Tag -> Vector SimpleType -> CG IR.Reg
codeGenTaggedNodeType tag ts = do
let ts' = Vec.toList ts
r <- newReg
irTag <- getTag tag
argRegs <- mapM codeGenSimpleType ts'
emit IR.Set {dstReg = r, constant = IR.CNodeType irTag (length argRegs + 1)}
forM_ (zip [1..] argRegs) $ \(idx, argReg) ->
emit IR.Extend {srcReg = argReg, dstSelector = IR.NodeItem irTag idx, dstReg = r}
pure r
-- FIXME: the following type signature is a bad oman ; it's not intuitive ; no-go ; refactor!
codeGenType :: (SimpleType -> CG IR.Reg) ->
(NodeSet -> CG IR.Reg) ->
Type -> CG IR.Reg
codeGenType cgSimpleTy cgNodeTy = \case
T_SimpleType t -> cgSimpleTy t
T_NodeSet ns -> cgNodeTy ns
isPointer :: IR.Predicate
isPointer = IR.ValueIn (IR.Range 0 (maxBound :: Int32))
isNotPointer :: IR.Predicate
isNotPointer = IR.ValueIn (IR.Range (minBound :: Int32) 0)
-- For simple types, copies only pointer information
-- For nodes, copies the structure and the pointer information in the fields
copyStructureWithPtrInfo :: IR.Reg -> IR.Reg -> IR.Instruction
copyStructureWithPtrInfo srcReg dstReg = IR.ConditionalMove
{ srcReg = srcReg
, predicate = isPointer
, dstReg = dstReg
}
| andorp/grin | grin/src/AbstractInterpretation/CreatedBy/CodeGenBase.hs | bsd-3-clause | 6,239 | 0 | 17 | 1,385 | 2,024 | 1,061 | 963 | 147 | 10 |
module Data.ISO639.Parse
( parseAlpha3Bibliographic
, parseAlpha3Terminologic
, parseAlpha2
) where
import qualified Data.HashMap.Strict as Map
import Data.ISO639.Types (Language, alpha3_terminologic, alpha3_bibliographic, languages, alpha2)
import Data.Text(Text)
idx_a3b = foldr fld Map.empty languages
where
fld iso acc = Map.insert (alpha3_bibliographic iso) iso acc
idx_a3t = foldr fld Map.empty languages
where
fld iso acc = case alpha3_terminologic iso of {Nothing -> acc ; Just a3 -> Map.insert a3 iso acc}
idx_a2 = foldr fld Map.empty languages
where
fld iso acc = case alpha2 iso of {Nothing -> acc ; Just a3 -> Map.insert a3 iso acc}
parseAlpha3Bibliographic :: Text -> Maybe Language
parseAlpha3Bibliographic input = Map.lookup input idx_a3b
parseAlpha3Terminologic :: Text -> Maybe Language
parseAlpha3Terminologic input = Map.lookup input idx_a3t
parseAlpha2 :: Text -> Maybe Language
parseAlpha2 input = Map.lookup input idx_a2
| necrobious/iso639 | src/Data/ISO639/Parse.hs | bsd-3-clause | 982 | 0 | 11 | 167 | 302 | 159 | 143 | 19 | 2 |
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.BLAS.Level1
-- Copyright : Copyright (c) 2010, Patrick Perry <patperry@gmail.com>
-- License : BSD3
-- Maintainer : Patrick Perry <patperry@gmail.com>
-- Stability : experimental
--
-- Vector operations.
--
module Foreign.BLAS.Level1 (
BLAS1(..),
) where
import Foreign( Storable, Ptr, peek, with )
import Foreign.Storable.Complex()
import Data.Complex( Complex(..) )
import Foreign.VMath.VFractional( VFractional )
import Foreign.BLAS.Double
import Foreign.BLAS.Zomplex
-- | Types with vector-vector operations.
class (Storable a, VFractional a) => BLAS1 a where
copy :: Int -> Ptr a -> Int -> Ptr a -> Int -> IO ()
swap :: Int -> Ptr a -> Int -> Ptr a -> Int -> IO ()
dotc :: Int -> Ptr a -> Int -> Ptr a -> Int -> IO a
dotu :: Int -> Ptr a -> Int -> Ptr a -> Int -> IO a
nrm2 :: Int -> Ptr a -> Int -> IO Double
asum :: Int -> Ptr a -> Int -> IO Double
iamax :: Int -> Ptr a -> Int -> IO Int
scal :: Int -> a -> Ptr a -> Int -> IO ()
axpy :: Int -> a -> Ptr a -> Int -> Ptr a -> Int -> IO ()
rotg :: Ptr a -> Ptr a -> Ptr a -> Ptr a -> IO ()
rot :: Int -> Ptr a -> Int -> Ptr a -> Int -> Double -> Double -> IO ()
withEnum :: (Enum a, Storable a) => Int -> (Ptr a -> IO b) -> IO b
withEnum = with . toEnum
{-# INLINE withEnum #-}
instance BLAS1 Double where
copy n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
dcopy pn px pincx py pincy
{-# INLINE copy #-}
swap n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
dswap pn px pincx py pincy
{-# INLINE swap #-}
dotc n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
ddot pn px pincx py pincy
{-# INLINE dotc #-}
dotu = dotc
{-# INLINE dotu #-}
nrm2 n px incx =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
dnrm2 pn px pincx
{-# INLINE nrm2 #-}
asum n px incx =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
dasum pn px pincx
{-# INLINE asum #-}
iamax n px incx =
withEnum n $ \pn ->
withEnum incx $ \pincx -> do
i <- idamax pn px pincx
return $! fromEnum (i - 1)
{-# INLINE iamax #-}
axpy n alpha px incx py incy =
withEnum n $ \pn ->
with alpha $ \palpha ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
daxpy pn palpha px pincx py pincy
{-# INLINE axpy #-}
scal n alpha px incx =
withEnum n $ \pn ->
with alpha $ \palpha ->
withEnum incx $ \pincx ->
dscal pn palpha px pincx
{-# INLINE scal #-}
rotg = drotg
{-# INLINE rotg #-}
rot n px incx py incy c s =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
with c $ \pc ->
with s $ \ps ->
drot pn px pincx py pincy pc ps
{-# INLINE rot #-}
instance BLAS1 (Complex Double) where
copy n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
zcopy pn px pincx py pincy
{-# INLINE copy #-}
swap n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
zswap pn px pincx py pincy
{-# INLINE swap #-}
dotc n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
with 0 $ \pdotc -> do
zdotc pdotc pn px pincx py pincy
peek pdotc
{-# INLINE dotc #-}
dotu n px incx py incy =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
with 0 $ \pdotu -> do
zdotu pdotu pn px pincx py pincy
peek pdotu
{-# INLINE dotu #-}
nrm2 n px incx =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
znrm2 pn px pincx
{-# INLINE nrm2 #-}
asum n px incx =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
zasum pn px pincx
{-# INLINE asum #-}
iamax n px incx =
withEnum n $ \pn ->
withEnum incx $ \pincx -> do
i <- izamax pn px pincx
return $! fromEnum (i - 1)
{-# INLINE iamax #-}
axpy n alpha px incx py incy =
withEnum n $ \pn ->
with alpha $ \palpha ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
zaxpy pn palpha px pincx py pincy
{-# INLINE axpy #-}
scal n alpha px incx =
withEnum n $ \pn ->
with alpha $ \palpha ->
withEnum incx $ \pincx ->
zscal pn palpha px pincx
{-# INLINE scal #-}
rotg = zrotg
{-# INLINE rotg #-}
rot n px incx py incy c s =
withEnum n $ \pn ->
withEnum incx $ \pincx ->
withEnum incy $ \pincy ->
with c $ \pc ->
with s $ \ps ->
zdrot pn px pincx py pincy pc ps
{-# INLINE rot #-}
| patperry/hs-linear-algebra | lib/Foreign/BLAS/Level1.hs | bsd-3-clause | 5,402 | 0 | 16 | 2,024 | 1,848 | 932 | 916 | 152 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.IO.Tinfoil.Internal.Sodium where
import Disorder.Core.IO (testIO)
import P
import System.IO
import Test.QuickCheck
import Tinfoil.Internal.Sodium
import X.Control.Monad.Trans.Either (runEitherT)
prop_initialiseSodium :: Property
prop_initialiseSodium = once . testIO $ do
r1 <- runEitherT initialiseSodium
r2 <- runEitherT initialiseSodium
pure $ (r1, isRight r1, isRight r2) === (r2, True, True)
return []
tests :: IO Bool
tests = $forAllProperties $ quickCheckWithResult (stdArgs { maxSuccess = 1000 } )
| ambiata/tinfoil | test/Test/IO/Tinfoil/Internal/Sodium.hs | bsd-3-clause | 660 | 0 | 11 | 146 | 167 | 94 | 73 | 17 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts, CPP #-}
module BinaryHeapSTM (
Entry
, newEntry
, renewEntry
, item
, PriorityQueue(..)
, new
, enqueue
, dequeue
, delete
) where
#if __GLASGOW_HASKELL__ < 709
import Data.Word (Word)
#endif
import Control.Concurrent.STM
import Control.Monad (when, void)
import Data.Array (Array, listArray, (!))
import Data.Array.MArray (newArray_, readArray, writeArray)
----------------------------------------------------------------
type Weight = Int
type Deficit = Word
-- | Abstract data type of entries for priority queues.
data Entry a = Entry {
weight :: {-# UNPACK #-} !Weight
, item :: {-# UNPACK #-} !(TVar a) -- ^ Extracting an item from an entry.
, deficit :: {-# UNPACK #-} !(TVar Deficit)
, index :: {-# UNPACK #-} !(TVar Index)
}
newEntry :: a -> Weight -> STM (Entry a)
newEntry x w = Entry w <$> newTVar x <*> newTVar magicDeficit <*> newTVar (-1)
-- | Changing the item of an entry.
renewEntry :: Entry a -> a -> STM ()
renewEntry Entry{..} x = writeTVar item x
----------------------------------------------------------------
type Index = Int
type MA a = TArray Index (Entry a)
-- FIXME: The base (Word64) would be overflowed.
-- In that case, the heap must be re-constructed.
data PriorityQueue a = PriorityQueue (TVar Deficit)
(TVar Index)
(MA a)
----------------------------------------------------------------
magicDeficit :: Deficit
magicDeficit = 0
deficitSteps :: Int
deficitSteps = 65536
deficitStepsW :: Word
deficitStepsW = fromIntegral deficitSteps
deficitList :: [Deficit]
deficitList = map calc idxs
where
idxs = [1..256] :: [Double]
calc w = round (fromIntegral deficitSteps / w)
deficitTable :: Array Index Deficit
deficitTable = listArray (1,256) deficitList
weightToDeficit :: Weight -> Deficit
weightToDeficit w = deficitTable ! w
----------------------------------------------------------------
new :: Int -> STM (PriorityQueue a)
new n = PriorityQueue <$> newTVar 0
<*> newTVar 1
<*> newArray_ (1,n)
-- | Enqueuing an entry. PriorityQueue is updated.
enqueue :: Entry a -> PriorityQueue a -> STM ()
enqueue ent@Entry{..} (PriorityQueue bref idx arr) = do
i <- readTVar idx
base <- readTVar bref
d <- readTVar deficit
let !b = if d == magicDeficit then base else d
!d' = b + weightToDeficit weight
writeTVar deficit d'
write arr i ent
shiftUp arr i
let !i' = i + 1
writeTVar idx i'
return ()
-- | Dequeuing an entry. PriorityQueue is updated.
dequeue :: PriorityQueue a -> STM (Entry a)
dequeue (PriorityQueue bref idx arr) = do
ent <- shrink arr 1 idx
i <- readTVar idx
shiftDown arr 1 i
d <- readTVar $ deficit ent
writeTVar bref $ if i == 1 then 0 else d
return ent
shrink :: MA a -> Index -> TVar Index -> STM (Entry a)
shrink arr r idx = do
entr <- readArray arr r
-- fixme: checking if i == 0
i <- subtract 1 <$> readTVar idx
xi <- readArray arr i
write arr r xi
writeTVar idx i
return entr
shiftUp :: MA a -> Int -> STM ()
shiftUp _ 1 = return ()
shiftUp arr c = do
swapped <- swap arr p c
when swapped $ shiftUp arr p
where
p = c `div` 2
shiftDown :: MA a -> Int -> Int -> STM ()
shiftDown arr p n
| c1 > n = return ()
| c1 == n = void $ swap arr p c1
| otherwise = do
let !c2 = c1 + 1
xc1 <- readArray arr c1
xc2 <- readArray arr c2
d1 <- readTVar $ deficit xc1
d2 <- readTVar $ deficit xc2
let !c = if d1 /= d2 && d2 - d1 <= deficitStepsW then c1 else c2
swapped <- swap arr p c
when swapped $ shiftDown arr c n
where
c1 = 2 * p
{-# INLINE swap #-}
swap :: MA a -> Index -> Index -> STM Bool
swap arr p c = do
xp <- readArray arr p
xc <- readArray arr c
dp <- readTVar $ deficit xp
dc <- readTVar $ deficit xc
if dc < dp then do
write arr c xp
write arr p xc
return True
else
return False
{-# INLINE write #-}
write :: MA a -> Index -> Entry a -> STM ()
write arr i ent = do
writeArray arr i ent
writeTVar (index ent) i
delete :: Entry a -> PriorityQueue a -> STM ()
delete ent pq@(PriorityQueue _ idx arr) = do
i <- readTVar $ index ent
if i == 1 then
void $ dequeue pq
else do
entr <- shrink arr i idx
r <- readTVar $ index entr
shiftDown arr r (i - 1)
shiftUp arr r
| kazu-yamamoto/http2 | bench-priority/BinaryHeapSTM.hs | bsd-3-clause | 4,602 | 0 | 15 | 1,270 | 1,590 | 780 | 810 | 128 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.