code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances, DeriveGeneric, StandaloneDeriving #-}
-- | QuickCheck instances for all of Rainbow. Currently Rainbow does
-- not use these instances itself; they are only here for
-- cut-and-paste for other libraries that may need them. There is an
-- executable in Rainbow that is built solely to make sure this module
-- compiles without any errors.
--
-- To use these instances, just drop them into your own project
-- somewhere. They are not packaged as a library because there are
-- orphan instances.
module Rainbow.QuickCheck where
import qualified Data.Text as X
import Data.Typeable
import Rainbow.Types
import Test.QuickCheck
instance Arbitrary X.Text where
arbitrary = fmap X.pack $ listOf genChar
where
genChar = elements ['a'..'z']
shrink = fmap X.pack . shrink . X.unpack
instance CoArbitrary X.Text where
coarbitrary = coarbitrary . X.unpack
instance (Typeable a, Arbitrary a) => Arbitrary (Color a) where
arbitrary = Color <$> arbitrary
shrink = genericShrink
instance CoArbitrary a => CoArbitrary (Color a) where
coarbitrary (Color a) = coarbitrary a
varInt :: Int -> Gen b -> Gen b
varInt = variant
instance Arbitrary Enum8 where
arbitrary = elements [E0, E1, E2, E3, E4, E5, E6, E7]
shrink = genericShrink
instance CoArbitrary Enum8 where
coarbitrary x = case x of
E0 -> varInt 0
E1 -> varInt 1
E2 -> varInt 2
E3 -> varInt 3
E4 -> varInt 4
E5 -> varInt 5
E6 -> varInt 6
E7 -> varInt 7
instance Arbitrary Format where
arbitrary
= Format <$> g <*> g <*> g <*> g <*> g <*> g <*> g <*> g
where
g = arbitrary
shrink = genericShrink
instance CoArbitrary Format where
coarbitrary (Format x0 x1 x2 x3 x4 x5 x6 x7)
= coarbitrary x0
. coarbitrary x1
. coarbitrary x2
. coarbitrary x3
. coarbitrary x4
. coarbitrary x5
. coarbitrary x6
. coarbitrary x7
instance (Arbitrary a, Typeable a) => Arbitrary (Style a) where
arbitrary = Style <$> arbitrary <*> arbitrary <*> arbitrary
shrink = genericShrink
instance CoArbitrary a => CoArbitrary (Style a) where
coarbitrary (Style a b c)
= coarbitrary a
. coarbitrary b
. coarbitrary c
instance Arbitrary Scheme where
arbitrary = Scheme <$> arbitrary <*> arbitrary
shrink = genericShrink
instance CoArbitrary Scheme where
coarbitrary (Scheme a b) = coarbitrary a . coarbitrary b
instance Arbitrary Chunk where
arbitrary = Chunk <$> arbitrary <*> arbitrary
shrink = genericShrink
instance CoArbitrary Chunk where
coarbitrary (Chunk a b)
= coarbitrary a
. coarbitrary b
instance Arbitrary Radiant where
arbitrary = Radiant <$> arbitrary <*> arbitrary
shrink = genericShrink
instance CoArbitrary Radiant where
coarbitrary (Radiant a b) = coarbitrary a . coarbitrary b
| massysett/rainbow | tests/Rainbow/QuickCheck.hs | bsd-3-clause | 2,851 | 0 | 13 | 618 | 797 | 407 | 390 | 73 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- |
-- Module : ForecastIO.V2.Types
-- Copyright : (c) 2015 Devan Stormont
--
-- License : BSD-style
-- Maintainer : stormont@gmail.com
-- Stability : experimental
-- Portability : GHC
--
-- This module defines data types for the various JSON types returned
-- by the <https://developer.forecast.io/docs/v2 Forecast.io > service.
--
-- These definitions are generally straight conversions from the
-- original JSON. Use of the Forecast.io service should return
-- JSON that can be directly decoded into a 'Forecast' object:
--
-- > eitherDecode json :: Either String Forecast
--
-- Some of the 'ByteString' libraries seem not to parse certain unicode
-- characters correctly (or maybe it's an 'Aeson' problem; this hasn't
-- yet been determined). If your decoding fails, you may need to filter
-- out certain of these characters before decoding. In particular, the
-- degree symbol (Unicode character @\\176@) has been known to cause
-- decoding errors.
--
-- Another thing to be wary of is that potentially any field is /not/
-- guaranteed to be returned in the JSON. This effectively makes
-- every definition live within a 'Maybe'.
module ForecastIO.V2.Types
( DataPoint(..)
, DataBlock(..)
, Alerts(..)
, Flags(..)
, Forecast(..)
) where
import Control.Applicative ((<*>), (<$>))
import Control.Monad (mzero)
import Data.Aeson
import Data.Aeson.TH
import Data.Text (Text, pack)
-----------------------------------------------------------
-- EXPORTED
-----------------------------------------------------------
-- | Defines a single data point in the weather forecast. For a full
-- explanation of the various records, please consult the
-- <https://developer.forecast.io/docs/v2 official documentation >.
data DataPoint =
DataPoint
{ dataPoint_time :: Maybe Int
, dataPoint_summary :: Maybe Text
, dataPoint_icon :: Maybe Text
, dataPoint_sunriseTime :: Maybe Int
, dataPoint_sunsetTime :: Maybe Int
, dataPoint_moonPhase :: Maybe Double
, dataPoint_moonPhaseError :: Maybe Double
, dataPoint_nearestStormDistance :: Maybe Double
, dataPoint_nearestStormDistanceError :: Maybe Double
, dataPoint_nearestStormBearing :: Maybe Double
, dataPoint_nearestStormBearingError :: Maybe Double
, dataPoint_precipIntensity :: Maybe Double
, dataPoint_precipIntensityError :: Maybe Double
, dataPoint_precipIntensityMax :: Maybe Double
, dataPoint_precipIntensityMaxError :: Maybe Double
, dataPoint_precipIntensityMaxTime :: Maybe Int
, dataPoint_precipProbability :: Maybe Double
, dataPoint_precipProbabilityError :: Maybe Double
, dataPoint_precipType :: Maybe Text
, dataPoint_precipAccumulation :: Maybe Double
, dataPoint_precipAccumulationError :: Maybe Double
, dataPoint_temperature :: Maybe Double
, dataPoint_temperatureError :: Maybe Double
, dataPoint_temperatureMin :: Maybe Double
, dataPoint_temperatureMinError :: Maybe Double
, dataPoint_temperatureMinTime :: Maybe Int
, dataPoint_temperatureMax :: Maybe Double
, dataPoint_temperatureMaxError :: Maybe Double
, dataPoint_temperatureMaxTime :: Maybe Int
, dataPoint_apparentTemperature :: Maybe Double
, dataPoint_apparentTemperatureError :: Maybe Double
, dataPoint_apparentTemperatureMin :: Maybe Double
, dataPoint_apparentTemperatureMinError :: Maybe Double
, dataPoint_apparentTemperatureMinTime :: Maybe Int
, dataPoint_apparentTemperatureMax :: Maybe Double
, dataPoint_apparentTemperatureMaxError :: Maybe Double
, dataPoint_apparentTemperatureMaxTime :: Maybe Int
, dataPoint_dewPoint :: Maybe Double
, dataPoint_dewPointError :: Maybe Double
, dataPoint_windSpeed :: Maybe Double
, dataPoint_windSpeedError :: Maybe Double
, dataPoint_windBearing :: Maybe Double
, dataPoint_windBearingError :: Maybe Double
, dataPoint_cloudCover :: Maybe Double
, dataPoint_cloudCoverError :: Maybe Double
, dataPoint_humidity :: Maybe Double
, dataPoint_humidityError :: Maybe Double
, dataPoint_pressure :: Maybe Double
, dataPoint_pressureError :: Maybe Double
, dataPoint_visibility :: Maybe Double
, dataPoint_visibilityError :: Maybe Double
, dataPoint_ozone :: Maybe Double
, dataPoint_ozoneError :: Maybe Double
} deriving (Show,Read)
-- | Defines a summary "block" of information that can contain multiple
-- 'DataPoint's.
data DataBlock =
DataBlock
{ dataBlock_summary :: Maybe Text
, dataBlock_icon :: Maybe Text
, dataBlock_data :: Maybe [DataPoint]
} deriving (Show,Read)
-- | Defines severe weather alerts that may be being broadcast by a
-- variety of weather services.
data Alerts =
Alerts
{ alerts_title :: Maybe Text
, alerts_expires :: Maybe Int
, alerts_description :: Maybe Text
, alerts_uri :: Maybe Text
} deriving (Show,Read)
-- | 'Flags' define general information about the returned data.
data Flags =
Flags
{ flags_darksky_unavailable :: Maybe Text
, flags_darksky_stations :: Maybe [Text]
, flags_datapoint_stations :: Maybe [Text]
, flags_isd_stations :: Maybe [Text]
, flags_lamp_stations :: Maybe [Text]
, flags_madis_stations :: Maybe [Text]
, flags_metar_stations :: Maybe [Text]
, flags_metno_license :: Maybe Text
, flags_sources :: Maybe [Text]
, flags_units :: Maybe Text
} deriving (Show,Read)
-- | This is the container type for the returned data. You /should/
-- be able to just directly take the downloaded JSON and transform
-- it into this data type.
data Forecast =
Forecast
{ forecast_latitude :: Maybe Double
, forecast_longitude :: Maybe Double
, forecast_timezone :: Maybe Text
, forecast_offset :: Maybe Double
, forecast_currently :: Maybe DataPoint
, forecast_minutely :: Maybe DataBlock
, forecast_hourly :: Maybe DataBlock
, forecast_daily :: Maybe DataBlock
, forecast_alerts :: Maybe [Alerts]
, forecast_flags :: Maybe Flags
} deriving (Show,Read)
-----------------------------------------------------------
-- INTERNAL
-----------------------------------------------------------
key_sources = pack "sources"
key_isd_stations = pack "isd-stations"
key_madis_stations = pack "madis-stations"
key_metar_stations = pack "metar-stations"
key_lamp_stations = pack "lamp-stations"
key_datapoint_stations = pack "datapoint-stations"
key_darksky_stations = pack "darksky-stations"
key_darksky_unavailable = pack "darksky-unavailable"
key_metno_license = pack "metno-license"
key_units = pack "units"
instance FromJSON Flags where
parseJSON (Object x) = Flags
<$> (x .:? key_darksky_unavailable)
<*> (x .:? key_darksky_stations)
<*> (x .:? key_datapoint_stations)
<*> (x .:? key_isd_stations)
<*> (x .:? key_lamp_stations)
<*> (x .:? key_madis_stations)
<*> (x .:? key_metar_stations)
<*> (x .:? key_metno_license)
<*> (x .:? key_sources)
<*> (x .:? key_units)
parseJSON _ = mzero
instance ToJSON Flags where
toJSON x = object
[ key_darksky_unavailable .= flags_darksky_unavailable x
, key_darksky_stations .= flags_darksky_stations x
, key_datapoint_stations .= flags_datapoint_stations x
, key_isd_stations .= flags_isd_stations x
, key_lamp_stations .= flags_lamp_stations x
, key_madis_stations .= flags_madis_stations x
, key_metar_stations .= flags_metar_stations x
, key_metno_license .= flags_metno_license x
, key_sources .= flags_sources x
, key_units .= flags_units x
]
$(deriveJSON (defaultOptions { fieldLabelModifier = drop 10 }) ''DataBlock)
$(deriveJSON (defaultOptions { fieldLabelModifier = drop 10 }) ''DataPoint)
$(deriveJSON (defaultOptions { fieldLabelModifier = drop 7 }) ''Alerts)
$(deriveJSON (defaultOptions { fieldLabelModifier = drop 9 }) ''Forecast)
| stormont/forecast-io | ForecastIO/V2/Types.hs | bsd-3-clause | 9,020 | 0 | 17 | 2,553 | 1,439 | 813 | 626 | 146 | 1 |
{-# OPTIONS_GHC -O2 #-}
module Main where
import Plotter
import Settings
import Help
import System.Environment
import Data.Char (toUpper)
defaultWavePath, defaultBMPPath, defaultSize, defaultDuration :: String
[defaultWavePath , defaultBMPPath , defaultSize , defaultDuration , defaultConfigPath] = ["input.wav" , "output.bmp" , "2048" , "0" , "Vinyl.conf"]
data CONTROL = S | I | D | O | C deriving (Eq,Show,Read)
argParser (a : r : gs) defl = ret ++ argParser gs defl where
ret = case a of
'-' : a' -> [((read $ map toUpper a') :: CONTROL , r)]
_ -> []
argParser _ defl = defl
defaultArgs = [(S,defaultSize) , (I,defaultWavePath) , (D,defaultDuration) , (O,defaultBMPPath) , (C,defaultConfigPath)]
main = do
args <- getArgs
if "--help" `elem` args then printHelpAndDie else return ()
let Just [sz',inp,dur',outp,conf] = sequence $ map (flip lookup $ argParser args defaultArgs) [S,I,D,O,C]
let sz = read sz'
dur = read dur'
settings' <- loadConfig conf
let settings = if dur /= 0 then (Duration,dur) : settings' else settings'
plotter <- buildPlotter settings inp
printFigure outp sz $ calcPoints sz plotter
| madbirddiary/Vinyl | src/Vinyl.hs | bsd-3-clause | 1,224 | 0 | 14 | 289 | 444 | 249 | 195 | 26 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Cardano.WalletClient (
withdraw
, randomAmount
) where
import Cardano.Wallet.API.V1.Types (Payment (..), V1 (..))
import qualified Cardano.Wallet.API.V1.Types as V1
import qualified Control.Concurrent.STM.TBQueue as TBQ
import Control.Concurrent.STM.TMVar (TMVar, newEmptyTMVar, takeTMVar)
import Control.Lens (re, to)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader
import Crypto.Hash (Blake2b_256, Digest)
import qualified Crypto.Hash as CryptoHash
import qualified Data.ByteArray as BA
import Data.ByteString (ByteString)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Text.Strict.Lens (utf8)
import Pos.Client.Txp.Util (InputSelectionPolicy (..))
import Pos.Core (Address (..), Coin (..))
import Pos.Crypto.Signing (PassPhrase)
import System.Random
import System.Wlog (logError, logInfo, withSublogger)
import Universum
import Cardano.Faucet.Types
-- | Computes the amount of ADA (units in lovelace) to send in 'withdraw'
randomAmount :: (MonadIO m) => PaymentDistribution -> m Int
randomAmount (PaymentDistribution amt var)= do
(f :: Float) <- liftIO $ randomRIO ((-1), 1)
return $ round $ ((fromIntegral amt) + ((fromIntegral var) * f))
-- | Client function for the handler for the @/withdraw@ action
--
-- Simply sends a 'randomAmount' of ADA (units in lovelace )to the supplied
-- 'Address'
withdraw :: (MonadFaucet c m) => V1 Address -> m (Either WithdrawalQFull WithdrawalResult)
withdraw addr = withSublogger "WalletClient.withdraw" $ do
paymentSource <- view (feSourceWallet . to cfgToPaymentSource)
spendingPassword <- view (feSourceWallet . srcSpendingPassword)
coin <- V1 . Coin . fromIntegral
<$> (randomAmount =<< view (feFaucetConfig . fcPaymentDistribution))
q <- view feWithdrawalQ
let paymentDist = (V1.PaymentDistribution addr coin :| [])
sp = spendingPassword <&> view (re utf8 . to hashPwd . to V1)
gp = Just (V1 OptimizeForHighThroughput)
payment = Payment paymentSource paymentDist gp sp
eRes <- liftIO $ sendToQueue q payment
case eRes of
Left e -> do
logError "Queue is full"
return $ Left e
Right tvar -> do
logInfo "Waiting for processing result"
liftIO $ (Right <$> atomically (takeTMVar tvar))
-- | Sends the 'Payment' to the processor queue
--
-- Returns a 'TMVar' to wait on for the response from the node
-- See 'Cardano.Faucet.Init.processWithdrawals'
sendToQueue
:: TBQ.TBQueue ProcessorPayload
-> Payment
-> IO (Either WithdrawalQFull (TMVar WithdrawalResult))
sendToQueue q payment = atomically $ do
isFull <- TBQ.isFullTBQueue q
if isFull
then return $ Left WithdrawalQFull
else do
resTMVar <- newEmptyTMVar
TBQ.writeTBQueue q (ProcessorPayload payment resTMVar)
return $ Right resTMVar
-- | Hashes bytestring password to the form expected by the wallet API
hashPwd :: ByteString -> PassPhrase
hashPwd bs =
let blake = CryptoHash.hash bs :: Digest (Blake2b_256)
in BA.convert blake
| input-output-hk/pos-haskell-prototype | faucet/src/Cardano/WalletClient.hs | mit | 3,414 | 0 | 18 | 827 | 836 | 456 | 380 | 66 | 2 |
-- ghci
-- :load C:\Users\Thomas\Documents\GitHub\algorithms.practice\algorithms.practice\algorithms.haskell.javascript\Haskell\DivideAndConquer\MergeSortCountInversions.hs
-- :r
-- :set +s for times
module MergeSortCountInversions where
x = 2 | Sobieck00/practice | algorithms/visualstudio/algorithms.nonvisualstudio/Haskell/DivideAndConquer/MergeSortCountInversions.hs | mit | 250 | 0 | 4 | 21 | 13 | 10 | 3 | 2 | 1 |
{- |
Module : ./Syntax/ToXml.hs
Description : xml output of Hets specification libaries
Copyright : (c) Ewaryst Schulz, Uni Bremen 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Ewaryst.Schulz@dfki.de
Stability : provisional
Portability : non-portable(Grothendieck)
Xml printing of Hets specification libaries
-}
module Syntax.ToXml (xmlLibDefn) where
import Syntax.AS_Structured
import Syntax.Print_AS_Structured
import Syntax.AS_Library
import Syntax.Print_AS_Library ()
import Common.AS_Annotation
import Common.Id
import Common.IRI
import Common.Item
import Common.LibName
import Common.Result
import Common.DocUtils
import Common.GlobalAnnotations
import Common.ToXml
import Common.XUpdate
import Logic.Logic
import Logic.Grothendieck
import Text.XML.Light
import Data.Maybe
iriToStr :: IRI -> String
iriToStr = iriToStringShortUnsecure . setAngles False
xmlLibDefn :: LogicGraph -> GlobalAnnos -> LIB_DEFN -> Element
xmlLibDefn lg ga (Lib_defn n il rg an) =
add_attrs (mkNameAttr (show $ setAngles False $ getLibId n) : rgAttrs rg)
$ unode "Lib" $ annos "Global" ga an ++ libItems lg ga il
libItems :: LogicGraph -> GlobalAnnos -> [Annoted LIB_ITEM] -> [Element]
libItems lg ga is = case is of
[] -> []
i : rs -> annoted libItem lg ga i : libItems (case item i of
Logic_decl aa _ -> setLogicName aa lg
_ -> lg) ga rs
unsupported :: PrettyLG a => LogicGraph -> GlobalAnnos -> a -> Element
unsupported lg ga =
unode "Unsupported" . show . useGlobalAnnos ga . prettyLG lg
libItem :: LogicGraph -> GlobalAnnos -> LIB_ITEM -> Element
libItem lg ga li = case li of
Spec_defn n g as rg ->
add_attrs (mkNameAttr (iriToStr n) : rgAttrs rg)
$ unode "SpecDefn" $ genericity lg ga g ++ [annoted spec lg ga as]
View_defn n g (View_type from to _) mapping rg ->
add_attrs (mkNameAttr (iriToStr n) : rgAttrs rg)
$ unode "ViewDefn" $ genericity lg ga g
++ [ unode "Source" $ annoted spec lg ga from
, unode "Target" $ annoted spec lg ga to ]
++ concatMap (gmapping ga) mapping
Download_items n mapping rg ->
add_attrs (mkNameAttr (show $ getLibId n) : rgAttrs rg)
$ unode "Import" $ downloadItems mapping
Logic_decl n rg ->
add_attrs (mkNameAttr (showDoc n "") : rgAttrs rg)
$ unode "Logic" ()
_ -> unsupported lg ga li
downloadItems :: DownloadItems -> [Element]
downloadItems d = case d of
ItemMaps l -> map itemNameOrMap l
UniqueItem i -> [add_attr (mkAttr "as" $ iriToStr i)
$ unode "Item" ()]
spec :: LogicGraph -> GlobalAnnos -> SPEC -> Element
spec lg ga s = case s of
Basic_spec bs rg -> withRg rg $ gBasicSpec lg ga bs
EmptySpec rg -> withRg rg $ unode "Empty" ()
Translation as (Renaming m _) ->
unode "Translation" $ annoted spec lg ga as : concatMap (gmapping ga) m
Reduction as m ->
unode "Restriction" $ annoted spec lg ga as : restriction ga m
Union asl rg -> withRg rg $ unode "Union"
$ map (unode "Spec" . annoted spec lg ga) asl
Extension asl rg -> withRg rg $ unode "Extension"
$ map (unode "Spec" . annoted spec lg ga) asl
Free_spec as rg -> withRg rg $ unode "Free" $ annoted spec lg ga as
Cofree_spec as rg -> withRg rg $ unode "Cofree" $ annoted spec lg ga as
Minimize_spec as rg -> withRg rg $ unode "Minimize" $ annoted spec lg ga as
Local_spec as ins rg -> withRg rg $ unode "Local"
[ unode "Spec" $ annoted spec lg ga as
, unode "Within" $ annoted spec lg ga ins]
Closed_spec as rg -> withRg rg $ unode "Closed" $ annoted spec lg ga as
Group as rg -> withRg rg $ unode "Group" $ annoted spec lg ga as
Spec_inst n fa _ rg ->
add_attrs (mkNameAttr (iriToStr n) : rgAttrs rg)
$ unode "Actuals" $ map (annoted fitArg lg ga) fa
Qualified_spec ln as rg -> withRg rg $ unode "Qualified"
[prettyElem "Logic" ga ln, annoted spec (setLogicName ln lg) ga as]
Data l1 _ s1 s2 rg ->
add_attrs (mkAttr "data-logic" (show l1) : rgAttrs rg)
$ unode "Data" [ annoted spec (setCurLogic (show l1) lg) ga s1
, annoted spec lg ga s2]
_ -> unsupported lg ga s
fitArg :: LogicGraph -> GlobalAnnos -> FIT_ARG -> Element
fitArg lg ga fa = case fa of
Fit_spec as m rg -> withRg rg $ unode "Spec"
$ annoted spec lg ga as : concatMap (gmapping ga) m
Fit_view n fargs rg ->
add_attrs (mkNameAttr (iriToStr n) : rgAttrs rg)
$ unode "Spec" $ unode "Actuals" $ map (annoted fitArg lg ga) fargs
itemNameOrMap :: ItemNameMap -> Element
itemNameOrMap (ItemNameMap name m) =
add_attrs (mkNameAttr (iriToStr name) : case m of
Nothing -> []
Just as -> [mkAttr "as" $ iriToStr as])
$ unode "Item" ()
gmapping :: GlobalAnnos -> G_mapping -> [Element]
gmapping ga gm = case gm of
G_symb_map l -> subnodes "Mapping" $ gSymbMapItemList ga l
G_logic_translation lc -> [ add_attrs (logicCode lc)
$ unode "Logictranslation" () ]
ghiding :: GlobalAnnos -> G_hiding -> Element
ghiding ga gm = case gm of
G_symb_list l -> unode "Hiding" $ gSymbItemList ga l
G_logic_projection lc -> add_attrs (logicCode lc)
$ unode "Logicprojection" ()
gBasicSpec :: LogicGraph -> GlobalAnnos -> G_basic_spec -> Element
gBasicSpec lg ga (G_basic_spec lid bs) = itemToXml lg ga $ toItem lid bs
genericity :: LogicGraph -> GlobalAnnos -> GENERICITY -> [Element]
genericity lg ga (Genericity (Params pl) (Imported il) rg) =
if null pl then [] else
unode "Parameters" (spec lg ga $ Union pl rg)
: if null il then [] else
[ unode "Imports" $ spec lg ga $ Union il rg ]
restriction :: GlobalAnnos -> RESTRICTION -> [Element]
restriction ga restr = case restr of
Hidden m _ -> map (ghiding ga) m
Revealed m _ -> gSymbMapItemList ga m
gSymbItemList :: GlobalAnnos -> G_symb_items_list -> [Element]
gSymbItemList ga (G_symb_items_list _ l) = map (prettyElem "SymbItems" ga) l
gSymbMapItemList :: GlobalAnnos -> G_symb_map_items_list -> [Element]
gSymbMapItemList ga (G_symb_map_items_list _ l) =
map (prettyElem "SymbMapItems" ga) l
logicCode :: Logic_code -> [Attr]
logicCode (Logic_code enc src trg _) =
(case enc of
Nothing -> []
Just t -> [mkAttr "encoding" t])
++ (case src of
Nothing -> []
Just l -> [mkAttr "source" $ show $ pretty l])
++ case trg of
Nothing -> []
Just l -> [mkAttr "target" $ show $ pretty l]
isEmptyItem :: Annoted Item -> Bool
isEmptyItem ai =
let i = item ai
IT _ attrs mdoc = itemType i
in null (rgAttrs $ range i) && null attrs && isNothing mdoc
&& null (l_annos ai) && null (r_annos ai)
&& all isEmptyItem (items i)
itemToXml :: LogicGraph -> GlobalAnnos -> Item -> Element
itemToXml lg ga i =
let IT name attrs mdoc = itemType i
in add_attrs (map (uncurry mkAttr) attrs ++ rgAttrs (range i))
$ unode name $ (case mdoc of
Nothing -> []
Just d -> [mkText $ show $ useGlobalAnnos ga d])
++ map (Elem . annoted itemToXml lg ga)
(filter (not . isEmptyItem) $ items i)
-- range attribute without file name
rgAttrs :: Range -> [Attr]
rgAttrs = rangeAttrsF $ show . prettyRange . map (\ p -> p { sourceName = "" })
annos :: String -> GlobalAnnos -> [Annotation] -> [Element]
annos str ga = subnodes str
. map (annotationF rgAttrs ga)
annoted :: (LogicGraph -> GlobalAnnos -> a -> Element) -> LogicGraph
-> GlobalAnnos -> Annoted a -> Element
annoted f lg ga a = let
e = f lg ga $ item a
l = annos "Left" ga $ l_annos a
r = annos "Right" ga $ r_annos a
in e { elContent = map Elem l ++ elContent e ++ map Elem r }
withRg :: Range -> Element -> Element
withRg r e = if isJust (getAttrVal "range" e) then e else
add_attrs (rgAttrs r) e
| spechub/Hets | Syntax/ToXml.hs | gpl-2.0 | 7,670 | 0 | 17 | 1,744 | 3,009 | 1,451 | 1,558 | 169 | 16 |
module FunArity2 where
zipp [] = []
zipp [] ys = []
zipp xs [] = []
zipp (x:xs) (y:ys) = (x, y) : zipp xs ys
| roberth/uu-helium | test/staticerrors/FunArity2.hs | gpl-3.0 | 110 | 0 | 7 | 28 | 83 | 44 | 39 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Route53.UpdateHostedZoneComment
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- To update the hosted zone comment, send a 'POST' request to the
-- '2013-04-01\/hostedzone\/hosted zone ID' resource. The request body must
-- include an XML document with a 'UpdateHostedZoneCommentRequest' element.
-- The response to this request includes the modified 'HostedZone' element.
--
-- The comment can have a maximum length of 256 characters.
--
-- /See:/ <http://docs.aws.amazon.com/Route53/latest/APIReference/API_UpdateHostedZoneComment.html AWS API Reference> for UpdateHostedZoneComment.
module Network.AWS.Route53.UpdateHostedZoneComment
(
-- * Creating a Request
updateHostedZoneComment
, UpdateHostedZoneComment
-- * Request Lenses
, uhzcComment
, uhzcId
-- * Destructuring the Response
, updateHostedZoneCommentResponse
, UpdateHostedZoneCommentResponse
-- * Response Lenses
, uhzcrsResponseStatus
, uhzcrsHostedZone
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.Route53.Types
import Network.AWS.Route53.Types.Product
-- | A complex type that contains information about the request to update a
-- hosted zone comment.
--
-- /See:/ 'updateHostedZoneComment' smart constructor.
data UpdateHostedZoneComment = UpdateHostedZoneComment'
{ _uhzcComment :: !(Maybe Text)
, _uhzcId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateHostedZoneComment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uhzcComment'
--
-- * 'uhzcId'
updateHostedZoneComment
:: Text -- ^ 'uhzcId'
-> UpdateHostedZoneComment
updateHostedZoneComment pId_ =
UpdateHostedZoneComment'
{ _uhzcComment = Nothing
, _uhzcId = pId_
}
-- | A comment about your hosted zone.
uhzcComment :: Lens' UpdateHostedZoneComment (Maybe Text)
uhzcComment = lens _uhzcComment (\ s a -> s{_uhzcComment = a});
-- | The ID of the hosted zone you want to update.
uhzcId :: Lens' UpdateHostedZoneComment Text
uhzcId = lens _uhzcId (\ s a -> s{_uhzcId = a});
instance AWSRequest UpdateHostedZoneComment where
type Rs UpdateHostedZoneComment =
UpdateHostedZoneCommentResponse
request = postXML route53
response
= receiveXML
(\ s h x ->
UpdateHostedZoneCommentResponse' <$>
(pure (fromEnum s)) <*> (x .@ "HostedZone"))
instance ToElement UpdateHostedZoneComment where
toElement
= mkElement
"{https://route53.amazonaws.com/doc/2013-04-01/}UpdateHostedZoneCommentRequest"
instance ToHeaders UpdateHostedZoneComment where
toHeaders = const mempty
instance ToPath UpdateHostedZoneComment where
toPath UpdateHostedZoneComment'{..}
= mconcat ["/2013-04-01/hostedzone/", toBS _uhzcId]
instance ToQuery UpdateHostedZoneComment where
toQuery = const mempty
instance ToXML UpdateHostedZoneComment where
toXML UpdateHostedZoneComment'{..}
= mconcat ["Comment" @= _uhzcComment]
-- | A complex type containing information about the specified hosted zone
-- after the update.
--
-- /See:/ 'updateHostedZoneCommentResponse' smart constructor.
data UpdateHostedZoneCommentResponse = UpdateHostedZoneCommentResponse'
{ _uhzcrsResponseStatus :: !Int
, _uhzcrsHostedZone :: !HostedZone
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateHostedZoneCommentResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uhzcrsResponseStatus'
--
-- * 'uhzcrsHostedZone'
updateHostedZoneCommentResponse
:: Int -- ^ 'uhzcrsResponseStatus'
-> HostedZone -- ^ 'uhzcrsHostedZone'
-> UpdateHostedZoneCommentResponse
updateHostedZoneCommentResponse pResponseStatus_ pHostedZone_ =
UpdateHostedZoneCommentResponse'
{ _uhzcrsResponseStatus = pResponseStatus_
, _uhzcrsHostedZone = pHostedZone_
}
-- | The response status code.
uhzcrsResponseStatus :: Lens' UpdateHostedZoneCommentResponse Int
uhzcrsResponseStatus = lens _uhzcrsResponseStatus (\ s a -> s{_uhzcrsResponseStatus = a});
-- | Undocumented member.
uhzcrsHostedZone :: Lens' UpdateHostedZoneCommentResponse HostedZone
uhzcrsHostedZone = lens _uhzcrsHostedZone (\ s a -> s{_uhzcrsHostedZone = a});
| fmapfmapfmap/amazonka | amazonka-route53/gen/Network/AWS/Route53/UpdateHostedZoneComment.hs | mpl-2.0 | 5,157 | 0 | 14 | 987 | 647 | 386 | 261 | 85 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SWF.CountOpenWorkflowExecutions
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the number of open workflow executions within the given domain
-- that meet the specified filtering criteria.
--
-- This operation is eventually consistent. The results are best effort and
-- may not exactly reflect recent updates and changes.
--
-- __Access Control__
--
-- You can use IAM policies to control this action\'s access to Amazon SWF
-- resources as follows:
--
-- - Use a 'Resource' element with the domain name to limit the action to
-- only specified domains.
-- - Use an 'Action' element to allow or deny permission to call this
-- action.
-- - Constrain the following parameters by using a 'Condition' element
-- with the appropriate keys.
-- - 'tagFilter.tag': String constraint. The key is
-- 'swf:tagFilter.tag'.
-- - 'typeFilter.name': String constraint. The key is
-- 'swf:typeFilter.name'.
-- - 'typeFilter.version': String constraint. The key is
-- 'swf:typeFilter.version'.
--
-- If the caller does not have sufficient permissions to invoke the action,
-- or the parameter values fall outside the specified constraints, the
-- action fails. The associated event attribute\'s __cause__ parameter will
-- be set to OPERATION_NOT_PERMITTED. For details and example IAM policies,
-- see
-- <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to Manage Access to Amazon SWF Workflows>.
--
-- /See:/ <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_CountOpenWorkflowExecutions.html AWS API Reference> for CountOpenWorkflowExecutions.
module Network.AWS.SWF.CountOpenWorkflowExecutions
(
-- * Creating a Request
countOpenWorkflowExecutions
, CountOpenWorkflowExecutions
-- * Request Lenses
, coweExecutionFilter
, coweTypeFilter
, coweTagFilter
, coweDomain
, coweStartTimeFilter
-- * Destructuring the Response
, workflowExecutionCount
, WorkflowExecutionCount
-- * Response Lenses
, wecTruncated
, wecCount
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.SWF.Types
import Network.AWS.SWF.Types.Product
-- | /See:/ 'countOpenWorkflowExecutions' smart constructor.
data CountOpenWorkflowExecutions = CountOpenWorkflowExecutions'
{ _coweExecutionFilter :: !(Maybe WorkflowExecutionFilter)
, _coweTypeFilter :: !(Maybe WorkflowTypeFilter)
, _coweTagFilter :: !(Maybe TagFilter)
, _coweDomain :: !Text
, _coweStartTimeFilter :: !ExecutionTimeFilter
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CountOpenWorkflowExecutions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'coweExecutionFilter'
--
-- * 'coweTypeFilter'
--
-- * 'coweTagFilter'
--
-- * 'coweDomain'
--
-- * 'coweStartTimeFilter'
countOpenWorkflowExecutions
:: Text -- ^ 'coweDomain'
-> ExecutionTimeFilter -- ^ 'coweStartTimeFilter'
-> CountOpenWorkflowExecutions
countOpenWorkflowExecutions pDomain_ pStartTimeFilter_ =
CountOpenWorkflowExecutions'
{ _coweExecutionFilter = Nothing
, _coweTypeFilter = Nothing
, _coweTagFilter = Nothing
, _coweDomain = pDomain_
, _coweStartTimeFilter = pStartTimeFilter_
}
-- | If specified, only workflow executions matching the 'WorkflowId' in the
-- filter are counted.
--
-- 'executionFilter', 'typeFilter' and 'tagFilter' are mutually exclusive.
-- You can specify at most one of these in a request.
coweExecutionFilter :: Lens' CountOpenWorkflowExecutions (Maybe WorkflowExecutionFilter)
coweExecutionFilter = lens _coweExecutionFilter (\ s a -> s{_coweExecutionFilter = a});
-- | Specifies the type of the workflow executions to be counted.
--
-- 'executionFilter', 'typeFilter' and 'tagFilter' are mutually exclusive.
-- You can specify at most one of these in a request.
coweTypeFilter :: Lens' CountOpenWorkflowExecutions (Maybe WorkflowTypeFilter)
coweTypeFilter = lens _coweTypeFilter (\ s a -> s{_coweTypeFilter = a});
-- | If specified, only executions that have a tag that matches the filter
-- are counted.
--
-- 'executionFilter', 'typeFilter' and 'tagFilter' are mutually exclusive.
-- You can specify at most one of these in a request.
coweTagFilter :: Lens' CountOpenWorkflowExecutions (Maybe TagFilter)
coweTagFilter = lens _coweTagFilter (\ s a -> s{_coweTagFilter = a});
-- | The name of the domain containing the workflow executions to count.
coweDomain :: Lens' CountOpenWorkflowExecutions Text
coweDomain = lens _coweDomain (\ s a -> s{_coweDomain = a});
-- | Specifies the start time criteria that workflow executions must meet in
-- order to be counted.
coweStartTimeFilter :: Lens' CountOpenWorkflowExecutions ExecutionTimeFilter
coweStartTimeFilter = lens _coweStartTimeFilter (\ s a -> s{_coweStartTimeFilter = a});
instance AWSRequest CountOpenWorkflowExecutions where
type Rs CountOpenWorkflowExecutions =
WorkflowExecutionCount
request = postJSON sWF
response = receiveJSON (\ s h x -> eitherParseJSON x)
instance ToHeaders CountOpenWorkflowExecutions where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("SimpleWorkflowService.CountOpenWorkflowExecutions"
:: ByteString),
"Content-Type" =#
("application/x-amz-json-1.0" :: ByteString)])
instance ToJSON CountOpenWorkflowExecutions where
toJSON CountOpenWorkflowExecutions'{..}
= object
(catMaybes
[("executionFilter" .=) <$> _coweExecutionFilter,
("typeFilter" .=) <$> _coweTypeFilter,
("tagFilter" .=) <$> _coweTagFilter,
Just ("domain" .= _coweDomain),
Just ("startTimeFilter" .= _coweStartTimeFilter)])
instance ToPath CountOpenWorkflowExecutions where
toPath = const "/"
instance ToQuery CountOpenWorkflowExecutions where
toQuery = const mempty
| fmapfmapfmap/amazonka | amazonka-swf/gen/Network/AWS/SWF/CountOpenWorkflowExecutions.hs | mpl-2.0 | 6,846 | 0 | 12 | 1,397 | 737 | 456 | 281 | 91 | 1 |
{-|
Module : Modular
Description : Modular Arithmetic module for the MPL DSL
Copyright : (c) Rohit Jha, 2015
License : BSD2
Maintainer : rohit305jha@gmail.com
Stability : Stable
Functionality for:
* Modular addition
* Modular subtraction
* Modular multiplication
* Modular exponentiation
* Checking congruence
* Solving linear congruences
-}
module Modular
(
modAdd,
modSub,
modMult,
modExp,
isCongruent,
findCongruentPair,
findCongruentPair'
)
where
{-|
The 'modAdd' function performs addition using modular arithmetic.
prop> modAdd a b m = (a + b) mod m
For example:
>>> modAdd 17 44 11
6
>>> modAdd 126832 1832 11
8
-}
modAdd :: Integral a => a -> a -> a -> a
modAdd a b m = (a + b) `mod` m
{-|
The 'modSub' function performs subtraction using modular arithmetic.
prop> modSub a b m = (a - b) mod m
For example:
>>> modSub 117 14 11
4
>>> modSub 114787 23934 3874
1751
-}
modSub :: Integral a => a -> a -> a -> a
modSub a b m = (a - b) `mod` m
{-|
The 'modMult' function performs multiplication using modular arithmetic.
prop> modMult a b m = (a * b) mod m
prop> (a * b) mod m = ((a mod m) * (b mod m)) mod m
For example:
>>> modMult 117 14 11
10
>>> modMult 114787 23934 3874
2974
-}
modMult :: Integral a => a -> a -> a -> a
modMult a b m = (a * b) `mod` m
{-|
The 'modExp' function is for the Modular Exponentiation operation (a ^ b mod m)
For example:
>>> modExp 12 5 6
0
>>> modExp 112 34 546
532
>>> modExp 515 5151 1563
1004
-}
modExp :: (Integral a, Integral a1) => a -> a1 -> a -> a
modExp a b m = modexp' 1 a b
where
modexp' p _ 0 = p
modexp' p x b =
if even b
then modexp' p (mod (x*x) m) (div b 2)
else modexp' (mod (p*x) m) x (pred b)
{-|
The 'isCongruent' function checks for modular congruency.
For a = b (mod m) to be congruent, a mod m = b.
For example:
>>> isCongruent 132 2 130
True
>>> isCongruent 13493 238 234
False
-}
isCongruent :: Integral a => a -> a -> a -> Bool
isCongruent a b m = (a `mod` m) == b
{-|
The 'findCongruentPair' function uses the linear congruence formula a * x = b (mod m) or (a * x) mod m = b to find x, when a, b and m are given.
For example:
>>> findCongruentPair 5 6 199 100
[41]
>>> isCongruent (5 * 41) 6 199
True
>>> findCongruentPair 5 6 199 500
[41,240,439]
-}
findCongruentPair :: Integral t => t -> t -> t -> t -> [t]
findCongruentPair a b m limit = [ x | x <- [0..limit], modMult a x m == b ]
{-|
The 'findCongruentPair'' function uses the linear congruence formula a + b = b (mod m) or (a + x) mod m = b to find x, when a, b and m are given.
For example:
>>> findCongruentPair' 10 4 5 100
[4,9,14,19,24,29,34,39,44,49,54,59,64,69,74,79,84,89,94,99]
>>> findCongruentPair' 113 20 40 100
[27,67]
>>> isCongruent (113 + 27) 20 40
True
>>> isCongruent (113 + 67) 20 40
True
-}
findCongruentPair' :: Integral t => t -> t -> t -> t -> [t]
findCongruentPair' a b m limit = [ x | x <- [0..limit], modAdd a x m == b ]
| rohitjha/DiMPL | src/Modular.hs | bsd-2-clause | 3,325 | 0 | 12 | 1,047 | 529 | 282 | 247 | 28 | 3 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module SubHask.Compatibility.Cassava
( decode_
, decode
-- * Types
, FromRecord
, ToRecord
, FromField
, ToField
, HasHeader (..)
)
where
import SubHask
import SubHask.Algebra.Array
import SubHask.Algebra.Parallel
import SubHask.Compatibility.ByteString
import qualified Prelude as P
import qualified Data.Csv as C
import Data.Csv (FromRecord, ToRecord, FromField, ToField, HasHeader)
--------------------------------------------------------------------------------
-- instances
instance FromField a => FromRecord (BArray a) where
parseRecord = P.fmap fromList . C.parseRecord
instance (Constructible (UArray a), Monoid (UArray a), FromField a) => FromRecord (UArray a) where
parseRecord = P.fmap fromList . C.parseRecord
--------------------------------------------------------------------------------
-- replacement functions
-- | This is a monoid homomorphism, which means it can be parallelized
decode_ ::
( FromRecord a
) => HasHeader
-> PartitionOnNewline (ByteString Char)
-> Either String (BArray a)
decode_ h (PartitionOnNewline (BSLC bs)) = case C.decode h bs of
Right r -> Right $ BArray r
Left s -> Left s
-- | Like the "decode" function in Data.Csv, but works in parallel
decode ::
( NFData a
, FromRecord a
, ValidEq a
) => HasHeader
-> ByteString Char
-> Either String (BArray a)
decode h = parallel (decode_ h) . PartitionOnNewline
| Drezil/subhask | src/SubHask/Compatibility/Cassava.hs | bsd-3-clause | 1,500 | 0 | 10 | 301 | 371 | 203 | 168 | -1 | -1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
-- (c) 2002 by Martin Erwig [see file COPYRIGHT]
-- | Static IOArray-based Graphs
module Data.Graph.Inductive.Monad.IOArray(
-- * Graph Representation
SGr(..), GraphRep, Context', USGr,
defaultGraphSize, emptyN,
-- * Utilities
removeDel,
) where
import Data.Graph.Inductive.Graph
import Data.Graph.Inductive.Monad
import Control.Monad
import Data.Array
import Data.Array.IO
import System.IO.Unsafe
----------------------------------------------------------------------
-- GRAPH REPRESENTATION
----------------------------------------------------------------------
newtype SGr a b = SGr (GraphRep a b)
type GraphRep a b = (Int,Array Node (Context' a b),IOArray Node Bool)
type Context' a b = Maybe (Adj b,a,Adj b)
type USGr = SGr () ()
----------------------------------------------------------------------
-- CLASS INSTANCES
----------------------------------------------------------------------
-- Show
--
showGraph :: (Show a,Show b) => GraphRep a b -> String
showGraph (_,a,m) = concatMap showAdj (indices a)
where showAdj v | unsafePerformIO (readArray m v) = ""
| otherwise = case a!v of
Nothing -> ""
Just (_,l,s) -> '\n':show v++":"++show l++"->"++show s'
where s' = unsafePerformIO (removeDel m s)
instance (Show a,Show b) => Show (SGr a b) where
show (SGr g) = showGraph g
instance (Show a,Show b) => Show (IO (SGr a b)) where
show g = unsafePerformIO (do {(SGr g') <- g; return (showGraph g')})
{-
run :: Show (IO a) => IO a -> IO ()
run x = seq x (print x)
-}
-- GraphM
--
instance GraphM IO SGr where
emptyM = emptyN defaultGraphSize
isEmptyM g = do {SGr (n,_,_) <- g; return (n==0)}
matchM v g = do g'@(SGr (n,a,m)) <- g
case a!v of
Nothing -> return (Nothing,g')
Just (pr,l,su) ->
do b <- readArray m v
if b then return (Nothing,g') else
do s <- removeDel m su
p' <- removeDel m pr
let p = filter ((/=v).snd) p'
writeArray m v True
return (Just (p,v,l,s),SGr (n-1,a,m))
mkGraphM vs es = do m <- newArray (1,n) False
return (SGr (n,pr,m))
where nod = array bnds (map (\(v,l)->(v,Just ([],l,[]))) vs)
su = accum addSuc nod (map (\(v,w,l)->(v,(l,w))) es)
pr = accum addPre su (map (\(v,w,l)->(w,(l,v))) es)
bnds = (minimum vs',maximum vs')
vs' = map fst vs
n = length vs
addSuc (Just (p,l',s)) (l,w) = Just (p,l',(l,w):s)
addSuc Nothing _ = error "mkGraphM (SGr): addSuc Nothing"
addPre (Just (p,l',s)) (l,w) = Just ((l,w):p,l',s)
addPre Nothing _ = error "mkGraphM (SGr): addPre Nothing"
labNodesM g = do (SGr (_,a,m)) <- g
let getLNode vs (_,Nothing) = return vs
getLNode vs (v,Just (_,l,_)) =
do b <- readArray m v
return (if b then vs else (v,l):vs)
foldM getLNode [] (assocs a)
defaultGraphSize :: Int
defaultGraphSize = 100
emptyN :: Int -> IO (SGr a b)
emptyN n = do m <- newArray (1,n) False
return (SGr (0,array (1,n) [(i,Nothing) | i <- [1..n]],m))
----------------------------------------------------------------------
-- UTILITIES
----------------------------------------------------------------------
-- | filter list (of successors\/predecessors) through a boolean ST array
-- representing deleted marks
removeDel :: IOArray Node Bool -> Adj b -> IO (Adj b)
removeDel m = filterM (\(_,v)->do {b<-readArray m v;return (not b)})
| scolobb/fgl | Data/Graph/Inductive/Monad/IOArray.hs | bsd-3-clause | 3,974 | 0 | 22 | 1,228 | 1,459 | 792 | 667 | 65 | 2 |
-- |
-- Copyright : (c) Sam Truzjan 2013
-- License : BSD3
-- Maintainer : pxqr.sta@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- BitTorrent uses a \"distributed sloppy hash table\" (DHT) for
-- storing peer contact information for \"trackerless\" torrents. In
-- effect, each peer becomes a tracker.
--
-- Normally you don't need to import other DHT modules.
--
-- For more info see:
-- <http://www.bittorrent.org/beps/bep_0005.html>
--
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Network.BitTorrent.DHT
( -- * Distributed Hash Table
DHT
, Options (..)
, dht
-- * Bootstrapping
-- $bootstrapping-terms
, tNodes
, defaultBootstrapNodes
, resolveHostName
, bootstrap
, isBootstrapped
-- * Initialization
, snapshot
, restore
-- * Operations
, Network.BitTorrent.DHT.lookup
, Network.BitTorrent.DHT.insert
, Network.BitTorrent.DHT.delete
-- * Embedding
-- ** Session
, LogFun
, Node
, defaultHandlers
, newNode
, closeNode
-- ** Monad
, MonadDHT (..)
, runDHT
) where
import Control.Applicative
import Control.Monad.Logger
import Control.Monad.Reader
import Control.Exception
import Data.ByteString as BS
import Data.Conduit as C
import Data.Conduit.List as C
import Network.Socket
import Data.Torrent
import Network.BitTorrent.Address
import Network.BitTorrent.DHT.Query
import Network.BitTorrent.DHT.Session
import Network.BitTorrent.DHT.Routing as T
{-----------------------------------------------------------------------
-- DHT types
-----------------------------------------------------------------------}
class MonadDHT m where
liftDHT :: DHT IPv4 a -> m a
instance MonadDHT (DHT IPv4) where
liftDHT = id
-- | Run DHT on specified port. <add note about resources>
dht :: Address ip
=> Options -- ^ normally you need to use 'Data.Default.def';
-> NodeAddr ip -- ^ address to bind this node;
-> DHT ip a -- ^ actions to run: 'bootstrap', 'lookup', etc;
-> IO a -- ^ result.
dht opts addr action = do
runStderrLoggingT $ LoggingT $ \ logger -> do
bracket (newNode defaultHandlers opts addr logger) closeNode $
\ node -> runDHT node action
{-# INLINE dht #-}
{-----------------------------------------------------------------------
-- Bootstrapping
-----------------------------------------------------------------------}
-- $bootstrapping-terms
--
-- [@Bootstrapping@] DHT @bootstrapping@ is the process of filling
-- routing 'Table' by /good/ nodes.
--
-- [@Bootstrapping time@] Bootstrapping process can take up to 5
-- minutes. Bootstrapping should only happen at first application
-- startup, if possible you should use 'snapshot' & 'restore'
-- mechanism which must work faster.
--
-- [@Bootstrap nodes@] DHT @bootstrap node@ is either:
--
-- * a specialized high performance node maintained by bittorrent
-- software authors\/maintainers, like those listed in
-- 'defaultBootstrapNodes'. /Specialized/ means that those nodes
-- may not support 'insert' queries and is running for the sake of
-- bootstrapping only.
--
-- * an ordinary bittorrent client running DHT node. The list of
-- such bootstrapping nodes usually obtained from
-- 'Data.Torrent.tNodes' field or
-- 'Network.BitTorrent.Exchange.Message.Port' messages.
-- Do not include the following hosts in the default bootstrap nodes list:
--
-- * "dht.aelitis.com" and "dht6.azureusplatform.com" - since
-- Azureus client have a different (and probably incompatible) DHT
-- protocol implementation.
--
-- * "router.utorrent.com" since it is just an alias to
-- "router.bittorrent.com".
-- | List of bootstrap nodes maintained by different bittorrent
-- software authors.
defaultBootstrapNodes :: [NodeAddr HostName]
defaultBootstrapNodes =
[ NodeAddr "router.bittorrent.com" 6881 -- by BitTorrent Inc.
-- doesn't work at the moment (use git blame) of commit
, NodeAddr "dht.transmissionbt.com" 6881 -- by Transmission project
]
-- TODO Multihomed hosts
-- | Resolve either a numeric network address or a hostname to a
-- numeric IP address of the node. Usually used to resolve
-- 'defaultBootstrapNodes' or 'Data.Torrent.tNodes' lists.
resolveHostName :: NodeAddr HostName -> IO (NodeAddr IPv4)
resolveHostName NodeAddr {..} = do
let hints = defaultHints { addrFamily = AF_INET, addrSocketType = Datagram }
-- getAddrInfo throws exception on empty list, so the pattern matching never fail
info : _ <- getAddrInfo (Just hints) (Just nodeHost) (Just (show nodePort))
case fromSockAddr (addrAddress info) of
Nothing -> error "resolveNodeAddr: impossible"
Just addr -> return addr
-- | One good node may be sufficient.
--
-- This operation do block, use
-- 'Control.Concurrent.Async.Lifted.async' if needed.
--
bootstrap :: Address ip => [NodeAddr ip] -> DHT ip ()
bootstrap startNodes = do
$(logInfoS) "bootstrap" "Start node bootstrapping"
nid <- asks thisNodeId
-- TODO filter duplicated in startNodes list
-- TODO retransmissions for startNodes
aliveNodes <- queryParallel (pingQ <$> startNodes)
_ <- sourceList [aliveNodes] $= search nid (findNodeQ nid) $$ C.consume
$(logInfoS) "bootstrap" "Node bootstrapping finished"
-- | Check if this node is already bootstrapped.
-- @bootstrap [good_node] >> isBootstrapped@@ should always return 'True'.
--
-- This operation do not block.
--
isBootstrapped :: DHT ip Bool
isBootstrapped = T.full <$> getTable
{-----------------------------------------------------------------------
-- Initialization
-----------------------------------------------------------------------}
-- | Load previous session. (corrupted - exception/ignore ?)
--
-- This is blocking operation, use
-- 'Control.Concurrent.Async.Lifted.async' if needed.
restore :: ByteString -> IO (Node ip)
restore = error "DHT.restore: not implemented"
-- | Serialize current DHT session to byte string.
--
-- This is blocking operation, use
-- 'Control.Concurrent.Async.Lifted.async' if needed.
snapshot :: DHT ip ByteString
snapshot = error "DHT.snapshot: not implemented"
{-----------------------------------------------------------------------
-- Operations
-----------------------------------------------------------------------}
-- | Get list of peers which downloading this torrent.
--
-- This operation is incremental and do block.
--
lookup :: Address ip => InfoHash -> DHT ip `Source` [PeerAddr ip]
lookup topic = do -- TODO retry getClosest if bucket is empty
closest <- lift $ getClosest topic
sourceList [closest] $= search topic (getPeersQ topic)
-- TODO do not republish if the topic is already in announceSet
-- | Announce that /this/ peer may have some pieces of the specified
-- torrent. DHT will reannounce this data periodically using
-- 'optReannounce' interval.
--
-- This operation is synchronous and do block, use
-- 'Control.Concurrent.Async.Lifted.async' if needed.
--
insert :: Address ip => InfoHash -> PortNumber -> DHT ip ()
insert ih p = do
publish ih p
insertTopic ih p
-- | Stop announcing /this/ peer for the specified torrent.
--
-- This operation is atomic and may block for a while.
--
delete :: Address ip => InfoHash -> PortNumber -> DHT ip ()
delete = deleteTopic
{-# INLINE delete #-}
| DavidAlphaFox/bittorrent | src/Network/BitTorrent/DHT.hs | bsd-3-clause | 7,540 | 0 | 14 | 1,447 | 948 | 550 | 398 | -1 | -1 |
{-# LANGUAGE TupleSections, FlexibleInstances, Rank2Types #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Language.Haskell.GhcMod.SrcUtils where
import Control.Applicative ((<$>))
import CoreUtils (exprType)
import Data.Generics
import Data.Maybe (fromMaybe)
import Data.Ord as O
import GHC (LHsExpr, LPat, Id, DynFlags, SrcSpan, Type, Located, ParsedSource, RenamedSource, TypecheckedSource, GenLocated(L))
import qualified GHC as G
import GHC.SYB.Utils (Stage(..), everythingStaged)
import GhcMonad
import qualified Language.Haskell.Exts.Annotated as HE
import Language.Haskell.GhcMod.Doc (showOneLine, getStyle)
import Language.Haskell.GhcMod.DynFlags
import Language.Haskell.GhcMod.Gap (HasType(..), setWarnTypedHoles, setDeferTypeErrors)
import qualified Language.Haskell.GhcMod.Gap as Gap
import Language.Haskell.GhcMod.Monad (IOish, GhcModT)
import Language.Haskell.GhcMod.Target (setTargetFiles)
import OccName (OccName)
import Outputable (PprStyle)
import TcHsSyn (hsPatType)
----------------------------------------------------------------
instance HasType (LHsExpr Id) where
getType tcm e = do
hs_env <- G.getSession
mbe <- liftIO $ Gap.deSugar tcm e hs_env
return $ (G.getLoc e, ) <$> CoreUtils.exprType <$> mbe
instance HasType (LPat Id) where
getType _ (G.L spn pat) = return $ Just (spn, hsPatType pat)
----------------------------------------------------------------
listifySpans :: Typeable a => TypecheckedSource -> (Int, Int) -> [Located a]
listifySpans tcs lc = listifyStaged TypeChecker p tcs
where
p (L spn _) = G.isGoodSrcSpan spn && spn `G.spans` lc
listifyParsedSpans :: Typeable a => ParsedSource -> (Int, Int) -> [Located a]
listifyParsedSpans pcs lc = listifyStaged Parser p pcs
where
p (L spn _) = G.isGoodSrcSpan spn && spn `G.spans` lc
listifyRenamedSpans :: Typeable a => RenamedSource -> (Int, Int) -> [Located a]
listifyRenamedSpans pcs lc = listifyStaged Renamer p pcs
where
p (L spn _) = G.isGoodSrcSpan spn && spn `G.spans` lc
listifyStaged :: Typeable r => Stage -> (r -> Bool) -> GenericQ [r]
listifyStaged s p = everythingStaged s (++) [] ([] `mkQ` (\x -> [x | p x]))
cmp :: SrcSpan -> SrcSpan -> Ordering
cmp a b
| a `G.isSubspanOf` b = O.LT
| b `G.isSubspanOf` a = O.GT
| otherwise = O.EQ
toTup :: DynFlags -> PprStyle -> (SrcSpan, Type) -> ((Int,Int,Int,Int),String)
toTup dflag style (spn, typ) = (fourInts spn, pretty dflag style typ)
fourInts :: SrcSpan -> (Int,Int,Int,Int)
fourInts = fromMaybe (0,0,0,0) . Gap.getSrcSpan
fourIntsHE :: HE.SrcSpan -> (Int,Int,Int,Int)
fourIntsHE loc = ( HE.srcSpanStartLine loc, HE.srcSpanStartColumn loc
, HE.srcSpanEndLine loc, HE.srcSpanEndColumn loc)
-- Check whether (line,col) is inside a given SrcSpanInfo
typeSigInRangeHE :: Int -> Int -> HE.Decl HE.SrcSpanInfo -> Bool
typeSigInRangeHE lineNo colNo (HE.TypeSig (HE.SrcSpanInfo s _) _ _) =
HE.srcSpanStart s <= (lineNo, colNo) && HE.srcSpanEnd s >= (lineNo, colNo)
typeSigInRangeHE lineNo colNo (HE.TypeFamDecl (HE.SrcSpanInfo s _) _ _) =
HE.srcSpanStart s <= (lineNo, colNo) && HE.srcSpanEnd s >= (lineNo, colNo)
typeSigInRangeHE lineNo colNo (HE.DataFamDecl (HE.SrcSpanInfo s _) _ _ _) =
HE.srcSpanStart s <= (lineNo, colNo) && HE.srcSpanEnd s >= (lineNo, colNo)
typeSigInRangeHE _ _ _= False
pretty :: DynFlags -> PprStyle -> Type -> String
pretty dflag style = showOneLine dflag style . Gap.typeForUser
----------------------------------------------------------------
inModuleContext :: IOish m
=> FilePath
-> (DynFlags -> PprStyle -> GhcModT m a)
-> GhcModT m a
inModuleContext file action =
withDynFlags (setWarnTypedHoles . setDeferTypeErrors . setNoWarningFlags) $ do
setTargetFiles [file]
Gap.withContext $ do
dflag <- G.getSessionDynFlags
style <- getStyle
action dflag style
----------------------------------------------------------------
showName :: DynFlags -> PprStyle -> G.Name -> String
showName dflag style name = showOneLine dflag style $ Gap.nameForUser name
showOccName :: DynFlags -> PprStyle -> OccName -> String
showOccName dflag style name = showOneLine dflag style $ Gap.occNameForUser name
| cabrera/ghc-mod | Language/Haskell/GhcMod/SrcUtils.hs | bsd-3-clause | 4,251 | 0 | 13 | 727 | 1,440 | 782 | 658 | 77 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module KAT_HMAC (tests) where
import qualified Crypto.MAC.HMAC as HMAC
import Crypto.Hash (MD5(..), SHA1(..), SHA256(..)
, Keccak_224(..), Keccak_256(..), Keccak_384(..), Keccak_512(..)
, SHA3_224(..), SHA3_256(..), SHA3_384(..), SHA3_512(..)
, HashAlgorithm, digestFromByteString)
import qualified Data.ByteString as B
import Imports
data MACVector hash = MACVector
{ macKey :: ByteString
, macSecret :: ByteString
, macResult :: HMAC.HMAC hash
}
instance Show (HMAC.HMAC a) where
show (HMAC.HMAC d) = show d
digest :: HashAlgorithm hash => ByteString -> HMAC.HMAC hash
digest = maybe (error "cannot get digest") HMAC.HMAC . digestFromByteString
v1 :: ByteString
v1 = "The quick brown fox jumps over the lazy dog"
md5MACVectors :: [MACVector MD5]
md5MACVectors =
[ MACVector B.empty B.empty $ digest "\x74\xe6\xf7\x29\x8a\x9c\x2d\x16\x89\x35\xf5\x8c\x00\x1b\xad\x88"
, MACVector "key" v1 $ digest "\x80\x07\x07\x13\x46\x3e\x77\x49\xb9\x0c\x2d\xc2\x49\x11\xe2\x75"
]
sha1MACVectors :: [MACVector SHA1]
sha1MACVectors =
[ MACVector B.empty B.empty $ digest "\xfb\xdb\x1d\x1b\x18\xaa\x6c\x08\x32\x4b\x7d\x64\xb7\x1f\xb7\x63\x70\x69\x0e\x1d"
, MACVector "key" v1 $ digest "\xde\x7c\x9b\x85\xb8\xb7\x8a\xa6\xbc\x8a\x7a\x36\xf7\x0a\x90\x70\x1c\x9d\xb4\xd9"
]
sha256MACVectors :: [MACVector SHA256]
sha256MACVectors =
[ MACVector B.empty B.empty $ digest "\xb6\x13\x67\x9a\x08\x14\xd9\xec\x77\x2f\x95\xd7\x78\xc3\x5f\xc5\xff\x16\x97\xc4\x93\x71\x56\x53\xc6\xc7\x12\x14\x42\x92\xc5\xad"
, MACVector "key" v1 $ digest "\xf7\xbc\x83\xf4\x30\x53\x84\x24\xb1\x32\x98\xe6\xaa\x6f\xb1\x43\xef\x4d\x59\xa1\x49\x46\x17\x59\x97\x47\x9d\xbc\x2d\x1a\x3c\xd8"
]
keccak_key1 = "\x4a\x65\x66\x65"
keccak_data1 = "\x77\x68\x61\x74\x20\x64\x6f\x20\x79\x61\x20\x77\x61\x6e\x74\x20\x66\x6f\x72\x20\x6e\x6f\x74\x68\x69\x6e\x67\x3f"
keccak_224_MAC_Vectors :: [MACVector Keccak_224]
keccak_224_MAC_Vectors =
[ MACVector keccak_key1 keccak_data1 $ digest "\xe8\x24\xfe\xc9\x6c\x07\x4f\x22\xf9\x92\x35\xbb\x94\x2d\xa1\x98\x26\x64\xab\x69\x2c\xa8\x50\x10\x53\xcb\xd4\x14"
]
keccak_256_MAC_Vectors :: [MACVector Keccak_256]
keccak_256_MAC_Vectors =
[ MACVector keccak_key1 keccak_data1 $ digest "\xaa\x9a\xed\x44\x8c\x7a\xbc\x8b\x5e\x32\x6f\xfa\x6a\x01\xcd\xed\xf7\xb4\xb8\x31\x88\x14\x68\xc0\x44\xba\x8d\xd4\x56\x63\x69\xa1"
]
keccak_384_MAC_Vectors :: [MACVector Keccak_384]
keccak_384_MAC_Vectors =
[ MACVector keccak_key1 keccak_data1 $ digest "\x5a\xf5\xc9\xa7\x7a\x23\xa6\xa9\x3d\x80\x64\x9e\x56\x2a\xb7\x7f\x4f\x35\x52\xe3\xc5\xca\xff\xd9\x3b\xdf\x8b\x3c\xfc\x69\x20\xe3\x02\x3f\xc2\x67\x75\xd9\xdf\x1f\x3c\x94\x61\x31\x46\xad\x2c\x9d"
]
keccak_512_MAC_Vectors :: [MACVector Keccak_512]
keccak_512_MAC_Vectors =
[ MACVector keccak_key1 keccak_data1 $ digest "\xc2\x96\x2e\x5b\xbe\x12\x38\x00\x78\x52\xf7\x9d\x81\x4d\xbb\xec\xd4\x68\x2e\x6f\x09\x7d\x37\xa3\x63\x58\x7c\x03\xbf\xa2\xeb\x08\x59\xd8\xd9\xc7\x01\xe0\x4c\xec\xec\xfd\x3d\xd7\xbf\xd4\x38\xf2\x0b\x8b\x64\x8e\x01\xbf\x8c\x11\xd2\x68\x24\xb9\x6c\xeb\xbd\xcb"
]
sha3_key1 = "\x4a\x65\x66\x65"
sha3_data1 = "\x77\x68\x61\x74\x20\x64\x6f\x20\x79\x61\x20\x77\x61\x6e\x74\x20\x66\x6f\x72\x20\x6e\x6f\x74\x68\x69\x6e\x67\x3f"
sha3_224_MAC_Vectors :: [MACVector SHA3_224]
sha3_224_MAC_Vectors =
[ MACVector sha3_key1 sha3_data1 $ digest "\x7f\xdb\x8d\xd8\x8b\xd2\xf6\x0d\x1b\x79\x86\x34\xad\x38\x68\x11\xc2\xcf\xc8\x5b\xfa\xf5\xd5\x2b\xba\xce\x5e\x66"
]
sha3_256_MAC_Vectors :: [MACVector SHA3_256]
sha3_256_MAC_Vectors =
[ MACVector sha3_key1 sha3_data1 $ digest "\xc7\xd4\x07\x2e\x78\x88\x77\xae\x35\x96\xbb\xb0\xda\x73\xb8\x87\xc9\x17\x1f\x93\x09\x5b\x29\x4a\xe8\x57\xfb\xe2\x64\x5e\x1b\xa5"
]
sha3_384_MAC_Vectors :: [MACVector SHA3_384]
sha3_384_MAC_Vectors =
[ MACVector sha3_key1 sha3_data1 $ digest "\xf1\x10\x1f\x8c\xbf\x97\x66\xfd\x67\x64\xd2\xed\x61\x90\x3f\x21\xca\x9b\x18\xf5\x7c\xf3\xe1\xa2\x3c\xa1\x35\x08\xa9\x32\x43\xce\x48\xc0\x45\xdc\x00\x7f\x26\xa2\x1b\x3f\x5e\x0e\x9d\xf4\xc2\x0a"
]
sha3_512_MAC_Vectors :: [MACVector SHA3_512]
sha3_512_MAC_Vectors =
[ MACVector sha3_key1 sha3_data1 $ digest "\x5a\x4b\xfe\xab\x61\x66\x42\x7c\x7a\x36\x47\xb7\x47\x29\x2b\x83\x84\x53\x7c\xdb\x89\xaf\xb3\xbf\x56\x65\xe4\xc5\xe7\x09\x35\x0b\x28\x7b\xae\xc9\x21\xfd\x7c\xa0\xee\x7a\x0c\x31\xd0\x22\xa9\x5e\x1f\xc9\x2b\xa9\xd7\x7d\xf8\x83\x96\x02\x75\xbe\xb4\xe6\x20\x24"
]
macTests :: [TestTree]
macTests =
[ testGroup "md5" $ concatMap toMACTest $ zip is md5MACVectors
, testGroup "sha1" $ concatMap toMACTest $ zip is sha1MACVectors
, testGroup "sha256" $ concatMap toMACTest $ zip is sha256MACVectors
, testGroup "keccak-224" $ concatMap toMACTest $ zip is keccak_224_MAC_Vectors
, testGroup "keccak-256" $ concatMap toMACTest $ zip is keccak_256_MAC_Vectors
, testGroup "keccak-384" $ concatMap toMACTest $ zip is keccak_384_MAC_Vectors
, testGroup "keccak-512" $ concatMap toMACTest $ zip is keccak_512_MAC_Vectors
, testGroup "sha3-224" $ concatMap toMACTest $ zip is sha3_224_MAC_Vectors
, testGroup "sha3-256" $ concatMap toMACTest $ zip is sha3_256_MAC_Vectors
, testGroup "sha3-384" $ concatMap toMACTest $ zip is sha3_384_MAC_Vectors
, testGroup "sha3-512" $ concatMap toMACTest $ zip is sha3_512_MAC_Vectors
]
where toMACTest (i, macVector) =
[ testCase (show i) (macResult macVector @=? HMAC.hmac (macKey macVector) (macSecret macVector))
, testCase ("incr-" ++ show i) (macResult macVector @=?
HMAC.finalize (HMAC.update (HMAC.initialize (macKey macVector)) (macSecret macVector)))
]
is :: [Int]
is = [1..]
data MacIncremental a = MacIncremental ByteString ByteString (HMAC.HMAC a)
deriving (Show,Eq)
instance HashAlgorithm a => Arbitrary (MacIncremental a) where
arbitrary = do
key <- arbitraryBSof 1 89
msg <- arbitraryBSof 1 99
return $ MacIncremental key msg (HMAC.hmac key msg)
data MacIncrementalList a = MacIncrementalList ByteString [ByteString] (HMAC.HMAC a)
deriving (Show,Eq)
instance HashAlgorithm a => Arbitrary (MacIncrementalList a) where
arbitrary = do
key <- arbitraryBSof 1 89
msgs <- choose (1,20) >>= \n -> replicateM n (arbitraryBSof 1 99)
return $ MacIncrementalList key msgs (HMAC.hmac key (B.concat msgs))
macIncrementalTests :: [TestTree]
macIncrementalTests =
[ testProperties MD5
, testProperties SHA1
, testProperties SHA256
, testProperties SHA3_224
, testProperties SHA3_256
, testProperties SHA3_384
, testProperties SHA3_512
]
where
--testProperties :: HashAlgorithm a => a -> [Property]
testProperties a = testGroup (show a)
[ testProperty "list-one" (prop_inc0 a)
, testProperty "list-multi" (prop_inc1 a)
]
prop_inc0 :: HashAlgorithm a => a -> MacIncremental a -> Bool
prop_inc0 _ (MacIncremental secret msg result) =
result `assertEq` HMAC.finalize (HMAC.update (HMAC.initialize secret) msg)
prop_inc1 :: HashAlgorithm a => a -> MacIncrementalList a -> Bool
prop_inc1 _ (MacIncrementalList secret msgs result) =
result `assertEq` HMAC.finalize (foldl' HMAC.update (HMAC.initialize secret) msgs)
tests = testGroup "HMAC"
[ testGroup "KATs" macTests
, testGroup "properties" macIncrementalTests
]
| tekul/cryptonite | tests/KAT_HMAC.hs | bsd-3-clause | 7,588 | 0 | 17 | 1,229 | 1,580 | 817 | 763 | 113 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcSplice: Template Haskell splices
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TcSplice(
tcSpliceExpr, tcTypedBracket, tcUntypedBracket,
-- runQuasiQuoteExpr, runQuasiQuotePat,
-- runQuasiQuoteDecl, runQuasiQuoteType,
runAnnotation,
runMetaE, runMetaP, runMetaT, runMetaD, runQuasi,
tcTopSpliceExpr, lookupThName_maybe,
defaultRunMeta, runMeta', runRemoteModFinalizers,
finishTH
) where
#include "HsVersions.h"
import GhcPrelude
import HsSyn
import Annotations
import Finder
import Name
import TcRnMonad
import TcType
import Outputable
import TcExpr
import SrcLoc
import THNames
import TcUnify
import TcEnv
import Control.Monad
import GHCi.Message
import GHCi.RemoteTypes
import GHCi
import HscMain
-- These imports are the reason that TcSplice
-- is very high up the module hierarchy
import FV
import RnSplice( traceSplice, SpliceInfo(..) )
import RdrName
import HscTypes
import Convert
import RnExpr
import RnEnv
import RnUtils ( HsDocContext(..) )
import RnFixity ( lookupFixityRn_help )
import RnTypes
import TcHsSyn
import TcSimplify
import Type
import Kind
import NameSet
import TcMType
import TcHsType
import TcIface
import TyCoRep
import FamInst
import FamInstEnv
import InstEnv
import Inst
import NameEnv
import PrelNames
import TysWiredIn
import OccName
import Hooks
import Var
import Module
import LoadIface
import Class
import TyCon
import CoAxiom
import PatSyn
import ConLike
import DataCon
import TcEvidence( TcEvBinds(..) )
import Id
import IdInfo
import DsExpr
import DsMonad
import GHC.Serialized
import ErrUtils
import Util
import Unique
import VarSet
import Data.List ( find, mapAccumL )
import Data.Maybe
import FastString
import BasicTypes hiding( SuccessFlag(..) )
import Maybes( MaybeErr(..) )
import DynFlags
import Panic
import Lexeme
import qualified EnumSet
import qualified Language.Haskell.TH as TH
-- THSyntax gives access to internal functions and data types
import qualified Language.Haskell.TH.Syntax as TH
-- Because GHC.Desugar might not be in the base library of the bootstrapping compiler
import GHC.Desugar ( AnnotationWrapper(..) )
import Control.Exception
import Data.Binary
import Data.Binary.Get
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as LB
import Data.Dynamic ( fromDynamic, toDyn )
import qualified Data.Map as Map
import Data.Typeable ( typeOf, Typeable, TypeRep, typeRep )
import Data.Data (Data)
import Data.Proxy ( Proxy (..) )
import GHC.Exts ( unsafeCoerce# )
{-
************************************************************************
* *
\subsection{Main interface + stubs for the non-GHCI case
* *
************************************************************************
-}
tcTypedBracket :: HsExpr GhcRn -> HsBracket GhcRn -> ExpRhoType -> TcM (HsExpr GhcTcId)
tcUntypedBracket :: HsExpr GhcRn -> HsBracket GhcRn -> [PendingRnSplice] -> ExpRhoType
-> TcM (HsExpr GhcTcId)
tcSpliceExpr :: HsSplice GhcRn -> ExpRhoType -> TcM (HsExpr GhcTcId)
-- None of these functions add constraints to the LIE
-- runQuasiQuoteExpr :: HsQuasiQuote RdrName -> RnM (LHsExpr RdrName)
-- runQuasiQuotePat :: HsQuasiQuote RdrName -> RnM (LPat RdrName)
-- runQuasiQuoteType :: HsQuasiQuote RdrName -> RnM (LHsType RdrName)
-- runQuasiQuoteDecl :: HsQuasiQuote RdrName -> RnM [LHsDecl RdrName]
runAnnotation :: CoreAnnTarget -> LHsExpr GhcRn -> TcM Annotation
{-
************************************************************************
* *
\subsection{Quoting an expression}
* *
************************************************************************
-}
-- See Note [How brackets and nested splices are handled]
-- tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcTypedBracket rn_expr brack@(TExpBr expr) res_ty
= addErrCtxt (quotationCtxtDoc brack) $
do { cur_stage <- getStage
; ps_ref <- newMutVar []
; lie_var <- getConstraintVar -- Any constraints arising from nested splices
-- should get thrown into the constraint set
-- from outside the bracket
-- Typecheck expr to make sure it is valid,
-- Throw away the typechecked expression but return its type.
-- We'll typecheck it again when we splice it in somewhere
; (_tc_expr, expr_ty) <- setStage (Brack cur_stage (TcPending ps_ref lie_var)) $
tcInferRhoNC expr
-- NC for no context; tcBracket does that
; meta_ty <- tcTExpTy expr_ty
; ps' <- readMutVar ps_ref
; texpco <- tcLookupId unsafeTExpCoerceName
; tcWrapResultO (Shouldn'tHappenOrigin "TExpBr")
rn_expr
(unLoc (mkHsApp (nlHsTyApp texpco [expr_ty])
(noLoc (HsTcBracketOut brack ps'))))
meta_ty res_ty }
tcTypedBracket _ other_brack _
= pprPanic "tcTypedBracket" (ppr other_brack)
-- tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> ExpRhoType -> TcM (HsExpr TcId)
tcUntypedBracket rn_expr brack ps res_ty
= do { traceTc "tc_bracket untyped" (ppr brack $$ ppr ps)
; ps' <- mapM tcPendingSplice ps
; meta_ty <- tcBrackTy brack
; traceTc "tc_bracket done untyped" (ppr meta_ty)
; tcWrapResultO (Shouldn'tHappenOrigin "untyped bracket")
rn_expr (HsTcBracketOut brack ps') meta_ty res_ty }
---------------
tcBrackTy :: HsBracket GhcRn -> TcM TcType
tcBrackTy (VarBr _ _) = tcMetaTy nameTyConName -- Result type is Var (not Q-monadic)
tcBrackTy (ExpBr _) = tcMetaTy expQTyConName -- Result type is ExpQ (= Q Exp)
tcBrackTy (TypBr _) = tcMetaTy typeQTyConName -- Result type is Type (= Q Typ)
tcBrackTy (DecBrG _) = tcMetaTy decsQTyConName -- Result type is Q [Dec]
tcBrackTy (PatBr _) = tcMetaTy patQTyConName -- Result type is PatQ (= Q Pat)
tcBrackTy (DecBrL _) = panic "tcBrackTy: Unexpected DecBrL"
tcBrackTy (TExpBr _) = panic "tcUntypedBracket: Unexpected TExpBr"
---------------
tcPendingSplice :: PendingRnSplice -> TcM PendingTcSplice
tcPendingSplice (PendingRnSplice flavour splice_name expr)
= do { res_ty <- tcMetaTy meta_ty_name
; expr' <- tcMonoExpr expr (mkCheckExpType res_ty)
; return (PendingTcSplice splice_name expr') }
where
meta_ty_name = case flavour of
UntypedExpSplice -> expQTyConName
UntypedPatSplice -> patQTyConName
UntypedTypeSplice -> typeQTyConName
UntypedDeclSplice -> decsQTyConName
---------------
-- Takes a tau and returns the type Q (TExp tau)
tcTExpTy :: TcType -> TcM TcType
tcTExpTy exp_ty
= do { unless (isTauTy exp_ty) $ addErr (err_msg exp_ty)
; q <- tcLookupTyCon qTyConName
; texp <- tcLookupTyCon tExpTyConName
; return (mkTyConApp q [mkTyConApp texp [exp_ty]]) }
where
err_msg ty
= vcat [ text "Illegal polytype:" <+> ppr ty
, text "The type of a Typed Template Haskell expression must" <+>
text "not have any quantification." ]
quotationCtxtDoc :: HsBracket GhcRn -> SDoc
quotationCtxtDoc br_body
= hang (text "In the Template Haskell quotation")
2 (ppr br_body)
-- The whole of the rest of the file is the else-branch (ie stage2 only)
{-
Note [How top-level splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Top-level splices (those not inside a [| .. |] quotation bracket) are handled
very straightforwardly:
1. tcTopSpliceExpr: typecheck the body e of the splice $(e)
2. runMetaT: desugar, compile, run it, and convert result back to
HsSyn RdrName (of the appropriate flavour, eg HsType RdrName,
HsExpr RdrName etc)
3. treat the result as if that's what you saw in the first place
e.g for HsType, rename and kind-check
for HsExpr, rename and type-check
(The last step is different for decls, because they can *only* be
top-level: we return the result of step 2.)
Note [How brackets and nested splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nested splices (those inside a [| .. |] quotation bracket),
are treated quite differently.
Remember, there are two forms of bracket
typed [|| e ||]
and untyped [| e |]
The life cycle of a typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s RnPendingTyped)
* Rename the body
* Result is still a HsBracket
* When typechecking:
* Set the ThStage to (Brack s (TcPending ps_var lie_var))
* Typecheck the body, and throw away the elaborated result
* Nested splices (which must be typed) are typechecked, and
the results accumulated in ps_var; their constraints
accumulate in lie_var
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
The life cycle of a un-typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s (RnPendingUntyped ps_var))
* Rename the body
* Nested splices (which must be untyped) are renamed, and the
results accumulated in ps_var
* Result is still (HsRnBracketOut rn_body pending_splices)
* When typechecking a HsRnBracketOut
* Typecheck the pending_splices individually
* Ignore the body of the bracket; just check that the context
expects a bracket of that type (e.g. a [p| pat |] bracket should
be in a context needing a (Q Pat)
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
In both cases, desugaring happens like this:
* HsTcBracketOut is desugared by DsMeta.dsBracket. It
a) Extends the ds_meta environment with the PendingSplices
attached to the bracket
b) Converts the quoted (HsExpr Name) to a CoreExpr that, when
run, will produce a suitable TH expression/type/decl. This
is why we leave the *renamed* expression attached to the bracket:
the quoted expression should not be decorated with all the goop
added by the type checker
* Each splice carries a unique Name, called a "splice point", thus
${n}(e). The name is initialised to an (Unqual "splice") when the
splice is created; the renamer gives it a unique.
* When DsMeta (used to desugar the body of the bracket) comes across
a splice, it looks up the splice's Name, n, in the ds_meta envt,
to find an (HsExpr Id) that should be substituted for the splice;
it just desugars it to get a CoreExpr (DsMeta.repSplice).
Example:
Source: f = [| Just $(g 3) |]
The [| |] part is a HsBracket
Typechecked: f = [| Just ${s7}(g 3) |]{s7 = g Int 3}
The [| |] part is a HsBracketOut, containing *renamed*
(not typechecked) expression
The "s7" is the "splice point"; the (g Int 3) part
is a typechecked expression
Desugared: f = do { s7 <- g Int 3
; return (ConE "Data.Maybe.Just" s7) }
Note [Template Haskell state diagram]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here are the ThStages, s, their corresponding level numbers
(the result of (thLevel s)), and their state transitions.
The top level of the program is stage Comp:
Start here
|
V
----------- $ ------------ $
| Comp | ---------> | Splice | -----|
| 1 | | 0 | <----|
----------- ------------
^ | ^ |
$ | | [||] $ | | [||]
| v | v
-------------- ----------------
| Brack Comp | | Brack Splice |
| 2 | | 1 |
-------------- ----------------
* Normal top-level declarations start in state Comp
(which has level 1).
Annotations start in state Splice, since they are
treated very like a splice (only without a '$')
* Code compiled in state Splice (and only such code)
will be *run at compile time*, with the result replacing
the splice
* The original paper used level -1 instead of 0, etc.
* The original paper did not allow a splice within a
splice, but there is no reason not to. This is the
$ transition in the top right.
Note [Template Haskell levels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Imported things are impLevel (= 0)
* However things at level 0 are not *necessarily* imported.
eg $( \b -> ... ) here b is bound at level 0
* In GHCi, variables bound by a previous command are treated
as impLevel, because we have bytecode for them.
* Variables are bound at the "current level"
* The current level starts off at outerLevel (= 1)
* The level is decremented by splicing $(..)
incremented by brackets [| |]
incremented by name-quoting 'f
When a variable is used, we compare
bind: binding level, and
use: current level at usage site
Generally
bind > use Always error (bound later than used)
[| \x -> $(f x) |]
bind = use Always OK (bound same stage as used)
[| \x -> $(f [| x |]) |]
bind < use Inside brackets, it depends
Inside splice, OK
Inside neither, OK
For (bind < use) inside brackets, there are three cases:
- Imported things OK f = [| map |]
- Top-level things OK g = [| f |]
- Non-top-level Only if there is a liftable instance
h = \(x:Int) -> [| x |]
To track top-level-ness we use the ThBindEnv in TcLclEnv
For example:
f = ...
g1 = $(map ...) is OK
g2 = $(f ...) is not OK; because we havn't compiled f yet
-}
{-
************************************************************************
* *
\subsection{Splicing an expression}
* *
************************************************************************
-}
tcSpliceExpr splice@(HsTypedSplice _ name expr) res_ty
= addErrCtxt (spliceCtxtDoc splice) $
setSrcSpan (getLoc expr) $ do
{ stage <- getStage
; case stage of
Splice {} -> tcTopSplice expr res_ty
Brack pop_stage pend -> tcNestedSplice pop_stage pend name expr res_ty
RunSplice _ ->
-- See Note [RunSplice ThLevel] in "TcRnTypes".
pprPanic ("tcSpliceExpr: attempted to typecheck a splice when " ++
"running another splice") (ppr splice)
Comp -> tcTopSplice expr res_ty
}
tcSpliceExpr splice _
= pprPanic "tcSpliceExpr" (ppr splice)
{- Note [Collecting modFinalizers in typed splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'qAddModFinalizer' of the @Quasi TcM@ instance adds finalizers in the local
environment (see Note [Delaying modFinalizers in untyped splices] in
"RnSplice"). Thus after executing the splice, we move the finalizers to the
finalizer list in the global environment and set them to use the current local
environment (with 'addModFinalizersWithLclEnv').
-}
tcNestedSplice :: ThStage -> PendingStuff -> Name
-> LHsExpr GhcRn -> ExpRhoType -> TcM (HsExpr GhcTc)
-- See Note [How brackets and nested splices are handled]
-- A splice inside brackets
tcNestedSplice pop_stage (TcPending ps_var lie_var) splice_name expr res_ty
= do { res_ty <- expTypeToType res_ty
; meta_exp_ty <- tcTExpTy res_ty
; expr' <- setStage pop_stage $
setConstraintVar lie_var $
tcMonoExpr expr (mkCheckExpType meta_exp_ty)
; untypeq <- tcLookupId unTypeQName
; let expr'' = mkHsApp (nlHsTyApp untypeq [res_ty]) expr'
; ps <- readMutVar ps_var
; writeMutVar ps_var (PendingTcSplice splice_name expr'' : ps)
-- The returned expression is ignored; it's in the pending splices
; return (panic "tcSpliceExpr") }
tcNestedSplice _ _ splice_name _ _
= pprPanic "tcNestedSplice: rename stage found" (ppr splice_name)
tcTopSplice :: LHsExpr GhcRn -> ExpRhoType -> TcM (HsExpr GhcTc)
tcTopSplice expr res_ty
= do { -- Typecheck the expression,
-- making sure it has type Q (T res_ty)
res_ty <- expTypeToType res_ty
; meta_exp_ty <- tcTExpTy res_ty
; zonked_q_expr <- tcTopSpliceExpr Typed $
tcMonoExpr expr (mkCheckExpType meta_exp_ty)
-- See Note [Collecting modFinalizers in typed splices].
; modfinalizers_ref <- newTcRef []
-- Run the expression
; expr2 <- setStage (RunSplice modfinalizers_ref) $
runMetaE zonked_q_expr
; mod_finalizers <- readTcRef modfinalizers_ref
; addModFinalizersWithLclEnv $ ThModFinalizers mod_finalizers
; traceSplice (SpliceInfo { spliceDescription = "expression"
, spliceIsDecl = False
, spliceSource = Just expr
, spliceGenerated = ppr expr2 })
-- Rename and typecheck the spliced-in expression,
-- making sure it has type res_ty
-- These steps should never fail; this is a *typed* splice
; addErrCtxt (spliceResultDoc expr) $ do
{ (exp3, _fvs) <- rnLExpr expr2
; exp4 <- tcMonoExpr exp3 (mkCheckExpType res_ty)
; return (unLoc exp4) } }
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
spliceCtxtDoc :: HsSplice GhcRn -> SDoc
spliceCtxtDoc splice
= hang (text "In the Template Haskell splice")
2 (pprSplice splice)
spliceResultDoc :: LHsExpr GhcRn -> SDoc
spliceResultDoc expr
= sep [ text "In the result of the splice:"
, nest 2 (char '$' <> ppr expr)
, text "To see what the splice expanded to, use -ddump-splices"]
-------------------
tcTopSpliceExpr :: SpliceType -> TcM (LHsExpr GhcTc) -> TcM (LHsExpr GhcTc)
-- Note [How top-level splices are handled]
-- Type check an expression that is the body of a top-level splice
-- (the caller will compile and run it)
-- Note that set the level to Splice, regardless of the original level,
-- before typechecking the expression. For example:
-- f x = $( ...$(g 3) ... )
-- The recursive call to tcPolyExpr will simply expand the
-- inner escape before dealing with the outer one
tcTopSpliceExpr isTypedSplice tc_action
= checkNoErrs $ -- checkNoErrs: must not try to run the thing
-- if the type checker fails!
unsetGOptM Opt_DeferTypeErrors $
-- Don't defer type errors. Not only are we
-- going to run this code, but we do an unsafe
-- coerce, so we get a seg-fault if, say we
-- splice a type into a place where an expression
-- is expected (Trac #7276)
setStage (Splice isTypedSplice) $
do { -- Typecheck the expression
(expr', wanted) <- captureConstraints tc_action
; const_binds <- simplifyTop wanted
-- Zonk it and tie the knot of dictionary bindings
; zonkTopLExpr (mkHsDictLet (EvBinds const_binds) expr') }
{-
************************************************************************
* *
Annotations
* *
************************************************************************
-}
runAnnotation target expr = do
-- Find the classes we want instances for in order to call toAnnotationWrapper
loc <- getSrcSpanM
data_class <- tcLookupClass dataClassName
to_annotation_wrapper_id <- tcLookupId toAnnotationWrapperName
-- Check the instances we require live in another module (we want to execute it..)
-- and check identifiers live in other modules using TH stage checks. tcSimplifyStagedExpr
-- also resolves the LIE constraints to detect e.g. instance ambiguity
zonked_wrapped_expr' <- tcTopSpliceExpr Untyped $
do { (expr', expr_ty) <- tcInferRhoNC expr
-- We manually wrap the typechecked expression in a call to toAnnotationWrapper
-- By instantiating the call >here< it gets registered in the
-- LIE consulted by tcTopSpliceExpr
-- and hence ensures the appropriate dictionary is bound by const_binds
; wrapper <- instCall AnnOrigin [expr_ty] [mkClassPred data_class [expr_ty]]
; let specialised_to_annotation_wrapper_expr
= L loc (mkHsWrap wrapper
(HsVar (L loc to_annotation_wrapper_id)))
; return (L loc (HsApp specialised_to_annotation_wrapper_expr expr')) }
-- Run the appropriately wrapped expression to get the value of
-- the annotation and its dictionaries. The return value is of
-- type AnnotationWrapper by construction, so this conversion is
-- safe
serialized <- runMetaAW zonked_wrapped_expr'
return Annotation {
ann_target = target,
ann_value = serialized
}
convertAnnotationWrapper :: ForeignHValue -> TcM (Either MsgDoc Serialized)
convertAnnotationWrapper fhv = do
dflags <- getDynFlags
if gopt Opt_ExternalInterpreter dflags
then do
Right <$> runTH THAnnWrapper fhv
else do
annotation_wrapper <- liftIO $ wormhole dflags fhv
return $ Right $
case unsafeCoerce# annotation_wrapper of
AnnotationWrapper value | let serialized = toSerialized serializeWithData value ->
-- Got the value and dictionaries: build the serialized value and
-- call it a day. We ensure that we seq the entire serialized value
-- in order that any errors in the user-written code for the
-- annotation are exposed at this point. This is also why we are
-- doing all this stuff inside the context of runMeta: it has the
-- facilities to deal with user error in a meta-level expression
seqSerialized serialized `seq` serialized
-- | Force the contents of the Serialized value so weknow it doesn't contain any bottoms
seqSerialized :: Serialized -> ()
seqSerialized (Serialized the_type bytes) = the_type `seq` bytes `seqList` ()
{-
************************************************************************
* *
\subsection{Running an expression}
* *
************************************************************************
-}
runQuasi :: TH.Q a -> TcM a
runQuasi act = TH.runQ act
runRemoteModFinalizers :: ThModFinalizers -> TcM ()
runRemoteModFinalizers (ThModFinalizers finRefs) = do
dflags <- getDynFlags
let withForeignRefs [] f = f []
withForeignRefs (x : xs) f = withForeignRef x $ \r ->
withForeignRefs xs $ \rs -> f (r : rs)
if gopt Opt_ExternalInterpreter dflags then do
hsc_env <- env_top <$> getEnv
withIServ hsc_env $ \i -> do
tcg <- getGblEnv
th_state <- readTcRef (tcg_th_remote_state tcg)
case th_state of
Nothing -> return () -- TH was not started, nothing to do
Just fhv -> do
liftIO $ withForeignRef fhv $ \st ->
withForeignRefs finRefs $ \qrefs ->
writeIServ i (putMessage (RunModFinalizers st qrefs))
() <- runRemoteTH i []
readQResult i
else do
qs <- liftIO (withForeignRefs finRefs $ mapM localRef)
runQuasi $ sequence_ qs
runQResult
:: (a -> String)
-> (SrcSpan -> a -> b)
-> (ForeignHValue -> TcM a)
-> SrcSpan
-> ForeignHValue {- TH.Q a -}
-> TcM b
runQResult show_th f runQ expr_span hval
= do { th_result <- runQ hval
; traceTc "Got TH result:" (text (show_th th_result))
; return (f expr_span th_result) }
-----------------
runMeta :: (MetaHook TcM -> LHsExpr GhcTc -> TcM hs_syn)
-> LHsExpr GhcTc
-> TcM hs_syn
runMeta unwrap e
= do { h <- getHooked runMetaHook defaultRunMeta
; unwrap h e }
defaultRunMeta :: MetaHook TcM
defaultRunMeta (MetaE r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsExpr runTHExp)
defaultRunMeta (MetaP r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToPat runTHPat)
defaultRunMeta (MetaT r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsType runTHType)
defaultRunMeta (MetaD r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsDecls runTHDec)
defaultRunMeta (MetaAW r)
= fmap r . runMeta' False (const empty) (const convertAnnotationWrapper)
-- We turn off showing the code in meta-level exceptions because doing so exposes
-- the toAnnotationWrapper function that we slap around the user's code
----------------
runMetaAW :: LHsExpr GhcTc -- Of type AnnotationWrapper
-> TcM Serialized
runMetaAW = runMeta metaRequestAW
runMetaE :: LHsExpr GhcTc -- Of type (Q Exp)
-> TcM (LHsExpr GhcPs)
runMetaE = runMeta metaRequestE
runMetaP :: LHsExpr GhcTc -- Of type (Q Pat)
-> TcM (LPat GhcPs)
runMetaP = runMeta metaRequestP
runMetaT :: LHsExpr GhcTc -- Of type (Q Type)
-> TcM (LHsType GhcPs)
runMetaT = runMeta metaRequestT
runMetaD :: LHsExpr GhcTc -- Of type Q [Dec]
-> TcM [LHsDecl GhcPs]
runMetaD = runMeta metaRequestD
---------------
runMeta' :: Bool -- Whether code should be printed in the exception message
-> (hs_syn -> SDoc) -- how to print the code
-> (SrcSpan -> ForeignHValue -> TcM (Either MsgDoc hs_syn)) -- How to run x
-> LHsExpr GhcTc -- Of type x; typically x = Q TH.Exp, or
-- something like that
-> TcM hs_syn -- Of type t
runMeta' show_code ppr_hs run_and_convert expr
= do { traceTc "About to run" (ppr expr)
; recordThSpliceUse -- seems to be the best place to do this,
-- we catch all kinds of splices and annotations.
-- Check that we've had no errors of any sort so far.
-- For example, if we found an error in an earlier defn f, but
-- recovered giving it type f :: forall a.a, it'd be very dodgy
-- to carry ont. Mind you, the staging restrictions mean we won't
-- actually run f, but it still seems wrong. And, more concretely,
-- see Trac #5358 for an example that fell over when trying to
-- reify a function with a "?" kind in it. (These don't occur
-- in type-correct programs.
; failIfErrsM
-- Desugar
; ds_expr <- initDsTc (dsLExpr expr)
-- Compile and link it; might fail if linking fails
; hsc_env <- getTopEnv
; src_span <- getSrcSpanM
; traceTc "About to run (desugared)" (ppr ds_expr)
; either_hval <- tryM $ liftIO $
HscMain.hscCompileCoreExpr hsc_env src_span ds_expr
; case either_hval of {
Left exn -> fail_with_exn "compile and link" exn ;
Right hval -> do
{ -- Coerce it to Q t, and run it
-- Running might fail if it throws an exception of any kind (hence tryAllM)
-- including, say, a pattern-match exception in the code we are running
--
-- We also do the TH -> HS syntax conversion inside the same
-- exception-cacthing thing so that if there are any lurking
-- exceptions in the data structure returned by hval, we'll
-- encounter them inside the try
--
-- See Note [Exceptions in TH]
let expr_span = getLoc expr
; either_tval <- tryAllM $
setSrcSpan expr_span $ -- Set the span so that qLocation can
-- see where this splice is
do { mb_result <- run_and_convert expr_span hval
; case mb_result of
Left err -> failWithTc err
Right result -> do { traceTc "Got HsSyn result:" (ppr_hs result)
; return $! result } }
; case either_tval of
Right v -> return v
Left se -> case fromException se of
Just IOEnvFailure -> failM -- Error already in Tc monad
_ -> fail_with_exn "run" se -- Exception
}}}
where
-- see Note [Concealed TH exceptions]
fail_with_exn :: Exception e => String -> e -> TcM a
fail_with_exn phase exn = do
exn_msg <- liftIO $ Panic.safeShowException exn
let msg = vcat [text "Exception when trying to" <+> text phase <+> text "compile-time code:",
nest 2 (text exn_msg),
if show_code then text "Code:" <+> ppr expr else empty]
failWithTc msg
{-
Note [Exceptions in TH]
~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have something like this
$( f 4 )
where
f :: Int -> Q [Dec]
f n | n>3 = fail "Too many declarations"
| otherwise = ...
The 'fail' is a user-generated failure, and should be displayed as a
perfectly ordinary compiler error message, not a panic or anything
like that. Here's how it's processed:
* 'fail' is the monad fail. The monad instance for Q in TH.Syntax
effectively transforms (fail s) to
qReport True s >> fail
where 'qReport' comes from the Quasi class and fail from its monad
superclass.
* The TcM monad is an instance of Quasi (see TcSplice), and it implements
(qReport True s) by using addErr to add an error message to the bag of errors.
The 'fail' in TcM raises an IOEnvFailure exception
* 'qReport' forces the message to ensure any exception hidden in unevaluated
thunk doesn't get into the bag of errors. Otherwise the following splice
will triger panic (Trac #8987):
$(fail undefined)
See also Note [Concealed TH exceptions]
* So, when running a splice, we catch all exceptions; then for
- an IOEnvFailure exception, we assume the error is already
in the error-bag (above)
- other errors, we add an error to the bag
and then fail
Note [Concealed TH exceptions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When displaying the error message contained in an exception originated from TH
code, we need to make sure that the error message itself does not contain an
exception. For example, when executing the following splice:
$( error ("foo " ++ error "bar") )
the message for the outer exception is a thunk which will throw the inner
exception when evaluated.
For this reason, we display the message of a TH exception using the
'safeShowException' function, which recursively catches any exception thrown
when showing an error message.
To call runQ in the Tc monad, we need to make TcM an instance of Quasi:
-}
instance TH.Quasi TcM where
qNewName s = do { u <- newUnique
; let i = getKey u
; return (TH.mkNameU s i) }
-- 'msg' is forced to ensure exceptions don't escape,
-- see Note [Exceptions in TH]
qReport True msg = seqList msg $ addErr (text msg)
qReport False msg = seqList msg $ addWarn NoReason (text msg)
qLocation = do { m <- getModule
; l <- getSrcSpanM
; r <- case l of
UnhelpfulSpan _ -> pprPanic "qLocation: Unhelpful location"
(ppr l)
RealSrcSpan s -> return s
; return (TH.Loc { TH.loc_filename = unpackFS (srcSpanFile r)
, TH.loc_module = moduleNameString (moduleName m)
, TH.loc_package = unitIdString (moduleUnitId m)
, TH.loc_start = (srcSpanStartLine r, srcSpanStartCol r)
, TH.loc_end = (srcSpanEndLine r, srcSpanEndCol r) }) }
qLookupName = lookupName
qReify = reify
qReifyFixity nm = lookupThName nm >>= reifyFixity
qReifyInstances = reifyInstances
qReifyRoles = reifyRoles
qReifyAnnotations = reifyAnnotations
qReifyModule = reifyModule
qReifyConStrictness nm = do { nm' <- lookupThName nm
; dc <- tcLookupDataCon nm'
; let bangs = dataConImplBangs dc
; return (map reifyDecidedStrictness bangs) }
-- For qRecover, discard error messages if
-- the recovery action is chosen. Otherwise
-- we'll only fail higher up.
qRecover recover main = tryTcDiscardingErrs recover main
qAddDependentFile fp = do
ref <- fmap tcg_dependent_files getGblEnv
dep_files <- readTcRef ref
writeTcRef ref (fp:dep_files)
qAddTopDecls thds = do
l <- getSrcSpanM
let either_hval = convertToHsDecls l thds
ds <- case either_hval of
Left exn -> pprPanic "qAddTopDecls: can't convert top-level declarations" exn
Right ds -> return ds
mapM_ (checkTopDecl . unLoc) ds
th_topdecls_var <- fmap tcg_th_topdecls getGblEnv
updTcRef th_topdecls_var (\topds -> ds ++ topds)
where
checkTopDecl :: HsDecl GhcPs -> TcM ()
checkTopDecl (ValD binds)
= mapM_ bindName (collectHsBindBinders binds)
checkTopDecl (SigD _)
= return ()
checkTopDecl (AnnD _)
= return ()
checkTopDecl (ForD (ForeignImport { fd_name = L _ name }))
= bindName name
checkTopDecl _
= addErr $ text "Only function, value, annotation, and foreign import declarations may be added with addTopDecl"
bindName :: RdrName -> TcM ()
bindName (Exact n)
= do { th_topnames_var <- fmap tcg_th_topnames getGblEnv
; updTcRef th_topnames_var (\ns -> extendNameSet ns n)
}
bindName name =
addErr $
hang (text "The binder" <+> quotes (ppr name) <+> ptext (sLit "is not a NameU."))
2 (text "Probable cause: you used mkName instead of newName to generate a binding.")
qAddForeignFile lang str = do
var <- fmap tcg_th_foreign_files getGblEnv
updTcRef var ((lang, str) :)
qAddModFinalizer fin = do
r <- liftIO $ mkRemoteRef fin
fref <- liftIO $ mkForeignRef r (freeRemoteRef r)
addModFinalizerRef fref
qAddCorePlugin plugin = do
hsc_env <- env_top <$> getEnv
r <- liftIO $ findHomeModule hsc_env (mkModuleName plugin)
let err = hang
(text "addCorePlugin: invalid plugin module "
<+> text (show plugin)
)
2
(text "Plugins in the current package can't be specified.")
case r of
Found {} -> addErr err
FoundMultiple {} -> addErr err
_ -> return ()
th_coreplugins_var <- tcg_th_coreplugins <$> getGblEnv
updTcRef th_coreplugins_var (plugin:)
qGetQ :: forall a. Typeable a => TcM (Maybe a)
qGetQ = do
th_state_var <- fmap tcg_th_state getGblEnv
th_state <- readTcRef th_state_var
-- See #10596 for why we use a scoped type variable here.
return (Map.lookup (typeRep (Proxy :: Proxy a)) th_state >>= fromDynamic)
qPutQ x = do
th_state_var <- fmap tcg_th_state getGblEnv
updTcRef th_state_var (\m -> Map.insert (typeOf x) (toDyn x) m)
qIsExtEnabled = xoptM
qExtsEnabled =
EnumSet.toList . extensionFlags . hsc_dflags <$> getTopEnv
-- | Adds a mod finalizer reference to the local environment.
addModFinalizerRef :: ForeignRef (TH.Q ()) -> TcM ()
addModFinalizerRef finRef = do
th_stage <- getStage
case th_stage of
RunSplice th_modfinalizers_var -> updTcRef th_modfinalizers_var (finRef :)
-- This case happens only if a splice is executed and the caller does
-- not set the 'ThStage' to 'RunSplice' to collect finalizers.
-- See Note [Delaying modFinalizers in untyped splices] in RnSplice.
_ ->
pprPanic "addModFinalizer was called when no finalizers were collected"
(ppr th_stage)
-- | Releases the external interpreter state.
finishTH :: TcM ()
finishTH = do
dflags <- getDynFlags
when (gopt Opt_ExternalInterpreter dflags) $ do
tcg <- getGblEnv
writeTcRef (tcg_th_remote_state tcg) Nothing
runTHExp :: ForeignHValue -> TcM TH.Exp
runTHExp = runTH THExp
runTHPat :: ForeignHValue -> TcM TH.Pat
runTHPat = runTH THPat
runTHType :: ForeignHValue -> TcM TH.Type
runTHType = runTH THType
runTHDec :: ForeignHValue -> TcM [TH.Dec]
runTHDec = runTH THDec
runTH :: Binary a => THResultType -> ForeignHValue -> TcM a
runTH ty fhv = do
hsc_env <- env_top <$> getEnv
dflags <- getDynFlags
if not (gopt Opt_ExternalInterpreter dflags)
then do
-- Run it in the local TcM
hv <- liftIO $ wormhole dflags fhv
r <- runQuasi (unsafeCoerce# hv :: TH.Q a)
return r
else
-- Run it on the server. For an overview of how TH works with
-- Remote GHCi, see Note [Remote Template Haskell] in
-- libraries/ghci/GHCi/TH.hs.
withIServ hsc_env $ \i -> do
rstate <- getTHState i
loc <- TH.qLocation
liftIO $
withForeignRef rstate $ \state_hv ->
withForeignRef fhv $ \q_hv ->
writeIServ i (putMessage (RunTH state_hv q_hv ty (Just loc)))
runRemoteTH i []
bs <- readQResult i
return $! runGet get (LB.fromStrict bs)
-- | communicate with a remotely-running TH computation until it finishes.
-- See Note [Remote Template Haskell] in libraries/ghci/GHCi/TH.hs.
runRemoteTH
:: IServ
-> [Messages] -- saved from nested calls to qRecover
-> TcM ()
runRemoteTH iserv recovers = do
THMsg msg <- liftIO $ readIServ iserv getTHMessage
case msg of
RunTHDone -> return ()
StartRecover -> do -- Note [TH recover with -fexternal-interpreter]
v <- getErrsVar
msgs <- readTcRef v
writeTcRef v emptyMessages
runRemoteTH iserv (msgs : recovers)
EndRecover caught_error -> do
v <- getErrsVar
let (prev_msgs, rest) = case recovers of
[] -> panic "EndRecover"
a : b -> (a,b)
if caught_error
then writeTcRef v prev_msgs
else updTcRef v (unionMessages prev_msgs)
runRemoteTH iserv rest
_other -> do
r <- handleTHMessage msg
liftIO $ writeIServ iserv (put r)
runRemoteTH iserv recovers
-- | Read a value of type QResult from the iserv
readQResult :: Binary a => IServ -> TcM a
readQResult i = do
qr <- liftIO $ readIServ i get
case qr of
QDone a -> return a
QException str -> liftIO $ throwIO (ErrorCall str)
QFail str -> fail str
{- Note [TH recover with -fexternal-interpreter]
Recover is slightly tricky to implement.
The meaning of "recover a b" is
- Do a
- If it finished successfully, then keep the messages it generated
- If it failed, discard any messages it generated, and do b
The messages are managed by GHC in the TcM monad, whereas the
exception-handling is done in the ghc-iserv process, so we have to
coordinate between the two.
On the server:
- emit a StartRecover message
- run "a" inside a catch
- if it finishes, emit EndRecover False
- if it fails, emit EndRecover True, then run "b"
Back in GHC, when we receive:
StartRecover
save the current messages and start with an empty set.
EndRecover caught_error
Restore the previous messages,
and merge in the new messages if caught_error is false.
-}
-- | Retrieve (or create, if it hasn't been created already), the
-- remote TH state. The TH state is a remote reference to an IORef
-- QState living on the server, and we have to pass this to each RunTH
-- call we make.
--
-- The TH state is stored in tcg_th_remote_state in the TcGblEnv.
--
getTHState :: IServ -> TcM (ForeignRef (IORef QState))
getTHState i = do
tcg <- getGblEnv
th_state <- readTcRef (tcg_th_remote_state tcg)
case th_state of
Just rhv -> return rhv
Nothing -> do
hsc_env <- env_top <$> getEnv
fhv <- liftIO $ mkFinalizedHValue hsc_env =<< iservCall i StartTH
writeTcRef (tcg_th_remote_state tcg) (Just fhv)
return fhv
wrapTHResult :: TcM a -> TcM (THResult a)
wrapTHResult tcm = do
e <- tryM tcm -- only catch 'fail', treat everything else as catastrophic
case e of
Left e -> return (THException (show e))
Right a -> return (THComplete a)
handleTHMessage :: THMessage a -> TcM a
handleTHMessage msg = case msg of
NewName a -> wrapTHResult $ TH.qNewName a
Report b str -> wrapTHResult $ TH.qReport b str
LookupName b str -> wrapTHResult $ TH.qLookupName b str
Reify n -> wrapTHResult $ TH.qReify n
ReifyFixity n -> wrapTHResult $ TH.qReifyFixity n
ReifyInstances n ts -> wrapTHResult $ TH.qReifyInstances n ts
ReifyRoles n -> wrapTHResult $ TH.qReifyRoles n
ReifyAnnotations lookup tyrep ->
wrapTHResult $ (map B.pack <$> getAnnotationsByTypeRep lookup tyrep)
ReifyModule m -> wrapTHResult $ TH.qReifyModule m
ReifyConStrictness nm -> wrapTHResult $ TH.qReifyConStrictness nm
AddDependentFile f -> wrapTHResult $ TH.qAddDependentFile f
AddModFinalizer r -> do
hsc_env <- env_top <$> getEnv
wrapTHResult $ liftIO (mkFinalizedHValue hsc_env r) >>= addModFinalizerRef
AddCorePlugin str -> wrapTHResult $ TH.qAddCorePlugin str
AddTopDecls decs -> wrapTHResult $ TH.qAddTopDecls decs
AddForeignFile lang str -> wrapTHResult $ TH.qAddForeignFile lang str
IsExtEnabled ext -> wrapTHResult $ TH.qIsExtEnabled ext
ExtsEnabled -> wrapTHResult $ TH.qExtsEnabled
_ -> panic ("handleTHMessage: unexpected message " ++ show msg)
getAnnotationsByTypeRep :: TH.AnnLookup -> TypeRep -> TcM [[Word8]]
getAnnotationsByTypeRep th_name tyrep
= do { name <- lookupThAnnLookup th_name
; topEnv <- getTopEnv
; epsHptAnns <- liftIO $ prepareAnnotations topEnv Nothing
; tcg <- getGblEnv
; let selectedEpsHptAnns = findAnnsByTypeRep epsHptAnns name tyrep
; let selectedTcgAnns = findAnnsByTypeRep (tcg_ann_env tcg) name tyrep
; return (selectedEpsHptAnns ++ selectedTcgAnns) }
{-
************************************************************************
* *
Instance Testing
* *
************************************************************************
-}
reifyInstances :: TH.Name -> [TH.Type] -> TcM [TH.Dec]
reifyInstances th_nm th_tys
= addErrCtxt (text "In the argument of reifyInstances:"
<+> ppr_th th_nm <+> sep (map ppr_th th_tys)) $
do { loc <- getSrcSpanM
; rdr_ty <- cvt loc (mkThAppTs (TH.ConT th_nm) th_tys)
-- #9262 says to bring vars into scope, like in HsForAllTy case
-- of rnHsTyKi
; free_vars <- extractHsTyRdrTyVars rdr_ty
; let tv_rdrs = freeKiTyVarsAllVars free_vars
-- Rename to HsType Name
; ((tv_names, rn_ty), _fvs)
<- checkNoErrs $ -- If there are out-of-scope Names here, then we
-- must error before proceeding to typecheck the
-- renamed type, as that will result in GHC
-- internal errors (#13837).
bindLRdrNames tv_rdrs $ \ tv_names ->
do { (rn_ty, fvs) <- rnLHsType doc rdr_ty
; return ((tv_names, rn_ty), fvs) }
; (_tvs, ty)
<- solveEqualities $
tcImplicitTKBndrsType tv_names $
fst <$> tcLHsType rn_ty
; ty <- zonkTcTypeToType emptyZonkEnv ty
-- Substitute out the meta type variables
-- In particular, the type might have kind
-- variables inside it (Trac #7477)
; traceTc "reifyInstances" (ppr ty $$ ppr (typeKind ty))
; case splitTyConApp_maybe ty of -- This expands any type synonyms
Just (tc, tys) -- See Trac #7910
| Just cls <- tyConClass_maybe tc
-> do { inst_envs <- tcGetInstEnvs
; let (matches, unifies, _) = lookupInstEnv False inst_envs cls tys
; traceTc "reifyInstances1" (ppr matches)
; reifyClassInstances cls (map fst matches ++ unifies) }
| isOpenFamilyTyCon tc
-> do { inst_envs <- tcGetFamInstEnvs
; let matches = lookupFamInstEnv inst_envs tc tys
; traceTc "reifyInstances2" (ppr matches)
; reifyFamilyInstances tc (map fim_instance matches) }
_ -> bale_out (hang (text "reifyInstances:" <+> quotes (ppr ty))
2 (text "is not a class constraint or type family application")) }
where
doc = ClassInstanceCtx
bale_out msg = failWithTc msg
cvt :: SrcSpan -> TH.Type -> TcM (LHsType GhcPs)
cvt loc th_ty = case convertToHsType loc th_ty of
Left msg -> failWithTc msg
Right ty -> return ty
{-
************************************************************************
* *
Reification
* *
************************************************************************
-}
lookupName :: Bool -- True <=> type namespace
-- False <=> value namespace
-> String -> TcM (Maybe TH.Name)
lookupName is_type_name s
= do { lcl_env <- getLocalRdrEnv
; case lookupLocalRdrEnv lcl_env rdr_name of
Just n -> return (Just (reifyName n))
Nothing -> do { mb_nm <- lookupGlobalOccRn_maybe rdr_name
; return (fmap reifyName mb_nm) } }
where
th_name = TH.mkName s -- Parses M.x into a base of 'x' and a module of 'M'
occ_fs :: FastString
occ_fs = mkFastString (TH.nameBase th_name)
occ :: OccName
occ | is_type_name
= if isLexVarSym occ_fs || isLexCon occ_fs
then mkTcOccFS occ_fs
else mkTyVarOccFS occ_fs
| otherwise
= if isLexCon occ_fs then mkDataOccFS occ_fs
else mkVarOccFS occ_fs
rdr_name = case TH.nameModule th_name of
Nothing -> mkRdrUnqual occ
Just mod -> mkRdrQual (mkModuleName mod) occ
getThing :: TH.Name -> TcM TcTyThing
getThing th_name
= do { name <- lookupThName th_name
; traceIf (text "reify" <+> text (show th_name) <+> brackets (ppr_ns th_name) <+> ppr name)
; tcLookupTh name }
-- ToDo: this tcLookup could fail, which would give a
-- rather unhelpful error message
where
ppr_ns (TH.Name _ (TH.NameG TH.DataName _pkg _mod)) = text "data"
ppr_ns (TH.Name _ (TH.NameG TH.TcClsName _pkg _mod)) = text "tc"
ppr_ns (TH.Name _ (TH.NameG TH.VarName _pkg _mod)) = text "var"
ppr_ns _ = panic "reify/ppr_ns"
reify :: TH.Name -> TcM TH.Info
reify th_name
= do { traceTc "reify 1" (text (TH.showName th_name))
; thing <- getThing th_name
; traceTc "reify 2" (ppr thing)
; reifyThing thing }
lookupThName :: TH.Name -> TcM Name
lookupThName th_name = do
mb_name <- lookupThName_maybe th_name
case mb_name of
Nothing -> failWithTc (notInScope th_name)
Just name -> return name
lookupThName_maybe :: TH.Name -> TcM (Maybe Name)
lookupThName_maybe th_name
= do { names <- mapMaybeM lookup (thRdrNameGuesses th_name)
-- Pick the first that works
-- E.g. reify (mkName "A") will pick the class A in preference to the data constructor A
; return (listToMaybe names) }
where
lookup rdr_name
= do { -- Repeat much of lookupOccRn, because we want
-- to report errors in a TH-relevant way
; rdr_env <- getLocalRdrEnv
; case lookupLocalRdrEnv rdr_env rdr_name of
Just name -> return (Just name)
Nothing -> lookupGlobalOccRn_maybe rdr_name }
tcLookupTh :: Name -> TcM TcTyThing
-- This is a specialised version of TcEnv.tcLookup; specialised mainly in that
-- it gives a reify-related error message on failure, whereas in the normal
-- tcLookup, failure is a bug.
tcLookupTh name
= do { (gbl_env, lcl_env) <- getEnvs
; case lookupNameEnv (tcl_env lcl_env) name of {
Just thing -> return thing;
Nothing ->
case lookupNameEnv (tcg_type_env gbl_env) name of {
Just thing -> return (AGlobal thing);
Nothing ->
-- EZY: I don't think this choice matters, no TH in signatures!
if nameIsLocalOrFrom (tcg_semantic_mod gbl_env) name
then -- It's defined in this module
failWithTc (notInEnv name)
else
do { mb_thing <- tcLookupImported_maybe name
; case mb_thing of
Succeeded thing -> return (AGlobal thing)
Failed msg -> failWithTc msg
}}}}
notInScope :: TH.Name -> SDoc
notInScope th_name = quotes (text (TH.pprint th_name)) <+>
text "is not in scope at a reify"
-- Ugh! Rather an indirect way to display the name
notInEnv :: Name -> SDoc
notInEnv name = quotes (ppr name) <+>
text "is not in the type environment at a reify"
------------------------------
reifyRoles :: TH.Name -> TcM [TH.Role]
reifyRoles th_name
= do { thing <- getThing th_name
; case thing of
AGlobal (ATyCon tc) -> return (map reify_role (tyConRoles tc))
_ -> failWithTc (text "No roles associated with" <+> (ppr thing))
}
where
reify_role Nominal = TH.NominalR
reify_role Representational = TH.RepresentationalR
reify_role Phantom = TH.PhantomR
------------------------------
reifyThing :: TcTyThing -> TcM TH.Info
-- The only reason this is monadic is for error reporting,
-- which in turn is mainly for the case when TH can't express
-- some random GHC extension
reifyThing (AGlobal (AnId id))
= do { ty <- reifyType (idType id)
; let v = reifyName id
; case idDetails id of
ClassOpId cls -> return (TH.ClassOpI v ty (reifyName cls))
RecSelId{sel_tycon=RecSelData tc}
-> return (TH.VarI (reifySelector id tc) ty Nothing)
_ -> return (TH.VarI v ty Nothing)
}
reifyThing (AGlobal (ATyCon tc)) = reifyTyCon tc
reifyThing (AGlobal (AConLike (RealDataCon dc)))
= do { let name = dataConName dc
; ty <- reifyType (idType (dataConWrapId dc))
; return (TH.DataConI (reifyName name) ty
(reifyName (dataConOrigTyCon dc)))
}
reifyThing (AGlobal (AConLike (PatSynCon ps)))
= do { let name = reifyName ps
; ty <- reifyPatSynType (patSynSig ps)
; return (TH.PatSynI name ty) }
reifyThing (ATcId {tct_id = id})
= do { ty1 <- zonkTcType (idType id) -- Make use of all the info we have, even
-- though it may be incomplete
; ty2 <- reifyType ty1
; return (TH.VarI (reifyName id) ty2 Nothing) }
reifyThing (ATyVar tv tv1)
= do { ty1 <- zonkTcTyVar tv1
; ty2 <- reifyType ty1
; return (TH.TyVarI (reifyName tv) ty2) }
reifyThing thing = pprPanic "reifyThing" (pprTcTyThingCategory thing)
-------------------------------------------
reifyAxBranch :: TyCon -> CoAxBranch -> TcM TH.TySynEqn
reifyAxBranch fam_tc (CoAxBranch { cab_lhs = lhs, cab_rhs = rhs })
-- remove kind patterns (#8884)
= do { let lhs_types_only = filterOutInvisibleTypes fam_tc lhs
; lhs' <- reifyTypes lhs_types_only
; annot_th_lhs <- zipWith3M annotThType (mkIsPolyTvs fam_tvs)
lhs_types_only lhs'
; rhs' <- reifyType rhs
; return (TH.TySynEqn annot_th_lhs rhs') }
where
fam_tvs = tyConVisibleTyVars fam_tc
reifyTyCon :: TyCon -> TcM TH.Info
reifyTyCon tc
| Just cls <- tyConClass_maybe tc
= reifyClass cls
| isFunTyCon tc
= return (TH.PrimTyConI (reifyName tc) 2 False)
| isPrimTyCon tc
= return (TH.PrimTyConI (reifyName tc) (tyConArity tc) (isUnliftedTyCon tc))
| isTypeFamilyTyCon tc
= do { let tvs = tyConTyVars tc
res_kind = tyConResKind tc
resVar = famTcResVar tc
; kind' <- reifyKind res_kind
; let (resultSig, injectivity) =
case resVar of
Nothing -> (TH.KindSig kind', Nothing)
Just name ->
let thName = reifyName name
injAnnot = tyConInjectivityInfo tc
sig = TH.TyVarSig (TH.KindedTV thName kind')
inj = case injAnnot of
NotInjective -> Nothing
Injective ms ->
Just (TH.InjectivityAnn thName injRHS)
where
injRHS = map (reifyName . tyVarName)
(filterByList ms tvs)
in (sig, inj)
; tvs' <- reifyTyVars (tyConVisibleTyVars tc)
; let tfHead =
TH.TypeFamilyHead (reifyName tc) tvs' resultSig injectivity
; if isOpenTypeFamilyTyCon tc
then do { fam_envs <- tcGetFamInstEnvs
; instances <- reifyFamilyInstances tc
(familyInstances fam_envs tc)
; return (TH.FamilyI (TH.OpenTypeFamilyD tfHead) instances) }
else do { eqns <-
case isClosedSynFamilyTyConWithAxiom_maybe tc of
Just ax -> mapM (reifyAxBranch tc) $
fromBranches $ coAxiomBranches ax
Nothing -> return []
; return (TH.FamilyI (TH.ClosedTypeFamilyD tfHead eqns)
[]) } }
| isDataFamilyTyCon tc
= do { let res_kind = tyConResKind tc
; kind' <- fmap Just (reifyKind res_kind)
; tvs' <- reifyTyVars (tyConVisibleTyVars tc)
; fam_envs <- tcGetFamInstEnvs
; instances <- reifyFamilyInstances tc (familyInstances fam_envs tc)
; return (TH.FamilyI
(TH.DataFamilyD (reifyName tc) tvs' kind') instances) }
| Just (_, rhs) <- synTyConDefn_maybe tc -- Vanilla type synonym
= do { rhs' <- reifyType rhs
; tvs' <- reifyTyVars (tyConVisibleTyVars tc)
; return (TH.TyConI
(TH.TySynD (reifyName tc) tvs' rhs'))
}
| otherwise
= do { cxt <- reifyCxt (tyConStupidTheta tc)
; let tvs = tyConTyVars tc
dataCons = tyConDataCons tc
isGadt = isGadtSyntaxTyCon tc
; cons <- mapM (reifyDataCon isGadt (mkTyVarTys tvs)) dataCons
; r_tvs <- reifyTyVars (tyConVisibleTyVars tc)
; let name = reifyName tc
deriv = [] -- Don't know about deriving
decl | isNewTyCon tc =
TH.NewtypeD cxt name r_tvs Nothing (head cons) deriv
| otherwise =
TH.DataD cxt name r_tvs Nothing cons deriv
; return (TH.TyConI decl) }
reifyDataCon :: Bool -> [Type] -> DataCon -> TcM TH.Con
reifyDataCon isGadtDataCon tys dc
= do { let -- used for H98 data constructors
(ex_tvs, theta, arg_tys)
= dataConInstSig dc tys
-- used for GADTs data constructors
g_user_tvs' = dataConUserTyVars dc
(g_univ_tvs, _, g_eq_spec, g_theta', g_arg_tys', g_res_ty')
= dataConFullSig dc
(srcUnpks, srcStricts)
= mapAndUnzip reifySourceBang (dataConSrcBangs dc)
dcdBangs = zipWith TH.Bang srcUnpks srcStricts
fields = dataConFieldLabels dc
name = reifyName dc
-- Universal tvs present in eq_spec need to be filtered out, as
-- they will not appear anywhere in the type.
eq_spec_tvs = mkVarSet (map eqSpecTyVar g_eq_spec)
; (univ_subst, _)
-- See Note [Freshen reified GADT constructors' universal tyvars]
<- freshenTyVarBndrs $
filterOut (`elemVarSet` eq_spec_tvs) g_univ_tvs
; let (tvb_subst, g_user_tvs)
= mapAccumL substTyVarBndr univ_subst g_user_tvs'
g_theta = substTys tvb_subst g_theta'
g_arg_tys = substTys tvb_subst g_arg_tys'
g_res_ty = substTy tvb_subst g_res_ty'
; r_arg_tys <- reifyTypes (if isGadtDataCon then g_arg_tys else arg_tys)
; main_con <-
if | not (null fields) && not isGadtDataCon ->
return $ TH.RecC name (zip3 (map reifyFieldLabel fields)
dcdBangs r_arg_tys)
| not (null fields) -> do
{ res_ty <- reifyType g_res_ty
; return $ TH.RecGadtC [name]
(zip3 (map (reifyName . flSelector) fields)
dcdBangs r_arg_tys) res_ty }
-- We need to check not isGadtDataCon here because GADT
-- constructors can be declared infix.
-- See Note [Infix GADT constructors] in TcTyClsDecls.
| dataConIsInfix dc && not isGadtDataCon ->
ASSERT( arg_tys `lengthIs` 2 ) do
{ let [r_a1, r_a2] = r_arg_tys
[s1, s2] = dcdBangs
; return $ TH.InfixC (s1,r_a1) name (s2,r_a2) }
| isGadtDataCon -> do
{ res_ty <- reifyType g_res_ty
; return $ TH.GadtC [name] (dcdBangs `zip` r_arg_tys) res_ty }
| otherwise ->
return $ TH.NormalC name (dcdBangs `zip` r_arg_tys)
; let (ex_tvs', theta') | isGadtDataCon = (g_user_tvs, g_theta)
| otherwise = (ex_tvs, theta)
ret_con | null ex_tvs' && null theta' = return main_con
| otherwise = do
{ cxt <- reifyCxt theta'
; ex_tvs'' <- reifyTyVars ex_tvs'
; return (TH.ForallC ex_tvs'' cxt main_con) }
; ASSERT( arg_tys `equalLength` dcdBangs )
ret_con }
{-
Note [Freshen reified GADT constructors' universal tyvars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose one were to reify this GADT:
data a :~: b where
Refl :: forall a b. (a ~ b) => a :~: b
We ought to be careful here about the uniques we give to the occurrences of `a`
and `b` in this definition. That is because in the original DataCon, all uses
of `a` and `b` have the same unique, since `a` and `b` are both universally
quantified type variables--that is, they are used in both the (:~:) tycon as
well as in the constructor type signature. But when we turn the DataCon
definition into the reified one, the `a` and `b` in the constructor type
signature becomes differently scoped than the `a` and `b` in `data a :~: b`.
While it wouldn't technically be *wrong* per se to re-use the same uniques for
`a` and `b` across these two different scopes, it's somewhat annoying for end
users of Template Haskell, since they wouldn't be able to rely on the
assumption that all TH names have globally distinct uniques (#13885). For this
reason, we freshen the universally quantified tyvars that go into the reified
GADT constructor type signature to give them distinct uniques from their
counterparts in the tycon.
-}
------------------------------
reifyClass :: Class -> TcM TH.Info
reifyClass cls
= do { cxt <- reifyCxt theta
; inst_envs <- tcGetInstEnvs
; insts <- reifyClassInstances cls (InstEnv.classInstances inst_envs cls)
; assocTys <- concatMapM reifyAT ats
; ops <- concatMapM reify_op op_stuff
; tvs' <- reifyTyVars (tyConVisibleTyVars (classTyCon cls))
; let dec = TH.ClassD cxt (reifyName cls) tvs' fds' (assocTys ++ ops)
; return (TH.ClassI dec insts) }
where
(_, fds, theta, _, ats, op_stuff) = classExtraBigSig cls
fds' = map reifyFunDep fds
reify_op (op, def_meth)
= do { ty <- reifyType (idType op)
; let nm' = reifyName op
; case def_meth of
Just (_, GenericDM gdm_ty) ->
do { gdm_ty' <- reifyType gdm_ty
; return [TH.SigD nm' ty, TH.DefaultSigD nm' gdm_ty'] }
_ -> return [TH.SigD nm' ty] }
reifyAT :: ClassATItem -> TcM [TH.Dec]
reifyAT (ATI tycon def) = do
tycon' <- reifyTyCon tycon
case tycon' of
TH.FamilyI dec _ -> do
let (tyName, tyArgs) = tfNames dec
(dec :) <$> maybe (return [])
(fmap (:[]) . reifyDefImpl tyName tyArgs . fst)
def
_ -> pprPanic "reifyAT" (text (show tycon'))
reifyDefImpl :: TH.Name -> [TH.Name] -> Type -> TcM TH.Dec
reifyDefImpl n args ty =
TH.TySynInstD n . TH.TySynEqn (map TH.VarT args) <$> reifyType ty
tfNames :: TH.Dec -> (TH.Name, [TH.Name])
tfNames (TH.OpenTypeFamilyD (TH.TypeFamilyHead n args _ _))
= (n, map bndrName args)
tfNames d = pprPanic "tfNames" (text (show d))
bndrName :: TH.TyVarBndr -> TH.Name
bndrName (TH.PlainTV n) = n
bndrName (TH.KindedTV n _) = n
------------------------------
-- | Annotate (with TH.SigT) a type if the first parameter is True
-- and if the type contains a free variable.
-- This is used to annotate type patterns for poly-kinded tyvars in
-- reifying class and type instances. See #8953 and th/T8953.
annotThType :: Bool -- True <=> annotate
-> TyCoRep.Type -> TH.Type -> TcM TH.Type
-- tiny optimization: if the type is annotated, don't annotate again.
annotThType _ _ th_ty@(TH.SigT {}) = return th_ty
annotThType True ty th_ty
| not $ isEmptyVarSet $ filterVarSet isTyVar $ tyCoVarsOfType ty
= do { let ki = typeKind ty
; th_ki <- reifyKind ki
; return (TH.SigT th_ty th_ki) }
annotThType _ _ th_ty = return th_ty
-- | For every type variable in the input,
-- report whether or not the tv is poly-kinded. This is used to eventually
-- feed into 'annotThType'.
mkIsPolyTvs :: [TyVar] -> [Bool]
mkIsPolyTvs = map is_poly_tv
where
is_poly_tv tv = not $
isEmptyVarSet $
filterVarSet isTyVar $
tyCoVarsOfType $
tyVarKind tv
------------------------------
reifyClassInstances :: Class -> [ClsInst] -> TcM [TH.Dec]
reifyClassInstances cls insts
= mapM (reifyClassInstance (mkIsPolyTvs tvs)) insts
where
tvs = tyConVisibleTyVars (classTyCon cls)
reifyClassInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- includes only *visible* tvs
-> ClsInst -> TcM TH.Dec
reifyClassInstance is_poly_tvs i
= do { cxt <- reifyCxt theta
; let vis_types = filterOutInvisibleTypes cls_tc types
; thtypes <- reifyTypes vis_types
; annot_thtypes <- zipWith3M annotThType is_poly_tvs vis_types thtypes
; let head_ty = mkThAppTs (TH.ConT (reifyName cls)) annot_thtypes
; return $ (TH.InstanceD over cxt head_ty []) }
where
(_tvs, theta, cls, types) = tcSplitDFunTy (idType dfun)
cls_tc = classTyCon cls
dfun = instanceDFunId i
over = case overlapMode (is_flag i) of
NoOverlap _ -> Nothing
Overlappable _ -> Just TH.Overlappable
Overlapping _ -> Just TH.Overlapping
Overlaps _ -> Just TH.Overlaps
Incoherent _ -> Just TH.Incoherent
------------------------------
reifyFamilyInstances :: TyCon -> [FamInst] -> TcM [TH.Dec]
reifyFamilyInstances fam_tc fam_insts
= mapM (reifyFamilyInstance (mkIsPolyTvs fam_tvs)) fam_insts
where
fam_tvs = tyConVisibleTyVars fam_tc
reifyFamilyInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- includes only *visible* tvs
-> FamInst -> TcM TH.Dec
reifyFamilyInstance is_poly_tvs inst@(FamInst { fi_flavor = flavor
, fi_fam = fam
, fi_tvs = fam_tvs
, fi_tys = lhs
, fi_rhs = rhs })
= case flavor of
SynFamilyInst ->
-- remove kind patterns (#8884)
do { let lhs_types_only = filterOutInvisibleTypes fam_tc lhs
; th_lhs <- reifyTypes lhs_types_only
; annot_th_lhs <- zipWith3M annotThType is_poly_tvs lhs_types_only
th_lhs
; th_rhs <- reifyType rhs
; return (TH.TySynInstD (reifyName fam)
(TH.TySynEqn annot_th_lhs th_rhs)) }
DataFamilyInst rep_tc ->
do { let rep_tvs = tyConTyVars rep_tc
fam' = reifyName fam
-- eta-expand lhs types, because sometimes data/newtype
-- instances are eta-reduced; See Trac #9692
-- See Note [Eta reduction for data family axioms]
-- in TcInstDcls
(_rep_tc, rep_tc_args) = splitTyConApp rhs
etad_tyvars = dropList rep_tc_args rep_tvs
etad_tys = mkTyVarTys etad_tyvars
eta_expanded_tvs = mkTyVarTys fam_tvs `chkAppend` etad_tys
eta_expanded_lhs = lhs `chkAppend` etad_tys
dataCons = tyConDataCons rep_tc
isGadt = isGadtSyntaxTyCon rep_tc
; cons <- mapM (reifyDataCon isGadt eta_expanded_tvs) dataCons
; let types_only = filterOutInvisibleTypes fam_tc eta_expanded_lhs
; th_tys <- reifyTypes types_only
; annot_th_tys <- zipWith3M annotThType is_poly_tvs types_only th_tys
; return $
if isNewTyCon rep_tc
then TH.NewtypeInstD [] fam' annot_th_tys Nothing (head cons) []
else TH.DataInstD [] fam' annot_th_tys Nothing cons []
}
where
fam_tc = famInstTyCon inst
------------------------------
reifyType :: TyCoRep.Type -> TcM TH.Type
-- Monadic only because of failure
reifyType ty | isLiftedTypeKind ty = return TH.StarT
| isConstraintKind ty = return TH.ConstraintT
reifyType ty@(ForAllTy {}) = reify_for_all ty
reifyType (LitTy t) = do { r <- reifyTyLit t; return (TH.LitT r) }
reifyType (TyVarTy tv) = return (TH.VarT (reifyName tv))
reifyType (TyConApp tc tys) = reify_tc_app tc tys -- Do not expand type synonyms here
reifyType (AppTy t1 t2) = do { [r1,r2] <- reifyTypes [t1,t2] ; return (r1 `TH.AppT` r2) }
reifyType ty@(FunTy t1 t2)
| isPredTy t1 = reify_for_all ty -- Types like ((?x::Int) => Char -> Char)
| otherwise = do { [r1,r2] <- reifyTypes [t1,t2] ; return (TH.ArrowT `TH.AppT` r1 `TH.AppT` r2) }
reifyType (CastTy t _) = reifyType t -- Casts are ignored in TH
reifyType ty@(CoercionTy {})= noTH (sLit "coercions in types") (ppr ty)
reify_for_all :: TyCoRep.Type -> TcM TH.Type
reify_for_all ty
= do { cxt' <- reifyCxt cxt;
; tau' <- reifyType tau
; tvs' <- reifyTyVars tvs
; return (TH.ForallT tvs' cxt' tau') }
where
(tvs, cxt, tau) = tcSplitSigmaTy ty
reifyTyLit :: TyCoRep.TyLit -> TcM TH.TyLit
reifyTyLit (NumTyLit n) = return (TH.NumTyLit n)
reifyTyLit (StrTyLit s) = return (TH.StrTyLit (unpackFS s))
reifyTypes :: [Type] -> TcM [TH.Type]
reifyTypes = mapM reifyType
reifyPatSynType
:: ([TyVar], ThetaType, [TyVar], ThetaType, [Type], Type) -> TcM TH.Type
-- reifies a pattern synonym's type and returns its *complete* type
-- signature; see NOTE [Pattern synonym signatures and Template
-- Haskell]
reifyPatSynType (univTyVars, req, exTyVars, prov, argTys, resTy)
= do { univTyVars' <- reifyTyVars univTyVars
; req' <- reifyCxt req
; exTyVars' <- reifyTyVars exTyVars
; prov' <- reifyCxt prov
; tau' <- reifyType (mkFunTys argTys resTy)
; return $ TH.ForallT univTyVars' req'
$ TH.ForallT exTyVars' prov' tau' }
reifyKind :: Kind -> TcM TH.Kind
reifyKind = reifyType
reifyCxt :: [PredType] -> TcM [TH.Pred]
reifyCxt = mapM reifyPred
reifyFunDep :: ([TyVar], [TyVar]) -> TH.FunDep
reifyFunDep (xs, ys) = TH.FunDep (map reifyName xs) (map reifyName ys)
reifyTyVars :: [TyVar] -> TcM [TH.TyVarBndr]
reifyTyVars tvs = mapM reify_tv tvs
where
-- even if the kind is *, we need to include a kind annotation,
-- in case a poly-kind would be inferred without the annotation.
-- See #8953 or test th/T8953
reify_tv tv = TH.KindedTV name <$> reifyKind kind
where
kind = tyVarKind tv
name = reifyName tv
{-
Note [Kind annotations on TyConApps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A poly-kinded tycon sometimes needs a kind annotation to be unambiguous.
For example:
type family F a :: k
type instance F Int = (Proxy :: * -> *)
type instance F Bool = (Proxy :: (* -> *) -> *)
It's hard to figure out where these annotations should appear, so we do this:
Suppose we have a tycon application (T ty1 ... tyn). Assuming that T is not
oversatured (more on this later), we can assume T's declaration is of the form
T (tvb1 :: s1) ... (tvbn :: sn) :: p. If any kind variable that
is free in p is not free in an injective position in tvb1 ... tvbn,
then we put on a kind annotation, since we would not otherwise be able to infer
the kind of the whole tycon application.
The injective positions in a tyvar binder are the injective positions in the
kind of its tyvar, provided the tyvar binder is either:
* Anonymous. For example, in the promoted data constructor '(:):
'(:) :: forall a. a -> [a] -> [a]
The second and third tyvar binders (of kinds `a` and `[a]`) are both
anonymous, so if we had '(:) 'True '[], then the inferred kinds of 'True and
'[] would contribute to the inferred kind of '(:) 'True '[].
* Has required visibility. For example, in the type family:
type family Wurble k (a :: k) :: k
Wurble :: forall k -> k -> k
The first tyvar binder (of kind `forall k`) has required visibility, so if
we had Wurble (Maybe a) Nothing, then the inferred kind of Maybe a would
contribute to the inferred kind of Wurble (Maybe a) Nothing.
An injective position in a type is one that does not occur as an argument to
a non-injective type constructor (e.g., non-injective type families). See
injectiveVarsOfType.
How can be sure that this is correct? That is, how can we be sure that in the
event that we leave off a kind annotation, that one could infer the kind of the
tycon application from its arguments? It's essentially a proof by induction: if
we can infer the kinds of every subtree of a type, then the whole tycon
application will have an inferrable kind--unless, of course, the remainder of
the tycon application's kind has uninstantiated kind variables.
An earlier implementation of this algorithm only checked if p contained any
free variables. But this was unsatisfactory, since a datatype like this:
data Foo = Foo (Proxy '[False, True])
Would be reified like this:
data Foo = Foo (Proxy ('(:) False ('(:) True ('[] :: [Bool])
:: [Bool]) :: [Bool]))
Which has a rather excessive amount of kind annotations. With the current
algorithm, we instead reify Foo to this:
data Foo = Foo (Proxy ('(:) False ('(:) True ('[] :: [Bool]))))
Since in the case of '[], the kind p is [a], and there are no arguments in the
kind of '[]. On the other hand, in the case of '(:) True '[], the kind p is
(forall a. [a]), but a occurs free in the first and second arguments of the
full kind of '(:), which is (forall a. a -> [a] -> [a]). (See Trac #14060.)
What happens if T is oversaturated? That is, if T's kind has fewer than n
arguments, in the case that the concrete application instantiates a result
kind variable with an arrow kind? If we run out of arguments, we do not attach
a kind annotation. This should be a rare case, indeed. Here is an example:
data T1 :: k1 -> k2 -> *
data T2 :: k1 -> k2 -> *
type family G (a :: k) :: k
type instance G T1 = T2
type instance F Char = (G T1 Bool :: (* -> *) -> *) -- F from above
Here G's kind is (forall k. k -> k), and the desugared RHS of that last
instance of F is (G (* -> (* -> *) -> *) (T1 * (* -> *)) Bool). According to
the algorithm above, there are 3 arguments to G so we should peel off 3
arguments in G's kind. But G's kind has only two arguments. This is the
rare special case, and we choose not to annotate the application of G with
a kind signature. After all, we needn't do this, since that instance would
be reified as:
type instance F Char = G (T1 :: * -> (* -> *) -> *) Bool
So the kind of G isn't ambiguous anymore due to the explicit kind annotation
on its argument. See #8953 and test th/T8953.
-}
reify_tc_app :: TyCon -> [Type.Type] -> TcM TH.Type
reify_tc_app tc tys
= do { tys' <- reifyTypes (filterOutInvisibleTypes tc tys)
; maybe_sig_t (mkThAppTs r_tc tys') }
where
arity = tyConArity tc
tc_binders = tyConBinders tc
tc_res_kind = tyConResKind tc
r_tc | isUnboxedSumTyCon tc = TH.UnboxedSumT (arity `div` 2)
| isUnboxedTupleTyCon tc = TH.UnboxedTupleT (arity `div` 2)
| isPromotedTupleTyCon tc = TH.PromotedTupleT (arity `div` 2)
-- See Note [Unboxed tuple RuntimeRep vars] in TyCon
| isTupleTyCon tc = if isPromotedDataCon tc
then TH.PromotedTupleT arity
else TH.TupleT arity
| tc `hasKey` listTyConKey = TH.ListT
| tc `hasKey` nilDataConKey = TH.PromotedNilT
| tc `hasKey` consDataConKey = TH.PromotedConsT
| tc `hasKey` heqTyConKey = TH.EqualityT
| tc `hasKey` eqPrimTyConKey = TH.EqualityT
| tc `hasKey` eqReprPrimTyConKey = TH.ConT (reifyName coercibleTyCon)
| isPromotedDataCon tc = TH.PromotedT (reifyName tc)
| otherwise = TH.ConT (reifyName tc)
-- See Note [Kind annotations on TyConApps]
maybe_sig_t th_type
| needs_kind_sig
= do { let full_kind = typeKind (mkTyConApp tc tys)
; th_full_kind <- reifyKind full_kind
; return (TH.SigT th_type th_full_kind) }
| otherwise
= return th_type
needs_kind_sig
| GT <- compareLength tys tc_binders
= False
| otherwise
= let (dropped_binders, remaining_binders)
= splitAtList tys tc_binders
result_kind = mkTyConKind remaining_binders tc_res_kind
result_vars = tyCoVarsOfType result_kind
dropped_vars = fvVarSet $
mapUnionFV injectiveVarsOfBinder dropped_binders
in not (subVarSet result_vars dropped_vars)
reifyPred :: TyCoRep.PredType -> TcM TH.Pred
reifyPred ty
-- We could reify the invisible parameter as a class but it seems
-- nicer to support them properly...
| isIPPred ty = noTH (sLit "implicit parameters") (ppr ty)
| otherwise = reifyType ty
------------------------------
reifyName :: NamedThing n => n -> TH.Name
reifyName thing
| isExternalName name = mk_varg pkg_str mod_str occ_str
| otherwise = TH.mkNameU occ_str (getKey (getUnique name))
-- Many of the things we reify have local bindings, and
-- NameL's aren't supposed to appear in binding positions, so
-- we use NameU. When/if we start to reify nested things, that
-- have free variables, we may need to generate NameL's for them.
where
name = getName thing
mod = ASSERT( isExternalName name ) nameModule name
pkg_str = unitIdString (moduleUnitId mod)
mod_str = moduleNameString (moduleName mod)
occ_str = occNameString occ
occ = nameOccName name
mk_varg | OccName.isDataOcc occ = TH.mkNameG_d
| OccName.isVarOcc occ = TH.mkNameG_v
| OccName.isTcOcc occ = TH.mkNameG_tc
| otherwise = pprPanic "reifyName" (ppr name)
-- See Note [Reifying field labels]
reifyFieldLabel :: FieldLabel -> TH.Name
reifyFieldLabel fl
| flIsOverloaded fl
= TH.Name (TH.mkOccName occ_str) (TH.NameQ (TH.mkModName mod_str))
| otherwise = TH.mkNameG_v pkg_str mod_str occ_str
where
name = flSelector fl
mod = ASSERT( isExternalName name ) nameModule name
pkg_str = unitIdString (moduleUnitId mod)
mod_str = moduleNameString (moduleName mod)
occ_str = unpackFS (flLabel fl)
reifySelector :: Id -> TyCon -> TH.Name
reifySelector id tc
= case find ((idName id ==) . flSelector) (tyConFieldLabels tc) of
Just fl -> reifyFieldLabel fl
Nothing -> pprPanic "reifySelector: missing field" (ppr id $$ ppr tc)
------------------------------
reifyFixity :: Name -> TcM (Maybe TH.Fixity)
reifyFixity name
= do { (found, fix) <- lookupFixityRn_help name
; return (if found then Just (conv_fix fix) else Nothing) }
where
conv_fix (BasicTypes.Fixity _ i d) = TH.Fixity i (conv_dir d)
conv_dir BasicTypes.InfixR = TH.InfixR
conv_dir BasicTypes.InfixL = TH.InfixL
conv_dir BasicTypes.InfixN = TH.InfixN
reifyUnpackedness :: DataCon.SrcUnpackedness -> TH.SourceUnpackedness
reifyUnpackedness NoSrcUnpack = TH.NoSourceUnpackedness
reifyUnpackedness SrcNoUnpack = TH.SourceNoUnpack
reifyUnpackedness SrcUnpack = TH.SourceUnpack
reifyStrictness :: DataCon.SrcStrictness -> TH.SourceStrictness
reifyStrictness NoSrcStrict = TH.NoSourceStrictness
reifyStrictness SrcStrict = TH.SourceStrict
reifyStrictness SrcLazy = TH.SourceLazy
reifySourceBang :: DataCon.HsSrcBang
-> (TH.SourceUnpackedness, TH.SourceStrictness)
reifySourceBang (HsSrcBang _ u s) = (reifyUnpackedness u, reifyStrictness s)
reifyDecidedStrictness :: DataCon.HsImplBang -> TH.DecidedStrictness
reifyDecidedStrictness HsLazy = TH.DecidedLazy
reifyDecidedStrictness HsStrict = TH.DecidedStrict
reifyDecidedStrictness HsUnpack{} = TH.DecidedUnpack
------------------------------
lookupThAnnLookup :: TH.AnnLookup -> TcM CoreAnnTarget
lookupThAnnLookup (TH.AnnLookupName th_nm) = fmap NamedTarget (lookupThName th_nm)
lookupThAnnLookup (TH.AnnLookupModule (TH.Module pn mn))
= return $ ModuleTarget $
mkModule (stringToUnitId $ TH.pkgString pn) (mkModuleName $ TH.modString mn)
reifyAnnotations :: Data a => TH.AnnLookup -> TcM [a]
reifyAnnotations th_name
= do { name <- lookupThAnnLookup th_name
; topEnv <- getTopEnv
; epsHptAnns <- liftIO $ prepareAnnotations topEnv Nothing
; tcg <- getGblEnv
; let selectedEpsHptAnns = findAnns deserializeWithData epsHptAnns name
; let selectedTcgAnns = findAnns deserializeWithData (tcg_ann_env tcg) name
; return (selectedEpsHptAnns ++ selectedTcgAnns) }
------------------------------
modToTHMod :: Module -> TH.Module
modToTHMod m = TH.Module (TH.PkgName $ unitIdString $ moduleUnitId m)
(TH.ModName $ moduleNameString $ moduleName m)
reifyModule :: TH.Module -> TcM TH.ModuleInfo
reifyModule (TH.Module (TH.PkgName pkgString) (TH.ModName mString)) = do
this_mod <- getModule
let reifMod = mkModule (stringToUnitId pkgString) (mkModuleName mString)
if (reifMod == this_mod) then reifyThisModule else reifyFromIface reifMod
where
reifyThisModule = do
usages <- fmap (map modToTHMod . moduleEnvKeys . imp_mods) getImports
return $ TH.ModuleInfo usages
reifyFromIface reifMod = do
iface <- loadInterfaceForModule (text "reifying module from TH for" <+> ppr reifMod) reifMod
let usages = [modToTHMod m | usage <- mi_usages iface,
Just m <- [usageToModule (moduleUnitId reifMod) usage] ]
return $ TH.ModuleInfo usages
usageToModule :: UnitId -> Usage -> Maybe Module
usageToModule _ (UsageFile {}) = Nothing
usageToModule this_pkg (UsageHomeModule { usg_mod_name = mn }) = Just $ mkModule this_pkg mn
usageToModule _ (UsagePackageModule { usg_mod = m }) = Just m
usageToModule _ (UsageMergedRequirement { usg_mod = m }) = Just m
------------------------------
mkThAppTs :: TH.Type -> [TH.Type] -> TH.Type
mkThAppTs fun_ty arg_tys = foldl TH.AppT fun_ty arg_tys
noTH :: LitString -> SDoc -> TcM a
noTH s d = failWithTc (hsep [text "Can't represent" <+> ptext s <+>
text "in Template Haskell:",
nest 2 d])
ppr_th :: TH.Ppr a => a -> SDoc
ppr_th x = text (TH.pprint x)
{-
Note [Reifying field labels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When reifying a datatype declared with DuplicateRecordFields enabled, we want
the reified names of the fields to be labels rather than selector functions.
That is, we want (reify ''T) and (reify 'foo) to produce
data T = MkT { foo :: Int }
foo :: T -> Int
rather than
data T = MkT { $sel:foo:MkT :: Int }
$sel:foo:MkT :: T -> Int
because otherwise TH code that uses the field names as strings will silently do
the wrong thing. Thus we use the field label (e.g. foo) as the OccName, rather
than the selector (e.g. $sel:foo:MkT). Since the Orig name M.foo isn't in the
environment, NameG can't be used to represent such fields. Instead,
reifyFieldLabel uses NameQ.
However, this means that extracting the field name from the output of reify, and
trying to reify it again, may fail with an ambiguity error if there are multiple
such fields defined in the module (see the test case
overloadedrecflds/should_fail/T11103.hs). The "proper" fix requires changes to
the TH AST to make it able to represent duplicate record fields.
-}
| shlevy/ghc | compiler/typecheck/TcSplice.hs | bsd-3-clause | 84,211 | 39 | 28 | 24,873 | 15,784 | 7,908 | 7,876 | -1 | -1 |
{-# LANGUAGE Unsafe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.ST.Unsafe
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (requires universal quantification for runST)
--
-- This library provides support for /strict/ state threads, as
-- described in the PLDI \'94 paper by John Launchbury and Simon Peyton
-- Jones /Lazy Functional State Threads/.
--
-- Unsafe API.
--
-----------------------------------------------------------------------------
module Control.Monad.ST.Unsafe (
-- * Unsafe operations
unsafeInterleaveST,
unsafeDupableInterleaveST,
unsafeIOToST,
unsafeSTToIO
) where
import Control.Monad.ST.Imp
| rahulmutt/ghcvm | libraries/base/Control/Monad/ST/Unsafe.hs | bsd-3-clause | 912 | 0 | 4 | 164 | 48 | 39 | 9 | 7 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Lens
import Control.Monad
import Control.Monad.Fix
import qualified Data.Dependent.Map as DMap
import Data.Functor.Misc
import qualified Data.Map as M
import Data.These
#if defined(MIN_VERSION_these_lens) || (MIN_VERSION_these(0,8,0) && !MIN_VERSION_these(0,9,0))
import Data.These.Lens
#endif
import Reflex
import Reflex.EventWriter.Base
import Test.Run
main :: IO ()
main = do
os1@[[Just [10,9,8,7,6,5,4,3,2,1]]] <- runApp' (unwrapApp testOrdering) $
[ Just ()
]
print os1
os2@[[Just [1,3,5,7,9]],[Nothing,Nothing],[Just [2,4,6,8,10]],[Just [2,4,6,8,10],Nothing]]
<- runApp' (unwrapApp testSimultaneous) $ map Just $
[ This ()
, That ()
, This ()
, These () ()
]
print os2
os3@[[Nothing, Just [2]]] <- runApp' (unwrapApp testMoribundTellEvent) [Just ()]
print os3
os4@[[Nothing, Just [2]]] <- runApp' (unwrapApp testMoribundTellEventDMap) [Just ()]
print os4
os5@[[Nothing, Just [1, 2]]] <- runApp' (unwrapApp testLiveTellEventDMap) [Just ()]
print os5
os6 <- runApp' (unwrapApp delayedPulse) [Just ()]
print os6
let ![[Nothing, Nothing]] = os6
return ()
unwrapApp :: (Reflex t, Monad m) => (a -> EventWriterT t [Int] m ()) -> a -> m (Event t [Int])
unwrapApp x appIn = do
((), e) <- runEventWriterT $ x appIn
return e
testOrdering :: (Reflex t, Monad m) => Event t () -> EventWriterT t [Int] m ()
testOrdering pulse = forM_ [10,9..1] $ \i -> tellEvent ([i] <$ pulse)
testSimultaneous :: (Reflex t, Adjustable t m, MonadHold t m) => Event t (These () ()) -> EventWriterT t [Int] m ()
testSimultaneous pulse = do
let e0 = fmapMaybe (^? here) pulse
e1 = fmapMaybe (^? there) pulse
forM_ [1,3..9] $ \i -> runWithReplace (tellEvent ([i] <$ e0)) $ ffor e1 $ \_ -> tellEvent ([i+1] <$ e0)
-- | Test that a widget telling and event which fires at the same time it has been replaced
-- doesn't count along with the new widget.
testMoribundTellEvent
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
)
=> Event t ()
-> EventWriterT t [Int] m ()
testMoribundTellEvent pulse = do
rec let tellIntOnReplace :: Int -> EventWriterT t [Int] m ()
tellIntOnReplace x = tellEvent $ [x] <$ rwrFinished
(_, rwrFinished) <- runWithReplace (tellIntOnReplace 1) $ tellIntOnReplace 2 <$ pulse
return ()
-- | The equivalent of 'testMoribundTellEvent' for 'traverseDMapWithKeyWithAdjust'.
testMoribundTellEventDMap
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
)
=> Event t ()
-> EventWriterT t [Int] m ()
testMoribundTellEventDMap pulse = do
rec let tellIntOnReplace :: Int -> EventWriterT t [Int] m ()
tellIntOnReplace x = tellEvent $ [x] <$ rwrFinished
(_, rwrFinished :: Event t (PatchDMap (Const2 () Int) Identity)) <-
traverseDMapWithKeyWithAdjust
(\(Const2 ()) (Identity v) -> Identity . const v <$> tellIntOnReplace v)
(mapToDMap $ M.singleton () 1)
((PatchDMap $ DMap.map (ComposeMaybe . Just) $ mapToDMap $ M.singleton () 2) <$ pulse)
return ()
-- | Ensures that elements which are _not_ removed can still fire 'tellEvent's
-- during the same frame as other elements are updated.
testLiveTellEventDMap
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
)
=> Event t ()
-> EventWriterT t [Int] m ()
testLiveTellEventDMap pulse = do
rec let tellIntOnReplace :: Int -> EventWriterT t [Int] m ()
tellIntOnReplace x = tellEvent $ [x] <$ rwrFinished
(_, rwrFinished :: Event t (PatchDMap (Const2 Int ()) Identity)) <-
traverseDMapWithKeyWithAdjust
(\(Const2 k) (Identity ()) -> Identity <$> tellIntOnReplace k)
(mapToDMap $ M.singleton 1 ())
((PatchDMap $ DMap.map (ComposeMaybe . Just) $ mapToDMap $ M.singleton 2 ()) <$ pulse)
return ()
delayedPulse
:: forall t m
. ( Reflex t
, Adjustable t m
, MonadHold t m
, MonadFix m
)
=> Event t ()
-> EventWriterT t [Int] m ()
delayedPulse pulse = void $ flip runWithReplace (pure () <$ pulse) $ do
-- This has the effect of delaying pulse' from pulse
(_, pulse') <- runWithReplace (pure ()) $ pure [1] <$ pulse
tellEvent pulse'
| ryantrinkle/reflex | test/EventWriterT.hs | bsd-3-clause | 4,504 | 0 | 19 | 1,043 | 1,763 | 917 | 846 | 108 | 1 |
--- Main module for TwoSorts example -----------------------------------------
module Main where
import StrategyLib
import Datatypes
--- Test case: Apply idTP to an integer --------------------------------------
test1 = (applyTP idTP 1)::Maybe Integer
--- Test case: Increment when faced with an integer --------------------------
strat2 :: Monad m => TP m
strat2 = adhocTP idTP (\x -> return (x + 1::Integer))
test2 = applyTP strat2 1 :: Maybe Integer
--- Test case: Negate when faced with a Boolean ------------------------------
strat3 :: Monad m => TP m
strat3 = adhocTP idTP (return . not)
test3 = applyTP strat3 True :: Maybe Bool
--- Test case: Increment all integers one can find ---------------------------
term4 = SortA1 (SortB 1 (SortA1 (SortB 2 (SortA1 (SortB 3 SortA2)))))
strat4 :: Monad m => TP m
strat4 = full_tdTP strat2
test4 = applyTP strat4 term4 :: Maybe SortA
------------------------------------------------------------------------------
main = writeFile "Test.log"
( show test1 ++ "\n"
++ show test2 ++ "\n"
++ show test3 ++ "\n"
++ show test4 ++ "\n"
)
| forste/haReFork | StrategyLib-4.0-beta/examples/two-sorts-deriving/Main.hs | bsd-3-clause | 1,187 | 0 | 15 | 272 | 281 | 145 | 136 | 19 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Loading interface files
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LoadIface (
-- Importing one thing
tcLookupImported_maybe, importDecl,
checkWiredInTyCon, ifCheckWiredInThing,
-- RnM/TcM functions
loadModuleInterface, loadModuleInterfaces,
loadSrcInterface, loadSrcInterface_maybe,
loadInterfaceForName, loadInterfaceForModule,
-- IfM functions
loadInterface, loadWiredInHomeIface,
loadSysInterface, loadUserInterface, loadPluginInterface,
findAndReadIface, readIface, -- Used when reading the module's old interface
loadDecls, -- Should move to TcIface and be renamed
initExternalPackageState,
ifaceStats, pprModIface, showIface
) where
#include "HsVersions.h"
import {-# SOURCE #-} TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
tcIfaceFamInst, tcIfaceVectInfo, tcIfaceAnnotations )
import DynFlags
import IfaceSyn
import IfaceEnv
import HscTypes
import BasicTypes hiding (SuccessFlag(..))
import TcRnMonad
import Constants
import PrelNames
import PrelInfo
import PrimOp ( allThePrimOps, primOpFixity, primOpOcc )
import MkId ( seqId )
import Rules
import TyCon
import Annotations
import InstEnv
import FamInstEnv
import Name
import NameEnv
import Avail
import Module
import Maybes
import ErrUtils
import Finder
import UniqFM
import SrcLoc
import Outputable
import BinIface
import Panic
import Util
import FastString
import Fingerprint
import Hooks
import Control.Monad
import Data.IORef
import System.FilePath
{-
************************************************************************
* *
* tcImportDecl is the key function for "faulting in" *
* imported things
* *
************************************************************************
The main idea is this. We are chugging along type-checking source code, and
find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find
it in the EPS type envt. So it
1 loads GHC.Base.hi
2 gets the decl for GHC.Base.map
3 typechecks it via tcIfaceDecl
4 and adds it to the type env in the EPS
Note that DURING STEP 4, we may find that map's type mentions a type
constructor that also
Notice that for imported things we read the current version from the EPS
mutable variable. This is important in situations like
...$(e1)...$(e2)...
where the code that e1 expands to might import some defns that
also turn out to be needed by the code that e2 expands to.
-}
tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Returns (Failed err) if we can't find the interface file for the thing
tcLookupImported_maybe name
= do { hsc_env <- getTopEnv
; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name)
; case mb_thing of
Just thing -> return (Succeeded thing)
Nothing -> tcImportDecl_maybe name }
tcImportDecl_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Entry point for *source-code* uses of importDecl
tcImportDecl_maybe name
| Just thing <- wiredInNameTyThing_maybe name
= do { when (needWiredInHomeIface thing)
(initIfaceTcRn (loadWiredInHomeIface name))
-- See Note [Loading instances for wired-in things]
; return (Succeeded thing) }
| otherwise
= initIfaceTcRn (importDecl name)
importDecl :: Name -> IfM lcl (MaybeErr MsgDoc TyThing)
-- Get the TyThing for this Name from an interface file
-- It's not a wired-in thing -- the caller caught that
importDecl name
= ASSERT( not (isWiredInName name) )
do { traceIf nd_doc
-- Load the interface, which should populate the PTE
; mb_iface <- ASSERT2( isExternalName name, ppr name )
loadInterface nd_doc (nameModule name) ImportBySystem
; case mb_iface of {
Failed err_msg -> return (Failed err_msg) ;
Succeeded _ -> do
-- Now look it up again; this time we should find it
{ eps <- getEps
; case lookupTypeEnv (eps_PTE eps) name of
Just thing -> return (Succeeded thing)
Nothing -> return (Failed not_found_msg)
}}}
where
nd_doc = ptext (sLit "Need decl for") <+> ppr name
not_found_msg = hang (ptext (sLit "Can't find interface-file declaration for") <+>
pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name)
2 (vcat [ptext (sLit "Probable cause: bug in .hi-boot file, or inconsistent .hi file"),
ptext (sLit "Use -ddump-if-trace to get an idea of which file caused the error")])
{-
************************************************************************
* *
Checks for wired-in things
* *
************************************************************************
Note [Loading instances for wired-in things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to make sure that we have at least *read* the interface files
for any module with an instance decl or RULE that we might want.
* If the instance decl is an orphan, we have a whole separate mechanism
(loadOrphanModules)
* If the instance decl is not an orphan, then the act of looking at the
TyCon or Class will force in the defining module for the
TyCon/Class, and hence the instance decl
* BUT, if the TyCon is a wired-in TyCon, we don't really need its interface;
but we must make sure we read its interface in case it has instances or
rules. That is what LoadIface.loadWiredInHomeInterface does. It's called
from TcIface.{tcImportDecl, checkWiredInTyCon, ifCheckWiredInThing}
* HOWEVER, only do this for TyCons. There are no wired-in Classes. There
are some wired-in Ids, but we don't want to load their interfaces. For
example, Control.Exception.Base.recSelError is wired in, but that module
is compiled late in the base library, and we don't want to force it to
load before it's been compiled!
All of this is done by the type checker. The renamer plays no role.
(It used to, but no longer.)
-}
checkWiredInTyCon :: TyCon -> TcM ()
-- Ensure that the home module of the TyCon (and hence its instances)
-- are loaded. See Note [Loading instances for wired-in things]
-- It might not be a wired-in tycon (see the calls in TcUnify),
-- in which case this is a no-op.
checkWiredInTyCon tc
| not (isWiredInName tc_name)
= return ()
| otherwise
= do { mod <- getModule
; ASSERT( isExternalName tc_name )
when (mod /= nameModule tc_name)
(initIfaceTcRn (loadWiredInHomeIface tc_name))
-- Don't look for (non-existent) Float.hi when
-- compiling Float.lhs, which mentions Float of course
-- A bit yukky to call initIfaceTcRn here
}
where
tc_name = tyConName tc
ifCheckWiredInThing :: TyThing -> IfL ()
-- Even though we are in an interface file, we want to make
-- sure the instances of a wired-in thing are loaded (imagine f :: Double -> Double)
-- Ditto want to ensure that RULES are loaded too
-- See Note [Loading instances for wired-in things]
ifCheckWiredInThing thing
= do { mod <- getIfModule
-- Check whether we are typechecking the interface for this
-- very module. E.g when compiling the base library in --make mode
-- we may typecheck GHC.Base.hi. At that point, GHC.Base is not in
-- the HPT, so without the test we'll demand-load it into the PIT!
-- C.f. the same test in checkWiredInTyCon above
; let name = getName thing
; ASSERT2( isExternalName name, ppr name )
when (needWiredInHomeIface thing && mod /= nameModule name)
(loadWiredInHomeIface name) }
needWiredInHomeIface :: TyThing -> Bool
-- Only for TyCons; see Note [Loading instances for wired-in things]
needWiredInHomeIface (ATyCon {}) = True
needWiredInHomeIface _ = False
{-
************************************************************************
* *
loadSrcInterface, loadOrphanModules, loadInterfaceForName
These three are called from TcM-land
* *
************************************************************************
-}
-- Note [Un-ambiguous multiple interfaces]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- When a user writes an import statement, this usually causes a *single*
-- interface file to be loaded. However, the game is different when
-- signatures are being imported. Suppose in packages p and q we have
-- signatures:
--
-- module A where
-- foo :: Int
--
-- module A where
-- bar :: Int
--
-- If both packages are exposed and I am importing A, I should see a
-- "unified" signature:
--
-- module A where
-- foo :: Int
-- bar :: Int
--
-- The way we achieve this is having the module lookup for A load and return
-- multiple interface files, which we will then process as if there were
-- "multiple" imports:
--
-- import "p" A
-- import "q" A
--
-- Doing so does not cause any ambiguity, because any overlapping identifiers
-- are guaranteed to have the same name if the backing implementations of the
-- two signatures are the same (a condition which is checked by 'Packages'.)
-- | Load the interface corresponding to an @import@ directive in
-- source code. On a failure, fail in the monad with an error message.
-- See Note [Un-ambiguous multiple interfaces] for why the return type
-- is @[ModIface]@
loadSrcInterface :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM [ModIface]
loadSrcInterface doc mod want_boot maybe_pkg
= do { res <- loadSrcInterface_maybe doc mod want_boot maybe_pkg
; case res of
Failed err -> failWithTc err
Succeeded ifaces -> return ifaces }
-- | Like 'loadSrcInterface', but returns a 'MaybeErr'. See also
-- Note [Un-ambiguous multiple interfaces]
loadSrcInterface_maybe :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM (MaybeErr MsgDoc [ModIface])
loadSrcInterface_maybe doc mod want_boot maybe_pkg
-- We must first find which Module this import refers to. This involves
-- calling the Finder, which as a side effect will search the filesystem
-- and create a ModLocation. If successful, loadIface will read the
-- interface; it will call the Finder again, but the ModLocation will be
-- cached from the first search.
= do { hsc_env <- getTopEnv
-- ToDo: findImportedModule should return a list of interfaces
; res <- liftIO $ findImportedModule hsc_env mod maybe_pkg
; case res of
Found _ mod -> fmap (fmap (:[]))
. initIfaceTcRn
$ loadInterface doc mod (ImportByUser want_boot)
err -> return (Failed (cannotFindInterface (hsc_dflags hsc_env) mod err)) }
-- | Load interface directly for a fully qualified 'Module'. (This is a fairly
-- rare operation, but in particular it is used to load orphan modules
-- in order to pull their instances into the global package table and to
-- handle some operations in GHCi).
loadModuleInterface :: SDoc -> Module -> TcM ModIface
loadModuleInterface doc mod = initIfaceTcRn (loadSysInterface doc mod)
-- | Load interfaces for a collection of modules.
loadModuleInterfaces :: SDoc -> [Module] -> TcM ()
loadModuleInterfaces doc mods
| null mods = return ()
| otherwise = initIfaceTcRn (mapM_ load mods)
where
load mod = loadSysInterface (doc <+> parens (ppr mod)) mod
-- | Loads the interface for a given Name.
-- Should only be called for an imported name;
-- otherwise loadSysInterface may not find the interface
loadInterfaceForName :: SDoc -> Name -> TcRn ModIface
loadInterfaceForName doc name
= do { when debugIsOn $ -- Check pre-condition
do { this_mod <- getModule
; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc ) }
; ASSERT2( isExternalName name, ppr name )
initIfaceTcRn $ loadSysInterface doc (nameModule name) }
-- | Loads the interface for a given Module.
loadInterfaceForModule :: SDoc -> Module -> TcRn ModIface
loadInterfaceForModule doc m
= do
-- Should not be called with this module
when debugIsOn $ do
this_mod <- getModule
MASSERT2( this_mod /= m, ppr m <+> parens doc )
initIfaceTcRn $ loadSysInterface doc m
{-
*********************************************************
* *
loadInterface
The main function to load an interface
for an imported module, and put it in
the External Package State
* *
*********************************************************
-}
-- | An 'IfM' function to load the home interface for a wired-in thing,
-- so that we're sure that we see its instance declarations and rules
-- See Note [Loading instances for wired-in things] in TcIface
loadWiredInHomeIface :: Name -> IfM lcl ()
loadWiredInHomeIface name
= ASSERT( isWiredInName name )
do _ <- loadSysInterface doc (nameModule name); return ()
where
doc = ptext (sLit "Need home interface for wired-in thing") <+> ppr name
------------------
-- | Loads a system interface and throws an exception if it fails
loadSysInterface :: SDoc -> Module -> IfM lcl ModIface
loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBySystem
------------------
-- | Loads a user interface and throws an exception if it fails. The first parameter indicates
-- whether we should import the boot variant of the module
loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface
loadUserInterface is_boot doc mod_name
= loadInterfaceWithException doc mod_name (ImportByUser is_boot)
loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface
loadPluginInterface doc mod_name
= loadInterfaceWithException doc mod_name ImportByPlugin
------------------
-- | A wrapper for 'loadInterface' that throws an exception if it fails
loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface
loadInterfaceWithException doc mod_name where_from
= do { mb_iface <- loadInterface doc mod_name where_from
; dflags <- getDynFlags
; case mb_iface of
Failed err -> liftIO $ throwGhcExceptionIO (ProgramError (showSDoc dflags err))
Succeeded iface -> return iface }
------------------
loadInterface :: SDoc -> Module -> WhereFrom
-> IfM lcl (MaybeErr MsgDoc ModIface)
-- loadInterface looks in both the HPT and PIT for the required interface
-- If not found, it loads it, and puts it in the PIT (always).
-- If it can't find a suitable interface file, we
-- a) modify the PackageIfaceTable to have an empty entry
-- (to avoid repeated complaints)
-- b) return (Left message)
--
-- It's not necessarily an error for there not to be an interface
-- file -- perhaps the module has changed, and that interface
-- is no longer used
loadInterface doc_str mod from
= do { -- Read the state
(eps,hpt) <- getEpsAndHpt
; traceIf (text "Considering whether to load" <+> ppr mod <+> ppr from)
-- Check whether we have the interface already
; dflags <- getDynFlags
; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of {
Just iface
-> return (Succeeded iface) ; -- Already loaded
-- The (src_imp == mi_boot iface) test checks that the already-loaded
-- interface isn't a boot iface. This can conceivably happen,
-- if an earlier import had a before we got to real imports. I think.
_ -> do {
-- READ THE MODULE IN
; read_result <- case (wantHiBootFile dflags eps mod from) of
Failed err -> return (Failed err)
Succeeded hi_boot_file -> findAndReadIface doc_str mod hi_boot_file
; case read_result of {
Failed err -> do
{ let fake_iface = emptyModIface mod
; updateEps_ $ \eps ->
eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface }
-- Not found, so add an empty iface to
-- the EPS map so that we don't look again
; return (Failed err) } ;
-- Found and parsed!
-- We used to have a sanity check here that looked for:
-- * System importing ..
-- * a home package module ..
-- * that we know nothing about (mb_dep == Nothing)!
--
-- But this is no longer valid because thNameToGhcName allows users to
-- cause the system to load arbitrary interfaces (by supplying an appropriate
-- Template Haskell original-name).
Succeeded (iface, file_path) ->
let
loc_doc = text file_path
in
initIfaceLcl mod loc_doc $ do
-- Load the new ModIface into the External Package State
-- Even home-package interfaces loaded by loadInterface
-- (which only happens in OneShot mode; in Batch/Interactive
-- mode, home-package modules are loaded one by one into the HPT)
-- are put in the EPS.
--
-- The main thing is to add the ModIface to the PIT, but
-- we also take the
-- IfaceDecls, IfaceClsInst, IfaceFamInst, IfaceRules, IfaceVectInfo
-- out of the ModIface and put them into the big EPS pools
-- NB: *first* we do loadDecl, so that the provenance of all the locally-defined
--- names is done correctly (notably, whether this is an .hi file or .hi-boot file).
-- If we do loadExport first the wrong info gets into the cache (unless we
-- explicitly tag each export which seems a bit of a bore)
; ignore_prags <- goptM Opt_IgnoreInterfacePragmas
; new_eps_decls <- loadDecls ignore_prags (mi_decls iface)
; new_eps_insts <- mapM tcIfaceInst (mi_insts iface)
; new_eps_fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface)
; new_eps_rules <- tcIfaceRules ignore_prags (mi_rules iface)
; new_eps_anns <- tcIfaceAnnotations (mi_anns iface)
; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface)
; let { final_iface = iface {
mi_decls = panic "No mi_decls in PIT",
mi_insts = panic "No mi_insts in PIT",
mi_fam_insts = panic "No mi_fam_insts in PIT",
mi_rules = panic "No mi_rules in PIT",
mi_anns = panic "No mi_anns in PIT"
}
}
; updateEps_ $ \ eps ->
if elemModuleEnv mod (eps_PIT eps) then eps else
case from of -- See Note [Care with plugin imports]
ImportByPlugin -> eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls}
_ -> eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls,
eps_rule_base = extendRuleBaseList (eps_rule_base eps)
new_eps_rules,
eps_inst_env = extendInstEnvList (eps_inst_env eps)
new_eps_insts,
eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps)
new_eps_fam_insts,
eps_vect_info = plusVectInfo (eps_vect_info eps)
new_eps_vect_info,
eps_ann_env = extendAnnEnvList (eps_ann_env eps)
new_eps_anns,
eps_mod_fam_inst_env
= let
fam_inst_env =
extendFamInstEnvList emptyFamInstEnv
new_eps_fam_insts
in
extendModuleEnv (eps_mod_fam_inst_env eps)
mod
fam_inst_env,
eps_stats = addEpsInStats (eps_stats eps)
(length new_eps_decls)
(length new_eps_insts)
(length new_eps_rules) }
; return (Succeeded final_iface)
}}}}
wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom
-> MaybeErr MsgDoc IsBootInterface
-- Figure out whether we want Foo.hi or Foo.hi-boot
wantHiBootFile dflags eps mod from
= case from of
ImportByUser usr_boot
| usr_boot && not this_package
-> Failed (badSourceImport mod)
| otherwise -> Succeeded usr_boot
ImportByPlugin
-> Succeeded False
ImportBySystem
| not this_package -- If the module to be imported is not from this package
-> Succeeded False -- don't look it up in eps_is_boot, because that is keyed
-- on the ModuleName of *home-package* modules only.
-- We never import boot modules from other packages!
| otherwise
-> case lookupUFM (eps_is_boot eps) (moduleName mod) of
Just (_, is_boot) -> Succeeded is_boot
Nothing -> Succeeded False
-- The boot-ness of the requested interface,
-- based on the dependencies in directly-imported modules
where
this_package = thisPackage dflags == modulePackageKey mod
badSourceImport :: Module -> SDoc
badSourceImport mod
= hang (ptext (sLit "You cannot {-# SOURCE #-} import a module from another package"))
2 (ptext (sLit "but") <+> quotes (ppr mod) <+> ptext (sLit "is from package")
<+> quotes (ppr (modulePackageKey mod)))
{-
Note [Care with plugin imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When dynamically loading a plugin (via loadPluginInterface) we
populate the same External Package State (EPS), even though plugin
modules are to link with the compiler itself, and not with the
compiled program. That's fine: mostly the EPS is just a cache for
the interace files on disk.
But it's NOT ok for the RULES or instance environment. We do not want
to fire a RULE from the plugin on the code we are compiling, otherwise
the code we are compiling will have a reference to a RHS of the rule
that exists only in the compiler! This actually happened to Daniel,
via a RULE arising from a specialisation of (^) in the plugin.
Solution: when loading plugins, do not extend the rule and instance
environments. We are only interested in the type environment, so that
we can check that the plugin exports a function with the type that the
compiler expects.
-}
-----------------------------------------------------
-- Loading type/class/value decls
-- We pass the full Module name here, replete with
-- its package info, so that we can build a Name for
-- each binder with the right package info in it
-- All subsequent lookups, including crucially lookups during typechecking
-- the declaration itself, will find the fully-glorious Name
--
-- We handle ATs specially. They are not main declarations, but also not
-- implicit things (in particular, adding them to `implicitTyThings' would mess
-- things up in the renaming/type checking of source programs).
-----------------------------------------------------
addDeclsToPTE :: PackageTypeEnv -> [(Name,TyThing)] -> PackageTypeEnv
addDeclsToPTE pte things = extendNameEnvList pte things
loadDecls :: Bool
-> [(Fingerprint, IfaceDecl)]
-> IfL [(Name,TyThing)]
loadDecls ignore_prags ver_decls
= do { mod <- getIfModule
; thingss <- mapM (loadDecl ignore_prags mod) ver_decls
; return (concat thingss)
}
loadDecl :: Bool -- Don't load pragmas into the decl pool
-> Module
-> (Fingerprint, IfaceDecl)
-> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the
-- TyThings are forkM'd thunks
loadDecl ignore_prags mod (_version, decl)
= do { -- Populate the name cache with final versions of all
-- the names associated with the decl
main_name <- lookupOrig mod (ifName decl)
-- Typecheck the thing, lazily
-- NB. Firstly, the laziness is there in case we never need the
-- declaration (in one-shot mode), and secondly it is there so that
-- we don't look up the occurrence of a name before calling mk_new_bndr
-- on the binder. This is important because we must get the right name
-- which includes its nameParent.
; thing <- forkM doc $ do { bumpDeclStats main_name
; tcIfaceDecl ignore_prags decl }
-- Populate the type environment with the implicitTyThings too.
--
-- Note [Tricky iface loop]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- Summary: The delicate point here is that 'mini-env' must be
-- buildable from 'thing' without demanding any of the things
-- 'forkM'd by tcIfaceDecl.
--
-- In more detail: Consider the example
-- data T a = MkT { x :: T a }
-- The implicitTyThings of T are: [ <datacon MkT>, <selector x>]
-- (plus their workers, wrappers, coercions etc etc)
--
-- We want to return an environment
-- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ]
-- (where the "MkT" is the *Name* associated with MkT, etc.)
--
-- We do this by mapping the implicit_names to the associated
-- TyThings. By the invariant on ifaceDeclImplicitBndrs and
-- implicitTyThings, we can use getOccName on the implicit
-- TyThings to make this association: each Name's OccName should
-- be the OccName of exactly one implicitTyThing. So the key is
-- to define a "mini-env"
--
-- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ]
-- where the 'MkT' here is the *OccName* associated with MkT.
--
-- However, there is a subtlety: due to how type checking needs
-- to be staged, we can't poke on the forkM'd thunks inside the
-- implicitTyThings while building this mini-env.
-- If we poke these thunks too early, two problems could happen:
-- (1) When processing mutually recursive modules across
-- hs-boot boundaries, poking too early will do the
-- type-checking before the recursive knot has been tied,
-- so things will be type-checked in the wrong
-- environment, and necessary variables won't be in
-- scope.
--
-- (2) Looking up one OccName in the mini_env will cause
-- others to be looked up, which might cause that
-- original one to be looked up again, and hence loop.
--
-- The code below works because of the following invariant:
-- getOccName on a TyThing does not force the suspended type
-- checks in order to extract the name. For example, we don't
-- poke on the "T a" type of <selector x> on the way to
-- extracting <selector x>'s OccName. Of course, there is no
-- reason in principle why getting the OccName should force the
-- thunks, but this means we need to be careful in
-- implicitTyThings and its helper functions.
--
-- All a bit too finely-balanced for my liking.
-- This mini-env and lookup function mediates between the
--'Name's n and the map from 'OccName's to the implicit TyThings
; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing]
lookup n = case lookupOccEnv mini_env (getOccName n) of
Just thing -> thing
Nothing ->
pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl))
; implicit_names <- mapM (lookupOrig mod) (ifaceDeclImplicitBndrs decl)
-- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names)
; return $ (main_name, thing) :
-- uses the invariant that implicit_names and
-- implicitTyThings are bijective
[(n, lookup n) | n <- implicit_names]
}
where
doc = ptext (sLit "Declaration for") <+> ppr (ifName decl)
bumpDeclStats :: Name -> IfL () -- Record that one more declaration has actually been used
bumpDeclStats name
= do { traceIf (text "Loading decl for" <+> ppr name)
; updateEps_ (\eps -> let stats = eps_stats eps
in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } })
}
{-
*********************************************************
* *
\subsection{Reading an interface file}
* *
*********************************************************
Note [Home module load error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the sought-for interface is in the current package (as determined
by -package-name flag) then it jolly well should already be in the HPT
because we process home-package modules in dependency order. (Except
in one-shot mode; see notes with hsc_HPT decl in HscTypes).
It is possible (though hard) to get this error through user behaviour.
* Suppose package P (modules P1, P2) depends on package Q (modules Q1,
Q2, with Q2 importing Q1)
* We compile both packages.
* Now we edit package Q so that it somehow depends on P
* Now recompile Q with --make (without recompiling P).
* Then Q1 imports, say, P1, which in turn depends on Q2. So Q2
is a home-package module which is not yet in the HPT! Disaster.
This actually happened with P=base, Q=ghc-prim, via the AMP warnings.
See Trac #8320.
-}
findAndReadIface :: SDoc -> Module
-> IsBootInterface -- True <=> Look for a .hi-boot file
-- False <=> Look for .hi file
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
-- Nothing <=> file not found, or unreadable, or illegible
-- Just x <=> successfully found and parsed
-- It *doesn't* add an error to the monad, because
-- sometimes it's ok to fail... see notes with loadInterface
findAndReadIface doc_str mod hi_boot_file
= do traceIf (sep [hsep [ptext (sLit "Reading"),
if hi_boot_file
then ptext (sLit "[boot]")
else Outputable.empty,
ptext (sLit "interface for"),
ppr mod <> semi],
nest 4 (ptext (sLit "reason:") <+> doc_str)])
-- Check for GHC.Prim, and return its static interface
if mod == gHC_PRIM
then do
iface <- getHooked ghcPrimIfaceHook ghcPrimIface
return (Succeeded (iface,
"<built in interface for GHC.Prim>"))
else do
dflags <- getDynFlags
-- Look for the file
hsc_env <- getTopEnv
mb_found <- liftIO (findExactModule hsc_env mod)
case mb_found of
Found loc mod -> do
-- Found file, so read it
let file_path = addBootSuffix_maybe hi_boot_file
(ml_hi_file loc)
-- See Note [Home module load error]
if thisPackage dflags == modulePackageKey mod &&
not (isOneShot (ghcMode dflags))
then return (Failed (homeModError mod loc))
else do r <- read_file file_path
checkBuildDynamicToo r
return r
err -> do
traceIf (ptext (sLit "...not found"))
dflags <- getDynFlags
return (Failed (cannotFindInterface dflags
(moduleName mod) err))
where read_file file_path = do
traceIf (ptext (sLit "readIFace") <+> text file_path)
read_result <- readIface mod file_path
case read_result of
Failed err -> return (Failed (badIfaceFile file_path err))
Succeeded iface
| mi_module iface /= mod ->
return (Failed (wrongIfaceModErr iface mod file_path))
| otherwise ->
return (Succeeded (iface, file_path))
-- Don't forget to fill in the package name...
checkBuildDynamicToo (Succeeded (iface, filePath)) = do
dflags <- getDynFlags
whenGeneratingDynamicToo dflags $ withDoDynamicToo $ do
let ref = canGenerateDynamicToo dflags
dynFilePath = addBootSuffix_maybe hi_boot_file
$ replaceExtension filePath (dynHiSuf dflags)
r <- read_file dynFilePath
case r of
Succeeded (dynIface, _)
| mi_mod_hash iface == mi_mod_hash dynIface ->
return ()
| otherwise ->
do traceIf (text "Dynamic hash doesn't match")
liftIO $ writeIORef ref False
Failed err ->
do traceIf (text "Failed to load dynamic interface file:" $$ err)
liftIO $ writeIORef ref False
checkBuildDynamicToo _ = return ()
-- @readIface@ tries just the one file.
readIface :: Module -> FilePath
-> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface)
-- Failed err <=> file not found, or unreadable, or illegible
-- Succeeded iface <=> successfully found and parsed
readIface wanted_mod file_path
= do { res <- tryMostM $
readBinIface CheckHiWay QuietBinIFaceReading file_path
; case res of
Right iface
| wanted_mod == actual_mod -> return (Succeeded iface)
| otherwise -> return (Failed err)
where
actual_mod = mi_module iface
err = hiModuleNameMismatchWarn wanted_mod actual_mod
Left exn -> return (Failed (text (showException exn)))
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
initExternalPackageState :: ExternalPackageState
initExternalPackageState
= EPS {
eps_is_boot = emptyUFM,
eps_PIT = emptyPackageIfaceTable,
eps_PTE = emptyTypeEnv,
eps_inst_env = emptyInstEnv,
eps_fam_inst_env = emptyFamInstEnv,
eps_rule_base = mkRuleBase builtinRules,
-- Initialise the EPS rule pool with the built-in rules
eps_mod_fam_inst_env
= emptyModuleEnv,
eps_vect_info = noVectInfo,
eps_ann_env = emptyAnnEnv,
eps_stats = EpsStats { n_ifaces_in = 0, n_decls_in = 0, n_decls_out = 0
, n_insts_in = 0, n_insts_out = 0
, n_rules_in = length builtinRules, n_rules_out = 0 }
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
ghcPrimIface :: ModIface
ghcPrimIface
= (emptyModIface gHC_PRIM) {
mi_exports = ghcPrimExports,
mi_decls = [],
mi_fixities = fixities,
mi_fix_fn = mkIfaceFixCache fixities
}
where
fixities = (getOccName seqId, Fixity 0 InfixR) -- seq is infixr 0
: mapMaybe mkFixity allThePrimOps
mkFixity op = (,) (primOpOcc op) <$> primOpFixity op
{-
*********************************************************
* *
\subsection{Statistics}
* *
*********************************************************
-}
ifaceStats :: ExternalPackageState -> SDoc
ifaceStats eps
= hcat [text "Renamer stats: ", msg]
where
stats = eps_stats eps
msg = vcat
[int (n_ifaces_in stats) <+> text "interfaces read",
hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
int (n_decls_in stats), text "read"],
hsep [ int (n_insts_out stats), text "instance decls imported, out of",
int (n_insts_in stats), text "read"],
hsep [ int (n_rules_out stats), text "rule decls imported, out of",
int (n_rules_in stats), text "read"]
]
{-
************************************************************************
* *
Printing interfaces
* *
************************************************************************
-}
-- | Read binary interface, and print it out
showIface :: HscEnv -> FilePath -> IO ()
showIface hsc_env filename = do
-- skip the hi way check; we don't want to worry about profiled vs.
-- non-profiled interfaces, for example.
iface <- initTcRnIf 's' hsc_env () () $
readBinIface IgnoreHiWay TraceBinIFaceReading filename
let dflags = hsc_dflags hsc_env
log_action dflags dflags SevDump noSrcSpan defaultDumpStyle (pprModIface iface)
pprModIface :: ModIface -> SDoc
-- Show a ModIface
pprModIface iface
= vcat [ ptext (sLit "interface")
<+> ppr (mi_module iface) <+> pp_boot
<+> (if mi_orphan iface then ptext (sLit "[orphan module]") else Outputable.empty)
<+> (if mi_finsts iface then ptext (sLit "[family instance module]") else Outputable.empty)
<+> (if mi_hpc iface then ptext (sLit "[hpc]") else Outputable.empty)
<+> integer hiVersion
, nest 2 (text "interface hash:" <+> ppr (mi_iface_hash iface))
, nest 2 (text "ABI hash:" <+> ppr (mi_mod_hash iface))
, nest 2 (text "export-list hash:" <+> ppr (mi_exp_hash iface))
, nest 2 (text "orphan hash:" <+> ppr (mi_orphan_hash iface))
, nest 2 (text "flag hash:" <+> ppr (mi_flag_hash iface))
, nest 2 (text "sig of:" <+> ppr (mi_sig_of iface))
, nest 2 (text "used TH splices:" <+> ppr (mi_used_th iface))
, nest 2 (ptext (sLit "where"))
, ptext (sLit "exports:")
, nest 2 (vcat (map pprExport (mi_exports iface)))
, pprDeps (mi_deps iface)
, vcat (map pprUsage (mi_usages iface))
, vcat (map pprIfaceAnnotation (mi_anns iface))
, pprFixities (mi_fixities iface)
, vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface]
, vcat (map ppr (mi_insts iface))
, vcat (map ppr (mi_fam_insts iface))
, vcat (map ppr (mi_rules iface))
, pprVectInfo (mi_vect_info iface)
, ppr (mi_warns iface)
, pprTrustInfo (mi_trust iface)
, pprTrustPkg (mi_trust_pkg iface)
]
where
pp_boot | mi_boot iface = ptext (sLit "[boot]")
| otherwise = Outputable.empty
{-
When printing export lists, we print like this:
Avail f f
AvailTC C [C, x, y] C(x,y)
AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C
-}
pprExport :: IfaceExport -> SDoc
pprExport (Avail n) = ppr n
pprExport (AvailTC _ []) = Outputable.empty
pprExport (AvailTC n (n':ns))
| n==n' = ppr n <> pp_export ns
| otherwise = ppr n <> char '|' <> pp_export (n':ns)
where
pp_export [] = Outputable.empty
pp_export names = braces (hsep (map ppr names))
pprUsage :: Usage -> SDoc
pprUsage usage@UsagePackageModule{}
= pprUsageImport usage usg_mod
pprUsage usage@UsageHomeModule{}
= pprUsageImport usage usg_mod_name $$
nest 2 (
maybe Outputable.empty (\v -> text "exports: " <> ppr v) (usg_exports usage) $$
vcat [ ppr n <+> ppr v | (n,v) <- usg_entities usage ]
)
pprUsage usage@UsageFile{}
= hsep [ptext (sLit "addDependentFile"),
doubleQuotes (text (usg_file_path usage))]
pprUsageImport :: Outputable a => Usage -> (Usage -> a) -> SDoc
pprUsageImport usage usg_mod'
= hsep [ptext (sLit "import"), safe, ppr (usg_mod' usage),
ppr (usg_mod_hash usage)]
where
safe | usg_safe usage = ptext $ sLit "safe"
| otherwise = ptext $ sLit " -/ "
pprDeps :: Dependencies -> SDoc
pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs,
dep_finsts = finsts })
= vcat [ptext (sLit "module dependencies:") <+> fsep (map ppr_mod mods),
ptext (sLit "package dependencies:") <+> fsep (map ppr_pkg pkgs),
ptext (sLit "orphans:") <+> fsep (map ppr orphs),
ptext (sLit "family instance modules:") <+> fsep (map ppr finsts)
]
where
ppr_mod (mod_name, boot) = ppr mod_name <+> ppr_boot boot
ppr_pkg (pkg,trust_req) = ppr pkg <>
(if trust_req then text "*" else Outputable.empty)
ppr_boot True = text "[boot]"
ppr_boot False = Outputable.empty
pprFixities :: [(OccName, Fixity)] -> SDoc
pprFixities [] = Outputable.empty
pprFixities fixes = ptext (sLit "fixities") <+> pprWithCommas pprFix fixes
where
pprFix (occ,fix) = ppr fix <+> ppr occ
pprVectInfo :: IfaceVectInfo -> SDoc
pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
, ifaceVectInfoTyCon = tycons
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
}) =
vcat
[ ptext (sLit "vectorised variables:") <+> hsep (map ppr vars)
, ptext (sLit "vectorised tycons:") <+> hsep (map ppr tycons)
, ptext (sLit "vectorised reused tycons:") <+> hsep (map ppr tyconsReuse)
, ptext (sLit "parallel variables:") <+> hsep (map ppr parallelVars)
, ptext (sLit "parallel tycons:") <+> hsep (map ppr parallelTyCons)
]
pprTrustInfo :: IfaceTrustInfo -> SDoc
pprTrustInfo trust = ptext (sLit "trusted:") <+> ppr trust
pprTrustPkg :: Bool -> SDoc
pprTrustPkg tpkg = ptext (sLit "require own pkg trusted:") <+> ppr tpkg
instance Outputable Warnings where
ppr = pprWarns
pprWarns :: Warnings -> SDoc
pprWarns NoWarnings = Outputable.empty
pprWarns (WarnAll txt) = ptext (sLit "Warn all") <+> ppr txt
pprWarns (WarnSome prs) = ptext (sLit "Warnings")
<+> vcat (map pprWarning prs)
where pprWarning (name, txt) = ppr name <+> ppr txt
pprIfaceAnnotation :: IfaceAnnotation -> SDoc
pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized })
= ppr target <+> ptext (sLit "annotated by") <+> ppr serialized
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
badIfaceFile :: String -> SDoc -> SDoc
badIfaceFile file err
= vcat [ptext (sLit "Bad interface file:") <+> text file,
nest 4 err]
hiModuleNameMismatchWarn :: Module -> Module -> MsgDoc
hiModuleNameMismatchWarn requested_mod read_mod =
-- ToDo: This will fail to have enough qualification when the package IDs
-- are the same
withPprStyle (mkUserStyle alwaysQualify AllTheWay) $
-- we want the Modules below to be qualified with package names,
-- so reset the PrintUnqualified setting.
hsep [ ptext (sLit "Something is amiss; requested module ")
, ppr requested_mod
, ptext (sLit "differs from name found in the interface file")
, ppr read_mod
]
wrongIfaceModErr :: ModIface -> Module -> String -> SDoc
wrongIfaceModErr iface mod_name file_path
= sep [ptext (sLit "Interface file") <+> iface_file,
ptext (sLit "contains module") <+> quotes (ppr (mi_module iface)) <> comma,
ptext (sLit "but we were expecting module") <+> quotes (ppr mod_name),
sep [ptext (sLit "Probable cause: the source code which generated"),
nest 2 iface_file,
ptext (sLit "has an incompatible module name")
]
]
where iface_file = doubleQuotes (text file_path)
homeModError :: Module -> ModLocation -> SDoc
-- See Note [Home module load error]
homeModError mod location
= ptext (sLit "attempting to use module ") <> quotes (ppr mod)
<> (case ml_hs_file location of
Just file -> space <> parens (text file)
Nothing -> Outputable.empty)
<+> ptext (sLit "which is not loaded")
| DavidAlphaFox/ghc | compiler/iface/LoadIface.hs | bsd-3-clause | 47,290 | 377 | 20 | 15,124 | 7,196 | 3,840 | 3,356 | -1 | -1 |
-- |
-- Module: Data.Aeson.Functions
-- Copyright: (c) 2011-2016 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
module Data.Aeson.Internal.Functions
(
mapHashKeyVal
, mapKeyVal
, mapKey
) where
import Prelude ()
import Prelude.Compat
import Data.Hashable (Hashable)
import qualified Data.HashMap.Strict as H
import qualified Data.Map as M
-- | Transform a 'M.Map' into a 'H.HashMap' while transforming the keys.
mapHashKeyVal :: (Eq k2, Hashable k2) => (k1 -> k2) -> (v1 -> v2)
-> M.Map k1 v1 -> H.HashMap k2 v2
mapHashKeyVal fk kv = M.foldrWithKey (\k v -> H.insert (fk k) (kv v)) H.empty
{-# INLINE mapHashKeyVal #-}
-- | Transform the keys and values of a 'H.HashMap'.
mapKeyVal :: (Eq k2, Hashable k2) => (k1 -> k2) -> (v1 -> v2)
-> H.HashMap k1 v1 -> H.HashMap k2 v2
mapKeyVal fk kv = H.foldrWithKey (\k v -> H.insert (fk k) (kv v)) H.empty
{-# INLINE mapKeyVal #-}
-- | Transform the keys of a 'H.HashMap'.
mapKey :: (Eq k2, Hashable k2) => (k1 -> k2) -> H.HashMap k1 v -> H.HashMap k2 v
mapKey fk = mapKeyVal fk id
{-# INLINE mapKey #-}
| tolysz/prepare-ghcjs | spec-lts8/aeson/Data/Aeson/Internal/Functions.hs | bsd-3-clause | 1,242 | 0 | 10 | 275 | 349 | 194 | 155 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Downloads page view.
module HL.View.Downloads where
import Data.Monoid
import HL.Types
import HL.View
import HL.View.Template
hpSection :: Html ()
hpSection = do
let hpRoot = "http://www.haskell.org/platform/"
h2_ "Haskell Platform"
p_ $ "The Haskell Platform is a convenient way to install the Haskell development tools and"
<> " a collection of commonly used Haskell packages from Hackage."
p_ $ "Get the Haskell Platform for:"
ul_ $ do li_ $ a_ [href_ $ hpRoot <> "windows.html"] "Windows"
li_ $ a_ [href_ $ hpRoot <> "mac.html"] "OS X"
li_ $ a_ [href_ $ hpRoot <> "linux.html"] "Linux"
hr_ [style_ "height: 1px; background-color: black;"]
-- | Downloads view.
downloadsV :: FromLucid App
downloadsV =
template [] "Downloads"
(\url ->
container_
(row_
(span12_ [class_ "col-md-12"]
(do h1_ "Downloads"
hpSection
h2_ "Compiler and base libraries"
p_ "Many now recommend just using the compiler and base libraries combined with package sandboxing, especially for new users interested in using frameworks with complex dependency structures."
p_ "Downloads are available on a per operating system basis:"
ul_ (forM_ [minBound .. maxBound]
(\os ->
li_ (a_ [href_ (url (DownloadsForR os))]
(toHtml (toHuman os)))))
thirdParty))))
-- | OS-specific downloads view.
downloadsForV :: OS -> Html () -> Html () -> FromLucid App
downloadsForV os autoInstall manualInstall =
template
[DownloadsR
,DownloadsForR os]
("Downloads for " <> toHuman os)
(\_ ->
container_
(row_
(span12_ [class_ "col-md-12"]
(do h1_ (toHtml ("Downloads for " <> toHuman os))
autoInstall
when (os == Linux)
(do h2_ "Manual install"
p_ "To install GHC and Cabal manually, follow these steps."
manualInstall)))))
thirdParty :: Html ()
thirdParty =
do h2_ "Third party libraries"
p_ (do "In Haskell, packages are managed with the Cabal package system built into GHC (and other compilers). "
"For more specific details, see "
(a_ [href_ "https://www.haskell.org/cabal/users-guide/"] "The Cabal User Guide")
".")
hackage
ltsHaskell
stackage
github
hackage :: Html ()
hackage =
do h3_ "Hackage"
p_ (do "Hackage is a repository of packages to which anyone can freely \
\upload at any time. The packages are available immediately and \
\documentation will be generated and hosted there. It can be used by "
code_ "cabal install"
".")
p_ "You can install a package by merely running: "
pre_ "$ cabal update \n\
\$ cabal install the-package"
p_ (a_ [href_ "https://hackage.haskell.org/packages/"] $ "Go to Hackage →")
ltsHaskell :: Html ()
ltsHaskell =
do h3_ "LTS Haskell"
p_ "LTS Haskell is a stackage-based long-term support set of packages \
\which build and pass tests together, with backported bug fixes."
p_ (a_ [href_ "http://www.stackage.org/lts"] $ "Get LTS Haskell →")
stackage :: Html ()
stackage =
do h3_ "Stackage Nightly"
p_ "Stackage is a nightly generated stable repository of snapshots of package sets in \
\which only packages which build and pass tests together are bundled \
\together into a snapshot."
p_ (a_ [href_ "http://www.stackage.org/nightly"] $ "Get Stackage Nightly →")
github :: Html ()
github =
do h3_ "From source control repositories"
p_ "Installing from a source repository is also possible. For example, \
\to clone and install the network package from source, you would run:"
pre_ "$ git clone git@github.com:haskell/network.git\n\
\$ cabal install network/"
p_ "Or:"
pre_ "$ git clone git@github.com:haskell/network.git\n\
\$ cd network\n\
\$ cabal install"
p_ (a_ [href_ "https://github.com/trending?l=haskell&since=monthly"] $
"Browse Github by Haskell repositories →")
| imalsogreg/hl | src/HL/View/Downloads.hs | bsd-3-clause | 4,380 | 0 | 29 | 1,309 | 792 | 368 | 424 | 89 | 1 |
module Foundation where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import Settings.Development (development)
import qualified Database.Persist
import Database.Persist.Sql (SqlPersistT)
import Settings.StaticFiles
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Text.Hamlet (hamletFile)
import System.Log.FastLogger (Logger)
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.PersistConfigPool Settings.PersistConf -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConf
, appLogger :: Logger
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/handler
--
-- This function does three things:
--
-- * Creates the route datatype AppRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route App = AppRoute
-- * Creates the value resourcesApp which contains information on the
-- resources declared below. This is used in Handler.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- App. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the AppRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = fmap Just $ defaultClientSessionBackend
(120 * 60) -- 120 minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(combineStylesheets 'StaticR
[ css_normalize_css
, css_bootstrap_css
])
$(widgetFile "default-layout")
giveUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent =
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
where
-- Generate a unique filename based on the content itself
genFileName lbs
| development = "autogen-" ++ base64md5 lbs
| otherwise = base64md5 lbs
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog _ _source level =
development || level == LevelWarn || level == LevelError
makeLogger = return . appLogger
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersistT
runDB = defaultRunDB persistConfig connPool
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner connPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds) Nothing
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId def, authGoogleEmail]
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
getExtra :: Handler Extra
getExtra = fmap (appExtra . settings) getYesod
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| k-bx/web_shelf | WebShelf/Foundation.hs | gpl-3.0 | 6,438 | 0 | 17 | 1,383 | 866 | 480 | 386 | -1 | -1 |
{-# LANGUAGE GADTs, ExplicitForAll #-}
module Main (main) where
import GHC.Exts
newtype Age a b where
Age :: forall b a. Int -> Age a b
data T a = MkT a
{-# NOINLINE foo #-}
foo :: (Int -> Age Bool Char) -> String
foo _ = "bad (RULE should have fired)"
{-# RULES "foo/coerce" [1] foo coerce = "good" #-}
main = putStrLn (foo Age)
| sdiehl/ghc | testsuite/tests/simplCore/should_run/T16208.hs | bsd-3-clause | 338 | 0 | 8 | 75 | 96 | 56 | 40 | 11 | 1 |
module GCD () where
import Prelude hiding (gcd, mod)
import Language.Haskell.Liquid.Prelude
{-@ mod :: a:Nat -> b:{v:Nat| ((v < a) && (v > 0))} -> {v:Nat | v < b} @-}
mod :: Int -> Int -> Int
mod a b | a - b > b = mod (a - b) b
| a - b < b = a - b
| a - b == b = 0
{-@ gcd :: a:Nat -> b:{v:Nat | v < a} -> Int @-}
gcd :: Int -> Int -> Int
gcd a 0 = a
gcd a b = gcd b (a `mod` b)
{-@ gcd' :: a:Nat -> b:Nat -> Nat / [a, b] @-}
gcd' :: Int -> Int -> Int
gcd' a b | a == 0 = b
| b == 0 = a
| a == b = a
| a > b = gcd' (a - b) b
| a < b = gcd' a (b - a)
| mightymoose/liquidhaskell | tests/pos/GCD.hs | bsd-3-clause | 611 | 0 | 9 | 226 | 272 | 140 | 132 | 16 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Main where
type family Elem c
class Col c where
isEmpty :: c -> Bool
add :: c -> Elem c -> c
headTail :: c -> (Elem c,c)
-- LIST
instance Col [a] where
isEmpty = null
add = flip (:)
headTail (x:xs) = (x,xs)
type instance Elem [a] = a
-- SEQUENCE
data Sequence a = Nil | Snoc (Sequence a) a deriving Show
instance Col (Sequence a) where
isEmpty Nil = True
isEmpty _ = False
add s x = Snoc s x
headTail (Snoc s x) = (x,s)
type instance Elem (Sequence a) = a
--
addAll c1 c2
| isEmpty c1
= c2
| otherwise
= let (x,c1') = headTail c1
in addAll c1' (add c2 x)
--
main = print $ addAll c1 c2
where c1 = ['a','b','c']
c2 = (Snoc (Snoc (Snoc Nil 'd') 'e') 'f')
| ezyang/ghc | testsuite/tests/indexed-types/should_compile/ColInference3.hs | bsd-3-clause | 817 | 0 | 12 | 275 | 355 | 191 | 164 | 28 | 1 |
import System.IO
import System.IO.Error
main = do
h <- openFile "hClose001.tmp" WriteMode
hPutStr h "junk"
hClose h
hPutStr h "junk" `catchIOError` \ err -> if isIllegalOperation err then putStr "Okay\n" else error "Not okay\n"
| urbanslug/ghc | libraries/base/tests/IO/hClose001.hs | bsd-3-clause | 238 | 1 | 10 | 44 | 82 | 38 | 44 | 7 | 2 |
import Sieve
main = print $ last $ take 10001 sieve | cptroot/ProjectEuler-Haskell | Euler7.hs | mit | 52 | 1 | 6 | 11 | 25 | 11 | 14 | 2 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Program.UserAction
( QEventTag (..)
, ScId (..)
) where
import Commons.NoReflex
import Model.Scenario.Properties (PropName)
import Model.Scenario.Object (ObjectId)
-- | Represents a scenario id.
-- TODO: this datatype should be in luci module;
newtype ScId = ScId Int
deriving (Eq, Show, Ord)
-- | Event types fired by user actions
data instance QEventTag UserAction evArg where
-- | User wants to save scenario with this name.
AskSaveScenario :: QEventTag UserAction Text
-- | User selects a scenario in the scenario list.
AskSelectScenario :: QEventTag UserAction ScId
-- | User wants to clear all geometry.
AskClearGeometry :: QEventTag UserAction ()
-- | User wants to reset camera to its default position.
AskResetCamera :: QEventTag UserAction ()
-- | User selected a property so that we can colorize all objects according to prop value
PropertyClicked :: QEventTag UserAction PropName
-- | Programmatically select or unselect an object.
-- This event does not fire when a user click on an object!
-- If you want to listen to object seletion events, use global dynamic `selectedObjectIdD`.
AskSelectObject :: QEventTag UserAction (Maybe ObjectId)
deriveEvent ''UserAction
| achirkin/qua-view | src/Program/UserAction.hs | mit | 1,437 | 0 | 8 | 293 | 177 | 105 | 72 | 21 | 0 |
{-# LANGUAGE InstanceSigs #-}
module Ch25.Compose where
newtype Compose f g a =
Compose { getCompose :: f (g a) }
deriving (Eq, Show)
instance (Functor f, Functor g) => Functor (Compose f g) where
fmap f (Compose fga) =
Compose $ (fmap . fmap) f fga
newtype One f a =
One (f a)
deriving (Eq, Show)
instance Functor f => Functor (One f) where
fmap f (One fa) = One $ fmap f fa
newtype Three f g h a =
Three (f (g (h a)))
deriving (Eq, Show)
instance (Functor f, Functor g, Functor h) => Functor (Three f g h) where
fmap f (Three fgha) =
Three $ (fmap . fmap . fmap) f fgha
instance (Applicative f, Applicative g) => Applicative (Compose f g) where
pure :: a -> Compose f g a
pure a = Compose (pure . pure $ a)
(<*>) :: Compose f g (a -> b) -> Compose f g a -> Compose f g b
(<*>) (Compose f) (Compose fga) =
Compose $ ((<*>) <$> f <*> fga)
instance (Foldable f, Foldable g) => Foldable (Compose f g) where
foldMap f (Compose fga) = (foldMap . foldMap) f fga
instance (Traversable f, Traversable g) => Traversable (Compose f g) where
traverse f (Compose fga) = Compose <$> (traverse . traverse) f fga
class Bifunctor p where
{-# MINIMAL bimap | first, second #-}
bimap :: (a -> b) -> (c -> d) -> p a c -> p b d
bimap f g = first f . second g
first :: (a -> b) -> p a c -> p b c
first f = bimap f id
second :: (b -> c) -> p a b -> p a c
second = bimap id
data Deux a b = Deux a b
instance Bifunctor Deux where
bimap f g (Deux a b) = Deux (f a) (g b)
first f (Deux a b) = Deux (f a) b
second f (Deux a b) = Deux a (f b)
data Const a b = Const a
instance Bifunctor Const where
bimap f _ (Const a) = Const (f a)
first f (Const a) = Const (f a)
second _ (Const a) = Const a
data Drei a b c = Drei a b c
instance Bifunctor (Drei a) where
bimap f g (Drei a b c) = Drei a (f b) (g c)
first f (Drei a b c) = Drei a (f b) c
second f (Drei a b c) = Drei a b (f c)
data SuperDrei a b c = SuperDrei a b
instance Bifunctor (SuperDrei a) where
bimap f _ (SuperDrei a b) = SuperDrei a (f b)
first f = bimap f id
second _ (SuperDrei a b) = SuperDrei a b
data SemiDrei a b c = SemiDrei a
instance Bifunctor (SemiDrei a) where
bimap _ _ (SemiDrei a) = SemiDrei a
first _ (SemiDrei a) = SemiDrei a
second _ (SemiDrei a) = SemiDrei a
data Quadriceps a b c d = Quadzzz a b c d
instance Bifunctor (Quadriceps a b) where
bimap f g (Quadzzz a b c d) = Quadzzz a b (f c) (g d)
first f (Quadzzz a b c d) = Quadzzz a b (f c) d
second f (Quadzzz a b c d) = Quadzzz a b c (f d)
data Either' a b =
Left' a
| Right' b
instance Bifunctor Either' where
bimap f _ (Left' a) = Left' (f a)
bimap _ g (Right' b) = Right' (g b)
first f (Left' a) = Left' (f a)
first _ (Right' b) = Right' b
second _ (Left' a) = Left' a
second f (Right' b) = Right' (f b)
| andrewMacmurray/haskell-book-solutions | src/ch25/Compose.hs | mit | 2,863 | 0 | 11 | 776 | 1,565 | 799 | 766 | 77 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Station.Types.Card
( module Station.Types.Card
, module STC
) where
import Import
import qualified Data.Hashable as HA
import Lens.Micro.TH
import Station.Types.Card.Hardcoded as STC
import Station.Types.Card.Hash as STC
import Station.Types.Card.Id as STC
import Station.Types.Card.Link as STC
import Station.Types.Card.Time as STC
import Station.Types.Card.URI as STC
newtype SchemaLink
= SchemaLink { _unSchemaLink :: Link BlobOrVersionHash }
deriving (Eq, Show, Generic, FromJSON, ToJSON)
instance HA.Hashable SchemaLink
type CardBytes = Card ByteString
data Card a = Card
-- Keys required.
{ _cardSchema :: SchemaLink
, _cardInstance :: a
-- Keys optional.
, _cardName :: Maybe Text
} deriving (Eq, Show, Functor)
instance FromJSON a => FromJSON (Card a) where
parseJSON = withObject "Card" $ \o -> Card
<$> o .: "schema"
<*> o .: "instance"
<*> o .:! "name"
instance ToJSON a => ToJSON (Card a) where
toJSON a = object $
[ "schema" .= _cardSchema a
, "instance" .= _cardInstance a
]
<> catMaybes
[ "name" `optionalPair` _cardName a
]
magFromCard :: CardBytes -> Card BlobHash
magFromCard a = a { _cardInstance = hashFront a }
cardFromMag
:: (BlobHash -> Maybe ByteString)
-> Card BlobHash
-> Maybe CardBytes
cardFromMag f card = do
front <- f (_cardInstance card)
Just (card { _cardInstance = front })
hashFront :: CardBytes -> BlobHash
hashFront = BlobHash . STC.hashProper . _cardInstance
-- * Lenses
unSchemaLink :: Lens' SchemaLink (Link BlobOrVersionHash)
unSchemaLink f a = (\b -> a { _unSchemaLink = b }) <$> f (_unSchemaLink a)
makeLenses ''Card
| seagreen/station | src/Station/Types/Card.hs | mit | 1,950 | 0 | 13 | 594 | 522 | 293 | 229 | 48 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Lambency.Sprite (
changeSpriteFrameColor,
changeSpriteColor,
loadStaticSprite,
loadStaticSpriteWithTexture,
loadStaticSpriteWithMask,
loadAnimatedSprite,
loadAnimatedSpriteWithTexture,
loadAnimatedSpriteWithMask,
loadFixedSizeAnimatedSprite,
renderSprite,
renderSpriteWithAlpha,
renderUISprite,
renderUISpriteWithSize,
SpriteAnimationType(..),
animatedWire,
) where
--------------------------------------------------------------------------------
import Control.Comonad
import Control.Monad.Reader
import Control.Wire
import Data.List (nub)
import Lambency.Material
import Lambency.Mesh
import Lambency.Renderer
import Lambency.Texture
import Lambency.Transform
import Lambency.Types
import Lambency.Utils
import Linear hiding (trace, identity)
import qualified Linear
import Prelude hiding ((.), id)
--------------------------------------------------------------------------------
updateColor :: V4 Float -> Material -> Material
updateColor c mat@(MaskedSpriteMaterial {..}) =
mat { spriteMaskColor = updateMaterialVar4vf c spriteMaskColor }
updateColor _ m =
error $ "Lambency.Sprite (updateColor): Unsupported material type: " ++ show m
updateAlpha :: Float -> Material -> Material
updateAlpha a' mat@(MaskedSpriteMaterial {..}) =
case spriteMaskColor of
MaterialVar (_, Nothing) ->
mat { spriteMaskColor = updateMaterialVar4vf (V4 1 1 1 a') spriteMaskColor }
MaterialVar (_, Just (Vector4Val (V4 r g b _))) ->
mat { spriteMaskColor = updateMaterialVar4vf (V4 r g b a') spriteMaskColor }
MaterialVar (_, Just _) ->
error $ "Lambency.Sprite (updateAlpha): Internal error -- " <>
"spriteMaskColor is not a V4 value??"
updateAlpha a mat@(TexturedSpriteMaterial {..}) =
mat { spriteAlpha = updateMaterialVarf a spriteAlpha }
updateAlpha _ m =
error $ "Lambency.Sprite (updateColor): Unsupported material type: " ++ show m
updateMatrixScale :: V2 Float -> M33 Float -> M33 Float
updateMatrixScale (V2 sx sy) (V3 _ _ t) = V3 (V3 sx 0 0) (V3 0 sy 0) t
updateMatrixTranslation :: V2 Float -> M33 Float -> M33 Float
updateMatrixTranslation (V2 tx ty) (V3 x y _) = V3 x y (V3 tx ty 1)
getShaderVarMatrix :: MaterialVar Mat3f -> Mat3f
getShaderVarMatrix (MaterialVar (_, (Just (Matrix3Val mat)))) = mat
getShaderVarMatrix _ = Linear.identity
updateScale :: V2 Float -> Material -> Material
updateScale s mat@(MaskedSpriteMaterial {..}) =
let newMatrix = updateMatrixScale s $ getShaderVarMatrix spriteMaskMatrix
in mat { spriteMaskMatrix = updateMaterialVar3mf newMatrix spriteMaskMatrix }
updateScale s mat@(TexturedSpriteMaterial {..}) =
let newMatrix = updateMatrixScale s $ getShaderVarMatrix spriteTextureMatrix
in mat { spriteTextureMatrix = updateMaterialVar3mf newMatrix spriteTextureMatrix }
updateScale _ m =
error $ "Lambency.Sprite (updateScale): Unsupported material type: " ++ show m
updateTranslation :: V2 Float -> Material -> Material
updateTranslation t mat@(MaskedSpriteMaterial {..}) =
let newMatrix = updateMatrixTranslation t $ getShaderVarMatrix spriteMaskMatrix
in mat { spriteMaskMatrix = updateMaterialVar3mf newMatrix spriteMaskMatrix }
updateTranslation t mat@(TexturedSpriteMaterial {..}) =
let newMatrix = updateMatrixTranslation t $ getShaderVarMatrix spriteTextureMatrix
in mat { spriteTextureMatrix = updateMaterialVar3mf newMatrix spriteTextureMatrix }
updateTranslation _ m =
error $ "Lambency.Sprite (updateTranslation): Unsupported material type: " ++ show m
-- !FIXME! These functions shouldn't be here and we should really be using lenses
mapROMaterial :: (Material -> Material) -> RenderObject -> RenderObject
mapROMaterial fn ro = ro { material = fn (material ro) }
mapFrameRO :: (RenderObject -> RenderObject) -> SpriteFrame -> SpriteFrame
mapFrameRO fn sf = sf { frameRO = fn (frameRO sf) }
addTextFlag :: SpriteFrame -> SpriteFrame
addTextFlag = mapFrameRO $ \ro -> ro { flags = nub $ Text : (flags ro) }
changeSpriteFrameColor :: V4 Float -> SpriteFrame -> SpriteFrame
changeSpriteFrameColor c = mapFrameRO $ mapROMaterial $ updateColor c
mapSpriteFrames :: (SpriteFrame -> SpriteFrame) -> Sprite -> Sprite
mapSpriteFrames f (Sprite frames) = Sprite (fmap f frames)
changeSpriteColor :: V4 Float -> Sprite -> Sprite
changeSpriteColor c = mapSpriteFrames $ changeSpriteFrameColor c
addRenderFlag :: RenderFlag -> RenderObject -> RenderObject
addRenderFlag flag = \r -> r { flags = nub $ flag : (flags r) }
initStaticSprite :: Bool -> Texture -> ResourceLoader Sprite
initStaticSprite isMask tex = do
let mat = if isMask
then maskedSpriteMaterial tex
else texturedSpriteMaterial tex
ro <- (if isMask then addRenderFlag Transparent else id)
<$> createRenderObject quad mat
return $ Sprite
{ spriteFrames = cycleSingleton $ SpriteFrame
{ offset = zero
, spriteSize = textureSize tex
, frameRO = ro
}
}
initAnimatedSprite :: Bool -> [V2 Int] -> [V2 Int] -> Texture
-> ResourceLoader Sprite
initAnimatedSprite isMask frameSzs offsets tex = do
let mat = if isMask
then maskedSpriteMaterial tex
else texturedSpriteMaterial tex
ro <- createRenderObject quad mat
return $ Sprite $ cyclicFromList $ map (genFrame ro) (zip frameSzs offsets)
where
genFrame :: RenderObject -> (V2 Int, V2 Int) -> SpriteFrame
genFrame ro (sz, off) =
let texOff = changeRange off
in SpriteFrame {
offset = texOff,
spriteSize = sz,
frameRO = ro { material =
updateScale (changeRange sz) $
updateTranslation texOff $
material ro }
}
changeRange :: V2 Int -> V2 Float
changeRange (V2 ox oy) =
let (V2 tx ty) = textureSize tex
in V2
(newRange (fromIntegral ox) (0, fromIntegral tx) (0, 1))
(newRange (fromIntegral oy) (0, fromIntegral ty) (0, 1))
loadSpriteWith :: FilePath
-> (Texture -> ResourceLoader Sprite)
-> ResourceLoader (Maybe Sprite)
loadSpriteWith f initFn = do
tex <- loadTexture f
case tex of
Nothing -> return Nothing
(Just t@(Texture _ _)) -> initFn t >>= (return . Just)
_ -> return Nothing
loadStaticSpriteWithTexture :: Texture -> ResourceLoader Sprite
loadStaticSpriteWithTexture = initStaticSprite False
loadStaticSpriteWithMask :: Texture -> ResourceLoader Sprite
loadStaticSpriteWithMask = initStaticSprite True
loadStaticSprite :: FilePath -> ResourceLoader (Maybe Sprite)
loadStaticSprite f = loadSpriteWith f (initStaticSprite False)
loadAnimatedSprite :: FilePath -> [V2 Int] -> [V2 Int]
-> ResourceLoader (Maybe Sprite)
loadAnimatedSprite f frameSzs offsets =
loadSpriteWith f $ initAnimatedSprite False frameSzs offsets
loadAnimatedSpriteWithTexture :: Texture -> [V2 Int] -> [V2 Int]
-> ResourceLoader (Maybe Sprite)
loadAnimatedSpriteWithTexture t frameSzs offsets =
initAnimatedSprite False frameSzs offsets t >>= (return . Just)
loadAnimatedSpriteWithMask :: Texture -> [V2 Int] -> [V2 Int]
-> ResourceLoader (Maybe Sprite)
loadAnimatedSpriteWithMask t frameSzs offsets =
-- !HACK! Not all animated (multi-frame) mask sprites are fonts...
initAnimatedSprite True frameSzs offsets t >>=
(return . Just . mapSpriteFrames addTextFlag)
loadFixedSizeAnimatedSprite :: FilePath -> V2 Int -> [V2 Int]
-> ResourceLoader (Maybe Sprite)
loadFixedSizeAnimatedSprite f frameSz = loadAnimatedSprite f (repeat frameSz)
renderUISpriteWithSize :: Sprite -> V2 Float -> V2 Float -> GameMonad ()
renderUISpriteWithSize sprite pos (V2 sx sy)
= let (V2 px py) = pos
in addTransformedRenderAction (nonuniformScale (V3 sx sy 1) identity)
$ addRenderUIAction (V2 (px / sx) (py / sy))
(frameRO . extract $ spriteFrames sprite)
renderUISprite :: Sprite -> V2 Float -> GameMonad ()
renderUISprite s@(Sprite frames) pos =
renderUISpriteWithSize s pos $ fromIntegral <$> (spriteSize $ extract frames)
renderFrameAt :: RenderObject -> V2 Int -> Float -> V2 Float -> GameMonad ()
renderFrameAt ro sc depth (V2 x y) = addRenderAction xf ro
where
(V2 sx sy) = fmap fromIntegral sc
xf = translate (V3 x y depth) $
nonuniformScale (V3 sx sy 1) identity
-- Renders an opaque sprite at the given scale, depth, and position
renderSprite :: Sprite -> V2 Int -> Float -> V2 Float -> GameMonad ()
renderSprite s = renderSpriteWithAlpha s 1.0
-- Renders a sprite for the given alpha, scale, depth, and position
renderSpriteWithAlpha :: Sprite -> Float -> V2 Int -> Float -> V2 Float ->
GameMonad ()
renderSpriteWithAlpha (Sprite frames) a
| a == 0.0 = \_ _ _ -> return ()
| a == 1.0 = renderFrameAt (frameRO $ extract frames)
| otherwise = renderFrameAt (setAlpha . frameRO . extract $ frames)
where
setAlpha ro = ro { material = updateAlpha a (material ro),
flags = nub $ Transparent : (flags ro) }
data SpriteAnimationType
= SpriteAnimationType'Forward
| SpriteAnimationType'Backward
| SpriteAnimationType'Loop
| SpriteAnimationType'LoopBack
| SpriteAnimationType'PingPong
deriving (Eq, Ord, Show, Enum, Bounded)
animatedWire :: Sprite -> SpriteAnimationType -> GameWire a Sprite
animatedWire (Sprite (CyclicList _ _ [])) SpriteAnimationType'Forward = mkEmpty
animatedWire s@(Sprite frames) SpriteAnimationType'Forward =
mkGenN $ \ _ -> do
let nextSprite = Sprite (advance frames)
return (Right s, animatedWire nextSprite SpriteAnimationType'Forward)
animatedWire (Sprite (CyclicList p c n)) SpriteAnimationType'Backward =
animatedWire (Sprite (CyclicList n c p)) SpriteAnimationType'Forward
animatedWire s SpriteAnimationType'Loop =
let w = animatedWire s SpriteAnimationType'Forward
in w --> w
animatedWire s SpriteAnimationType'LoopBack =
let w = animatedWire s SpriteAnimationType'Backward
in w --> w
animatedWire s SpriteAnimationType'PingPong =
let f = animatedWire s SpriteAnimationType'Forward
b = animatedWire s SpriteAnimationType'Backward
in
f --> b --> (animatedWire s SpriteAnimationType'PingPong)
| Mokosha/Lambency | lib/Lambency/Sprite.hs | mit | 10,313 | 0 | 17 | 2,069 | 3,023 | 1,535 | 1,488 | 205 | 3 |
{-# LANGUAGE OverloadedStrings,DeriveGeneric #-}
{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-unused-imports #-}
module Data.TZworld.Internal.Types where
import Data.Aeson
import qualified Data.Binary as DB
import Control.Monad
import GHC.Generics
import qualified Data.ByteString.Lazy as BL
import Database.SQLite.Simple.FromRow
import Database.SQLite.Simple.ToRow
import Control.Applicative
type Longitude = Double
type Latitude = Double
data TZWorldField = TZWorldField Int BL.ByteString deriving (Show)
instance FromRow TZWorldField where
fromRow = TZWorldField <$> field <*> field
instance ToRow TZWorldField where
toRow (TZWorldField id_ blob) = toRow (id_, blob)
{- Defines a TZ World structure -}
data TZWorld = TZWorld {
tztype::String
, crs::CRS
, features::[TZFeature]
}deriving (Show)
instance FromJSON TZWorld where
parseJSON (Object v) = TZWorld <$>
v .: "type" <*>
v .: "crs" <*>
v .: "features"
parseJSON _ = mzero
data CRS = CRS {
crsType::String
, crsProperties::CRSProperty
}deriving (Show)
instance FromJSON CRS where
parseJSON (Object v) = CRS <$>
v .: "type" <*>
v .: "properties"
parseJSON _ = mzero
data CRSProperty = CRSProperty {
name::String
} deriving (Show)
instance FromJSON CRSProperty where
parseJSON (Object v) = CRSProperty <$>
v .: "name"
parseJSON _ = mzero
{- Represents a polygon associated with a time zone. A time zone region can have many polygons -}
--data TZPoly = TZPoly {
-- tzname::String
-- , tzcoords::[(Double,Double)]
-- } deriving (Show,Read,Eq, Ord,Generic)
--instance DB.Binary TZPoly
{- A polygon that has a collection of indexed polygon coordinates -}
data TZPoly = TZPoly {
tzbinname::String
, tzbincoords::[((Double,Double),(Double,Double))]
, tzbinminlong::Double
, tzbinmaxlong::Double
} deriving (Show, Read,Generic,Eq,Ord)
instance DB.Binary TZPoly
{- Collection of indexed polygons -}
data TZPolys = TZPolys { tzbins::[TZPoly]} deriving (Show,Read, Generic)
instance DB.Binary TZPolys
data TZFeature = TZFeature {
fType::String
, fproperties::TZID
, geometry::Geometry
}deriving (Show)
instance FromJSON TZFeature where
parseJSON (Object v) = TZFeature <$>
v .: "type" <*>
v .: "properties" <*>
v .: "geometry"
parseJSON _ = mzero
data TZID = TZID {
tzid::String
} deriving (Show)
instance FromJSON TZID where
parseJSON (Object v) = TZID <$>
v .: "TZID"
parseJSON _ = mzero
data Geometry = Geometry {
gType::String
, coordinates::[[[Double]]]
} deriving (Show)
instance FromJSON Geometry where
parseJSON (Object v) = Geometry <$>
v .: "type" <*>
v .: "coordinates"
parseJSON _ = mzero
| danplubell/tzworld-builder | library/Data/TZworld/Internal/Types.hs | mit | 3,046 | 0 | 11 | 849 | 740 | 421 | 319 | 81 | 0 |
module Manip where
addBang :: String -> String
addBang s = s ++ "!"
letterIndex :: Int -> Char
letterIndex x = (!!) "Curry is awesome!" x
rvrs :: String -> String
rvrs x = drop 9 x ++ take 4 (drop 5 x) ++ take 5 x
dropNine :: String -> String
dropNine = drop 9 | deciduously/Haskell-First-Principles-Exercises | 1-Getting the basics/3-Strings/code/manip.hs | mit | 264 | 0 | 9 | 60 | 114 | 59 | 55 | 9 | 1 |
module SkelBNF where
-- Haskell module generated by the BNF converter
import AbsBNF
import ErrM
type Result = Err String
failure :: Show a => a -> Result
failure x = Bad $ "Undefined case: " ++ show x
transNonTerminal :: NonTerminal -> Result
transNonTerminal x = case x of
NonTerminal str -> failure x
transSyntax :: Syntax -> Result
transSyntax x = case x of
SyntaxNil -> failure x
SyntaxCons rule syntax -> failure x
transRule :: Rule -> Result
transRule x = case x of
Rule nonterminal expression -> failure x
transExpression :: Expression -> Result
transExpression x = case x of
ExpressionList list -> failure x
ExpressionOpt list expression -> failure x
transList :: List -> Result
transList x = case x of
ListOne term -> failure x
ListCons term list -> failure x
transTerm :: Term -> Result
transTerm x = case x of
Literal str -> failure x
NonTerm nonterminal -> failure x
| athanclark/bnfm | bnfc-constructions/BNF/SkelBNF.hs | mit | 926 | 0 | 8 | 205 | 306 | 149 | 157 | 28 | 2 |
module Main where
import Graphics.UI.GLUT
import Graphics.Rendering.OpenGL
--import Control.Monad.Trans ( lift )
--import Control.Monad.IO.Class ( liftIO )
import FRP.Sodium ( Reactive, Behaviour, sync, sample, listen, value, newBehaviour )
main :: IO ()
main = do
(sb, sf) <- sync $ newBehaviour Nothing :: IO (Behaviour (Maybe (Int, Int)), Maybe (Int, Int) -> Reactive ())
getArgsAndInitialize
createWindow "Main"
reshapeCallback $= Just (reshape sf)
displayCallback $= display sb
mainLoop
color3 :: GLfloat -> GLfloat -> GLfloat -> IO ()
color3 r g b = color $ Color3 r g b
vertex2 :: GLfloat -> GLfloat -> IO ()
vertex2 x y = vertex $ Vertex2 x y
vertex3 :: GLfloat -> GLfloat -> GLfloat -> IO ()
vertex3 x y z = vertex $ Vertex3 x y z
type GlColor = Color3 GLfloat
makeGradientRow :: Int -> GLfloat -> GlColor -> GlColor -> IO ()
makeGradientRow iw y lColor rColor =
let w = fromIntegral iw
in mapM_ (renderPoint lColor rColor y w . fromIntegral) (take iw [0..])
where
renderPoint :: GlColor -> GlColor -> GLfloat -> GLfloat -> GLfloat -> IO ()
renderPoint lc rc y w n = do
let c = k `onRange` (lc,rc)
k = x/w
x = n
color c
vertex2 x y
makeLinearGradient :: Int -> Int -> GlColor -> GlColor -> IO ()
makeLinearGradient iw ih lColor rColor =
let w = fromIntegral iw
h = fromIntegral ih
in mapM_ (\n -> makeGradientRow w n lColor rColor) (take h [0..])
makeQuadraGradient :: Int
-- ^ width
-> Int
-- ^ height
-> GlColor
-- ^ top left corner's color
-> GlColor
-- ^ top right corner's color
-> GlColor
-- ^ bottom left corner's color
-> GlColor
-- ^ bottom right corner's color
-> IO ()
makeQuadraGradient iw ih blc trc brc tlc = do
let h = fromIntegral ih
w = fromIntegral iw
mapM_ (renderRow h iw tlc trc brc blc) (take ih [0..])
where
renderRow h w tlc trc brc blc n = do
let yLeftCol = k `onRange` (tlc, blc)
yRightCol = k `onRange` (trc, brc)
y = n
k = n/h
makeGradientRow w y yLeftCol yRightCol
onRange :: GLfloat -> (GlColor , GlColor) -> GlColor
k `onRange` ((Color3 ar ag ab), (Color3 br bg bb)) =
let kr = ar + (br - ar) * k
kg = ag + (bg - ag) * k
kb = ab + (bb - ab) * k
in Color3 kr kg kb
display :: Behaviour (Maybe (Int,Int)) -> DisplayCallback
display b = do
clear [ ColorBuffer ]
dimPairMay <- sync $ sample b
case dimPairMay of
Nothing -> return ()
Just (w,h) -> do
renderPrimitive Points $ do
makeQuadraGradient w h (Color3 0.5 0.25 0.75) (Color3 0.75 0.25 0.25) (Color3 0.25 0.5 0.75) (Color3 0.75 0.5 0.5)
flush
reshape :: (Maybe (Int,Int) -> Reactive ()) -> ReshapeCallback
reshape f size@(Size iw ih) = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
let w = fromIntegral iw
h = fromIntegral ih
ortho2D 0 w 0 h
matrixMode $= Modelview 0
loadIdentity
let Size cintW cintH = size
ww = fromIntegral cintW
wh = fromIntegral cintH
sync $ f $ Just (ww,wh)
print size
postRedisplay Nothing
| geraldus/OpenGLSodium1 | src/QuadGrad/Main.hs | mit | 3,523 | 0 | 17 | 1,222 | 1,252 | 630 | 622 | 84 | 2 |
-- Remove Duplicates
-- http://www.codewars.com/kata/53e30ec0116393fe1a00060b
module RemoveDuplicates where
unique :: Eq a => [a] -> [a]
unique [] = []
unique (x: xs) = x : unique (filter (x/=) xs)
| gafiatulin/codewars | src/7 kyu/RemoveDuplicates.hs | mit | 200 | 0 | 9 | 32 | 74 | 41 | 33 | 4 | 1 |
-- Tutorials/10 Days of Statistics/Day 5
{-# language TypeApplications #-}
module HackerRank.Tutorials.Statistics.Day5 where
import qualified Text.Printf as P
import qualified Sandbox.Statistics as S
import qualified Sandbox.Util.IO as U
-- | Poisson Distribution 1
main1 :: IO ()
main1 = do
l <- U.readLine @Double
k <- U.readLine @Int
let
res = S.poisson k l
P.printf "%.3f\n"res
| 4e6/sandbox | haskell/HackerRank/Tutorials/Statistics/Day5.hs | mit | 396 | 0 | 11 | 70 | 105 | 59 | 46 | 12 | 1 |
module IsPrime where
isPrime :: Integer -> Bool
isPrime n = and [ n `mod` i /= 0 | i<-[2..n-1], i*i <= n ] | mino2357/Hello_Haskell | mod.hsproj/IsPrime.hs | mit | 108 | 0 | 10 | 25 | 66 | 36 | 30 | 3 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Base.Geometry where
import Control.Lens
data Shape = Rectangle { _rectX :: Int, _rectY :: Int, _rectW :: Int, _rectH :: Int } | Line { _start :: (Int,Int), _end :: (Int,Int) }
| Point { _ptX :: Int, _ptY :: Int } deriving (Eq,Show)
x :: Lens' Shape Int
x = lens getX setX
where
getX (Rectangle x _ _ _) = x
getX (Point x _) = x
getX (Line (x,_) _) = x
setX (Rectangle x y z w) x' = (Rectangle x' y z w)
setX (Point x y) x' = (Point x' y)
setX (Line (x,y) e) x' = (Line (x',y) e)
y :: Lens' Shape Int
y = lens getY setY
where
getY (Rectangle _ y _ _) = y
getY (Point _ y) = y
getY (Line (_,y) _) = y
setY (Rectangle x y z w) y' = (Rectangle x y' z w)
setY (Point x y) y' = (Point x y')
width :: Lens' Shape Int
width = lens getW setW
where
getW (Rectangle _ _ w _) = w
getW (Line (x1,_) (x2,_)) = abs(x1-x2)
getW (Point _ _) = 1
setW (Rectangle x y w h) w' = (Rectangle x y w' h)
setW (Line s e) _ = (Line s e)
setW (Point x y) _ = (Point x y)
height :: Lens' Shape Int
height = lens getH setH
where
getH (Rectangle _ _ _ h) = h
getH (Line (_,y1) (_,y2)) = abs(y1-y2)
getH (Point _ _) = 1
setH (Rectangle x y w h) h' = (Rectangle x y w h')
setH (Line s e) _ = (Line s e)
setH (Point x y) _ = (Point x y)
collides :: Shape -> Shape -> Bool
collides (Rectangle x y w h) (Rectangle x1 y1 w1 h1) = (inRange x w x1 w1) && (inRange y h y1 h1)
where
inRange p1 l1 p2 l2 = (p1 <= p2) && (p2 <= p1 + l1) || (p2 <= p1) && (p1 <= p2 +l2)
collides (Point x y) (Point x1 y1) = (x == x1) && (y == y1)
collides (Rectangle x y w h) (Point x1 y1) = (inRange x1 x w) && (inRange y1 y h)
where
inRange p1 p2 l2 = (p2 <= p1) && (p1 <= p2 + l2)
collides p@(Point _ _) r@(Rectangle _ _ _ _) = collides r p
collides l1@(Line (x',y') (x1',y1')) l2@(Line (x2',y2') (x3',y3')) = if l1 == l2 then True else (if (crossDir == 0) then False else (0 <= t) && (t <= 1) && (0 <= u) && (u <= 1))
where
crossDir = dx*dy1 - dx1*dy
t = ((x2-x)*dy1 - (y2-y)*dx1) / (dx*dy1 - dx1*dy)
u = ((x2-x)*dy - (y2-y)*dx) / (dx*dy1 - dx1*dy)
dx = x1 - x
dy = y1 - y
dx1 = x3 - x2
dy1 = y3 - y2
x = fromIntegral x'
x1 = fromIntegral x1'
x2 = fromIntegral x2'
x3 = fromIntegral x3'
y = fromIntegral y'
y1 = fromIntegral y1'
y2 = fromIntegral y2'
y3 = fromIntegral y3'
collides (Point x' y') (Line (lx',ly') (lx1',ly1')) = abs((y - ly1) - ((ly1 - ly)/(lx1 - lx)) * (x - lx1)) < 0.1
where
x = fromIntegral x'
y = fromIntegral y'
lx = fromIntegral lx'
ly = fromIntegral ly'
lx1 = fromIntegral lx1'
ly1 = fromIntegral ly1'
collides l@(Line _ _) p@(Point _ _) = collides p l
collides (Rectangle x y w h) l@(Line _ _) = (collides (Line (x,y) (x+w,y)) l) ||
(collides (Line (x,y) (x,y+h)) l) ||
(collides (Line (x+w,y) (x+w,y+h)) l) ||
(collides (Line (x,y+h) (x+w,y+h)) l)
collides l@(Line _ _) r@(Rectangle _ _ _ _) = collides r l
| mdietz94/haskellgame | src/Base/Geometry.hs | mit | 2,969 | 6 | 13 | 778 | 1,863 | 990 | 873 | 72 | 5 |
module Y2016.M09.D06.Exercise where
import Data.Aeson
import Network.HTTP.Conduit
-- availabe from the @1HaskellADay git repository
import qualified Data.BlockChain.Block.Summary as Smy
import Data.BlockChain.Block.Transactions (Transaction)
import qualified Data.BlockChain.Block.Transactions as Txn
import Data.BlockChain.Block.Types
{--
There is a reason the summary is called the summary, for, when we download the
entire block we get:
-rw-r--r-- 1 geophf staff 4020041 Sep 6 00:55 lateblk.json
-rw-r--r-- 1 geophf staff 12763 Sep 6 00:40 latesum.json
or, put another way, the latest block summary is 12k, but the entire block,
with all its transactions is 4M!
Eep!
Let's just deal with blocks over the wire for now, instead of putting 4 meg
into this git repository with one push.
From the above Summary import, we know how to load the latest block summary.
Do that, get the hash for the block, and from the hash, download the entire
block into memory and extract the list of transactions.
--}
data Block = Block { blockhash :: Hash, ver :: Integer, prevBlock :: String,
merkleRoot :: String, time, bits, fee, nonce :: Integer,
nTx, size :: Int, blockIdx :: Integer, mainChain :: Bool,
height, receivedTime :: Integer, relayedBy :: String,
tx :: [Transaction] }
deriving (Eq, Ord, Show)
instance FromJSON Block where
parseJSON = undefined
rawBlockURL :: FilePath
rawBlockURL = "https://blockchain.info/rawblock/"
readBlock :: FilePath -> Hash -> IO Block
readBlock = undefined
-- hint look at above Summary import on how to read in the latest summary, then
-- look at how to extract the block-hash from the summary report.
-- How many transactions are there in this block? What is the average size of
-- the transactions?
-- We should be able to link the transcation in this full block with the
-- summary block transaction index ... we'll get to doing that another day.
| geophf/1HaskellADay | exercises/HAD/Y2016/M09/D06/Exercise.hs | mit | 1,992 | 0 | 9 | 411 | 213 | 143 | 70 | 19 | 1 |
module Types.API where
import Haste.App (Client, Remote, Server)
data API = API {
getRequestChunk :: Remote (Server String)
, performActionInVim :: Remote (String -> Server ())
}
data Action = OpenFile{
filename :: String
} deriving Show
| ababkin/railoscopy | src/Types/API.hs | mit | 270 | 0 | 13 | 69 | 85 | 49 | 36 | 8 | 0 |
module JSONSchema.Validator.Draft4.Any where
import Import hiding ((<>))
import Data.Aeson.TH (constructorTagModifier)
import Data.Char (toLower)
import qualified Data.HashMap.Strict as HM
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.List.NonEmpty as NE
import qualified Data.Scientific as SCI
import Data.Semigroup
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text.Encoding.Error (UnicodeException)
import qualified JSONPointer as JP
import Network.HTTP.Types.URI (urlDecode)
import JSONSchema.Validator.Reference (BaseURI(..), Scope(..),
URIAndFragment,
resolveReference)
import qualified JSONSchema.Validator.Utils as UT
--------------------------------------------------
-- * $ref
--------------------------------------------------
newtype Ref
= Ref { _unRef :: Text }
deriving (Eq, Show)
instance FromJSON Ref where
parseJSON = withObject "Ref" $ \o ->
Ref <$> o .: "$ref"
data RefInvalid err
= RefResolution Text
-- ^ Indicates a reference that failed to resolve.
--
-- NOTE: The language agnostic test suite doesn't specify if this should
-- cause a validation error or should allow data to pass. We choose to
-- return a validation error.
--
-- Also note that ideally we would enforce in the type system that any
-- failing references be dealt with before valididation. Then this could
-- be removed entirely.
| RefPointerResolution JSONPointerError
| RefLoop Text VisitedSchemas URIAndFragment
| RefInvalid Text Value (NonEmpty err)
-- ^ 'Text' is the URI and 'Value' is the linked schema.
deriving (Eq, Show)
newtype VisitedSchemas
= VisitedSchemas { _unVisited :: [URIAndFragment] }
deriving (Eq, Show, Semigroup, Monoid)
refVal
:: forall err schema. (FromJSON schema, ToJSON schema)
=> (Text -> Maybe schema)
-- ^ Look up a schema.
-> (BaseURI -> schema -> BaseURI)
-- ^ Update scope (needed after moving deeper into nested schemas).
-> (VisitedSchemas -> Scope schema -> schema -> Value -> [err])
-- ^ Validate data.
-> VisitedSchemas
-> Scope schema
-> Ref
-> Value
-> Maybe (RefInvalid err)
refVal getRef updateScope val visited scope (Ref reference) x
| (mURI, mFragment) `elem` _unVisited visited =
Just (RefLoop reference visited (mURI, mFragment))
| otherwise = leftToMaybe $ do
-- Get the referenced document
(newScope, doc) <- first RefResolution
$ getDocument getRef updateScope scope mURI reference
-- Get the correct subschema within that document.
res <- case mFragment of
Nothing -> Right (newScope, doc)
Just fragment -> first RefPointerResolution
$ resolveFragment updateScope newScope fragment
let (finalScope, schema) = res
-- Check if that schema is valid.
let newVisited = VisitedSchemas [(_documentURI newScope, mFragment)]
<> visited
failures = val newVisited finalScope schema x
first (RefInvalid reference (toJSON schema))
. maybeToLeft ()
$ NE.nonEmpty failures
where
mURI :: Maybe Text
mFragment :: Maybe Text
(mURI, mFragment) = resolveReference (_currentBaseURI scope) reference
getDocument
:: forall schema. (Text -> Maybe schema)
-> (BaseURI -> schema -> BaseURI)
-> Scope schema
-> Maybe Text
-> Text
-> Either Text (Scope schema, schema)
-- ^ 'Left' is the URI of the document we failed to resolve.
getDocument getRef updateScope scope mURI reference =
case mURI <* fst (resolveReference (BaseURI Nothing) reference) of
Nothing -> Right topOfThisDoc
Just uri ->
case getRef uri of
Nothing -> Left uri
Just s -> Right ( Scope s mURI (updateScope (BaseURI mURI) s)
, s
)
where
topOfThisDoc :: (Scope schema, schema)
topOfThisDoc =
( scope { _currentBaseURI =
updateScope (BaseURI (_documentURI scope))
(_topLevelDocument scope)
}
, _topLevelDocument scope
)
data JSONPointerError
= URLDecodingError UnicodeException
-- ^ Aspirationally internal.
| FormatError JP.FormatError
| ResolutionError JP.ResolutionError
| SubschemaDecodingError Text
-- ^ Aspirationally internal.
deriving (Eq, Show)
resolveFragment
:: (FromJSON schema, ToJSON schema)
=> (BaseURI -> schema -> BaseURI)
-> Scope schema
-> Text
-> Either JSONPointerError (Scope schema, schema)
resolveFragment updateScope scope fragment = do
urlDecoded <- first URLDecodingError
. decodeUtf8'
. urlDecode True
. encodeUtf8
$ fragment
JP.Pointer tokens <- first FormatError (JP.unescape urlDecoded)
let acc = (toJSON (_topLevelDocument scope), _currentBaseURI scope)
(schemaVal, base) <- foldM go acc tokens
schema <- first SubschemaDecodingError (fromJSONEither schemaVal)
pure (scope { _currentBaseURI = base }, schema)
where
-- We have to step through the document JSON Pointer token
-- by JSON Pointer token so that we can update the scope
-- based on each @"id"@ we encounter.
--
-- TODO: Do we need specialized code to skip @"id"@s such
-- as property keys that aren't meant to change scope?
-- Perhaps this should be added to the language agnostic
-- test suite as well.
--
-- In the meantime 'newBaseURIFromFragment' drops all keys
-- from JSON objects except "id", which at least prevents
-- SubschemaDecodingError in a situation where one of the
-- values we step through isn't a valid schema.
go :: (Value, BaseURI)
-> JP.Token
-> Either JSONPointerError (Value, BaseURI)
go (lastVal, baseURI) tok = do
v <- first ResolutionError (JP.resolveToken tok lastVal)
newBase <- newBaseURIFromFragment updateScope baseURI v
Right (v, newBase)
-- | Update the 'BaseURI' (the store of the current "id" value)
-- after resolving one token of a JSON Pointer and stepping into
-- a new 'Value'.
newBaseURIFromFragment
:: FromJSON schema
=> (BaseURI -> schema -> BaseURI)
-> BaseURI
-> Value
-> Either JSONPointerError BaseURI
newBaseURIFromFragment updateScope baseURI v =
case v of
Object hm -> do
let hmWithOnlyId = case HM.lookup idKey hm of
Nothing -> mempty
Just idVal -> HM.singleton idKey idVal
schema <- first SubschemaDecodingError (fromJSONEither (Object hmWithOnlyId))
Right (updateScope baseURI schema)
_ -> Right baseURI
where
idKey :: Text
idKey = "id"
--------------------------------------------------
-- * enum
--------------------------------------------------
-- | From the spec:
-- <http://json-schema.org/latest/json-schema-validation.html#anchor76>
--
-- > The value of this keyword MUST be an array.
-- > This array MUST have at least one element.
-- > Elements in the array MUST be unique.
--
-- NOTE: We don't enforce the uniqueness constraint in the haskell code,
-- but we do in the 'FromJSON' instance.
newtype EnumValidator
= EnumValidator { _unEnumValidator :: NonEmpty Value }
-- Given a choice, we'd prefer to enforce uniqueness through the type
-- system over having at least one element. To use a 'Set' though we'd
-- have to use 'OrdValue' here (there's no 'Ord' instance for plain Values)
-- and we'd rather not make users mess with 'OrdValue'.
deriving (Eq, Show)
instance FromJSON EnumValidator where
parseJSON = withObject "EnumValidator" $ \o ->
EnumValidator <$> o .: "enum"
instance Arbitrary EnumValidator where
arbitrary = do
xs <- (fmap.fmap) UT._unArbitraryValue arbitrary
case NE.nonEmpty (toUnique xs) of
Nothing -> EnumValidator . pure . UT._unArbitraryValue <$> arbitrary
Just ne -> pure (EnumValidator ne)
where
toUnique :: [Value] -> [Value]
toUnique = fmap UT._unOrdValue
. Set.toList
. Set.fromList
. fmap UT.OrdValue
data EnumInvalid
= EnumInvalid EnumValidator Value
deriving (Eq, Show)
enumVal :: EnumValidator -> Value -> Maybe EnumInvalid
enumVal a@(EnumValidator vs) x
| not (UT.allUniqueValues vs) = Nothing
| x `elem` vs = Nothing
| otherwise = Just $ EnumInvalid a x
--------------------------------------------------
-- * type
--------------------------------------------------
-- | This is separate from 'TypeValidator' so that 'TypeValidator' can
-- be used to write 'JSONSchema.Draft4.Schema.Schema' without
-- messing up the 'FromJSON' instance of that data type.
newtype TypeContext
= TypeContext { _unTypeContext :: TypeValidator }
deriving (Eq, Show)
instance FromJSON TypeContext where
parseJSON = withObject "TypeContext" $ \o ->
TypeContext <$> o .: "type"
data TypeValidator
= TypeValidatorString SchemaType
| TypeValidatorArray (Set SchemaType)
deriving (Eq, Show)
instance Semigroup TypeValidator where
(<>) x y
| isEmpty x = x
| isEmpty y = y
| x == y = x
| otherwise = TypeValidatorArray (setFromTypeValidator x
`Set.union`
setFromTypeValidator y)
where
isEmpty :: TypeValidator -> Bool
isEmpty (TypeValidatorString _) = False
isEmpty (TypeValidatorArray ts) = Set.null ts
stimes = stimesIdempotent
instance FromJSON TypeValidator where
parseJSON v = fmap TypeValidatorString (parseJSON v)
<|> fmap TypeValidatorArray (parseJSON v)
instance ToJSON TypeValidator where
toJSON (TypeValidatorString t) = toJSON t
toJSON (TypeValidatorArray ts) = toJSON ts
instance Arbitrary TypeValidator where
arbitrary = oneof [ TypeValidatorString <$> arbitrary
, TypeValidatorArray <$> arbitrary
]
data SchemaType
= SchemaObject
| SchemaArray
| SchemaString
| SchemaNumber
| SchemaInteger
| SchemaBoolean
| SchemaNull
deriving (Eq, Ord, Show, Bounded, Enum, Generic)
instance FromJSON SchemaType where
parseJSON = genericParseJSON
defaultOptions
{ constructorTagModifier = fmap toLower . drop 6 }
instance ToJSON SchemaType where
toJSON = genericToJSON
defaultOptions
{ constructorTagModifier = fmap toLower . drop 6 }
instance Arbitrary SchemaType where
arbitrary = arbitraryBoundedEnum
data TypeValidatorInvalid
= TypeValidatorInvalid TypeValidator Value
deriving (Eq, Show)
typeVal :: TypeContext -> Value -> Maybe TypeValidatorInvalid
typeVal (TypeContext tv) x
| Set.null matches = Just (TypeValidatorInvalid tv x)
| otherwise = Nothing
where
-- There can be more than one match because a 'Value' can be both a
-- @"number"@ and an @"integer"@.
matches :: Set SchemaType
matches = Set.intersection okTypes (setFromTypeValidator tv)
okTypes :: Set SchemaType
okTypes =
case x of
Null -> Set.singleton SchemaNull
(Array _) -> Set.singleton SchemaArray
(Bool _) -> Set.singleton SchemaBoolean
(Object _) -> Set.singleton SchemaObject
(String _) -> Set.singleton SchemaString
(Number y) ->
if SCI.isInteger y
then Set.fromList [SchemaNumber, SchemaInteger]
else Set.singleton SchemaNumber
-- | Internal.
setFromTypeValidator :: TypeValidator -> Set SchemaType
setFromTypeValidator (TypeValidatorString t) = Set.singleton t
setFromTypeValidator (TypeValidatorArray ts) = ts
--------------------------------------------------
-- * allOf
--------------------------------------------------
newtype AllOf schema
= AllOf { _unAllOf :: NonEmpty schema }
deriving (Eq, Show)
instance FromJSON schema => FromJSON (AllOf schema) where
parseJSON = withObject "AllOf" $ \o ->
AllOf <$> o .: "allOf"
newtype AllOfInvalid err
= AllOfInvalid (NonEmpty (JP.Index, NonEmpty err))
deriving (Eq, Show)
allOfVal
:: forall err schema.
(schema -> Value -> [err])
-> AllOf schema
-> Value
-> Maybe (AllOfInvalid err)
allOfVal f (AllOf subSchemas) x = AllOfInvalid <$> NE.nonEmpty failures
where
perhapsFailures :: [(JP.Index, [err])]
perhapsFailures = zip (JP.Index <$> [0..])
(flip f x <$> NE.toList subSchemas)
failures :: [(JP.Index, NonEmpty err)]
failures = mapMaybe (traverse NE.nonEmpty) perhapsFailures
--------------------------------------------------
-- * anyOf
--------------------------------------------------
newtype AnyOf schema
= AnyOf { _unAnyOf :: NonEmpty schema }
deriving (Eq, Show)
instance FromJSON schema => FromJSON (AnyOf schema) where
parseJSON = withObject "AnyOf" $ \o ->
AnyOf <$> o .: "anyOf"
newtype AnyOfInvalid err
= AnyOfInvalid (NonEmpty (JP.Index, NonEmpty err))
deriving (Eq, Show)
anyOfVal
:: forall err schema.
(schema -> Value -> [err])
-> AnyOf schema
-> Value
-> Maybe (AnyOfInvalid err)
anyOfVal f (AnyOf subSchemas) x
| any (null . snd) perhapsFailures = Nothing
| otherwise = AnyOfInvalid <$> NE.nonEmpty failures
where
perhapsFailures :: [(JP.Index, [err])]
perhapsFailures = zip (JP.Index <$> [0..])
(flip f x <$> NE.toList subSchemas)
failures :: [(JP.Index, NonEmpty err)]
failures = mapMaybe (traverse NE.nonEmpty) perhapsFailures
--------------------------------------------------
-- * oneOf
--------------------------------------------------
newtype OneOf schema
= OneOf { _unOneOf :: NonEmpty schema }
deriving (Eq, Show)
instance FromJSON schema => FromJSON (OneOf schema) where
parseJSON = withObject "OneOf" $ \o ->
OneOf <$> o .: "oneOf"
data OneOfInvalid err
= TooManySuccesses (NonEmpty (JP.Index, Value)) Value
-- ^ The NonEmpty lists contains tuples whose contents
-- are the index of a schema that validated the data
-- and the contents of that schema.
| NoSuccesses (NonEmpty (JP.Index, NonEmpty err)) Value
-- ^ The NonEmpty lists contains tuples whose contents
-- are the index of a schema that failed to validate the data
-- and the failures it produced.
deriving (Eq, Show)
oneOfVal
:: forall err schema. ToJSON schema
=> (schema -> Value -> [err])
-> OneOf schema
-> Value
-> Maybe (OneOfInvalid err)
oneOfVal f (OneOf (firstSubSchema :| otherSubSchemas)) x =
-- Producing the NonEmpty lists needed by the error constructors
-- is a little tricky. If we had a partition function like this
-- it might help:
-- @
-- (a -> Either b c) -> NonEmpty a -> Either (NonEmpty b, [c])
-- ([b], NonEmpty c)
-- @
case (firstSuccess, otherSuccesses) of
(Right _, Nothing) -> Nothing
(Right a, Just successes) -> Just (TooManySuccesses
(a NE.<| successes) x)
(Left e, Nothing) -> Just (NoSuccesses (e :| otherFailures) x)
(Left _, Just (_ :| [])) -> Nothing
(Left _, Just successes) -> Just (TooManySuccesses successes x)
where
firstSuccess :: Either (JP.Index, NonEmpty err) (JP.Index, Value)
firstSuccess =
case NE.nonEmpty (f firstSubSchema x) of
Nothing -> Right (JP.Index 0, toJSON firstSubSchema)
Just errs -> Left (JP.Index 0, errs)
otherPerhapsFailures :: [(JP.Index, Value, [err])]
otherPerhapsFailures =
zipWith
(\index schema -> (index, toJSON schema, f schema x))
(JP.Index <$> [0..])
otherSubSchemas
otherSuccesses :: Maybe (NonEmpty (JP.Index, Value))
otherSuccesses = NE.nonEmpty
$ mapMaybe (\(index,val,errs) ->
case errs of
[] -> Just (index,val)
_ -> Nothing
) otherPerhapsFailures
otherFailures :: [(JP.Index, NonEmpty err)]
otherFailures = mapMaybe (traverse NE.nonEmpty . mid) otherPerhapsFailures
mid :: (a,b,c) -> (a,c)
mid (a,_,c) = (a,c)
--------------------------------------------------
-- * not
--------------------------------------------------
newtype NotValidator schema
= NotValidator { _unNotValidator :: schema }
deriving (Eq, Show)
instance FromJSON schema => FromJSON (NotValidator schema) where
parseJSON = withObject "NotValidator" $ \o ->
NotValidator <$> o .: "not"
data NotValidatorInvalid
= NotValidatorInvalid Value Value
deriving (Eq, Show)
notVal
:: ToJSON schema =>
(schema -> Value -> [err])
-> NotValidator schema
-> Value
-> Maybe NotValidatorInvalid
notVal f (NotValidator schema) x =
case f schema x of
[] -> Just (NotValidatorInvalid (toJSON schema) x)
_ -> Nothing
| seagreen/hjsonschema | src/JSONSchema/Validator/Draft4/Any.hs | mit | 17,818 | 0 | 17 | 5,070 | 4,136 | 2,208 | 1,928 | -1 | -1 |
module Main where
import Fass.Compiler
main :: IO ()
main = do
-- TODO - parse ARGV + print help
text <- readFile "sample.scss"
compile text >>= print
test :: IO ()
test = readFile "sample.scss" >>= debugOutput >>= putStrLn
debugOutput :: String -> IO String
debugOutput = compile
| darthdeus/fass | src/Main.hs | mit | 297 | 0 | 8 | 65 | 88 | 45 | 43 | 10 | 1 |
{- Keyboard events handling -}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
module Lesson04 where
--
import qualified SDL
--
import Data.Maybe
--
import PixelPen
import Data.ListZipper
--
data Input = IDefault
| IDown
| IUp
| IRight
| ILeft
deriving (Show, Eq, Ord, Enum)
type Mapping a = (Input, a)
type MapKeycode = Mapping SDL.Keycode
type MapSurface = Mapping SDL.Surface
newtype ZipperMap a = ZM (Zipper (Mapping a))
deriving (Functor, Foldable, Traversable)
mapPicPath :: ZipperMap String
mapPicPath = ZM
$ Z [] (IDefault, "./img/press.bmp")
[ (IDown, "./img/down.bmp")
, (IUp, "./img/up.bmp")
, (IRight, "./img/right.bmp")
, (ILeft, "./img/left.bmp")
]
mapping :: [ MapKeycode ]
mapping = [ (IDefault, SDL.KeycodeEscape)
, (IDown, SDL.KeycodeDown)
, (IUp, SDL.KeycodeUp)
, (IRight, SDL.KeycodeRight)
, (ILeft, SDL.KeycodeLeft)
]
toInput :: [ MapKeycode ] -> SDL.Keycode -> Maybe Input
toInput [] _ = Nothing
toInput ((i, k):xs) mk
| k == mk = Just i
| k /= mk = toInput xs mk
-- to decide a surface to blit from given event info.
eventToSurface :: Zipper MapSurface
-> SDL.Event
-> Maybe SDL.Surface
eventToSurface s@(Z xs (i,sur) zs) event =
case input of
Just LT -> eventToSurface (right s) event
Just GT -> eventToSurface (left s) event
Just EQ -> Just sur
Nothing -> Nothing
where input = compare i <$> (getKeycode event >>= toInput mapping)
lesson04 :: IO ()
lesson04
= (^.^) sdlInitVideo () -- initialize SDL
$ \() -> (^.^) defaultWindow "Lesson04" -- create window
$ \w -> (^.^) surface w -- get surface from given window
$ \s -> (^.^/) bmpSurface mapPicPath
$ \(ZM l@(Z xs (k, sur) zs)) -> update_ sur $ eventHandle_ checkDefaultQuit
(\f t -> do
let newS = eventToSurface l f
return $ fromMaybe t newS
)
(\f t -> do
SDL.surfaceBlit t Nothing s Nothing
SDL.updateWindowSurface w
)
| rueshyna/sdl2-examples | src/Lesson04.hs | mit | 2,348 | 0 | 22 | 772 | 697 | 383 | 314 | 61 | 4 |
module Main where
import Solidran.Hamm.Detail
main :: IO ()
main = do
c <- getContents
let [a, b] = lines c
putStr . show $ hammingDist a b
| Jefffrey/Solidran | src/Solidran/Hamm/Main.hs | mit | 154 | 0 | 10 | 42 | 67 | 34 | 33 | 7 | 1 |
{- |
Module : $Header$
Description : XML processing for the CMDL interface
Copyright : uni-bremen and DFKI
License : GPLv2 or higher, see LICENSE.txt
Maintainer : r.pascanu@jacobs-university.de
Stability : provisional
Portability : portable
PGIP.XMLparsing contains commands for parsing or creating XML messages
-}
module PGIP.XMLparsing where
import PGIP.XMLstate
import CMDL.DataTypes
import CMDL.DataTypesUtils
import CMDL.DgCommands (cUse)
import CMDL.ProcessScript
import CMDL.Interface (cmdlRunShell)
import Interfaces.DataTypes
import Interfaces.Command
import Interfaces.Utils (emptyIntIState)
import Driver.Options
import Driver.ReadFn
import qualified Static.ToXml as ToXml
import Static.DevGraph
import Common.LibName
import Common.ToXml
import Text.XML.Light as XML
import Network (connectTo, PortID (PortNumber), accept, listenOn)
import System.IO
import Data.List (isInfixOf)
{- | Generates the XML packet that contains information about what
commands can the interface respond to -}
addPGIPHandshake :: CmdlPgipState -> CmdlPgipState
addPGIPHandshake pgipData = if useXML pgipData
then addPGIPElement pgipData
$ add_attr (mkAttr "version" "2.0")
$ unode "acceptedpgipelems" $ map genPgipElem
[ "askpgip"
, "askpgml"
, "askprefs"
, "getprefs"
, "setprefs"
, "proverinit"
, "proverexit"
, "startquiet"
, "stopquiet"
, "pgmlsymbolon"
, "pgmlsymboloff"
, "dostep"
, "undostep"
, "redostep"
, "abortgoal"
, "forget"
, "restoregoal"
, "askids"
, "showid"
, "askguise"
, "parsescript"
, "showproofstate"
, "showctxt"
, "searchtheorems"
, "setlinewidth"
, "viewdoc"
, "doitem"
, "undoitem"
, "redoitem"
, "aborttheory"
, "retracttheory"
, "loadfile"
, "openfile"
, "closefile"
, "abortfile"
, "changecwd"
, "systemcmd"]
else pgipData
{- | The function executes a communication step, i.e. waits for input,
processes the message and outputs the answer -}
communicationStep :: CmdlPgipState -> CmdlState -> IO (CmdlPgipState, CmdlState)
communicationStep pgD st = do
-- tries to read a packet from the input
b <- hIsEOF (hin pgD)
if b then return (pgD { stop = True }, st) else do
tmp <- timeoutReadPacket (maxWaitTime pgD) pgD
case tmp of
Nothing -> if resendMsgIfTimeout pgD
{- if the interface receives nothing in the given timeframe
described by maxWaitTime and the flag resendMsgIfTimeout is
set, that the interface resends last packet assuming that last
send was a fail -}
then do
nwpgD <- sendPGIPData (hetsOpts st) pgD
communicationStep nwpgD st
{- if the flag is not set, that the network waits some more for the
broker to respond or give a new command -}
else communicationStep pgD st
{- if something is received, that the commands are parsed and executed
and a response is generated -}
Just smtxt ->
do
let cmds = parseMsg pgD smtxt
refseqNb = getRefseqNb smtxt
(nwSt, nwPgD) <- processCmds cmds st $ resetPGIPData $
pgD { refSeqNb = refseqNb }
if useXML pgD then do
nwPgipSt <- sendPGIPData (hetsOpts nwSt) nwPgD
return (nwPgipSt, nwSt)
else do
nwPgD' <- sendMSGData (hetsOpts nwSt) nwPgD
return (nwPgD', nwSt)
-- | Comunicate over a port
cmdlListenOrConnect2Port :: HetcatsOpts -> CmdlState -> IO CmdlState
cmdlListenOrConnect2Port opts state = do
let portNb = listen opts
conPN = connectP opts
hostName = connectH opts
swXML = xmlFlag opts
servH <- if portNb /= -1 then do
putIfVerbose opts 1 $ "Starting hets. Listen to port " ++ show portNb
servSock <- listenOn $ PortNumber $ fromIntegral portNb
(servH, _, _) <- accept servSock
return servH
else if conPN /= -1 then do
putIfVerbose opts 1 $ "Starting hets. Connecting to port "
++ show conPN ++ " on host " ++ hostName
connectTo hostName $ PortNumber $ fromIntegral conPN
else error "cmdlListenOrConnect2Port: missing port number"
cmdlStartLoop swXML servH servH 1000 state
{- | Reads from a handle, it waits only for a certain amount of time,
if no input comes it will return Nothing -}
timeoutReadPacket :: Int -> CmdlPgipState -> IO (Maybe String)
timeoutReadPacket untilTimeout st = do
let h = hin st
smtmp <- hWaitForInput h untilTimeout
if smtmp then do
ms <- if useXML st
then readPacket [] h
else hGetLine h
return $ Just ms
else return Nothing
-- | Waits until it reads an entire XML packet
readPacket :: String -> Handle -> IO String
readPacket acc hf = do
tmp <- hGetLine hf
let str = acc ++ tmp ++ "\n"
if isInfixOf "</pgip>" tmp
then return str
else readPacket str hf
cmdlStartLoop :: Bool -> Handle -> Handle -> Int -> CmdlState
-> IO CmdlState
cmdlStartLoop swXML h_in h_out timeOut state = do
pgData <- genCMDLPgipState swXML h_in h_out timeOut
let pgD = addPGIPReady $ addPGIPHandshake $ resetPGIPData pgData
pgD' <- sendPGIPData (hetsOpts state) pgD
waitLoop pgD' state
waitLoop :: CmdlPgipState -> CmdlState -> IO CmdlState
waitLoop pgData state = do
(pgData', state') <- communicationStep pgData state
if stop pgData'
then return state'
else waitLoop pgData' state'
{- | Runs a shell in which the communication is expected to be
through XML packets -}
cmdlRunXMLShell :: CmdlState -> IO CmdlState
cmdlRunXMLShell = cmdlStartLoop True stdin stdout (-1)
-- | Processes a list of input files
processInput :: HetcatsOpts -> [FilePath] -> CmdlState -> IO CmdlState
processInput opts ls state = case ls of
[] -> return state
l : ll -> (case guess l GuessIn of
ProofCommand -> cmdlProcessScriptFile
_ -> cUse) l state >>= processInput opts ll
cmdlRun :: HetcatsOpts -> IO CmdlState
cmdlRun opts =
processInput opts (infiles opts) (emptyCmdlState opts) >>=
if isRemote opts
then cmdlListenOrConnect2Port opts
else if interactive opts
then if xmlFlag opts
then cmdlRunXMLShell
else cmdlRunShell
else return
processString :: [CmdlXMLcommands] -> String -> CmdlState -> CmdlPgipState
-> IO (CmdlState, CmdlPgipState)
processString pl str st pgSt = do
(nwSt, mCmd) <- cmdlProcessString "" 0 str st
postProcessCmd pl nwSt pgSt mCmd
-- copy messages to pgip state
processMsgs :: CmdlState -> CmdlPgipState -> (CmdlPgipState, String)
processMsgs nwSt pgSt =
let o = output nwSt
ms = outputMsg o
ws = warningMsg o
es = errorMsg o
-- there should be at most one
in (if null es then addPGIPAnswer ms ws pgSt else addPGIPError es pgSt, es)
processCommand :: [CmdlXMLcommands] -> Command -> CmdlState -> CmdlPgipState
-> IO (CmdlState, CmdlPgipState)
processCommand pl cmd st pgSt = do
nwSt <- cmdlProcessCmd cmd st
postProcessCmd pl nwSt pgSt (Just cmd)
-- postprocess a previously run command and recurse
postProcessCmd :: [CmdlXMLcommands] -> CmdlState -> CmdlPgipState
-> Maybe Command -> IO (CmdlState, CmdlPgipState)
postProcessCmd pl nwSt0 pgSt mCmd = let
(pgSt1, es) = processMsgs nwSt0 pgSt
nwSt = nwSt0 { output = emptyCmdlMessage } -- remove messages form cmdl state
in if null es then processCmds pl nwSt $ informCmd nwSt mCmd pgSt1 else
return (nwSt, addPGIPReady pgSt1)
informCmd :: CmdlState -> Maybe Command -> CmdlPgipState -> CmdlPgipState
informCmd nwSt mCmd pgSt1 = case (getMaybeLib $ intState nwSt, mCmd) of
(Just (lN, lEnv), Just cmd) -> case cmd of
SelectCmd LibFile _ ->
informDGraph lN lEnv $ addPGIPElement pgSt1
$ add_attr (mkAttr "url" $ libNameToFile lN)
$ unode "informfileloaded" ()
GlobCmd g | g < ProveCurrent ->
informDGraph lN lEnv pgSt1
_ -> pgSt1
_ -> pgSt1
informDGraph :: LibName -> LibEnv -> CmdlPgipState -> CmdlPgipState
informDGraph lN lEnv pgSt =
addPGIPElement pgSt $ unode "informdevelopmentgraph"
$ ToXml.dGraph lEnv lN
$ lookupDGraph lN lEnv
-- | Executes given commands and returns output message and the new state
processCmds :: [CmdlXMLcommands] -> CmdlState -> CmdlPgipState
-> IO (CmdlState, CmdlPgipState)
processCmds cmds state pgipSt = do
let opts = hetsOpts state
case cmds of
[] -> return (state, addPGIPReady pgipSt)
{- ensures that the response is ended with a ready element
such that the broker does wait for more input -}
XmlExecute str : l -> processString l str state (resetPGIPData pgipSt)
XmlExit : l -> processCmds l state $
addPGIPAnswer "Exiting prover" [] pgipSt { stop = True }
XmlAskpgip : l -> processCmds l state $ addPGIPHandshake pgipSt
XmlProverInit : l -> processCmds l (emptyCmdlState opts) $
addPGIPAnswer "Prover state was reset" [] pgipSt
XmlStartQuiet : l -> do
{- To inform that quiet mode is enabled we need to send this with the
old options. -}
let pgD = addPGIPReady $ addPGIPAnswer "Quiet mode enabled" [] pgipSt
pgipSt' <- if useXML pgD
then sendPGIPData opts pgD
else sendMSGData opts pgD
processCmds l (state { hetsOpts = opts { verbose = 0 } }) pgipSt'
XmlStopQuiet : l ->
processCmds l (state { hetsOpts = opts { verbose = 1 } }) $
addPGIPAnswer "Quiet mode disabled" [] pgipSt
XmlOpenGoal str : l -> processCommand l (SelectCmd Goal str) state pgipSt
XmlCloseGoal str : l -> processCommand (XmlGiveUpGoal str : l)
(GlobCmd ProveCurrent) state pgipSt
XmlGiveUpGoal str : l -> processString l ("del goals " ++ str) state pgipSt
XmlUnknown str : l -> processCmds l state $
addPGIPAnswer [] ("Unknown command: " ++ str) pgipSt
XmlUndo : l -> processCommand l (GlobCmd UndoCmd) state pgipSt
XmlRedo : l -> processCommand l (GlobCmd RedoCmd) state pgipSt
XmlForget str : l -> processString l ("del axioms " ++ str) state pgipSt
XmlOpenTheory str : l -> processString l str state pgipSt
XmlCloseTheory _ : l -> let
nwSt = case i_state $ intState state of
Nothing -> state
Just ist -> add2hist [IStateChange $ Just ist] $ state
{ intState = (intState state)
{ i_state = Just $ emptyIntIState (i_libEnv ist)
$ i_ln ist }}
in processCmds l nwSt $ addPGIPAnswer "Theory closed" [] pgipSt
XmlCloseFile _ : l -> processCmds l (emptyCmdlState opts)
(addPGIPAnswer "File closed" [] pgipSt)
XmlParseScript str : _ ->
processCmds [] state . addPGIPElement pgipSt $ addPGIPMarkup str
XmlLoadFile str : l ->
processCommand l (SelectCmd LibFile str) state pgipSt
{- deleting axioms or goals should be implemented via a select command after
inspecting the current axioms or goals. The current strings do not work. -}
| nevrenato/Hets_Fork | PGIP/XMLparsing.hs | gpl-2.0 | 11,678 | 0 | 26 | 3,390 | 2,817 | 1,415 | 1,402 | 236 | 21 |
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
import Passman.Core.PassList
import Passman.Core.Hash
import Passman.Core.Config
import qualified Passman.Core.Config.Optional as OC
import Data.Maybe (fromMaybe)
import Control.Applicative (pure, (<$>), (<*>))
import Control.Monad (unless)
import Control.Monad.Trans (liftIO)
import Control.Monad.Trans.Maybe (runMaybeT, MaybeT(..))
import System.FilePath (splitFileName)
import Graphics.UI.WX
( (.+.), Align(..), Button, Frame, ListView, ListView(..), Prop(..)
, Size2D(..), Var, Window, activate, button, column, columns, command
, errorDialog, fileOpenDialog, fileSaveDialog, fill, frame, hfill
, infoDialog, listCtrlEx, listViewCtrl, listViewItems, listViewSetHandler
, listViewSetItems, minsize, on, passwordDialog, row, set, start, text
, varCreate, varCreate, varGet, varSet, varUpdate, when, widget
, windowReLayout, windowSetLayout
)
import Graphics.UI.WXCore
( Clipboard, EventList(..), clipboardCreate, clipboardSetData
, execClipBoardData, textDataObjectCreate, wxLC_REPORT, wxLC_SINGLE_SEL
, wxLC_REPORT
)
data GUI = GUI { gWin :: Frame ()
, gListView :: ListView PassListEntry
, gGetPassword :: Button ()
, gOpenFile :: Button ()
, gSaveFile :: Button ()
, gAddEntry :: Button ()
, gModifyEntry :: Button ()
, gRemoveEntry :: Button ()
, gConfig :: Var Config
, gSelectedItem :: Var Int
}
-- Main block
main :: IO ()
main = start gui
gui :: IO ()
gui = do
gWin <- frame [ text := "Passman" ]
gListView <- ListView <$> listCtrlEx gWin
(wxLC_SINGLE_SEL .+. wxLC_REPORT)
[ columns := [ ("Info", AlignLeft, 220)
, ("Length", AlignRight, -1)
, ("Mode", AlignRight, -1)
] ] <*> varCreate [] <*> pure entryToStrings
gGetPassword <- button gWin [ text := "Get Password" ]
gOpenFile <- button gWin [ text := "Open File" ]
gSaveFile <- button gWin [ text := "Save File" ]
gAddEntry <- button gWin [ text := "Add Entry" ]
gModifyEntry <- button gWin [ text := "Modify Entry" ]
gRemoveEntry <- button gWin [ text := "Remove Entry" ]
gConfig <- varCreate =<< initConfig gWin
gSelectedItem <- varCreate (-1)
let g = GUI {..}
set gWin [ on activate := flip when (windowReLayout gWin) ]
set gOpenFile [ on command := _FileEvent g False]
set gSaveFile [ on command := _FileEvent g True]
set gGetPassword [ on command := getPasswordEvent g]
listViewSetHandler gListView (listViewEvent g)
windowSetLayout gWin $ column 5 $ map (row 5)
[ [fill $ minsize (Size 400 100) $ widget $ listViewCtrl gListView]
, map (hfill . widget) [gGetPassword]
, map (hfill . widget) [gOpenFile, gSaveFile]
, map (hfill . widget) [gAddEntry, gModifyEntry, gRemoveEntry]
]
-- Event handlers
getPasswordEvent :: GUI -> IO ()
getPasswordEvent g@GUI{..} = do
selectedItem <- varGet gSelectedItem
if selectedItem < 0 then
errorDialog gWin "No item selected" "No item selected"
else do
entries <- varGet $ listViewItems gListView
getPasswordForEntry g (entries !! selectedItem)
_FileEvent :: GUI -> Bool -> IO ()
_FileEvent g@GUI{..} save = helper =<< passListDialog g save
where
_File = if save then saveFile else loadFile
helper :: Maybe FilePath -> IO ()
helper Nothing = return ()
helper (Just path) = _File gListView path >>= errHandler
errHandler Nothing = return ()
errHandler (Just err) = errorDialog' gWin err >> _FileEvent g save
listViewEvent :: GUI -> EventList -> IO ()
listViewEvent GUI{..} event = case event of
ListItemSelected i -> varSet gSelectedItem i
ListDeleteAllItems -> varSet gSelectedItem (-1)
_ -> return ()
-- Helper Functions
getPasswordForEntry :: GUI -> PassListEntry -> IO ()
getPasswordForEntry g@GUI{..} entry = do
let errorLoop = errorDialog' gWin "Incorrect password" >>
getPasswordForEntry g entry
config <- varGet gConfig
let hash = masterPasswordHash config
passwd <- passwordDialog' gWin "Please enter your password" ""
unless (null passwd) $ case masterPassword passwd of
Nothing -> errorLoop
Just mpass -> if checkMasterPassword hash mpass then do
setClipboardText $ generatePassword entry mpass
infoDialog' gWin ( "The password for "
++ passListEntryInfo entry
++ " is in the clipboard. Press OK when done"
)
setClipboardText ""
else errorLoop
setClipboardText :: String -> IO ()
setClipboardText t = clipboardCreate >>= flip execClipBoardData helper
where
helper :: Clipboard () -> IO ()
helper cl = textDataObjectCreate t >>= clipboardSetData cl >> return ()
passListDialog :: GUI -> Bool -> IO (Maybe FilePath)
passListDialog GUI{..} save = runMaybeT $ do
config <- liftIO $ varGet gConfig
let (p1,p2) = splitPassListPath $ getPassListPath config
dialog = if save then fileSaveDialog else fileOpenDialog
path <- MaybeT $ dialog gWin True True "Open file..."
[("Text Files (*.txt)", ["*.txt"])
,("All Files (*.*)",["*"])] p1 p2
liftIO $ updateConfig gConfig (setPassListPath path)
return path
getPassListPath :: Config -> Maybe FilePath
getPassListPath = OC.lookup "passlist path" . optionalConfig
setPassListPath :: FilePath -> Config -> Config
setPassListPath path config = config
{ optionalConfig = OC.insert "passlist path" path (optionalConfig config)
}
updateConfig :: Var Config -> (Config -> Config) -> IO ()
updateConfig vc f = varUpdate vc f >> varGet vc >>= saveConfig
splitPassListPath :: Maybe FilePath -> (String,String)
splitPassListPath = maybe ("","") splitFileName
loadFile :: ListView PassListEntry -> FilePath -> IO (Maybe String)
loadFile lc filename = fileToEntries filename >>= errHandler
where
errHandler (Right entries) = listViewSetItems lc entries >> return Nothing
errHandler (Left err) = return $ Just $ show err
saveFile :: ListView PassListEntry -> FilePath -> IO (Maybe String)
saveFile lc fn = fmap show <$> (varGet (listViewItems lc) >>= entriesToFile fn)
entryToStrings :: PassListEntry -> [String]
entryToStrings (PassListEntry x y z) = [x, fromMaybe "Max" $ show <$> y, show z]
initConfig :: Frame () -> IO Config
initConfig f = do
c <- loadConfig
case c of
Right config -> return config
Left ConfigFileNotFound -> do
hash <- initMasterPassword f
let config = Config { masterPasswordHash = hash
, optionalConfig = OC.empty
}
saveConfig config
return config
Left (InvalidConfig fp) ->
crashWithError f $ "Invalid config file. Please delete " ++ fp
initMasterPassword :: Frame () -> IO String
initMasterPassword f = do
spass <- passwordDialog' f "Please enter a master password" ""
case masterPassword spass of
Nothing -> initMasterPassword f
Just mpass -> hashMasterPassword mpass
passwordDialog' :: Window a -> String -> String -> IO String
passwordDialog' f s = passwordDialog f (s ++ ":") (s ++ ".")
errorDialog' :: Window a -> String -> IO ()
errorDialog' f = errorDialog f "Error"
infoDialog' :: Window a -> String -> IO ()
infoDialog' f = infoDialog f "Info"
crashWithError :: Frame () -> String -> IO a
crashWithError f m = do
errorDialog' f m
error m
| PasswordManager/passman-wx | Main.hs | gpl-2.0 | 7,921 | 0 | 18 | 2,180 | 2,453 | 1,254 | 1,199 | 161 | 4 |
{-# LANGUAGE FlexibleContexts #-}
module Level.Command where
import Control.Lens ((^.))
import Control.Lens.Cons (_tail)
import Control.Monad.Error
import Data.Monoid
import qualified Data.Foldable as DF
import Actor
import Coords
import Level
import Level.Transformation
import Path(pathCoords)
import StaticElement
import Unfold
import qualified Level.Transformation as T
type Command = Unfold LevelTrans
command :: Monad m => LevelTrans -> CommandT m
command = commandT . return
-- | A level transformation that is used by the command scheduler
type CommandT m = UnfoldT m LevelTrans
commandT :: Monad m => Command -> CommandT m
commandT = UnfoldT . return
runCommandT :: CommandT m -> m Command
runCommandT = runUnfoldT
-- | find a way to the destination and move the actor to it
approach :: Coord -> Actor -> Level -> Command
approach dest actor lvl =
case maybePath of
Just path -> DF.foldMap (return . T.move actor) (path ^. pathCoords . _tail)
Nothing -> return (const . throwError $ PathNotFound fromCoord dest)
where
fromCoord = lvl ^. coordOf actor
maybePath = findArea fromCoord destCoords lvl
destCoords = if isWalkable dest lvl
then [dest]
else filter (`isWalkable` lvl) (neighbors2d dest)
pickup :: StaticElement -> Actor -> Level -> Command
pickup item actor lvl =
mconcat
[ approach itemCoord actor lvl
, return $ failOnMissingItem actor item itemCoord
>> T.pickup actor item
]
where itemCoord = lvl ^. coordOf item
mine :: StaticElement -> Actor -> Level -> Command
mine block actor lvl =
mconcat
[ approach blockCoord actor lvl
, return $ failOnMissingItem actor block blockCoord
>> T.mine actor block
]
where blockCoord = lvl ^. coordOf block
| svenkeidel/gnome-citadel | src/Level/Command.hs | gpl-3.0 | 1,796 | 0 | 12 | 395 | 507 | 272 | 235 | -1 | -1 |
{-# OPTIONS_GHC -XTypeSynonymInstances -XFlexibleInstances #-}
module Folsolver.TPTP
( wrapF
, parse
, parseFormula
, transformOnInput
, true, isTrue
, false, isFalse
, stripDoubleNeg, noDoubleNeg
, HasPretty(..), Formula(..)
, rnd, rndIO
, HasPretty(..)
) where
import Folsolver.HasPretty
import Codec.TPTP
import Data.Functor.Identity
import System.Random
import System.IO.Unsafe
import Data.Maybe (fromMaybe)
import Data.List (intercalate)
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Set (Set)
import Data.Map (Map)
import Text.PrettyPrint.HughesPJ as Pretty
-- wrap F around a Formula0 using Identity
-- reverse of unwrapF on Identity
wrapF :: Formula0 (T Identity) (F Identity) -> F Identity
wrapF e = F $ Identity e
-- pretty print of a term
instance HasPretty Term where
pretty f = Pretty.text $ (toTPTP f) ""
-- pretty print of a formula
instance HasPretty Formula where
pretty f = Pretty.text $ (toTPTP f) ""
-- pretty to print TPTP
instance HasPretty TPTP_Input where
pretty f = Pretty.text $ (toTPTP f) ""
instance HasPretty V where
pretty (V v) = Pretty.text "V(" <> (Pretty.text $ v) <> Pretty.text ")"
instance HasPretty AtomicWord where
pretty (AtomicWord s) = Pretty.text $ s
transformOnInput :: (Formula -> Formula) -> TPTP_Input -> TPTP_Input
transformOnInput fun (AFormula name role form anno) = AFormula name role (fun form) anno
-- | True and False represented in our system
true , false :: Formula
true = wrapF $ PredApp (AtomicWord "true") []
false = wrapF $ PredApp (AtomicWord "false") []
-- | Checks for True and False
isTrue , isFalse :: Formula -> Bool
isTrue x = case unwrapF x of
(:~:) x0 -> isFalse x0
_ -> x == true
isFalse x = case unwrapF x of
(:~:) x0 -> isTrue x0
_ -> x == false
-- | Parse a set of axioms and conjectures and extract the formulas from them
parseFormula :: String -> [Formula]
parseFormula = map formula . parse
-- | strip a double negation for a formula
-- | if the formula is not double negated, Nothing is returned.
stripDoubleNeg :: Formula -> Maybe Formula
stripDoubleNeg x = case unwrapF x of
(:~:) x0 -> case unwrapF x0 of
(:~:) x1 -> Just x1
_ -> Nothing
_ -> Nothing
-- | removes a double negation from a formula if present
-- | ~~x --> x where x can be any formula
-- | otherwise --> id
noDoubleNeg :: Formula -> Formula
noDoubleNeg x = fromMaybe x (stripDoubleNeg x)
-- | returns a random value uniformly distributed in the closed interval [min,max]
-- | in an IO-Monad
rndIO :: Random a => a -> a -> IO a
rndIO min max = getStdRandom (randomR (min,max))
-- | returns a random value uniformly distributed in the closed interval [min,max]
-- | (uses unsafePerformIO, so be careful!)
rnd :: Random a => a -> a -> a
rnd min max = unsafePerformIO $ rndIO min max
| traeger/fol-solver | Folsolver/TPTP.hs | gpl-3.0 | 2,889 | 0 | 11 | 607 | 795 | 431 | 364 | 62 | 3 |
{- |
Module : $Header$
Description : Lists. dealwithit.jpeg.
Copyright : (c) plaimi 2015
License : GPL-3
Maintainer : plailude@plaimi.net
-} module Plailude.Control.List where
(!?) :: [a] -> Int -> Maybe a
-- | Safe indexing of lists.
l !? i = if i < 0 then Nothing else go i l
where
go _ [] = Nothing
go 0 (x:_) = Just x
go n (_:xs) = go (n - 1) xs
| plaimi/plailude | src/Plailude/Control/List.hs | gpl-3.0 | 388 | 0 | 9 | 111 | 121 | 65 | 56 | 6 | 4 |
-- Utility module
-- By Gregory W. Schwartz
--
-- Collects utility functions for the main files
{-# LANGUAGE BangPatterns, OverloadedStrings, ViewPatterns #-}
module Utility ( addLengthHeader
, addMutationsHeader
, addFillerGermlines
, replaceChars
, fromEither
) where
-- Built-in
import qualified Data.Map as M
import Data.Monoid
-- Cabal
import qualified Data.Text as T
import Data.Fasta.Text
import TextShow
-- Local
import Types
-- | Adds the length of a sequence to the header of that sequence
addLengthHeader :: FastaSequence -> FastaSequence
addLengthHeader fSeq = fSeq { fastaHeader = fastaHeader fSeq
<> "|"
<> (showt . T.length . fastaSeq $ fSeq)
}
-- | Adds the mutations of a sequence to the header of that sequence
addMutationsHeader :: Bool -> Field -> FastaSequence -> FastaSequence
addMutationsHeader aaFlag field fSeq =
fSeq { fastaHeader = fastaHeader fSeq
<> "|"
<> ( printMutations
. getMutations (fastaSeq germline)
. fastaSeq
$ fSeq
)
}
where
germline = if aaFlag then fromEither (translate 1 otherSeq) else otherSeq
otherSeq =
FastaSequence { fastaHeader = "", fastaSeq = getField field '|' fSeq }
-- | Print the mutations
printMutations :: [(Position, (Char, Char))] -> T.Text
printMutations = T.intercalate "/"
. map (\(!p, (!x, !y)) -> showt p <> T.pack ['-', x, '-', y])
-- | Filter for the true mutations
getMutations :: T.Text -> T.Text -> [(Position, (Char, Char))]
getMutations xs = filter (\x -> isDiff x && noGaps x) . getDiff xs
where
isDiff (_, (!x, !y)) = x /= y
noGaps (_, !x) = not . any (flip inTuple x) $ ("-.~" :: String)
-- | Returns the difference between two texts
getDiff :: T.Text -> T.Text -> [(Position, (Char, Char))]
getDiff xs = zip [1..] . T.zip xs
-- | Sees if an element is in a tuple
inTuple :: (Eq a) => a -> (a, a) -> Bool
inTuple c (!x, !y) = c == x || c == y
-- | Adds filler germlines to normal fasta files
addFillerGermlines :: [FastaSequence] -> CloneMap
addFillerGermlines = M.fromList . labelGermlines . map insertDummy
where
labelGermlines = map (\(x, (y, z)) -> ((x, y), z)) . zip [0..]
insertDummy x = (dummy, [x])
dummy = FastaSequence {fastaHeader = "filler", fastaSeq = "---"}
-- | Like zipWith, but if one if one list is longer than the other than use
-- the remaining, needs to be the same type
zipWithRetain :: (a -> a -> a) -> [a] -> [a] -> [a]
zipWithRetain _ [] [] = []
zipWithRetain _ xs [] = xs
zipWithRetain _ [] ys = ys
zipWithRetain f (x:xs) (y:ys) = f x y : zipWithRetain f xs ys
-- | Like zipWithRetain, but for text
zipWithRetainText :: (Char -> Char -> Char) -> T.Text -> T.Text -> T.Text
zipWithRetainText _ (T.uncons -> Nothing) (T.uncons -> Nothing) = T.empty
zipWithRetainText _ xs (T.uncons -> Nothing) = xs
zipWithRetainText _ (T.uncons -> Nothing) ys = ys
zipWithRetainText f (T.uncons -> Just (x, xs)) (T.uncons -> Just (y, ys))
= f x y `T.cons` zipWithRetainText f xs ys
-- | Replace characters in the first string with another in the second string
-- if they are equal to a certain character and they aren't replaced with
-- a gap.
replaceChars :: Char -> T.Text -> T.Text -> T.Text
replaceChars c = zipWithRetainText changeChar
where
changeChar a b = if a == c && (not . T.isInfixOf (T.singleton b)) ".-"
then b
else a
-- | Error for left
fromEither :: Either T.Text b -> b
fromEither (Right x) = x
fromEither (Left x) = error . T.unpack $ x
| GregorySchwartz/modify-fasta | src/Utility.hs | gpl-3.0 | 3,830 | 0 | 15 | 1,085 | 1,150 | 633 | 517 | 62 | 2 |
{- This file is part of PhoneDirectory.
Copyright (C) 2009 Michael Steele
PhoneDirectory is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PhoneDirectory is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PhoneDirectory. If not, see <http://www.gnu.org/licenses/>.
-}
module GUIConstants
( ctrlPadding
, lblPadding
, winPadding
, scale
) where
type DPI = Int
type RelPix = Int
type AbsPix = Int
-- |The number of pixels between controls that are grouped together.
ctrlPadding :: DPI -> AbsPix
ctrlPadding = scale 4
-- |The number of pixels between controls and their labels.
lblPadding :: DPI -> AbsPix
lblPadding = scale 3
-- |The number of pixels between the window border or vertical/horizontal
-- spacers and internal controls.
winPadding :: DPI -> AbsPix
winPadding = scale 7
scale :: RelPix -> DPI -> AbsPix
scale rel dpi = rel * dpi `div` 96
| mikesteele81/Phone-Directory | src/GUIConstants.hs | gpl-3.0 | 1,336 | 0 | 6 | 277 | 125 | 74 | 51 | 16 | 1 |
module Debug.Vampire.Data (ExprStruct(..), ExprStruct'(..), resolve) where
import Data.IORef
import Data.Functor
data ExprStruct =
ExprStruct {expr :: String,
value :: Maybe String,
children :: [ExprStruct]} deriving Show
data ExprStruct' =
ExprStruct' {expr' :: String,
value' :: Maybe String,
children' :: [IORef ExprStruct']}
resolve :: ExprStruct' -> IO ExprStruct
resolve (ExprStruct' expr value children) =
ExprStruct expr value <$> (mapM readIORef children >>= mapM resolve)
| benzrf/vampire | src/Debug/Vampire/Data.hs | gpl-3.0 | 548 | 0 | 10 | 129 | 162 | 93 | 69 | 14 | 1 |
module Carbon.Data.Logic.ParseDiamond(
Answer
, DRParser
, answers
)where
import Control.Monad
import Text.Parsec as P
import Text.ParserCombinators.Parsec as PC
import qualified Data.Functor.Identity as Identity
import Carbon.Common
import Carbon.Data.Id
import Carbon.Data.Logic.Diamond
import Carbon.Data.Logic.Parse
type Answer = DiamondResult String
type DRParser a = MyParser (DiamondResult String) a
answer :: DRParser Answer
answer = do
setState startState
(pieces `sepBy` char ' ') >> eol
getState
where
parsePiece :: String -> (String -> DiamondResult String -> DiamondResult String) -> DRParser ()
parsePiece start update = between (string start >> char '(') (char ')') $ do
name <- many1 $ noneOf ")"
modifyState $ update name
parseIn = parsePiece "t" $ \s dr -> dr{inSet = s:inSet dr}
parseUdec = parsePiece "u" $ \s dr -> dr{udecSet = s:udecSet dr}
parseOut = parsePiece "f" $ \s dr -> dr{outSet = s:outSet dr}
pieces = choice [parseIn, parseUdec, parseOut]
answers :: DRParser [Answer]
answers = choice [found, dropLine, finished]
where
found = P.try $ do
a <- answer
as <- answers
return $ a:as
dropLine = do
many $ noneOf "\r\n"
eol >> answers
finished = eof >> return []
testAnswers :: FilePath -> IO ()
testAnswers path = do
foo <- readFile path
either putStrLn print $ execParser answers path foo
| runjak/carbon-adf | Carbon/Data/Logic/ParseDiamond.hs | gpl-3.0 | 1,433 | 0 | 12 | 321 | 512 | 270 | 242 | 41 | 1 |
module Main where
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Media.FFMpeg
import System.Directory
import System.Environment
import System.Exit
main :: IO ()
main = do
registerAll
pname <- getProgName
args <- getArgs
case args of
[fname] -> do
fexists <- doesFileExist fname
when (not fexists)$ do
putStrLn$ "File not found " ++ fname
exitFailure
do
(ctx, dict) <- openInput fname Nothing Nothing
vals <- dictGetAll =<< getDictField ctx format_metadata
if (null vals) then
liftIO.putStrLn$ "No metadata"
else
forM_ vals$ \(key, val) -> do
liftIO.putStrLn$ key ++ "=" ++ val
`catch` \e -> do
putStrLn$ show (e :: HSFFError)
exitFailure
_ -> do
putStrLn$ "usage: " ++ pname ++ " <input_file>"
putStrLn$ "example program to demonstrate the use of the libavformat metadata API.\n"
exitFailure
| CLowcay/hs-ffmpeg-examples | src/Metadata.hs | gpl-3.0 | 917 | 6 | 24 | 209 | 299 | 149 | 150 | 33 | 3 |
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this
-- file, You can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Main (main) where
import Control.Applicative
import Control.Monad
import Criterion
import Criterion.Main
import Data.ByteString.Lazy
import Data.Monoid
import Database.Redis.IO
import qualified Database.Redis as Hedis
import qualified System.Logger as Logger
default (ByteString, Int)
main :: IO ()
main = do
g <- Logger.new (Logger.setLogLevel Logger.Error Logger.defSettings)
p <- mkPool g (setMaxConnections 50 . setPoolStripes 1 $ defSettings)
h <- Hedis.connect Hedis.defaultConnectInfo
defaultMain
[ bgroup "ping"
[ bench "hedis 1" $ nfIO (runPingH 1 h)
, bench "redis-io 1" $ nfIO (runPing 1 p)
, bench "hedis 4" $ nfIO (runPingH 4 h)
, bench "redis-io 4" $ nfIO (runPing 4 p)
, bench "hedis 10" $ nfIO (runPingH 10 h)
, bench "redis-io 10" $ nfIO (runPing 10 p)
, bench "hedis 100" $ nfIO (runPingH 100 h)
, bench "redis-io 100" $ nfIO (runPing 100 p)
]
, bgroup "get-and-set"
[ bench "hedis 1" $ nfIO (runGetSetH 1 h)
, bench "redis-io 1" $ nfIO (runSetGet 1 p)
, bench "hedis 4" $ nfIO (runGetSetH 4 h)
, bench "redis-io 4" $ nfIO (runSetGet 4 p)
, bench "hedis 10" $ nfIO (runGetSetH 10 h)
, bench "redis-io 10" $ nfIO (runSetGet 10 p)
, bench "hedis 100" $ nfIO (runGetSetH 100 h)
, bench "redis-io 100" $ nfIO (runSetGet 100 p)
]
]
shutdown p
Logger.close g
runPing :: Int -> Pool -> IO ()
runPing n p = do
x <- runRedis p $ pipelined $ Prelude.last <$> replicateM n ping
x `seq` return ()
runPingH :: Int -> Hedis.Connection -> IO ()
runPingH n p = do
x <- Hedis.runRedis p $ Prelude.last <$> replicateM n Hedis.ping
x `seq` return ()
runSetGet :: Int -> Pool -> IO ()
runSetGet n p = do
x <- runRedis p $ pipelined $ do
replicateM_ n $ set "hello" "world" mempty
get "hello" :: Redis IO (Maybe ByteString)
x `seq` return ()
runGetSetH :: Int -> Hedis.Connection -> IO ()
runGetSetH n p = do
x <- Hedis.runRedis p $ do
replicateM_ n $ Hedis.set "world" "hello"
Hedis.get "world"
x `seq` return ()
| twittner/redis-io | bench/Bench.hs | mpl-2.0 | 2,639 | 0 | 14 | 794 | 857 | 419 | 438 | 60 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAP.Projects.Brands.IdentityAwareProxyClients.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves an Identity Aware Proxy (IAP) OAuth client. Requires that the
-- client is owned by IAP.
--
-- /See:/ <https://cloud.google.com/iap Cloud Identity-Aware Proxy API Reference> for @iap.projects.brands.identityAwareProxyClients.get@.
module Network.Google.Resource.IAP.Projects.Brands.IdentityAwareProxyClients.Get
(
-- * REST Resource
ProjectsBrandsIdentityAwareProxyClientsGetResource
-- * Creating a Request
, projectsBrandsIdentityAwareProxyClientsGet
, ProjectsBrandsIdentityAwareProxyClientsGet
-- * Request Lenses
, pbiapcgXgafv
, pbiapcgUploadProtocol
, pbiapcgAccessToken
, pbiapcgUploadType
, pbiapcgName
, pbiapcgCallback
) where
import Network.Google.IAP.Types
import Network.Google.Prelude
-- | A resource alias for @iap.projects.brands.identityAwareProxyClients.get@ method which the
-- 'ProjectsBrandsIdentityAwareProxyClientsGet' request conforms to.
type ProjectsBrandsIdentityAwareProxyClientsGetResource
=
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] IdentityAwareProxyClient
-- | Retrieves an Identity Aware Proxy (IAP) OAuth client. Requires that the
-- client is owned by IAP.
--
-- /See:/ 'projectsBrandsIdentityAwareProxyClientsGet' smart constructor.
data ProjectsBrandsIdentityAwareProxyClientsGet =
ProjectsBrandsIdentityAwareProxyClientsGet'
{ _pbiapcgXgafv :: !(Maybe Xgafv)
, _pbiapcgUploadProtocol :: !(Maybe Text)
, _pbiapcgAccessToken :: !(Maybe Text)
, _pbiapcgUploadType :: !(Maybe Text)
, _pbiapcgName :: !Text
, _pbiapcgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsBrandsIdentityAwareProxyClientsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pbiapcgXgafv'
--
-- * 'pbiapcgUploadProtocol'
--
-- * 'pbiapcgAccessToken'
--
-- * 'pbiapcgUploadType'
--
-- * 'pbiapcgName'
--
-- * 'pbiapcgCallback'
projectsBrandsIdentityAwareProxyClientsGet
:: Text -- ^ 'pbiapcgName'
-> ProjectsBrandsIdentityAwareProxyClientsGet
projectsBrandsIdentityAwareProxyClientsGet pPbiapcgName_ =
ProjectsBrandsIdentityAwareProxyClientsGet'
{ _pbiapcgXgafv = Nothing
, _pbiapcgUploadProtocol = Nothing
, _pbiapcgAccessToken = Nothing
, _pbiapcgUploadType = Nothing
, _pbiapcgName = pPbiapcgName_
, _pbiapcgCallback = Nothing
}
-- | V1 error format.
pbiapcgXgafv :: Lens' ProjectsBrandsIdentityAwareProxyClientsGet (Maybe Xgafv)
pbiapcgXgafv
= lens _pbiapcgXgafv (\ s a -> s{_pbiapcgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pbiapcgUploadProtocol :: Lens' ProjectsBrandsIdentityAwareProxyClientsGet (Maybe Text)
pbiapcgUploadProtocol
= lens _pbiapcgUploadProtocol
(\ s a -> s{_pbiapcgUploadProtocol = a})
-- | OAuth access token.
pbiapcgAccessToken :: Lens' ProjectsBrandsIdentityAwareProxyClientsGet (Maybe Text)
pbiapcgAccessToken
= lens _pbiapcgAccessToken
(\ s a -> s{_pbiapcgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pbiapcgUploadType :: Lens' ProjectsBrandsIdentityAwareProxyClientsGet (Maybe Text)
pbiapcgUploadType
= lens _pbiapcgUploadType
(\ s a -> s{_pbiapcgUploadType = a})
-- | Required. Name of the Identity Aware Proxy client to be fetched. In the
-- following format:
-- projects\/{project_number\/id}\/brands\/{brand}\/identityAwareProxyClients\/{client_id}.
pbiapcgName :: Lens' ProjectsBrandsIdentityAwareProxyClientsGet Text
pbiapcgName
= lens _pbiapcgName (\ s a -> s{_pbiapcgName = a})
-- | JSONP
pbiapcgCallback :: Lens' ProjectsBrandsIdentityAwareProxyClientsGet (Maybe Text)
pbiapcgCallback
= lens _pbiapcgCallback
(\ s a -> s{_pbiapcgCallback = a})
instance GoogleRequest
ProjectsBrandsIdentityAwareProxyClientsGet
where
type Rs ProjectsBrandsIdentityAwareProxyClientsGet =
IdentityAwareProxyClient
type Scopes
ProjectsBrandsIdentityAwareProxyClientsGet
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsBrandsIdentityAwareProxyClientsGet'{..}
= go _pbiapcgName _pbiapcgXgafv
_pbiapcgUploadProtocol
_pbiapcgAccessToken
_pbiapcgUploadType
_pbiapcgCallback
(Just AltJSON)
iAPService
where go
= buildClient
(Proxy ::
Proxy
ProjectsBrandsIdentityAwareProxyClientsGetResource)
mempty
| brendanhay/gogol | gogol-iap/gen/Network/Google/Resource/IAP/Projects/Brands/IdentityAwareProxyClients/Get.hs | mpl-2.0 | 5,779 | 0 | 15 | 1,219 | 700 | 411 | 289 | 111 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SecurityCenter.Organizations.NotificationConfigs.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists notification configs.
--
-- /See:/ <https://console.cloud.google.com/apis/api/securitycenter.googleapis.com/overview Security Command Center API Reference> for @securitycenter.organizations.notificationConfigs.list@.
module Network.Google.Resource.SecurityCenter.Organizations.NotificationConfigs.List
(
-- * REST Resource
OrganizationsNotificationConfigsListResource
-- * Creating a Request
, organizationsNotificationConfigsList
, OrganizationsNotificationConfigsList
-- * Request Lenses
, onclParent
, onclXgafv
, onclUploadProtocol
, onclAccessToken
, onclUploadType
, onclPageToken
, onclPageSize
, onclCallback
) where
import Network.Google.Prelude
import Network.Google.SecurityCenter.Types
-- | A resource alias for @securitycenter.organizations.notificationConfigs.list@ method which the
-- 'OrganizationsNotificationConfigsList' request conforms to.
type OrganizationsNotificationConfigsListResource =
"v1p1beta1" :>
Capture "parent" Text :>
"notificationConfigs" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListNotificationConfigsResponse
-- | Lists notification configs.
--
-- /See:/ 'organizationsNotificationConfigsList' smart constructor.
data OrganizationsNotificationConfigsList =
OrganizationsNotificationConfigsList'
{ _onclParent :: !Text
, _onclXgafv :: !(Maybe Xgafv)
, _onclUploadProtocol :: !(Maybe Text)
, _onclAccessToken :: !(Maybe Text)
, _onclUploadType :: !(Maybe Text)
, _onclPageToken :: !(Maybe Text)
, _onclPageSize :: !(Maybe (Textual Int32))
, _onclCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsNotificationConfigsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'onclParent'
--
-- * 'onclXgafv'
--
-- * 'onclUploadProtocol'
--
-- * 'onclAccessToken'
--
-- * 'onclUploadType'
--
-- * 'onclPageToken'
--
-- * 'onclPageSize'
--
-- * 'onclCallback'
organizationsNotificationConfigsList
:: Text -- ^ 'onclParent'
-> OrganizationsNotificationConfigsList
organizationsNotificationConfigsList pOnclParent_ =
OrganizationsNotificationConfigsList'
{ _onclParent = pOnclParent_
, _onclXgafv = Nothing
, _onclUploadProtocol = Nothing
, _onclAccessToken = Nothing
, _onclUploadType = Nothing
, _onclPageToken = Nothing
, _onclPageSize = Nothing
, _onclCallback = Nothing
}
-- | Required. Name of the organization to list notification configs. Its
-- format is \"organizations\/[organization_id]\".
onclParent :: Lens' OrganizationsNotificationConfigsList Text
onclParent
= lens _onclParent (\ s a -> s{_onclParent = a})
-- | V1 error format.
onclXgafv :: Lens' OrganizationsNotificationConfigsList (Maybe Xgafv)
onclXgafv
= lens _onclXgafv (\ s a -> s{_onclXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
onclUploadProtocol :: Lens' OrganizationsNotificationConfigsList (Maybe Text)
onclUploadProtocol
= lens _onclUploadProtocol
(\ s a -> s{_onclUploadProtocol = a})
-- | OAuth access token.
onclAccessToken :: Lens' OrganizationsNotificationConfigsList (Maybe Text)
onclAccessToken
= lens _onclAccessToken
(\ s a -> s{_onclAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
onclUploadType :: Lens' OrganizationsNotificationConfigsList (Maybe Text)
onclUploadType
= lens _onclUploadType
(\ s a -> s{_onclUploadType = a})
-- | The value returned by the last \`ListNotificationConfigsResponse\`;
-- indicates that this is a continuation of a prior
-- \`ListNotificationConfigs\` call, and that the system should return the
-- next page of data.
onclPageToken :: Lens' OrganizationsNotificationConfigsList (Maybe Text)
onclPageToken
= lens _onclPageToken
(\ s a -> s{_onclPageToken = a})
-- | The maximum number of results to return in a single response. Default is
-- 10, minimum is 1, maximum is 1000.
onclPageSize :: Lens' OrganizationsNotificationConfigsList (Maybe Int32)
onclPageSize
= lens _onclPageSize (\ s a -> s{_onclPageSize = a})
. mapping _Coerce
-- | JSONP
onclCallback :: Lens' OrganizationsNotificationConfigsList (Maybe Text)
onclCallback
= lens _onclCallback (\ s a -> s{_onclCallback = a})
instance GoogleRequest
OrganizationsNotificationConfigsList
where
type Rs OrganizationsNotificationConfigsList =
ListNotificationConfigsResponse
type Scopes OrganizationsNotificationConfigsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
OrganizationsNotificationConfigsList'{..}
= go _onclParent _onclXgafv _onclUploadProtocol
_onclAccessToken
_onclUploadType
_onclPageToken
_onclPageSize
_onclCallback
(Just AltJSON)
securityCenterService
where go
= buildClient
(Proxy ::
Proxy OrganizationsNotificationConfigsListResource)
mempty
| brendanhay/gogol | gogol-securitycenter/gen/Network/Google/Resource/SecurityCenter/Organizations/NotificationConfigs/List.hs | mpl-2.0 | 6,441 | 0 | 18 | 1,403 | 883 | 512 | 371 | 130 | 1 |
module Last where
import Prelude hiding (last)
last :: [a] -> a
last [a] = a
last (h:t) = last t
| ice1000/OI-codes | codewars/101-200/last.hs | agpl-3.0 | 98 | 0 | 7 | 22 | 54 | 31 | 23 | 5 | 1 |
{-# LANGUAGE TupleSections, LambdaCase #-}
import ViperVM.Parsing.Lisp
import ViperVM.Graph.Graph
import ViperVM.Graph.ParallelReducer
import ViperVM.Graph.Builtins
import Control.Applicative ( (<$>) )
import Control.Monad (replicateM)
import Data.Map as Map
import Data.Dynamic
import Paths_ViperVM
main :: IO ()
main = do
let file = "apps/samples/lisp/Sample.lisp"
kernels = Map.fromList [
("potrf", Builtin [True] $ \case
([m],[arg]) -> do
let sz = readData m
if sz > 100
then do
splt <- readExpr "(lambda (x) (unsplit (deepseq (cholRec (triangularize (split 2 2 x))))))"
return (App splt arg)
else do
putStrLn ("potrf (" ++ show sz ++ ")")
return m
_ -> error "Invalid parameters"
),
("trsm", Builtin [True,True] $ \case
(args,_) -> do
let sz = readData (head args)
putStrLn ("trsm (" ++ show sz ++ ")")
return (head args)
),
("syrk", Builtin [True,True] $ \case
(args,_) -> do
let sz = readData (head args)
putStrLn ("syrk (" ++ show sz ++ ")")
return (head args)
),
("sgemm", Builtin [True,True,True] $ \case
(args,_) -> do
let sz = readData (head args)
putStrLn ("sgemm (" ++ show sz ++ ")")
return (head args)
),
("unsplit", Builtin [True] $ \case
([List xs],_) -> do
List ys <- getNodeExprIO (head xs)
sz <- readData <$> getNodeExprIO (head ys)
let sz' = sz * (length xs) :: Int
putStrLn ("unsplit: " ++ show sz ++ " -> " ++ show sz')
return (Data (toDyn sz'))
_ -> error "Invalid parameters"
),
("split", Builtin [True,True,True] $ \case
([ConstInteger h, ConstInteger w, m],_) -> do
let sz = readData m :: Int
h' = fromIntegral h
w' = fromIntegral w
sz' = sz `div` w'
d' = Data (toDyn sz')
putStrLn ("split: " ++ show sz ++ " -> " ++ show sz')
List <$> replicateM h' (newNodeIO . List =<< replicateM w' (newNodeIO d'))
_ -> error "Invalid parameters"
)
]
ctx <- readModule =<< readFile =<< getDataFileName file
let ch = check builtins ctx
datas = registerData [("m", 500 :: Int)]
builtins = Map.unions [defaultBuiltins, kernels, datas]
ch "(potrf m)"
--ch "(unsplit (split 2 2 m))"
check :: Map String Builtin -> Map String Node -> String -> IO ()
check builtins ctx expr = do
r <- readExpr expr
putStrLn ("Evaluating: " ++ show expr)
f <- run builtins ctx r
putStrLn ("Reduction result: " ++ show f)
registerData :: Typeable a => [(String,a)] -> Map String Builtin
registerData ds = fmap f (Map.fromList ds)
where
f = Builtin [] . const . return . Data . toDyn
readData :: Expr -> Int
readData (Data u) = fromDyn u (error "Invalid data")
readData e = error ("Invalid data parameter: " ++ show e)
| hsyl20/HViperVM | apps/GranularityAdaptation.hs | lgpl-3.0 | 3,313 | 0 | 26 | 1,238 | 1,130 | 568 | 562 | 75 | 5 |
import Data.Char
s n = [ "w", "", "k", "s", "t", "n", "h", "m", "y", "r" ]!!n
b 'T' = "a"
b 'L' = "i"
b 'U' = "u"
b 'R' = "e"
b 'D' = "o"
-- |
-- >>> ans1 "0U"
-- ["n","n"]
ans1 [] = []
ans1 ('0':'U':ls) = "n":"n":(ans1 ls)
ans1 (l:ls) =
let n = digitToInt l :: Int
in
(s n):(ans2 ls)
ans2 (l:ls) = (b l):(ans1 ls)
main = do
l <- getLine
let o = concat $ ans1 l
putStrLn o
| a143753/AOJ | 2417.hs | apache-2.0 | 395 | 1 | 11 | 111 | 257 | 130 | 127 | 17 | 1 |
module CoinsInARow.A276164Spec (main, spec) where
import Test.Hspec
import CoinsInARow.A276164 (a276164)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A276164" $
it "correctly computes the first 5 elements" $
map a276164 [1..5] `shouldBe` expectedValue where
expectedValue = [1, 2, 4, 7, 10]
| peterokagey/haskellOEIS | test/CoinsInARow/A276164Spec.hs | apache-2.0 | 326 | 0 | 8 | 65 | 109 | 62 | 47 | 10 | 1 |
{-
- Copyright (c) 2017 The Agile Monkeys S.L. <hackers@theam.io>
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module HaskellDo.View where
import Prelude hiding (id, div)
import GHCJS.HPlay.View
import Transient.Internals ((**>))
import qualified Ulmus
import HaskellDo.Types
import qualified HaskellDo.Materialize.View as Materialize
import qualified HaskellDo.CodeMirror.View as CodeMirror
import qualified HaskellDo.Compilation.View as Compilation
import qualified HaskellDo.Toolbar.View as Toolbar
import qualified HaskellDo.Toolbar.Types as Toolbar
import qualified HaskellDo.Toolbar.FileSystemTree as FileSystemTree
view :: AppState -> Widget Action
view appState = Ulmus.withWidgets (widgets appState) $
div ! atr "class" "editor-container" $ do
Materialize.row $ do
Materialize.col "s" 6 $
Ulmus.widgetPlaceholder "editor"
Materialize.col "s" 6 ! id "outputdiv" $ do
Ulmus.widgetPlaceholder "outputDisplay"
loaderOverlay
Materialize.row $
Materialize.col "s" 12 $ div ! atr "class" "error-placeholder" $ noHtml
Ulmus.widgetPlaceholder "errorDisplay"
loaderOverlay :: Perch
loaderOverlay =
div ! atr "class" "dimmedBackground" $
div ! atr "class" "loader-align center-align" $
div ! atr "class" "loader-align-inner" $ do
div ! atr "class" "preloader-wrapper big active" $
div ! atr "class" "spinner-layer spinner-blue-only" $ do
div ! atr "class" "circle-clipper left" $
div ! atr "class" "circle" $ noHtml
div ! atr "class" "gap-patch" $
div ! atr "class" "circle" $ noHtml
div ! atr "class" "circle-clipper right" $
div ! atr "class" "circle" $ noHtml
p ! atr "class" "grey-text center-align" ! atr "id" "dependencyMessage" $ ("Downloading dependencies" :: String)
widgets :: AppState -> Widget Action
widgets state = do
Toolbar.toolbar
Toolbar.creationDisplay (toolbarState state)
showDisplays state
codeMirrorWidget
**> packageTextAreaWidget
**> openProjectButtonWidget
**> packageEditorButtonWidget
**> toggleEditorButtonWidget
**> toggleErrorButtonWidget
**> convertToPDFButtonWidget
**> compileButtonWidget
**> pathInputWidget
**> closeModalButtonWidget
**> closePackageEditorButtonWidget
**> cancelPackageEditorButtonWidget
**> fsTreeWidget
**> modalPrompt "newDirectoryModal" Toolbar.NewDirectory Toolbar.CreateNewDirectory
where
modalPrompt id' inputAction buttonAction = Ulmus.mapAction ToolbarAction $
Toolbar.modalPrompt id' inputAction buttonAction (toolbarState state)
codeMirrorWidget = Ulmus.newWidget "editor" $
Ulmus.mapAction CodeMirrorAction $
CodeMirror.view $ codeMirrorState state
openProjectButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.openProjectButton (toolbarState state)
packageEditorButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.packageEditorButton (toolbarState state)
compileButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.compileButton (toolbarState state)
toggleEditorButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.toggleEditorButton (toolbarState state)
toggleErrorButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.toggleErrorButton (toolbarState state)
convertToPDFButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.convertToPDFButton (toolbarState state)
pathInputWidget = Ulmus.mapAction ToolbarAction $
Toolbar.pathInput (toolbarState state)
packageTextAreaWidget = Ulmus.mapAction ToolbarAction $
Toolbar.packageTextArea (toolbarState state)
fsTreeWidget = Ulmus.mapAction ToolbarAction $
FileSystemTree.widget (toolbarState state)
closeModalButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.closeModalButton (toolbarState state)
closePackageEditorButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.closePackageEditorButton (toolbarState state)
cancelPackageEditorButtonWidget = Ulmus.mapAction ToolbarAction $
Toolbar.cancelPackageEditorButton (toolbarState state)
showDisplays :: AppState -> Widget ()
showDisplays state = do
Ulmus.newWidget "outputDisplay" $ Compilation.outputDisplay (compilationState state)
Ulmus.newWidget "errorDisplay" $ Compilation.errorDisplay (compilationState state)
updateDisplays :: AppState -> Widget Action
updateDisplays state = do
Compilation.updateDisplays (compilationState state)
Ulmus.mapAction ToolbarAction $
Toolbar.updateDisplays (toolbarState state)
| J2RGEZ/haskell-do | src/common/HaskellDo/View.hs | apache-2.0 | 5,341 | 0 | 22 | 1,158 | 1,053 | 517 | 536 | 95 | 1 |
module Data.Rational(Euclidean, module Data.Rational) where
import Prelude hiding ((+), (-), negate, (*), (^), (/), gcd, Rational)
import Domains.Euclidean
import Domains.Field
-- A fraction or ratio between two elements of a Euclidean Ring. E.g. Integer or Polynomial
data Rational a = Rational !a !a deriving (Eq, Read)
canonical :: (Ring a, Euclidean a) => (a -> a -> b) -> a -> a -> b
canonical f n d = let g = sign d * gcd n d in f (divideOrFail n g) (divideOrFail d g)
rational :: (Ring a, Euclidean a) => a -> a -> Rational a
rational = canonical Rational -- Curried constructor
rational1 :: (Ring a, Euclidean a) => a -> Rational a
rational1 n = rational n one
instance (Eq a, Show a, Multiplicative a) => Show (Rational a) where
show (Rational n d) = if d == one then show n else show n ++ "/" ++ show d
instance (Ring a, Euclidean a) => Additive (Rational a) where
(Rational n1 d1) + (Rational n2 d2) = rational (n1 * d2 + n2 * d1) (d1 * d2)
zero = Rational zero one
instance (Ring a, Euclidean a) => Multiplicative (Rational a) where
(Rational n1 d1) * (Rational n2 d2) = rational (n1 * n2) (d1 * d2)
one = Rational one one
instance (Ring a, Euclidean a) => Negatable (Rational a) where
neg (Rational n d) = Rational (neg n) d
instance (Ring a, Euclidean a) => Subtractive (Rational a) where
instance (Ring a, Euclidean a) => Ring (Rational a) where
instance (Ring a, Euclidean a) => Reciprocative (Rational a) where
reciprocal (Rational n d) = rational d n -- this could be optimised; there is no need for a gcd in this case -- just signum
instance (Ring a, Euclidean a) => Field (Rational a) where
| pmilne/algebra | src/Data/Rational.hs | bsd-3-clause | 1,712 | 2 | 10 | 405 | 724 | 380 | 344 | 30 | 1 |
module Main where
import Protolude
import Database.Alkali
main :: IO ()
main = someFunc
| nvladimiroff/alkali | app/Main.hs | bsd-3-clause | 90 | 0 | 6 | 16 | 27 | 16 | 11 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- ^
-- Constants
module Util.Constants where
import Data.Bson
createdAtLabel :: Label
createdAtLabel = "createdAt"
updatedAtLabel :: Label
updatedAtLabel = "updatedAt"
idLabel :: Label
idLabel = "_id"
-- ^
-- Maximum number of results returned from a search or query
-- For elastic search the default for an index is 10000
-- The index max_result_window value must be increased to allow for
-- higher values
-- curl -XPUT "http://localhost:9200/kapi-xandar/_settings" -d '{ "index" : { "max_result_window" : 500000 } }'
maxResultsSize :: Int
maxResultsSize = 10000 | gabesoft/kapi | src/Util/Constants.hs | bsd-3-clause | 606 | 0 | 4 | 94 | 60 | 40 | 20 | 11 | 1 |
module Main where
import System.Environment
import MarketUtil
import Thera
import SDEDrill
main :: IO ()
main = do
[action, target] <- getArgs
case action of
"report" -> case target of
"orders" -> reportSellAndBuyOrderStatus
"sigs" -> reportUnscannedSigID
_ -> putStrLn "Unknown target"
"gen" -> case target of
"allSystemsMap" -> genAllSolarSystemsMap
_ -> putStrLn "Unknown target"
_ -> putStrLn "Unknown action"
| Frefreak/Gideon | app/Main.hs | bsd-3-clause | 528 | 0 | 13 | 173 | 118 | 61 | 57 | 17 | 6 |
module Aws.Swf.Commands.ListDomains (
DomainInfo (..),
ListDomains (..),
ListDomainsResponse (..)
) where
import Data.ByteString as B
import Control.Monad (mzero)
import Control.Applicative ((<*>), (<$>))
import Data.Aeson (Value (Object) , ToJSON (..), FromJSON (..),
object, (.=), (.:), (.:?))
import Aws.Core (Transaction, ResponseConsumer (..), AsMemoryResponse (..))
import Aws.Core.SignClass (SignQuery (..))
import qualified Aws.Core.Sign as S (ServiceConfiguration)
import qualified Aws.Core.Sign3 as SIG3 (signRequest)
import Aws.Swf.Sign (swfRequest)
import Aws.Swf.Response (SwfMetadata, swfResponseConsumer, jsonConsumer)
import qualified Data.Text as T
target :: B.ByteString
target = "com.amazonaws.swf.service.model.SimpleWorkflowService.ListDomains"
data ListDomains = ListDomains { registrationStatus :: T.Text,
maximumPageSize :: Int,
reverseOrder :: Bool,
nextPageTokenReq :: Maybe T.Text }
deriving (Show, Eq)
data DomainInfo = DomainInfo {
description :: T.Text,
name :: T.Text,
status :: T.Text
} deriving (Show, Eq)
data ListDomainsResponse =
ListDomainsResponse { domainInfo :: [DomainInfo],
nextPageToken :: Maybe T.Text
} deriving Show
instance ToJSON ListDomains where
toJSON (ListDomains registrationStatus maximumPageSize reverseOrder nextPageToken) =
object [ "maximumPageSize" .= maximumPageSize,
"nextPageToken" .= nextPageToken,
"registrationStatus" .= registrationStatus,
"reverseOrder" .= reverseOrder ]
instance FromJSON DomainInfo where
parseJSON (Object o) =
DomainInfo <$>
o .: "description" <*>
o .: "name" <*>
o .: "status"
parseJSON _ = mzero
instance FromJSON ListDomainsResponse where
parseJSON (Object o) = ListDomainsResponse <$>
o .: "domainInfos" <*>
o .:? "nextPageToken"
parseJSON _ = mzero
instance SignQuery ListDomains where
type ServiceConfiguration ListDomains = S.ServiceConfiguration
signQuery lstdoms = SIG3.signRequest $ swfRequest target $ toJSON lstdoms
instance ResponseConsumer ListDomains ListDomainsResponse where
type ResponseMetadata ListDomainsResponse = SwfMetadata
responseConsumer _ mref = swfResponseConsumer mref $ \rsp -> jsonConsumer rsp
instance Transaction ListDomains ListDomainsResponse
instance AsMemoryResponse ListDomainsResponse where
type MemoryResponse ListDomainsResponse = ListDomainsResponse
loadToMemory = return
| RayRacine/aws | Aws/Swf/Commands/ListDomains.hs | bsd-3-clause | 2,833 | 0 | 11 | 791 | 630 | 370 | 260 | -1 | -1 |
module Astro.DefaultData where
import Astro
import Astro.Place.ReferenceEllipsoid
import Astro.Time hiding (taiToUT1, ut1ToTAI)
import qualified Astro.Time.Barycentric.Kaplan2005 as TDB
import IAU2000.Nutation
import qualified IAU2000.Equations as Eq
import Data.Time.Clock.AnnouncedLeapSeconds (lst)
import Data.Default
defaultTimeData :: Floating a => TimeData a
defaultTimeData = TimeData
{ leapSecondMap = lst
, taiToUT1 = coerceE -- Astro.Time.taiToUT1 (const 0) ??
, ut1ToTAI = coerceE
, ttToTDB = TDB.ttToTDB
, tdbToTT = TDB.tdbToTT
}
defaultNutationModel :: Floating a => NutationModel a
defaultNutationModel = NutationModel
{ angles = nutationAngles2000A
, equationOfEquinoxes = Eq.equationOfEquinoxes
}
defaultAstroData :: Floating a => AstroData a
defaultAstroData = AstroData
{ time = defaultTimeData
, nutation = defaultNutationModel
, refEllipsoid = iers2003
}
-- TODO: How do I best avoid these being orphan instances?
instance Floating a => Default (TimeData a) where def = defaultTimeData
instance Floating a => Default (NutationModel a) where def = defaultNutationModel
instance Floating a => Default (AstroData a) where def = defaultAstroData
| bjornbm/astro | src/Astro/DefaultData.hs | bsd-3-clause | 1,222 | 0 | 7 | 211 | 281 | 164 | 117 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Linode where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (mzero)
import Data.Aeson
import Data.Attoparsec.Lazy (parse, Result(..))
import qualified Data.ByteString.Char8 as B
import Data.Text (Text)
import Network.HTTP.Enumerator
-- | Build a POST request for the Linode API from an API key and parameters.
-- The API key can be obtained from a Linode user's profile
-- (https://manager.linode.com/profile/).
apiRequest :: String -> [(B.ByteString, B.ByteString)] -> IO (Request IO)
apiRequest apiKey parameters = do
request <- parseUrl "https://api.linode.com/"
let request' = flip urlEncodedBody request $
("api_key", B.pack apiKey) : parameters
return request'
-- | Post a request built with `apiRequest` to Linode and return an Aeson
-- value.
apiCall' :: String -> [(B.ByteString, B.ByteString)] -> IO (Maybe ApiResponse)
apiCall' apiKey parameters = do
request <- apiRequest apiKey parameters
Response{..} <- withManager $ httpLbs request
case parse json responseBody of
Done _ value -> do
case fromJSON value of
Success apiResponse -> do
return $ Just apiResponse
_ -> return Nothing
_ -> return Nothing
-- | Similar to `apiCall'` but extract the `apiResponseData`,
-- parsed it (using `fromJSON`), and return the result.
apiCall :: FromJSON a =>
String -> [(B.ByteString, B.ByteString)] -> IO (Maybe a)
apiCall apiKey parameters = do
mApiResponse <- apiCall' apiKey parameters
-- print mApiResponse
case (fromJSON . apiResponseData) <$> mApiResponse of
Just (Success x) -> return $ Just x
_ -> do
return Nothing
-- | List domains the API key have access to.
domainList :: String -> IO (Maybe [Domain])
domainList apiKey = apiCall apiKey [("api_action", "domain.list")]
-- | List the resources associated to a domain.
domainResourceList :: String -> Int -> IO (Maybe [Resource])
domainResourceList apiKey domainId = apiCall apiKey
[ ("api_action", "domain.resource.list")
, ("DomainID", B.pack $ show domainId)
]
-- | Create a domain record.
domainResourceCreateA :: String -> Int -> String -> String -> IO (Maybe ResourceId)
domainResourceCreateA apiKey domainId fqdn target = apiCall apiKey
[ ("api_action", "domain.resource.create")
, ("DomainID", B.pack $ show domainId)
, ("Type", "A")
, ("Name", B.pack fqdn)
, ("Target", B.pack target)
]
-- | Create a domain record.
domainResourceCreateCNAME :: String -> Int -> String -> String -> IO (Maybe ResourceId)
domainResourceCreateCNAME apiKey domainId fqdn target = apiCall apiKey
[ ("api_action", "domain.resource.create")
, ("DomainID", B.pack $ show domainId)
, ("Type", "CNAME")
, ("Name", B.pack fqdn)
, ("Target", B.pack target)
]
-- | Delete a domain record.
domainResourceDelete :: String -> Int -> Int -> IO (Maybe ResourceId)
domainResourceDelete apiKey domainId resourceId = apiCall apiKey
[ ("api_action", "domain.resource.delete")
, ("DomainID", B.pack $ show domainId)
, ("ResourceID", B.pack $ show resourceId)
]
-- | Represent a domain.
data Domain = Domain
{ domainId :: Int
, domainDescription :: Text
, domainType :: Text
, domainStatus :: Int
, domainSoaEmail :: Text
, domainDomain :: Text
, domainRetrySec :: Int
, domainMasterIps :: Text
, domainExpireSec :: Int
, domainRefreshSec :: Int
, domainTtlSec :: Int
}
deriving Show
instance FromJSON Domain where
parseJSON (Object v) = Domain <$>
v .: "DOMAINID" <*>
v .: "DESCRIPTION" <*>
v .: "TYPE" <*>
v .: "STATUS" <*>
v .: "SOA_EMAIL" <*>
v .: "DOMAIN" <*>
v .: "RETRY_SEC" <*>
v .: "MASTER_IPS" <*>
v .: "EXPIRE_SEC" <*>
v .: "REFRESH_SEC" <*>
v .: "TTL_SEC"
parseJSON _ = mzero
-- | Represent a domain resource
data Resource = Resource
{ resourceProtocol :: Text
, resourceTtlSec :: Int
, resourcePriority :: Int
, resourceType :: Text
, resourceTarget :: Text
, resourceWeight :: Int
, resourceId :: Int
, resourcePort :: Int
, resourceDomainId :: Int
, resourceName :: Text
}
deriving Show
instance FromJSON Resource where
parseJSON (Object v) = Resource <$>
v .: "PROTOCOL" <*>
v .: "TTL_SEC" <*>
v .: "PRIORITY" <*>
v .: "TYPE" <*>
v .: "TARGET" <*>
v .: "WEIGHT" <*>
v .: "RESOURCEID" <*>
v .: "PORT" <*>
v .: "DOMAINID" <*>
v .: "NAME"
parseJSON _ = mzero
-- | Represent a domain resource ID, e.g. the result of
-- domain.resource.create.
data ResourceId = ResourceId Int
deriving Show
instance FromJSON ResourceId where
parseJSON (Object v) = ResourceId <$>
v .: "ResourceID"
parseJSON _ = mzero
-- | Represent a Linode response in a slightly more structured data type than
-- just JSON. Still some parts are kept in JSON.
data ApiResponse = ApiResponse
{ apiResponseErrors :: [Value]
, apiResponseAction :: Text
, apiResponseData :: Value
}
deriving Show
instance FromJSON ApiResponse where
parseJSON (Object v) = ApiResponse <$>
v .: "ERRORARRAY" <*>
v .: "ACTION" <*>
v .: "DATA"
parseJSON _ = mzero
| noteed/hlinode | Network/Linode.hs | bsd-3-clause | 5,162 | 0 | 27 | 1,066 | 1,398 | 761 | 637 | 129 | 3 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Feldspar.Core.Constructs.Elements where
import Language.Syntactic
import Feldspar.Lattice (universal)
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Feldspar.Core.Constructs.Binding
import Data.List (genericTake, sortBy)
import Data.Function (on)
data ElementsFeat a
where
EMaterialize :: Type a => ElementsFeat (Length :-> Elements a :-> Full [a])
EWrite :: Type a => ElementsFeat (Index :-> a :-> Full (Elements a))
ESkip :: Type a => ElementsFeat (Full (Elements a))
EPar :: Type a => ElementsFeat (Elements a :-> Elements a :-> Full (Elements a))
EparFor :: Type a => ElementsFeat (Length :-> (Index -> Elements a) :-> Full (Elements a))
instance Semantic ElementsFeat
where
semantics EMaterialize = Sem "materialize" ematerialize
semantics EWrite = Sem "write" (\ix e -> Elements [(ix, e)])
semantics ESkip = Sem "skip" (Elements [])
semantics EPar = Sem "par" (\(Elements l) (Elements r) -> Elements (l ++ r))
semantics EparFor = Sem "parFor" eparFor
instance Typed ElementsFeat
where
typeDictSym _ = Nothing
ematerialize :: Length -> Elements a -> [a]
ematerialize l (Elements xs) = map snd xs'
where xs' = genericTake l $ sortBy (compare `on` fst) xs
eparFor :: Length -> (Index -> Elements a) -> Elements a
eparFor len ixf = Elements $ concatMap (\(Elements vs) -> vs) xs
where xs = genericTake len $ map ixf [0..]
semanticInstances ''ElementsFeat
instance EvalBind ElementsFeat where evalBindSym = evalBindSymDefault
instance AlphaEq dom dom dom env => AlphaEq ElementsFeat ElementsFeat dom env
where
alphaEqSym = alphaEqSymDefault
instance Sharable ElementsFeat
instance Cumulative ElementsFeat
instance SizeProp ElementsFeat
where
sizeProp EMaterialize (WrapFull len :* WrapFull arr :* Nil) = infoSize arr
sizeProp EWrite _ = universal
sizeProp ESkip _ = universal
sizeProp EPar (WrapFull p1 :* WrapFull p2 :* Nil) = universal -- TODO: p1 U p2
sizeProp EparFor _ = universal
instance ( ElementsFeat :<: dom
, OptimizeSuper dom
)
=> Optimize ElementsFeat dom
where
constructFeatOpt _ EPar (a :* b :* Nil)
| Just ESkip <- prj b = return a
| Just ESkip <- prj a = return b
constructFeatOpt opts a args = constructFeatUnOpt opts a args
constructFeatUnOpt opts EMaterialize = constructFeatUnOptDefaultTyp opts typeRep EMaterialize
constructFeatUnOpt opts EWrite = constructFeatUnOptDefaultTyp opts typeRep EWrite
constructFeatUnOpt opts ESkip = constructFeatUnOptDefaultTyp opts typeRep ESkip
constructFeatUnOpt opts EPar = constructFeatUnOptDefaultTyp opts typeRep EPar
constructFeatUnOpt opts EparFor = constructFeatUnOptDefaultTyp opts typeRep EparFor
| emwap/feldspar-language | src/Feldspar/Core/Constructs/Elements.hs | bsd-3-clause | 3,219 | 0 | 12 | 775 | 919 | 464 | 455 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | GetStarted page controller.
module HL.Controller.GetStarted where
import HL.Controller
import HL.View.GetStarted
import HL.View
-- | GetStarted controller.
getGetStartedR :: C (Html ())
getGetStartedR = lucid (getStarted Nothing)
-- | GetStarted controller.
getGetStartedOSR :: OS -> C (Html ())
getGetStartedOSR os = lucid (getStarted (Just os))
| haskell-lang/haskell-lang | src/HL/Controller/GetStarted.hs | bsd-3-clause | 392 | 0 | 9 | 55 | 100 | 55 | 45 | 9 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE CPP #-}
module Narradar.Constraints.UsableRules where
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.Foldable as F (toList)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Traversable as T (Traversable, mapM)
import Data.Term
import Data.Term.Rules
import Narradar.Constraints.ICap
import Narradar.Framework
import Narradar.Types.Term
import Narradar.Types.Var
import Narradar.Types.ArgumentFiltering as AF (AF_, ApplyAF(..))
class (Rename v, Monoid trs) => IUsableRules t v typ trs where
iUsableRulesM :: MonadVariant v m => typ -> trs -> trs -> [Term t v] -> m trs
iUsableRulesVarM :: MonadVariant v m => typ -> trs -> trs -> v -> m(Set (Rule t v))
data Proxy a
proxy = undefined
deriveUsableRulesFromTRS :: forall t v typ trs m.
(IUsableRules t v typ trs, IsTRS t v trs, MonadVariant v m) =>
Proxy trs -> typ -> [Rule t v] -> [Rule t v] -> [Term t v] -> m [Rule t v]
deriveUsableRulesFromTRS _ typ r p = liftM rules . iUsableRulesM typ (tRS r :: trs) (tRS p :: trs)
deriveUsableRulesVarFromTRS :: forall t v typ trs m.
(IUsableRules t v typ trs, IsTRS t v trs, MonadVariant v m) =>
Proxy trs -> typ -> [Rule t v] -> [Rule t v] -> v -> m (Set(Rule t v))
deriveUsableRulesVarFromTRS _ typ r p = iUsableRulesVarM typ (tRS r :: trs) (tRS p :: trs)
iUsableRules :: ( p ~ Problem typ
, Ord (Term t v), Enum v, Rename v
, MkProblem typ trs, IsDPProblem typ, Traversable p
, IsTRS t v trs, GetVars v trs, IUsableRules t v typ trs
) =>
p trs -> [Term t v] -> p trs
iUsableRules p = runIcap p . iUsableRulesMp p
iUsableRulesVar :: ( p ~ Problem typ
, Ord (Term t v), Enum v, Rename v
, IsDPProblem typ, Traversable p
, IsTRS t v trs, GetVars v trs, IUsableRules t v typ trs
) =>
p trs -> v -> Set(Rule t v)
iUsableRulesVar p = runIcap p . iUsableRulesVarMp p
iUsableRules3 typ trs dps = runIcap (getVars trs `mappend` getVars dps) . iUsableRulesM typ trs dps
iUsableRulesMp ::
(MkProblem typ trs,
IsDPProblem typ,
IUsableRules t v typ trs,
MonadVariant v m) =>
Problem typ trs -> [Data.Term.Term t v] -> m (Problem typ trs)
iUsableRulesMp p tt = do { trs' <- iUsableRulesM (getFramework p) (getR p) (getP p) tt
; return $ setR trs' p}
iUsableRulesVarMp p = iUsableRulesVarM (getFramework p) (getR p) (getP p)
liftUsableRulesM typ trs dps = iUsableRulesM (getBaseFramework typ) trs dps
liftUsableRulesVarM typ trs dps = iUsableRulesVarM (getBaseFramework typ) trs dps
-- ----------------------
-- Implementations
-- ----------------
f_UsableRules :: forall term vk acc t v trs typ problem m.
( Ord (Term t v), Unify t, Ord v
, problem ~ (typ, trs)
, term ~ Term t v
, vk ~ (v -> m acc)
, acc ~ Set (Rule t v)
, HasRules t v trs, GetVars v trs
, ICap t v problem
, MonadVariant v m
) =>
problem -> vk -> [term] -> m acc
f_UsableRules p@(_,trs) _ tt | assert (Set.null (getVars trs `Set.intersection` getVars tt)) False = undefined
f_UsableRules p@(_,trs) vk tt = go mempty tt where
go acc [] = return acc
go acc (t:rest) = evalTerm (\v -> vk v >>= \vacc -> go (vacc `mappend` acc) rest) tk t where
tk :: t (Term t v) -> m acc
tk in_t = do
t' <- wrap `liftM` (icap p `T.mapM` in_t)
let rr = [ rule | rule@(l:->r) <- rules trs, not(isVar l), l `unifies` t']
new = Set.difference (Set.fromList rr) acc
rhsSubterms <- getFresh (rhs <$> F.toList new)
go (new `mappend` acc) (mconcat [rhsSubterms, directSubterms t, rest])
f_UsableRulesAF :: forall term vk acc t id v trs typ problem m.
( problem ~ (typ,trs)
, term ~ Term t v
, vk ~ (v -> m acc)
, acc ~ Set (Rule t v)
, id ~ AFId trs, AFId term ~ id, Ord id
, Ord (Term t v), Unify t, Ord v, ApplyAF term
, HasRules t v trs, ApplyAF trs, GetVars v trs
, ICap t v problem
, MonadVariant v m
) =>
problem -> AF_ id -> vk -> [term] -> m acc
f_UsableRulesAF p@(typ,trs) _ _ tt | assert (Set.null (getVars trs `Set.intersection` getVars tt)) False = undefined
f_UsableRulesAF p@(typ,trs) pi vk tt = go mempty tt where
pi_rules = [(AF.apply pi r, r) | r <- rules trs]
pi_trs = AF.apply pi trs
--go acc (t:_) | trace ("usableRules acc=" ++ show acc ++ ", t=" ++ show t) False = undefined
go acc [] = return acc
go acc (t:rest) = evalTerm (\v -> vk v >>= \vacc -> go (vacc `mappend` acc) rest) tk t where
tk in_t = do
t' <- wrap `liftM` (icap (typ, pi_trs) `T.mapM` in_t)
let rr = Set.fromList
[rule | (l:->r, rule) <- pi_rules, not (isVar l), t' `unifies` l]
new = Set.difference rr acc
rhsSubterms <- getFresh (AF.apply pi . rhs <$> F.toList new)
go (new `mappend` acc) (mconcat [rhsSubterms, directSubterms t, rest])
-- ----------------
-- Needed Rules
-- ----------------
class (Rename v, Monoid trs) => NeededRules t v typ trs | trs -> t v where
neededRulesM :: MonadVariant v m => typ -> trs -> trs -> [Term t v] -> m trs
-- We lift the needed rules automatically
instance (FrameworkExtension ext, NeededRules t v base trs) => NeededRules t v (ext base) trs
where neededRulesM typ trs dps = neededRulesM (getBaseFramework typ) trs dps
neededRules :: ( p ~ Problem typ
, Ord (Term t v), Enum v, Rename v
, MkProblem typ trs, IsDPProblem typ, Traversable p
, IsTRS t v trs, GetVars v trs, NeededRules t v typ trs
) =>
p trs -> [Term t v] -> p trs
neededRules p tt = runIcap p $ do
trs' <- neededRulesM (getFramework p) (getR p) (getP p) tt
return $ setR trs' p
| pepeiborra/narradar | src/Narradar/Constraints/UsableRules.hs | bsd-3-clause | 6,617 | 0 | 19 | 2,064 | 2,522 | 1,311 | 1,211 | -1 | -1 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{- This module contains some recursion schemes designed for use with mutually
recursive ADT bifunctors.
Taking ideas from the recursion-shemes package which defines recursion schemes
for recursive functors we define cata and cataM for mutually
recursive bifunctors.
The approach is based on ideas from:
1. 'Generic Programming with Fixed Points for Mutually Recursive Datatypes' available at
http://users.eecs.northwestern.edu/~clk800/rand-test-study/_gpwfpfmrd/gpwfpfmrd-2009-10-8-12-02-00.pdf
2. 'Designing and Implementing Combinator Languages' available at
http://www.staff.science.uu.nl/~swier101/Papers/1999/AFP3.pdf -}
module Data.Bifunctor.Foldable where
import Data.Bifunctor hiding (second)
import Data.Bitraversable
import Control.Monad ((<=<))
newtype Fix2 f g = Fix2 { unFix :: (f (Fix2 f g) (Fix2 g f)) }
-- The base functor of two mutually recurive fixed points
type family Base t q :: (* -> * -> *)
type instance Base (Fix2 f g) (Fix2 g f) = f
instance Show (f (Fix2 f g) (Fix2 g f)) => Show (Fix2 f g) where
showsPrec n x = showsPrec 11 (unFix x)
-- NOTE: For readablity the Fix2 constructor is intentionally not shown.
instance Eq (f (Fix2 f g) (Fix2 g f)) => Eq (Fix2 f g) where
a == b = (unFix a) == (unFix b)
class (Bifunctor (Base t q)) => Birecursive t q | t -> q where
project :: t -> (Base t q) t q
instance (Bifunctor f) => Birecursive (Fix2 f g) (Fix2 g f) where
project = unFix
bicata :: (Birecursive x z, Birecursive z x)
=> ((Base x z) a b -> a)
-> ((Base z x) b a -> b)
-> x
-> a
bicata falg galg =
fcata
where fcata = falg . (bimap fcata gcata) . project
gcata = galg . (bimap gcata fcata) . project
bicataP :: (Birecursive x z, Birecursive z x)
=> ((p -> (Base x z) a b -> a), x -> p -> p)
-> ((p -> (Base z x) b a -> b), z -> p -> p)
-> p
-> x
-> a
bicataP (falgP, ftop) (galgP, gtop) =
fcataP
where fcataP p fp =
let p' = ftop fp p
in falgP p' $ bimap (fcataP p') (gcataP p') $ project fp
gcataP p fp =
let p' = gtop fp p
in galgP p' $ bimap (gcataP p') (fcataP p') $ project fp
bicataM :: (Birecursive x z, Birecursive z x)
=> (Bitraversable (Base x z), Bitraversable (Base z x))
=> (Monad m)
=> ((Base x z) a b -> m a)
-> ((Base z x) b a -> m b)
-> x
-> m a
bicataM falgM galgM =
fcataM
where fcataM = falgM <=< (bimapM fcataM gcataM) . project
gcataM = galgM <=< (bimapM gcataM fcataM) . project
bicataPM :: (Birecursive x z, Birecursive z x)
=> (Bitraversable (Base x z), Bitraversable (Base z x))
=> (Monad m)
=> ((p -> (Base x z) a b -> m a), x -> p -> p)
-> ((p -> (Base z x) b a -> m b), z -> p -> p)
-> p
-> x
-> m a
bicataPM (falgPM, ftop) (galgPM, gtop) =
fcataPM
where fcataPM p fp =
let p' = ftop fp p
in (bimapM (fcataPM p') (gcataPM p') $ project fp) >>= falgPM p'
gcataPM p fp =
let p' = gtop fp p
in (bimapM (gcataPM p') (fcataPM p') $ project fp) >>= galgPM p'
bipara :: (Birecursive x z, Birecursive z x)
=> ((Base x z) (x, a) (z, b) -> a)
-> ((Base z x) (z, b) (x, a) -> b)
-> x
-> a
bipara falg galg =
fpara
where fpara =
falg . (bimap ((,) <*> fpara) ((,) <*> gpara)) . project
gpara =
galg . (bimap ((,) <*> gpara) ((,) <*> fpara)) . project
applyLeft :: Functor f => (a -> f b) -> a -> f (a, b)
applyLeft f x = (,) x <$> f x
biparaP :: (Birecursive x z, Birecursive z x)
=> ((p -> (Base x z) (x, a) (z, b) -> a), x -> p -> p)
-> ((p -> (Base z x) (z, b) (x, a) -> b), z -> p -> p)
-> p
-> x
-> a
biparaP (falgP, ftop) (galgP, gtop) =
fparaP
where fparaP p fp =
let p' = ftop fp p
in falgP p' $ bimap ((,) <*> (fparaP p')) ((,) <*> (gparaP p')) $ project fp
gparaP p fp =
let p' = gtop fp p
in galgP p' $ bimap ((,) <*> (gparaP p')) ((,) <*> (fparaP p')) $ project fp
biparaM :: (Birecursive x z, Birecursive z x)
=> (Bitraversable (Base x z), Bitraversable (Base z x))
=> (Monad m)
=> ((Base x z) (x, a) (z, b) -> m a)
-> ((Base z x) (z, b) (x, a) -> m b)
-> x
-> m a
biparaM falgM galgM =
fparaM
where fparaM = falgM <=< (bimapM (applyLeft fparaM) (applyLeft gparaM)) . project
gparaM = galgM <=< (bimapM (applyLeft gparaM) (applyLeft fparaM)) . project
biparaPM :: (Birecursive x z, Birecursive z x)
=> (Bitraversable (Base x z), Bitraversable (Base z x))
=> (Monad m)
=> ((p -> (Base x z) (x, a) (z, b) -> m a), x -> p -> p)
-> ((p -> (Base z x) (z, b) (x, a) -> m b), z -> p -> p)
-> p
-> x
-> m a
biparaPM (falgPM, ftop) (galgPM, gtop) =
fcataPM
where fcataPM p fp =
let p' = ftop fp p
in ((bimapM (applyLeft $ fcataPM p') (applyLeft $ gcataPM p')) $ project fp) >>= falgPM p'
gcataPM p fp =
let p' = gtop fp p
in ((bimapM (applyLeft $ gcataPM p') (applyLeft $ fcataPM p')) $ project fp) >>= galgPM p'
| dorchard/gram_lang | frontend/src/Data/Bifunctor/Foldable.hs | bsd-3-clause | 5,527 | 0 | 16 | 1,744 | 2,397 | 1,248 | 1,149 | -1 | -1 |
-- Part of `Typing Haskell in Haskell', version of November 23, 2000
-- Copyright (c) Mark P Jones and the Oregon Graduate Institute
-- of Science and Technology, 1999-2000
--
-- This program is distributed as Free Software under the terms
-- in the file "License" that is included in the distribution
-- of this software, copies of which may be obtained from:
-- http://www.cse.ogi.edu/~mpj/thih/
--
-- modified by irori <irorin@gmail.com>
module Type where
import Data.List(nub, (\\), intersect, union, partition)
import Control.Monad(msum)
import Syntax
enumId :: Int -> Id
enumId n = "v" ++ show n
-- Substitutions
nullSubst :: Subst
nullSubst = []
(+->) :: Tyvar -> Type -> Subst
u +-> t = [(u, t)]
infixr 4 @@
(@@) :: Subst -> Subst -> Subst
s1 @@ s2 = [ (u, apply s1 t) | (u,t) <- s2 ] ++ s1
merge :: Monad m => Subst -> Subst -> m Subst
merge s1 s2 = if agree then return (s1++s2) else fail "merge fails"
where agree = all (\v -> apply s1 (TVar v) == apply s2 (TVar v))
(map fst s1 `intersect` map fst s2)
-- Unification
mgu :: Monad m => Type -> Type -> m Subst
varBind :: Monad m => Tyvar -> Type -> m Subst
mgu (TAp l r) (TAp l' r') = do s1 <- mgu l l'
s2 <- mgu (apply s1 r) (apply s1 r')
return (s2 @@ s1)
mgu (TVar u) t = varBind u t
mgu t (TVar u) = varBind u t
mgu (TSynonym s ts) u = mgu (unsynonym s ts) u
mgu t (TSynonym s ts) = mgu t (unsynonym s ts)
mgu (TCon tc1) (TCon tc2)
| tc1==tc2 = return nullSubst
mgu t1 t2 = fail ("types do not unify: "
++ show t1 ++ " " ++ show t2)
varBind u t | t == TVar u = return nullSubst
| u `elem` tv t = fail "occurs check fails"
| kind u /= kind t = fail "kinds do not match"
| otherwise = return (u +-> t)
match :: Monad m => Type -> Type -> m Subst
match (TAp l r) (TAp l' r') = do sl <- match l l'
sr <- match r r'
merge sl sr
match (TVar u) t | kind u == kind t = return (u +-> t)
match (TSynonym s ts) u = match (unsynonym s ts) u
match t (TSynonym s ts) = match t (unsynonym s ts)
match (TCon tc1) (TCon tc2)
| tc1==tc2 = return nullSubst
match t1 t2 = fail "types do not match"
-----------------------------------------------------------------------------
-- Pred: Predicates
-----------------------------------------------------------------------------
mguPred, matchPred :: Pred -> Pred -> Maybe Subst
mguPred = lift mgu
matchPred = lift match
lift m (IsIn i t) (IsIn i' t')
| i == i' = m t t'
| otherwise = fail "classes differ"
-----------------------------------------------------------------------------
super :: ClassEnv -> Id -> [Id]
super ce i = case classes ce i of
Just (is, its, ms) -> is
Nothing -> error ("super " ++ i)
insts :: ClassEnv -> Id -> [Inst]
insts ce i = case classes ce i of Just (is, its, ms) -> its
methods :: ClassEnv -> Id -> [Assump]
methods ce i = case classes ce i of Just (is, its, ms) -> ms
defined :: Maybe a -> Bool
defined (Just x) = True
defined Nothing = False
modify :: ClassEnv -> Id -> Class -> ClassEnv
modify ce i c = ce{classes = \j -> if i==j then Just c
else classes ce j}
initialEnv :: ClassEnv
initialEnv = ClassEnv { classes = \i -> fail "class not defined",
defaults = [],
impls = [],
expls = [],
assumps = [] }
addClass :: Id -> [Id] -> [Assump] -> EnvTransformer
addClass i is ms ce
| defined (classes ce i) = fail "class already defined"
| any (not . defined . classes ce) is = fail "superclass not defined"
| otherwise = return (modify (ce{assumps = assumps ce ++ ms}) i (is, [], ms))
addInst :: [Pred] -> Pred -> Expr -> EnvTransformer
addInst ps p@(IsIn i _) dict ce
| not (defined (classes ce i)) = error ("no class for instance " ++ i)
| any (overlap p) qs = error ("overlapping instance " ++ i)
| otherwise = return (modify ce i c)
where its = insts ce i
qs = [ q | (_ :=> q, _) <- its ]
c = (super ce i, (ps:=>p, dict) : its, methods ce i)
addImpls :: [Impl] -> EnvTransformer
addImpls is ce = return (ce { impls = impls ce ++ is })
addExpls :: [Expl] -> EnvTransformer
addExpls es ce = return (ce { expls = expls ce ++ es })
addAssumps :: [Assump] -> EnvTransformer
addAssumps is ce = return (ce { assumps = assumps ce ++ is })
overlap :: Pred -> Pred -> Bool
overlap p q = defined (mguPred p q)
{-
exampleInsts :: EnvTransformer
exampleInsts = addPreludeClasses
<:> addInst [] (IsIn "Ord" tUnit)
<:> addInst [] (IsIn "Ord" tChar)
<:> addInst [] (IsIn "Ord" tInt)
<:> addInst [IsIn "Ord" (TVar (Tyvar "a" Star)),
IsIn "Ord" (TVar (Tyvar "b" Star))]
(IsIn "Ord" (pair (TVar (Tyvar "a" Star))
(TVar (Tyvar "b" Star))))
-}
-----------------------------------------------------------------------------
bySuper :: ClassEnv -> Pred -> [Pred]
bySuper ce p@(IsIn i t)
= p : concat [ bySuper ce (IsIn i' t) | i' <- super ce i ]
byInst :: ClassEnv -> Pred -> Maybe ([Pred], Expr)
byInst ce p@(IsIn i t) = msum [ tryInst it | it <- insts ce i ]
where tryInst (ps :=> h, dict) = do u <- matchPred h p
Just (map (apply u) ps, dict)
entail :: ClassEnv -> [Pred] -> Pred -> Bool
entail ce ps p = any (p `elem`) (map (bySuper ce) ps) ||
case byInst ce p of
Nothing -> False
Just (qs, _) -> all (entail ce ps) qs
-----------------------------------------------------------------------------
inHnf :: Pred -> Bool
inHnf (IsIn c t) = hnf t
where hnf (TVar v) = True
hnf (TCon tc) = False
hnf (TAp t _) = hnf t
hnf (TSynonym s ts) = hnf (unsynonym s ts)
toHnfs :: Monad m => ClassEnv -> [Pred] -> m [Pred]
toHnfs ce ps = do pss <- mapM (toHnf ce) ps
return (concat pss)
toHnf :: Monad m => ClassEnv -> Pred -> m [Pred]
toHnf ce p | inHnf p = return [p]
| otherwise = case byInst ce p of
Nothing -> fail ("context reduction " ++ show p)
Just (ps, _) -> toHnfs ce ps
simplify :: ClassEnv -> [Pred] -> [Pred]
simplify ce = loop []
where loop rs [] = rs
loop rs (p:ps) | entail ce (rs++ps) p = loop rs ps
| otherwise = loop (p:rs) ps
reduce :: Monad m => ClassEnv -> [Pred] -> m [Pred]
reduce ce ps = do qs <- toHnfs ce ps
return (simplify ce qs)
scEntail :: ClassEnv -> [Pred] -> Pred -> Bool
scEntail ce ps p = any (p `elem`) (map (bySuper ce) ps)
-- Type inference monad
newtype TI a = TI (Subst -> Int -> (Subst, Int, a))
instance Monad TI where
return x = TI (\s n -> (s,n,x))
TI f >>= g = TI (\s n -> case f s n of
(s',m,x) -> let TI gx = g x
in gx s' m)
runTI :: TI a -> a
runTI (TI f) = x where (s,n,x) = f nullSubst 0
getSubst :: TI Subst
getSubst = TI (\s n -> (s,n,s))
unify :: Type -> Type -> TI ()
unify t1 t2 = do s <- getSubst
u <- mgu (apply s t1) (apply s t2)
extSubst u
extSubst :: Subst -> TI ()
extSubst s' = TI (\s n -> (s'@@s, n, ()))
newTVar :: Kind -> TI Type
newTVar k = TI (\s n -> let v = Tyvar (enumId n) k
in (s, n+1, TVar v))
freshInst :: Scheme -> TI (Qual Type)
freshInst (Forall ks qt) = do ts <- mapM newTVar ks
return (inst ts qt)
class Instantiate t where
inst :: [Type] -> t -> t
instance Instantiate Type where
inst ts (TAp l r) = TAp (inst ts l) (inst ts r)
inst ts (TGen n) = ts !! n
inst ts t = t
instance Instantiate a => Instantiate [a] where
inst ts = map (inst ts)
instance Instantiate t => Instantiate (Qual t) where
inst ts (ps :=> t) = inst ts ps :=> inst ts t
instance Instantiate Pred where
inst ts (IsIn c t) = IsIn c (inst ts t)
-----------------------------------------------------------------------------
-- TIMain: Type Inference Algorithm
-----------------------------------------------------------------------------
type RecAssump = (Id, Type)
data Env = Env [Assump] [RecAssump]
instance Types Env where
apply s (Env as ras) = Env (apply s as) [(i, apply s t) | (i, t) <- ras]
tv (Env as ras) = tv as `union` tv (map snd ras)
makeEnv :: [Assump] -> Env
makeEnv as = Env as []
extend :: Env -> [Assump] -> Env
extend (Env as ras) as' = Env (as' ++ as) ras
extendRec :: Env -> [RecAssump] -> Env
extendRec (Env as ras) ras' = Env as (ras' ++ ras)
lookupEnv :: Monad m => Env -> Id -> m (Either Scheme Type)
lookupEnv (Env as ras) i =
case lookup i ras of
Just t -> return (Right t)
Nothing -> find as
where find [] = fail ("unbound identifier: " ++ i)
find ((i':>:sc):as) = if i==i' then return (Left sc) else find as
-- Basic definitions for type inference
type Infer e t = ClassEnv -> Env -> e -> TI ([Pred], t, e)
-- Lit: Literals
tiLit :: Literal -> TI ([Pred], Type)
tiLit (LitChar _) = return ([], tChar)
tiLit (LitInt _) = return ([], tInt)
tiLit (LitStr _) = return ([], tString)
-- Pat: Patterns
tiPat :: Pat -> TI ([Pred], [Assump], Type)
tiPat (PVar i) = do v <- newTVar Star
return ([], [i :>: toScheme v], v)
tiPat PWildcard = do v <- newTVar Star
return ([], [], v)
tiPat (PAs i pat) = do (ps, as, t) <- tiPat pat
return (ps, (i:>:toScheme t):as, t)
tiPat (PLit l) = do (ps, t) <- tiLit l
return (ps, [], t)
tiPat (PCon con pats)
= do (ps, as, ts) <- tiPats pats
t' <- newTVar Star
(qs :=> t) <- freshInst (conScheme con)
unify t (foldr fn t' ts)
return (ps ++ qs, as, t')
tiPats :: [Pat] -> TI ([Pred], [Assump], [Type])
tiPats pats = do psasts <- mapM tiPat pats
let ps = concat [ ps' | (ps',_,_) <- psasts ]
as = concat [ as' | (_,as',_) <- psasts ]
ts = [ t | (_,_,t) <- psasts ]
return (ps, as, ts)
-----------------------------------------------------------------------------
tiExpr :: Infer Expr Type
tiExpr ce env e@(Var i) =
do sc_or_t <- lookupEnv env i
case sc_or_t of
Left sc -> do (ps :=> t) <- freshInst sc
return (ps, t, foldl Ap e (map ClassPH ps))
Right t -> return ([], t, RecPH i)
tiExpr ce env e@(Con con) = do (ps :=> t) <- freshInst (conScheme con)
return (ps, t, e)
tiExpr ce env e@(Lit l) = do (ps, t) <- tiLit l
return (ps, t, e)
tiExpr ce env (Ap e f) = do (ps, te, e') <- tiExpr ce env e
(qs, tf, f') <- tiExpr ce env f
t <- newTVar Star
unify (tf `fn` t) te
return (ps ++ qs, t, Ap e' f')
tiExpr ce env (Let bg e) = do (ps, as, bg') <- tiBindGroup ce env bg
(qs, t, e') <- tiExpr ce (extend env as) e
return (ps ++ qs, t, Let bg' e')
tiExpr ce env (Case e pses) = do (ps, te, e') <- tiExpr ce env e
tf <- newTVar Star
t <- newTVar Star
unify (te `fn` t) tf
(qs, alts') <- tiAlts ce env alts tf
let pses' = zip (map fst pses) (map snd alts')
return (ps ++ qs, t, Case e' pses')
where alts = [([p], e) | (p, e) <- pses]
tiExpr ce env (Lambda alt) = do (ps, t, alt') <- tiAlt ce env alt
return (ps, t, Lambda alt')
-- e :: sc => let v :: sc; v = e in v と変換したときと同じになってる?
tiExpr ce env (ESign e sc) =
do (qs :=> t) <- freshInst sc
(ps, te, e') <- tiExpr ce env e
unify te t
s <- getSubst
let qs' = apply s qs
t' = apply s t
fs = tv (apply s env)
gs = tv t' \\ fs
sc' = quantify gs (qs' :=> t')
ps' = filter (not . entail ce qs') (apply s ps)
(ds, rs) <- split ce fs gs ps'
if sc /= sc'
then fail "signature too general"
else if not (null rs)
then fail "context too weak"
else return (ds, te, e')
-----------------------------------------------------------------------------
tiAlt :: Infer Alt Type
tiAlt ce env (pats, rhs) =
do (ps, as, ts) <- tiPats pats
(qs, t, rhs') <- tiRhs ce (extend env as) rhs
return (ps ++ qs, foldr fn t ts, (pats, rhs'))
tiAlts :: ClassEnv -> Env -> [Alt] -> Type -> TI ([Pred], [Alt])
tiAlts ce env alts t = do r <- mapM (tiAlt ce env) alts
mapM (unify t) [t' | (_, t', _) <- r]
return (concat [p | (p, _, _) <- r],
[a | (_, _, a) <- r])
-----------------------------------------------------------------------------
tiRhs :: Infer Rhs Type
tiRhs ce env (Rhs e) =
do (ps, t, e') <- tiExpr ce env e
return (ps, t, Rhs e')
tiRhs ce env (Where bg rhs) =
do (ps, as, bg') <- tiBindGroup ce env bg
(qs, t, rhs') <- tiRhs ce (extend env as) rhs
return (ps ++ qs, t, Where bg' rhs')
tiRhs ce env (Guarded guards) =
do t <- newTVar Star
r <- mapM (tiGuard ce env) guards
mapM (unify t) [t' | (_,t',_) <- r]
return (concat [p | (p,_,_) <- r], t, Guarded [g| (_,_,g) <- r])
tiGuard :: Infer (Expr, Expr) Type
tiGuard ce env (cond, e) =
do (ps, tcond, cond') <- tiExpr ce env cond
unify tcond tBool
(qs, te, e') <- tiExpr ce env e
return (ps ++ qs, te, (cond', e'))
-----------------------------------------------------------------------------
split :: Monad m => ClassEnv -> [Tyvar] -> [Tyvar] -> [Pred]
-> m ([Pred], [Pred])
split ce fs gs ps = do ps' <- reduce ce ps
let (ds, rs) = partition (all (`elem` fs) . tv) ps'
rs' <- defaultedPreds ce (fs++gs) rs
return (ds, rs \\ rs')
type Ambiguity = (Tyvar, [Pred])
ambiguities :: ClassEnv -> [Tyvar] -> [Pred] -> [Ambiguity]
ambiguities ce vs ps = [ (v, filter (elem v . tv) ps) | v <- tv ps \\ vs ]
numClasses :: [Id]
numClasses = ["Num", "Integral", "Floating", "Fractional",
"Real", "RealFloat", "RealFrac"]
stdClasses :: [Id]
stdClasses = ["Eq", "Ord", "Show", "Read", "Bounded", "Enum", "Ix",
"Functor", "Monad", "MonadPlus"] ++ numClasses
candidates :: ClassEnv -> Ambiguity -> [Type]
candidates ce (v, qs) = [ t' | let is = [ i | IsIn i t <- qs ]
ts = [ t | IsIn i t <- qs ],
all ((TVar v)==) ts,
any (`elem` numClasses) is,
all (`elem` stdClasses) is,
t' <- defaults ce,
all (entail ce []) [ IsIn i t' | i <- is ] ]
withDefaults :: Monad m => ([Ambiguity] -> [Type] -> a)
-> ClassEnv -> [Tyvar] -> [Pred] -> m a
withDefaults f ce vs ps
| any null tss = fail "cannot resolve ambiguity"
| otherwise = return (f vps (map head tss))
where vps = ambiguities ce vs ps
tss = map (candidates ce) vps
defaultedPreds :: Monad m => ClassEnv -> [Tyvar] -> [Pred] -> m [Pred]
defaultedPreds = withDefaults (\vps ts -> concat (map snd vps))
defaultSubst :: Monad m => ClassEnv -> [Tyvar] -> [Pred] -> m Subst
defaultSubst = withDefaults (\vps ts -> zip (map fst vps) ts)
-----------------------------------------------------------------------------
-- Resolving Placeholders
data ResolveEnv = ResolveEnv { reParam :: [(Pred, Expr)],
reRec :: [(Id, Expr)],
reSubst :: Subst,
reClass :: ClassEnv }
resolve :: ClassEnv -> Subst -> [(Id, [Pred])] -> [Pred] -> [Alt] -> [Alt]
resolve ce s recs ps alts = map resolveAlt alts
where dictVars = [c ++ '#' : v | IsIn c (TVar (Tyvar v _)) <- ps]
env = ResolveEnv { reParam = zip ps (map Var dictVars),
reRec = [(i, foldl Ap (Var i) (map ClassPH ps))
| (i, ps) <- recs],
reSubst = s,
reClass = ce }
dictParams = map PVar dictVars
resolveAlt (pats, rhs) = (dictParams ++ pats, resolveRhs env rhs)
resolveRhs :: ResolveEnv -> Rhs -> Rhs
resolveRhs re (Rhs e) = Rhs (resolveExpr re e)
resolveRhs re (Where bg rhs) =
Where (resolveBindGroup re bg) (resolveRhs re rhs)
resolveRhs re (Guarded guards) =
Guarded [(resolveExpr re cond, resolveExpr re e) | (cond, e) <- guards]
resolveExpr :: ResolveEnv -> Expr -> Expr
resolveExpr re e@(Var _) = e
resolveExpr re e@(Lit _) = e
resolveExpr re e@(Con _) = e
resolveExpr re (Ap e f) = Ap (resolveExpr re e) (resolveExpr re f)
resolveExpr re (Let bg e) = Let (resolveBindGroup re bg) (resolveExpr re e)
resolveExpr re (Case e pairs) =
Case (resolveExpr re e) [(p, resolveRhs re rhs) | (p, rhs) <- pairs]
resolveExpr re (Lambda (pats, rhs)) = Lambda (pats, resolveRhs re rhs)
resolveExpr re (ESign e sc) = ESign (resolveExpr re e) sc
resolveExpr re e@(RecPH i) = case lookup i (reRec re) of
Just e' -> resolveExpr re e'
Nothing -> e
resolveExpr re e@(ClassPH p@(IsIn _ v)) =
case lookup p' (reParam re) of
Just e' -> e'
Nothing ->
case byInst (reClass re) p' of
Just (ps, e') -> foldl Ap e' (map (resolveExpr re . ClassPH) ps)
Nothing ->
case resolveSuper re pes p' of
Just e' -> e'
Nothing -> e
where p' = apply (reSubst re) p
pes = [pe | pe@(IsIn _ v', _) <- reParam re, v == v']
resolveSuper :: ResolveEnv -> [(Pred, Expr)] -> Pred -> Maybe Expr
resolveSuper re [] p = Nothing
resolveSuper re pes@(_:_) p =
case lookup p pes' of
Just e' -> Just e'
Nothing -> resolveSuper re pes' p
where pes' = [(IsIn sup v, Var (cls ++ ">>" ++ sup) `Ap` e)
| (IsIn cls v, e) <- pes, sup <- super (reClass re) cls]
resolveBindGroup re (es, iss) = (es', iss')
where es' = [(i, sc, resolveAlts alts) | (i, sc, alts) <- es]
iss' = map (\is -> [(i, resolveAlts alts) | (i, alts) <- is]) iss
resolveAlts alts = [(pats, resolveRhs re rhs) | (pats, rhs) <- alts]
-----------------------------------------------------------------------------
tiExpl :: ClassEnv -> Env -> Expl -> TI ([Pred], Expl)
tiExpl ce env (i, sc, alts)
= do (qs :=> t) <- freshInst sc
(ps, alts') <- tiAlts ce env alts t
s <- getSubst
let qs' = apply s qs
t' = apply s t
fs = tv (apply s env)
gs = tv t' \\ fs
sc' = quantify gs (qs' :=> t')
ps' = filter (not . entail ce qs') (apply s ps)
alts'' = resolve ce s [] qs' alts'
(ds, rs) <- split ce fs gs ps'
if sc /= sc' then
fail ("signature too general: expected" ++ show sc
++ ", but inferred " ++ show sc')
else if not (null rs) then
fail "context too weak"
else
return (ds, (i, sc, alts''))
-----------------------------------------------------------------------------
restricted :: [Impl] -> Bool
restricted bs = any simple bs
where simple (i,alts) = any (null . fst) alts
tiImpls :: Infer [Impl] [Assump]
tiImpls ce env [] = return ([], [], [])
tiImpls ce env bs =
do ts <- mapM (\_ -> newTVar Star) bs
let is = map fst bs
env' = extendRec env (zip is ts)
altss = map snd bs
pssass <- sequence (zipWith (tiAlts ce env') altss ts)
s <- getSubst
let ps' = apply s (concat (map fst pssass))
ts' = apply s ts
fs = tv (apply s env)
vss = map tv ts'
gs = foldr1 union vss \\ fs
(ds, rs) <- split ce fs (foldr1 intersect vss) ps'
if restricted bs then
let gs' = gs \\ tv rs
scs = map (quantify gs' . ([] :=>)) ts'
recenv = zip is (repeat [])
altss' = map (resolve ce s recenv [] . snd) pssass
bs' = zip is altss'
in return (ds ++ rs, zipWith (:>:) is scs, bs')
else
let scs = map (quantify gs . (rs :=>)) ts'
recenv = zip is (repeat rs)
altss' = map (resolve ce s recenv rs . snd) pssass
bs' = zip is altss'
in return (ds, zipWith (:>:) is scs, bs')
-----------------------------------------------------------------------------
tiBindGroup :: Infer BindGroup [Assump]
tiBindGroup ce env (es,iss) =
do let as = [ v:>:sc | (v,sc,alts) <- es ]
(ps, as', iss') <- tiSeq tiImpls ce (extend env as) iss
qses_s <- mapM (tiExpl ce (extend env (as'++as))) es
return (ps ++ concat (map fst qses_s), as' ++ as, (map snd qses_s, iss'))
tiSeq :: Infer bg [Assump] -> Infer [bg] [Assump]
tiSeq ti ce env [] = return ([], [], [])
tiSeq ti ce env (bs:bss) = do (ps, as, bs') <- ti ce env bs
(qs, as', bss') <- tiSeq ti ce (extend env as) bss
return (ps ++ qs, as' ++ as, bs':bss')
-- Type Inference for Whole Programs
tiProgram :: ClassEnv -> [Assump] -> Program -> ([Assump], Program)
tiProgram ce as bgs = runTI $
do (ps, as', bgs') <- tiSeq tiBindGroup ce (makeEnv as) bgs
s <- getSubst
rs <- reduce ce (apply s ps)
s' <- defaultSubst ce [] rs
return (apply (s'@@s) as', bgs')
-----------------------------------------------------------------------------
preludeAssumptions :: [Assump]
preludeAssumptions = [
"+" :>: (toScheme (tInt `fn` tInt `fn` tInt)),
"-" :>: (toScheme (tInt `fn` tInt `fn` tInt)),
"*" :>: (toScheme (tInt `fn` tInt `fn` tInt)),
-- "/" :>: (toScheme (tInt `fn` tInt `fn` tInt)),
"div":>: (toScheme (tInt `fn` tInt `fn` tInt)),
"mod":>: (toScheme (tInt `fn` tInt `fn` tInt)),
-- "==" :>: (toScheme (tInt `fn` tInt `fn` tBool)),
-- "eql" :>: (quantifyAll ([IsIn "Eq" a] :=> (a `fn` a `fn` tBool))),
-- "/=" :>: (toScheme (tInt `fn` tInt `fn` tBool)),
"<" :>: (toScheme (tInt `fn` tInt `fn` tBool)),
">" :>: (toScheme (tInt `fn` tInt `fn` tBool)),
"<=" :>: (toScheme (tInt `fn` tInt `fn` tBool)),
">=" :>: (toScheme (tInt `fn` tInt `fn` tBool)),
"&&" :>: (toScheme (tBool `fn` tBool `fn` tBool)),
"||" :>: (toScheme (tBool `fn` tBool `fn` tBool)),
"ord":>: (toScheme (tChar `fn` tInt)),
"chr":>: (toScheme (tInt `fn` tChar)),
"++" :>: (quantifyAll' (list a `fn` list a `fn` list a)),
"." :>: (quantifyAll' ((b `fn` c) `fn` (a `fn` b) `fn` a `fn` c)),
"error" :>: (quantifyAll' (list tChar `fn` a)),
"hGetContents" :>: (toScheme (tInt `fn` list tChar)),
"IF" :>: (quantifyAll' (tBool `fn` a `fn` a `fn` a)),
"SEL" :>: (quantifyAll' (a `fn` b))]
where a = TVar (Tyvar "a" Star)
b = TVar (Tyvar "b" Star)
c = TVar (Tyvar "c" Star)
addCoreClasses :: EnvTransformer
addCoreClasses = foldl1 (<:>) [
addClass "Eq" [] [
"==" :>: (quantifyAll ([IsIn "Eq" a] :=> (a `fn` a `fn` tBool))),
"/=" :>: (quantifyAll ([IsIn "Eq" a] :=> (a `fn` a `fn` tBool)))],
addImpls [tupleSelector "==" 0 2, tupleSelector "/=" 1 2],
-- addClass "Ord" ["Eq"] [],
-- addClass "Show" [] [],
-- addClass "Read" [] [],
-- addClass "Bounded" [] [],
-- addClass "Enum" [] [],
-- addClass "Functor" [] [],
-- addClass "Monad" [] [],
addInst [] (IsIn "Eq" tInt) (Var "EqInt"),
addImpls [("EqInt", [([], Rhs $ tuple [Var "primEq", Var "primNeq"])])],
addInst [] (IsIn "Eq" tChar) (Var "EqChar"),
addImpls [("EqChar", [([], Rhs $ tuple [Var "primEq", Var "primNeq"])])]
]
where a = TVar (Tyvar "a" Star)
{-
addNumClasses :: EnvTransformer
addNumClasses = addClass "Num" ["Eq", "Show"]
<:> addClass "Real" ["Num", "Ord"]
<:> addClass "Fractional" ["Num"]
<:> addClass "Integral" ["Real", "Enum"]
<:> addClass "RealFrac" ["Real", "Fractional"]
<:> addClass "Floating" ["Fractional"]
<:> addClass "RealFloat" ["RealFrac", "Floating"]
addPreludeClasses :: EnvTransformer
addPreludeClasses = addCoreClasses <:> addNumClasses
-}
| tromp/hs2blc | Type.hs | bsd-3-clause | 25,514 | 317 | 30 | 8,600 | 9,883 | 5,269 | 4,614 | 473 | 5 |
{-# language QuasiQuotes #-}
module Khronos.Versions.OpenXR
( specVersions
) where
import Data.Vector ( Vector )
import Data.Version
import Foreign.Storable ( Storable )
import Polysemy
import Polysemy.Input
import Relude
import Text.InterpolatedString.Perl6.Unindented
import Data.Bits
import Error
import Haskell.Name
import Render.Element
import Spec.Parse
specVersions
:: forall r
. (HasErr r, HasRenderParams r)
=> Spec SpecXr
-> Vector (Sem r RenderElement)
specVersions Spec {..} = fromList
( currentVersion specHeaderVersion
: versionTypeElem
: versionConstruction
: (featureVersion <$> toList specFeatures)
)
currentVersion
:: (HasRenderParams r, HasErr r)
=> SpecHeaderVersion SpecXr
-> Sem r RenderElement
currentVersion (XrVersion ma mi pa) = genRe "current version" $ do
RenderParams {..} <- input
tellExplicitModule =<< mkModuleName ["Version"]
let pat = mkPatternName "XR_CURRENT_API_VERSION"
makeVersion = mkPatternName "XR_MAKE_VERSION"
ver = mkTyName "XrVersion"
tellImport makeVersion
tellImport ver
tellExport (EPat pat)
tellDoc [qqi|
pattern {pat} :: {ver}
pattern {pat} = {makeVersion} {ma} {mi} {pa}
|]
featureVersion
:: (HasErr r, HasRenderParams r) => Feature -> Sem r RenderElement
featureVersion Feature {..} = genRe "feature version" $ do
RenderParams {..} <- input
let major : minor : _ = versionBranch fVersion
pat = mkPatternName
(CName $ "XR_API_VERSION_" <> show major <> "_" <> show minor)
make = mkPatternName "XR_MAKE_VERSION"
ver = mkTyName "XrVersion"
tellExport (EPat pat)
tellImport ''Word32
tellImport make
tellImport ver
tellExplicitModule =<< mkModuleName ["Core" <> show major <> show minor]
tellDoc [qqi|
pattern {pat} :: {ver}
pattern {pat} = {make} {major} {minor} 0
|]
versionTypeElem :: (HasErr r, HasRenderParams r) => Sem r RenderElement
versionTypeElem = genRe "version type" $ do
RenderParams {..} <- input
tellExplicitModule =<< mkModuleName ["Version"]
tellImport ''Word64
tellImport ''Storable
tellImport ''Generic
tellImport ''Typeable
tellImport (TyConName "Zero")
let t = mkTyName "XrVersion"
c = mkConName "XrVersion" "XrVersion"
tellDataExport t
tellDocWithHaddock $ \getDoc -> [qqi|
{getDoc (TopLevel "XrVersion")}
newtype {t} = {c} \{ unVersion :: Word64 }
deriving stock (Typeable, Eq, Ord, Show, Read)
deriving newtype (Storable, Zero)
#if defined(GENERIC_INSTANCES)
deriving instance Generic {t}
#endif
|]
-- // OpenXR current version number.
-- #define XR_CURRENT_API_VERSION XR_MAKE_VERSION(1, 0, 12)
-- #define XR_MAKE_VERSION(major, minor, patch) \
-- ((((major) & 0xffffULL) << 48) | (((minor) & 0xffffULL) << 32) | ((patch) & 0xffffffffULL))
-- #define XR_VERSION_MAJOR(version) (uint16_t)(((uint64_t)(version) >> 48)& 0xffffULL)
-- #define XR_VERSION_MINOR(version) (uint16_t)(((uint64_t)(version) >> 32) & 0xffffULL)
-- #define XR_VERSION_PATCH(version) (uint32_t)((uint64_t)(version) & 0xffffffffULL)
versionConstruction :: (HasErr r, HasRenderParams r) => Sem r RenderElement
versionConstruction = genRe "version construction" $ do
RenderParams {..} <- input
tellExplicitModule =<< mkModuleName ["Version"]
tellImport ''Word16
tellImport ''Word32
tellImport ''Word64
tellImport '(.&.)
tellImport '(.|.)
tellImport 'shiftL
tellImport 'shiftR
let p = mkPatternName "XR_MAKE_VERSION"
tellExport (EPat p)
let patMajor = TermName ("_" <> unName (mkPatternName "XR_VERSION_MAJOR"))
patMinor = TermName ("_" <> unName (mkPatternName "XR_VERSION_MINOR"))
patPatch = TermName ("_" <> unName (mkPatternName "XR_VERSION_PATCH"))
tellExport (ETerm patMajor)
tellExport (ETerm patMinor)
tellExport (ETerm patPatch)
tellDoc [qqi|
pattern {p} :: Word16 -> Word16 -> Word32 -> Version
pattern {p} major minor patch <-
(\\v -> ({patMajor} v, {patMinor} v, {patPatch} v) -> (major, minor, patch))
where {p} major minor patch = Version
$ fromIntegral major `shiftL` 48
.|. fromIntegral minor `shiftL` 32
.|. fromIntegral patch
\{-# complete {p} #-}
{patMajor} :: Version -> Word16
{patMajor} (Version v) = fromIntegral $ (v `shiftR` 48) .&. 0xffff
{patMinor} :: Version -> Word16
{patMinor} (Version v) = fromIntegral $ (v `shiftR` 32) .&. 0xffff
{patPatch} :: Version -> Word32
{patPatch} (Version v) = fromIntegral $ v .&. 0xffffffff
|]
| expipiplus1/vulkan | generate-new/khronos-spec/Khronos/Versions/OpenXR.hs | bsd-3-clause | 4,755 | 0 | 16 | 1,110 | 937 | 457 | 480 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
module TransactionServer where
import System.Random
import Control.Monad.Trans.Except
import Control.Monad.Trans.Resource
import Control.Monad.IO.Class
import Data.Aeson
import Data.Aeson.TH
import Data.Bson.Generic
import GHC.Generics
import Network.Wai hiding(Response)
import Network.Wai.Handler.Warp
import Network.Wai.Logger
import Servant
import Servant.API
import Servant.Client
import System.IO
import System.Directory
import System.Environment (getArgs, getProgName, lookupEnv)
import System.Log.Formatter
import System.Log.Handler (setFormatter)
import System.Log.Handler.Simple
import System.Log.Handler.Syslog
import System.Log.Logger
import Data.Bson.Generic
import qualified Data.List as DL
import Data.Maybe (catMaybes)
import Data.Text (pack, unpack)
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format (defaultTimeLocale, formatTime)
import Database.MongoDB
import Control.Monad (when)
import Network.HTTP.Client (newManager, defaultManagerSettings)
import CommonResources
import MongodbHelpers
type ApiHandler = ExceptT ServantErr IO
transactionApi :: Proxy TransactionApi
transactionApi = Proxy
server :: Server TransactionApi
server =
beginTrans :<|>
downloadTrans :<|>
uploadTrans :<|>
commitTrans
transactionApp :: Application
transactionApp = serve transactionApi server
directoryApi :: Proxy DirectoryApi
directoryApi = Proxy
join :: FileServer -> ClientM Response
open :: FileName -> ClientM File
close :: FileUpload -> ClientM Response
allfiles :: Ticket -> ClientM [String]
remove :: FileName -> ClientM Response
join :<|> open :<|> close :<|> allfiles :<|> remove = client directoryApi
runApp :: IO()
runApp = do
run (read (transserverport) ::Int) transactionApp
beginTrans :: Ticket -> ApiHandler Response
beginTrans (Ticket ticket encryptedTimeout) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (Response (encryptDecrypt sessionKey "Failed"))
else do
putStrLn "Starting transaction"
putStrLn "Storing client sessionKey as transaction ID"
withMongoDbConnection $ upsert (select ["transactionID" =: sessionKey] "TRANSACTION_ID_RECORD") $ toBSON sessionKey
return (encryptDecrypt sessionKey "Successful")
downloadTrans :: FileName -> ApiHandler File
downloadTrans fileName@(FileName ticket encryptedTimeout encryptedFN) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
let decryptedFN = encryptDecrypt sessionKey encryptedFN
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (File "Failed" "Failed")
else do
manager <- newManager defaultManagerSettings
res <- runClientM (open fileName) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> do
putStrLn err
return (File "Failed" "Failed")
Right file -> do
putStrLn "Storing file transaction data"
withMongoDbConnection $ upsert (select ["transactionID" =: sessionKey, "fileName" =: decryptedFN] "TRANSACTION_FILE_RECORD") $ toBSON (TransactionFile fileName sessionKey)
return file
uploadTrans :: FileUpload -> ApiHandler Response
uploadTrans fileUpload@(FileUpload ticket encryptedTimeout encryptedFN encryptedFC) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
let decryptedFN = encryptDecrypt sessionKey encryptedFN
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (Response (encryptDecrypt sessionKey "Failed"))
else do
manager <- newManager defaultManagerSettings
let tempFileName = encryptDecrypt sessionKey ("TMP~"++decryptedFN)
let fupload = FileUpload ticket encryptedTimeout tempFileName encryptedFC
res <- runClientM (close fupload) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> do
putStrLn err
return (Response (encryptDecrypt sessionKey "Failed"))
Right (Response response) -> do
let decryptedres = encryptDecrypt sessionKey response
putStrLn "Uploaded temp file - " ++ decryptedres
return (Response response)
commitTrans :: Ticket -> ApiHandler Response
commitTrans tic@(Ticket ticket encryptedTimeout) = liftIO $ do
let sessionKey = encryptDecrypt sharedSecret ticket
let decryptedTimeout = decryptTime sharedSecret encryptedTimeout
putStrLn ("Checking Client Credentials...")
currentTime <- getCurrentTime
if (currentTime > decryptedTimeout) then do
putStrLn "Client session timeout"
return (Response (encryptDecrypt sessionKey "Failed"))
else do
transactions <- liftIO $ withMongoDbConnection $ do
docs <- find (select ["transactionID" =: sessionKey] "TRANSACTION_FILE_RECORD") >>= drainCursor
return $ catMaybes $ DL.map (\ b -> fromBSON b :: Maybe TransactionFile) docs
mapM_ (commitfile tic) transactions
return (Response (encryptDecrypt sessionKey "Successful"))
commitfile :: Ticket -> TransactionFile -> IO()
commitfile (Ticket ticket encryptedTimeout) (TransactionFile decryptedFN sessionKey) = liftIO $ do
putStrLn "Commiting file: " ++ decryptedFN
manager <- newManager defaultManagerSettings
let temp_file = encryptDecrypt sessionKey ("TMP~"++ decryptedFN)
let fileName = (FileName ticket encryptedTimeout temp_file)
res <- runClientM (open fileName) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> putStrLn err
Right (File encryptedFN encryptedFC) -> do
case (temp_file == encryptedFN) of
False -> putStrLn "Commit Failed"
True -> do
let fileupload = (FileUpload ticket encryptedTimeout (encryptDecrypt sessionKey decryptedFN) encryptedFC)
res <- runClientM (TransactionServer.close fileupload) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> do putStrLn err
Right (Response response) -> do
case response of
"Successful" -> do
res <- runClientM (remove (FileName ticket encryptedTimeout temp_file)) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) ""))
case res of
Left err -> putStrLn err
Right (Response response) -> putStrLn (encryptDecrypt sessionKey response)
| Garygunn94/DFS | TransactionServer/.stack-work/intero/intero6744AxA.hs | bsd-3-clause | 7,910 | 30 | 40 | 1,895 | 1,980 | 990 | 990 | 163 | 5 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.PreProcess
-- Copyright : (c) 2003-2005, Isaac Jones, Malcolm Wallace
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This defines a 'PreProcessor' abstraction which represents a pre-processor
-- that can transform one kind of file into another. There is also a
-- 'PPSuffixHandler' which is a combination of a file extension and a function
-- for configuring a 'PreProcessor'. It defines a bunch of known built-in
-- preprocessors like @cpp@, @cpphs@, @c2hs@, @hsc2hs@, @happy@, @alex@ etc and
-- lists them in 'knownSuffixHandlers'. On top of this it provides a function
-- for actually preprocessing some sources given a bunch of known suffix
-- handlers. This module is not as good as it could be, it could really do with
-- a rewrite to address some of the problems we have with pre-processors.
module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras,
knownSuffixHandlers, ppSuffixes,
PPSuffixHandler, PreProcessor(..),
mkSimplePreProcessor, runSimplePreProcessor,
ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs,
ppHappy, ppAlex, ppUnlit, platformDefines
)
where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Compat.Stack
import Distribution.Simple.PreProcess.Unlit
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription as PD
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.CCompiler
import Distribution.Simple.Compiler
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Simple.Program
import Distribution.Simple.Test.LibV09
import Distribution.System
import Distribution.Text
import Distribution.Version
import Distribution.Verbosity
import System.Directory (doesFileExist)
import System.Info (os, arch)
import System.FilePath (splitExtension, dropExtensions, (</>), (<.>),
takeDirectory, normalise, replaceExtension,
takeExtensions)
-- |The interface to a preprocessor, which may be implemented using an
-- external program, but need not be. The arguments are the name of
-- the input file, the name of the output file and a verbosity level.
-- Here is a simple example that merely prepends a comment to the given
-- source file:
--
-- > ppTestHandler :: PreProcessor
-- > ppTestHandler =
-- > PreProcessor {
-- > platformIndependent = True,
-- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
-- > do info verbosity (inFile++" has been preprocessed to "++outFile)
-- > stuff <- readFile inFile
-- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff)
-- > return ExitSuccess
--
-- We split the input and output file names into a base directory and the
-- rest of the file name. The input base dir is the path in the list of search
-- dirs that this file was found in. The output base dir is the build dir where
-- all the generated source files are put.
--
-- The reason for splitting it up this way is that some pre-processors don't
-- simply generate one output .hs file from one input file but have
-- dependencies on other generated files (notably c2hs, where building one
-- .hs file may require reading other .chi files, and then compiling the .hs
-- file may require reading a generated .h file). In these cases the generated
-- files need to embed relative path names to each other (eg the generated .hs
-- file mentions the .h file in the FFI imports). This path must be relative to
-- the base directory where the generated files are located, it cannot be
-- relative to the top level of the build tree because the compilers do not
-- look for .h files relative to there, ie we do not use \"-I .\", instead we
-- use \"-I dist\/build\" (or whatever dist dir has been set by the user)
--
-- Most pre-processors do not care of course, so mkSimplePreProcessor and
-- runSimplePreProcessor functions handle the simple case.
--
data PreProcessor = PreProcessor {
-- Is the output of the pre-processor platform independent? eg happy output
-- is portable haskell but c2hs's output is platform dependent.
-- This matters since only platform independent generated code can be
-- inlcuded into a source tarball.
platformIndependent :: Bool,
-- TODO: deal with pre-processors that have implementaion dependent output
-- eg alex and happy have --ghc flags. However we can't really inlcude
-- ghc-specific code into supposedly portable source tarballs.
runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir
-> (FilePath, FilePath) -- Output file name, relative to an output base dir
-> Verbosity -- verbosity
-> IO () -- Should exit if the preprocessor fails
}
-- | Function to determine paths to possible extra C sources for a
-- preprocessor: just takes the path to the build directory and uses
-- this to search for C sources with names that match the
-- preprocessor's output name format.
type PreProcessorExtras = FilePath -> IO [FilePath]
mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ())
-> (FilePath, FilePath)
-> (FilePath, FilePath) -> Verbosity -> IO ()
mkSimplePreProcessor simplePP
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity
where inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity
-> IO ()
runSimplePreProcessor pp inFile outFile verbosity =
runPreProcessor pp (".", inFile) (".", outFile) verbosity
-- |A preprocessor for turning non-Haskell files with the given extension
-- into plain Haskell source files.
type PPSuffixHandler
= (String, BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor)
-- | Apply preprocessors to the sources from 'hsSourceDirs' for a given
-- component (lib, exe, or test suite).
preprocessComponent :: PackageDescription
-> Component
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Bool
-> Verbosity
-> [PPSuffixHandler]
-> IO ()
preprocessComponent pd comp lbi clbi isSrcDist verbosity handlers = case comp of
(CLib lib@Library{ libBuildInfo = bi }) -> do
let dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi
,autogenPackageModulesDir lbi]
setupMessage verbosity "Preprocessing library" (packageId pd)
for_ (map ModuleName.toFilePath $ libModules lib) $
pre dirs (componentBuildDir lbi clbi) (localHandlers bi)
(CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do
let exeDir = buildDir lbi </> nm </> nm ++ "-tmp"
dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi
,autogenPackageModulesDir lbi]
setupMessage verbosity ("Preprocessing executable '" ++ nm ++ "' for") (packageId pd)
for_ (map ModuleName.toFilePath $ otherModules bi) $
pre dirs exeDir (localHandlers bi)
pre (hsSourceDirs bi) exeDir (localHandlers bi) $
dropExtensions (modulePath exe)
CTest test@TestSuite{ testName = nm } -> do
setupMessage verbosity ("Preprocessing test suite '" ++ nm ++ "' for") (packageId pd)
case testInterface test of
TestSuiteExeV10 _ f ->
preProcessTest test f $ buildDir lbi </> testName test
</> testName test ++ "-tmp"
TestSuiteLibV09 _ _ -> do
let testDir = buildDir lbi </> stubName test
</> stubName test ++ "-tmp"
writeSimpleTestStub test testDir
preProcessTest test (stubFilePath test) testDir
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm@Benchmark{ benchmarkName = nm } -> do
setupMessage verbosity ("Preprocessing benchmark '" ++ nm ++ "' for") (packageId pd)
case benchmarkInterface bm of
BenchmarkExeV10 _ f ->
preProcessBench bm f $ buildDir lbi </> benchmarkName bm
</> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"]
builtinCSuffixes = cSourceExtensions
builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes
localHandlers bi = [(ext, h bi lbi clbi) | (ext, h) <- handlers]
pre dirs dir lhndlrs fp =
preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs
preProcessTest test = preProcessComponent (testBuildInfo test)
(testModules test)
preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm)
(benchmarkModules bm)
preProcessComponent bi modules exePath dir = do
let biHandlers = localHandlers bi
sourceDirs = hsSourceDirs bi ++ [ autogenComponentModulesDir lbi clbi
, autogenPackageModulesDir lbi ]
sequence_ [ preprocessFile sourceDirs dir isSrcDist
(ModuleName.toFilePath modu) verbosity builtinSuffixes
biHandlers
| modu <- modules ]
preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist
(dropExtensions $ exePath) verbosity
builtinSuffixes biHandlers
--TODO: try to list all the modules that could not be found
-- not just the first one. It's annoying and slow due to the need
-- to reconfigure after editing the .cabal file each time.
-- |Find the first extension of the file that exists, and preprocess it
-- if required.
preprocessFile
:: [FilePath] -- ^source directories
-> FilePath -- ^build directory
-> Bool -- ^preprocess for sdist
-> FilePath -- ^module file name
-> Verbosity -- ^verbosity
-> [String] -- ^builtin suffixes
-> [(String, PreProcessor)] -- ^possible preprocessors
-> IO ()
preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do
-- look for files in the various source dirs with this module name
-- and a file extension of a known preprocessor
psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile
case psrcFiles of
-- no preprocessor file exists, look for an ordinary source file
-- just to make sure one actually exists at all for this module.
-- Note: by looking in the target/output build dir too, we allow
-- source files to appear magically in the target build dir without
-- any corresponding "real" source file. This lets custom Setup.hs
-- files generate source modules directly into the build dir without
-- the rest of the build system being aware of it (somewhat dodgy)
Nothing -> do
bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile
case bsrcFiles of
Nothing -> die $ "can't find source for " ++ baseFile
++ " in " ++ intercalate ", " searchLoc
_ -> return ()
-- found a pre-processable file in one of the source dirs
Just (psrcLoc, psrcRelFile) -> do
let (srcStem, ext) = splitExtension psrcRelFile
psrcFile = psrcLoc </> psrcRelFile
pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected")
(lookup (tailNotNull ext) handlers)
-- Preprocessing files for 'sdist' is different from preprocessing
-- for 'build'. When preprocessing for sdist we preprocess to
-- avoid that the user has to have the preprocessors available.
-- ATM, we don't have a way to specify which files are to be
-- preprocessed and which not, so for sdist we only process
-- platform independent files and put them into the 'buildLoc'
-- (which we assume is set to the temp. directory that will become
-- the tarball).
--TODO: eliminate sdist variant, just supply different handlers
when (not forSDist || forSDist && platformIndependent pp) $ do
-- look for existing pre-processed source file in the dest dir to
-- see if we really have to re-run the preprocessor.
ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile
recomp <- case ppsrcFiles of
Nothing -> return True
Just ppsrcFile ->
psrcFile `moreRecentFile` ppsrcFile
when recomp $ do
let destDir = buildLoc </> dirName srcStem
createDirectoryIfMissingVerbose verbosity True destDir
runPreProcessorWithHsBootHack pp
(psrcLoc, psrcRelFile)
(buildLoc, srcStem <.> "hs")
where
dirName = takeDirectory
tailNotNull [] = []
tailNotNull x = tail x
-- FIXME: This is a somewhat nasty hack. GHC requires that hs-boot files
-- be in the same place as the hs files, so if we put the hs file in dist/
-- then we need to copy the hs-boot file there too. This should probably be
-- done another way. Possibly we should also be looking for .lhs-boot
-- files, but I think that preprocessors only produce .hs files.
runPreProcessorWithHsBootHack pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) = do
runPreProcessor pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity
exists <- doesFileExist inBoot
when exists $ copyFileVerbose verbosity inBoot outBoot
where
inBoot = replaceExtension inFile "hs-boot"
outBoot = replaceExtension outFile "hs-boot"
inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
-- ------------------------------------------------------------
-- * known preprocessors
-- ------------------------------------------------------------
ppGreenCard :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppGreenCard _ lbi _
= PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
runDbProgram verbosity greencardProgram (withPrograms lbi)
(["-tffi", "-o" ++ outFile, inFile])
}
-- This one is useful for preprocessors that can't handle literate source.
-- We also need a way to chain preprocessors.
ppUnlit :: PreProcessor
ppUnlit =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile _verbosity ->
withUTF8FileContents inFile $ \contents ->
either (writeUTF8File outFile) die (unlit inFile contents)
}
ppCpp :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppCpp = ppCpp' []
ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppCpp' extraArgs bi lbi clbi =
case compilerFlavor (compiler lbi) of
GHC -> ppGhcCpp ghcProgram (>= Version [6,6] []) args bi lbi clbi
GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi clbi
_ -> ppCpphs args bi lbi clbi
where cppArgs = getCppOptions bi lbi
args = cppArgs ++ extraArgs
ppGhcCpp :: Program -> (Version -> Bool)
-> [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppGhcCpp program xHs extraArgs _bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(prog, version, _) <- requireProgramVersion verbosity
program anyVersion (withPrograms lbi)
runProgram verbosity prog $
["-E", "-cpp"]
-- This is a bit of an ugly hack. We're going to
-- unlit the file ourselves later on if appropriate,
-- so we need GHC not to unlit it now or it'll get
-- double-unlitted. In the future we might switch to
-- using cpphs --unlit instead.
++ (if xHs version then ["-x", "hs"] else [])
++ [ "-optP-include", "-optP"++ (autogenComponentModulesDir lbi clbi </> cppHeaderName) ]
++ ["-o", outFile, inFile]
++ extraArgs
}
ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppCpphs extraArgs _bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity
cpphsProgram anyVersion (withPrograms lbi)
runProgram verbosity cpphsProg $
("-O" ++ outFile) : inFile
: "--noline" : "--strip"
: (if cpphsVersion >= Version [1,6] []
then ["--include="++ (autogenComponentModulesDir lbi clbi </> cppHeaderName)]
else [])
++ extraArgs
}
ppHsc2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppHsc2hs bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
runDbProgram verbosity hsc2hsProgram (withPrograms lbi) $
[ "--cc=" ++ programPath gccProg
, "--ld=" ++ programPath gccProg ]
-- Additional gcc options
++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
-- OSX frameworks:
++ [ what ++ "=-F" ++ opt
| isOSX
, opt <- nub (concatMap Installed.frameworkDirs pkgs)
, what <- ["--cflag", "--lflag"] ]
++ [ "--lflag=" ++ arg
| isOSX
, opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs
, arg <- ["-framework", opt] ]
-- Note that on ELF systems, wherever we use -L, we must also use -R
-- because presumably that -L dir is not on the normal path for the
-- system's dynamic linker. This is needed because hsc2hs works by
-- compiling a C program and then running it.
++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ]
-- Options from the current package:
++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ]
++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi
++ PD.cppOptions bi ]
++ [ "--cflag=" ++ opt | opt <-
[ "-I" ++ autogenComponentModulesDir lbi clbi,
"-I" ++ autogenPackageModulesDir lbi,
"-include", autogenComponentModulesDir lbi clbi </> cppHeaderName ] ]
++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-Wl,-R," ++ opt | isELF
, opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ]
++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ]
-- Options from dependent packages
++ [ "--cflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt <- Installed.ccOptions pkg ] ]
++ [ "--lflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ]
++ [ "-Wl,-R," ++ opt | isELF
, opt <- Installed.libraryDirs pkg ]
++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ]
++ [ opt | opt <- Installed.ldOptions pkg ] ]
++ ["-o", outFile, inFile]
}
where
-- TODO: installedPkgs contains ALL dependencies associated with
-- the package, but we really only want to look at packages for the
-- *current* dependency. We should use PackageIndex.dependencyClosure
-- on the direct depends of the component. The signature of this
-- function was recently refactored, so this should be fixable
-- now. Tracked with #2971 (which has a test case.)
pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi))
isOSX = case buildOS of OSX -> True; _ -> False
isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True;
packageHacks = case compilerFlavor (compiler lbi) of
GHC -> hackRtsPackage
GHCJS -> hackRtsPackage
_ -> id
-- We don't link in the actual Haskell libraries of our dependencies, so
-- the -u flags in the ldOptions of the rts package mean linking fails on
-- OS X (it's ld is a tad stricter than gnu ld). Thus we remove the
-- ldOptions for GHC's rts package:
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_, [rts])]
-> PackageIndex.insert rts { Installed.ldOptions = [] } index
_ -> error "No (or multiple) ghc rts package is registered!!"
ppHsc2hsExtras :: PreProcessorExtras
ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap`
getDirectoryContentsRecursive buildBaseDir
ppC2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppC2hs bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = \(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity -> do
(c2hsProg, _, _) <- requireProgramVersion verbosity
c2hsProgram (orLaterVersion (Version [0,15] []))
(withPrograms lbi)
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
runProgram verbosity c2hsProg $
-- Options from the current package:
[ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ]
++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ]
++ [ "--cppopts=-include" ++ (autogenComponentModulesDir lbi clbi </> cppHeaderName) ]
++ [ "--include=" ++ outBaseDir ]
-- Options from dependent packages
++ [ "--cppopts=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg
, c `elem` "DIU" ] ]
--TODO: install .chi files for packages, so we can --include
-- those dirs here, for the dependencies
-- input and output files
++ [ "--output-dir=" ++ outBaseDir
, "--output=" ++ outRelativeFile
, inBaseDir </> inRelativeFile ]
}
where
pkgs = PackageIndex.topologicalOrder (installedPkgs lbi)
ppC2hsExtras :: PreProcessorExtras
ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap`
getDirectoryContentsRecursive d
--TODO: perhaps use this with hsc2hs too
--TODO: remove cc-options from cpphs for cabal-version: >= 1.10
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= platformDefines lbi
++ cppOptions bi
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ [opt | opt@('-':c:_) <- PD.ccOptions bi, c `elem` "DIU"]
platformDefines :: LocalBuildInfo -> [String]
platformDefines lbi =
case compilerFlavor comp of
GHC ->
["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
GHCJS ->
compatGlasgowHaskell ++
["-D__GHCJS__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
JHC -> ["-D__JHC__=" ++ versionInt version]
HaskellSuite {} ->
["-D__HASKELL_SUITE__"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
_ -> []
where
comp = compiler lbi
Platform hostArch hostOS = hostPlatform lbi
version = compilerVersion comp
compatGlasgowHaskell =
maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v])
(compilerCompatVersion GHC comp)
-- TODO: move this into the compiler abstraction
-- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all
-- the other compilers. Check if that's really what they want.
versionInt :: Version -> String
versionInt (Version { versionBranch = [] }) = "1"
versionInt (Version { versionBranch = [n] }) = show n
versionInt (Version { versionBranch = n1:n2:_ })
= -- 6.8.x -> 608
-- 6.10.x -> 610
let s1 = show n1
s2 = show n2
middle = case s2 of
_ : _ : _ -> ""
_ -> "0"
in s1 ++ middle ++ s2
osStr = case hostOS of
Linux -> ["linux"]
Windows -> ["mingw32"]
OSX -> ["darwin"]
FreeBSD -> ["freebsd"]
OpenBSD -> ["openbsd"]
NetBSD -> ["netbsd"]
DragonFly -> ["dragonfly"]
Solaris -> ["solaris2"]
AIX -> ["aix"]
HPUX -> ["hpux"]
IRIX -> ["irix"]
HaLVM -> []
IOS -> ["ios"]
Android -> ["android"]
Ghcjs -> ["ghcjs"]
Hurd -> ["hurd"]
OtherOS _ -> []
archStr = case hostArch of
I386 -> ["i386"]
X86_64 -> ["x86_64"]
PPC -> ["powerpc"]
PPC64 -> ["powerpc64"]
Sparc -> ["sparc"]
Arm -> ["arm"]
Mips -> ["mips"]
SH -> []
IA64 -> ["ia64"]
S390 -> ["s390"]
Alpha -> ["alpha"]
Hppa -> ["hppa"]
Rs6000 -> ["rs6000"]
M68k -> ["m68k"]
Vax -> ["vax"]
JavaScript -> ["javascript"]
OtherArch _ -> []
ppHappy :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppHappy _ lbi _ = pp { platformIndependent = True }
where pp = standardPP lbi happyProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-agc"]
hcFlags GHCJS = ["-agc"]
hcFlags _ = []
ppAlex :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppAlex _ lbi _ = pp { platformIndependent = True }
where pp = standardPP lbi alexProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-g"]
hcFlags GHCJS = ["-g"]
hcFlags _ = []
standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor
standardPP lbi prog args =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
runDbProgram verbosity prog (withPrograms lbi)
(args ++ ["-o", outFile, inFile])
}
-- |Convenience function; get the suffixes of these preprocessors.
ppSuffixes :: [ PPSuffixHandler ] -> [String]
ppSuffixes = map fst
-- |Standard preprocessors: GreenCard, c2hs, hsc2hs, happy, alex and cpphs.
knownSuffixHandlers :: [ PPSuffixHandler ]
knownSuffixHandlers =
[ ("gc", ppGreenCard)
, ("chs", ppC2hs)
, ("hsc", ppHsc2hs)
, ("x", ppAlex)
, ("y", ppHappy)
, ("ly", ppHappy)
, ("cpphs", ppCpp)
]
-- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs.
knownExtrasHandlers :: [ PreProcessorExtras ]
knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ]
-- | Find any extra C sources generated by preprocessing that need to
-- be added to the component (addresses issue #238).
preprocessExtras :: Component
-> LocalBuildInfo
-> IO [FilePath]
preprocessExtras comp lbi = case comp of
CLib _ -> pp $ buildDir lbi
(CExe Executable { exeName = nm }) ->
pp $ buildDir lbi </> nm </> nm ++ "-tmp"
CTest test -> do
case testInterface test of
TestSuiteExeV10 _ _ ->
pp $ buildDir lbi </> testName test </> testName test ++ "-tmp"
TestSuiteLibV09 _ _ ->
pp $ buildDir lbi </> stubName test </> stubName test ++ "-tmp"
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm -> do
case benchmarkInterface bm of
BenchmarkExeV10 _ _ ->
pp $ buildDir lbi </> benchmarkName bm </> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
pp :: FilePath -> IO [FilePath]
pp dir = (map (dir </>) . filter not_sub . concat)
<$> for knownExtrasHandlers
(withLexicalCallStack (\f -> f dir))
-- TODO: This is a terrible hack to work around #3545 while we don't
-- reorganize the directory layout. Basically, for the main
-- library, we might accidentally pick up autogenerated sources for
-- our subcomponents, because they are all stored as subdirectories
-- in dist/build. This is a cheap and cheerful check to prevent
-- this from happening. It is not particularly correct; for example
-- if a user has a test suite named foobar and puts their C file in
-- foobar/foo.c, this test will incorrectly exclude it. But I
-- didn't want to break BC...
not_sub p = and [ not (pre `isPrefixOf` p) | pre <- component_dirs ]
component_dirs = component_names (localPkgDescr lbi)
-- TODO: libify me
component_names pkg_descr =
mapMaybe libName (subLibraries pkg_descr) ++
map exeName (executables pkg_descr) ++
map testName (testSuites pkg_descr) ++
map benchmarkName (benchmarks pkg_descr)
| sopvop/cabal | Cabal/Distribution/Simple/PreProcess.hs | bsd-3-clause | 31,208 | 0 | 30 | 8,872 | 6,081 | 3,248 | 2,833 | 455 | 40 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Main where
import Lib
import Database.PostgreSQL.Simple
import Opaleye
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
$(makeOpaleyeModel defaultConnectInfo { connectPassword = "postgres", connectDatabase = "scratch"} "users" "Users")
$(makeAdaptorAndInstance ("pUsers") ''UsersPoly)
$(makeOpaleyeTable defaultConnectInfo { connectPassword = "postgres", connectDatabase = "scratch"} "users" "Users")
main :: IO ()
main = return ()
| vacationlabs/opaleye-helpers | app/Main.hs | bsd-3-clause | 561 | 0 | 9 | 62 | 124 | 70 | 54 | 13 | 1 |
import Control.Applicative
import Control.Monad
import Data.List
main :: IO ()
main = do
[[n, x], prices] <- replicateM 2 readNumStrings
let ws = weights n x
in print . sum . zipWith (*) ws $ prices
readNumStrings :: IO [Int]
readNumStrings = (map read . words) <$> getLine
toRevBinary :: Int -> [Int]
toRevBinary n = case n `divMod` 2 of
(0, r) -> [r]
(q, r) -> r : toRevBinary q
weights :: Int -> Int -> [Int]
weights n x = take n $ toRevBinary x ++ repeat 0
| ksoda/atCoder | b/14.hs | bsd-3-clause | 476 | 6 | 11 | 106 | 253 | 125 | 128 | 16 | 2 |
-- | This module pretty prints all the exif fields with rational values.
module Graphics.Hexif.PrettyPrintRat
( ppRationalValues
) where
import Graphics.Hexif.Types
import Text.Printf (printf)
import GHC.Float
-- | Pretty printer for exif tags with multiple rational values.
ppRationalValues :: ExifTag -> [(Int,Int)] -> String
ppRationalValues _ [] = "No values"
ppRationalValues tg (r : []) = ppRationalValue tg r
ppRationalValues TagGPSLatitude rs = ppGPSLongLatt rs
ppRationalValues TagGPSLongitude rs = ppGPSLongLatt rs
ppRationalValues TagGPSDestLatitude rs = ppGPSLongLatt rs
ppRationalValues TagGPSDestLongitude rs = ppGPSLongLatt rs
ppRationalValues TagGPSTimeStamp rs = ppGPSTimeStamp $ map rat2Double rs
ppRationalValues TagGPS0a _ = "GPS Tag 0a"
ppRationalValues TagGPS0b _ = "GPS Tag 0b"
ppRationalValues TagGPS0f _ = "GPS Tag 0f"
ppRationalValues _ rs = concatMap fmtRat' rs
where fmtRat' r = fmtRat r ++ " "
-- | Pretty printer for exif tags with a single rational value.
ppRationalValue :: ExifTag -> (Int,Int) -> String
ppRationalValue t r
| t == TagExposureTime = fmtRatWithSlash r ++ " sec."
| t == TagFNumber = "f/" ++ fmtRatFloat r
| t == TagCompressedBitsPerPixel = ' ' : fmtRat r
| t == TagExposureBiasValue = ppExposureBiasValue r
| t == TagFocalLength = ppFocalLength r
| t == TagApertureValue = ppApertureValue f
| t == TagMaxApertureValue = ppApertureValue f
| t == TagShutterSpeedValue = ppShutterSpeedValue f
| t == TagDigitalZoomRatio = printf "%.4f" f
| t == TagBrightnessValue = ppBrightnessValue f
| otherwise = fmtRat r
where f = rat2Float r
-- | Helper function: Convert a rational to a float.
rat2Float :: (Int,Int) -> Float
rat2Float (n,d) = (fromIntegral n::Float) / (fromIntegral d::Float)
-- | Helper function: Convert a rational to a double.
rat2Double :: (Int,Int) -> Double
rat2Double (n,d) = (fromIntegral n::Double) / (fromIntegral d::Double)
-- | Helper function: Format a rational number with a slash.
fmtRatWithSlash :: (Int, Int) -> String
fmtRatWithSlash (num,denum) =
show (div num ggt) ++ "/" ++ show (div denum ggt)
where ggt = gcd num denum
-- | Format a rational number.
fmtRat :: (Int, Int) -> String
fmtRat r@(num, denum) =
if mod num denum == 0 then fmtRatInt r else fmtRatFloat r
-- | Format a rational number as an integer
fmtRatInt :: (Int, Int) -> String
fmtRatInt (num, denum) = show $ div num denum
-- | Format a rational number as a float.
fmtRatFloat :: (Int, Int) -> String
fmtRatFloat = show . rat2Float
-- | Pretty print the value of the tag ExposureBiasValue.
ppExposureBiasValue :: (Int, Int) -> String
ppExposureBiasValue r = printf "%.2f EV" (rat2Float r)
-- | Pretty print the value of the tag FocalLength.
ppFocalLength :: (Int, Int) -> String
ppFocalLength r = printf "%.1f mm" (rat2Float r)
-- | Pretty print the value of the tags ApertureValue and MaxApertureValue.
ppApertureValue :: Float -> String
ppApertureValue f = printf "%.2f EV (f/%.1f)" f pf
where
pf = 2 ** (f / 2)
-- | Pretty print the value of the tag ShutterSpeedValue.
ppShutterSpeedValue :: Float -> String
ppShutterSpeedValue f = printf "%.02f EV (1/%d sec.)" f (d::Int)
where
d = floor $ fromRational 2 ** f;
-- | Pretty print the value of the tag BightnessValue.
ppBrightnessValue :: Float -> String
ppBrightnessValue f = printf "%.2f EV (%.2f cd/m^2)" f pf
where
pf = 1 / (pi * 0.3048 * 0.3048) * 2 ** f
-- | Pretty print the values for the latitude and longitude GPS fields.
ppGPSLongLatt :: [(Int,Int)] -> String
ppGPSLongLatt rs = fmtLL fs
where
fs = map rat2Double rs
fmtLL (r1:r2:r3:[]) = printf "%2d, %2d, %.4f" d m s
where
(d,m,s) = degNorm r1 r2 r3
fmtLL _ = "verify data format"
-- | Support function for ppGPSLongLat: Normalize degrees
degNorm :: Double -> Double -> Double -> (Int, Int, Float)
degNorm dd mm ss = (d, m, s)
where
secs = dd * 3600 + mm * 60 + ss
q1 = secs / 3600
d = floor q1
r1 = (q1 - fromIntegral d) * 60
m = floor r1
s = double2Float (r1 - fromIntegral m) * 60
-- | Pretty print GPS time stamp
ppGPSTimeStamp :: [Double] -> String
ppGPSTimeStamp [h, m, s] = printf "%02.0f:%02.0f:%05.2f" h m s
ppGPSTimeStamp _ = "Invalid date format"
| hansroland/hexif | src/Graphics/Hexif/PrettyPrintRat.hs | bsd-3-clause | 4,351 | 0 | 12 | 937 | 1,255 | 659 | 596 | 77 | 2 |
{-# LANGUAGE DeriveGeneric, DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
module Apps.Juno.Ledger (
dirtyPickOutAccount50a
,runQuery
,convertQuery
,Transaction(..)
,SwiftAPI(..)
,AcctRole(..)
,LedgerQuery(..)
,QueryResult(..)
) where
import Data.Either ()
import Control.Lens
import Control.Applicative ((<|>))
import Data.Aeson
import GHC.Natural
import Data.Text (Text, intercalate)
import qualified Data.Text as Text
import Data.Set (Set)
import qualified Data.Set as Set
import GHC.Generics
import Data.Map.Strict (Map)
import Data.Ratio
import qualified Data.Map.Strict as Map
import Juno.Hoplite.Eval (TransactionId(..), OpId(..), OrderedOp, Cmd(..))
import Schwifty.Swift.M105.Types
data Transaction = Transaction {
transId :: Int
,opId :: Int
,from :: Text
,to :: Text
,amount :: Double
} deriving (Eq, Show, Generic, ToJSON, FromJSON)
data SwiftAPI = SwiftAPI {
ref :: Text
,opCode :: Text
,orderingAcct :: Text
,orderingAcctDescription :: Text
,beneficiaryAcct :: Text
,beneficiaryAcctDescription :: Text
,settled :: Double
,currency :: Text
,valueDate :: Text
,details :: Text
} deriving (Show, Eq, Generic, ToJSON, FromJSON)
data AcctRole = Sender | Receiver | Both deriving (Show, Eq, Generic, ToJSON, FromJSON)
data LedgerQuery = BySwiftId Integer
| ByAcctName AcctRole Text
| And [LedgerQuery]
-- | Or [LedgerQuery] -- If we need this, we'll add it
deriving (Show, Eq, Generic, ToJSON, FromJSON)
data QueryResult = QueryResult {
swifts :: Map String SwiftAPI
,trans :: [Transaction]
,inputs :: Map String Text
} deriving (Show, Eq, Generic, ToJSON)
-- these are here solely for convenience
type HopperLog = ([(TransactionId, [OrderedOp])], Map TransactionId SWIFT,Map TransactionId Text)
type TransLog = [(TransactionId, [OrderedOp])]
--type SwiftLog = Map TransactionId SWIFT
runQuery :: LedgerQuery -> HopperLog -> QueryResult
runQuery l hl = convertQuery $ execQuery l hl
execQuery :: LedgerQuery -> HopperLog -> HopperLog
execQuery (BySwiftId i) lss = filterById (fromInteger i) lss
execQuery (ByAcctName r i) lss = filterByAcct r i lss
execQuery (And []) _ = ([],Map.empty,mempty) -- if there's nothing to query, return nothing... techincally an error
execQuery (And [x]) lss = execQuery x lss
execQuery (And (x:xs)) lss = execQuery (And xs) (execQuery x lss)
filterById :: Natural -> HopperLog -> HopperLog
filterById i (l, ss, ts) = (l', ss', ts')
where
l' = filter (\(TransactionId i', _) -> i' == i) l
ss' = Map.filterWithKey (\k _ -> k == TransactionId i) ss
ts' = Map.filterWithKey (\k _ -> k == TransactionId i) ts
filterByAcct :: AcctRole -> Text -> HopperLog -> HopperLog
filterByAcct r a (l, ss, ts) = (l', ss', ts')
where
l' = filter (acctInvolved r a . snd) l
ss' = Map.filterWithKey (\k _ -> Set.member k (associatedSwiftIds l')) ss
ts' = Map.filterWithKey (\k _ -> Set.member k (associatedSwiftIds l')) ts
acctInTrans :: AcctRole -> Text -> OrderedOp -> Bool
acctInTrans Sender a (_, Cmd to' _ _ _) = to' == a
acctInTrans Receiver a (_, Cmd _ from' _ _) = from' == a
acctInTrans Both a (_, Cmd to' from' _ _) = from' == a || to' == a
acctInvolved :: AcctRole -> Text -> [OrderedOp] -> Bool
acctInvolved r a = any (acctInTrans r a)
associatedSwiftIds :: TransLog -> Set TransactionId
associatedSwiftIds = Set.fromList . fmap fst
convertQuery :: HopperLog -> QueryResult
convertQuery (l, ss, ts) = QueryResult ss' l' ts'
where
ss' = Map.map convertSWIFT $ Map.mapKeys (\(TransactionId i) -> show i) ss
l' = convertTrans l
ts' = Map.mapKeys (\(TransactionId i) -> show i) ts
convertSWIFT :: SWIFT -> SwiftAPI
convertSWIFT m = SwiftAPI
(m ^. sCode20 . unSendersRef) -- ref :: Text
(Text.pack $ show $ m ^. sCode23B) -- opCode :: Text
(dirtyPickOutAccount50a m) -- orderingAcct :: Text
(orderingAcctFreetext m) -- orderingAcctDescription :: Text
(m ^. sCode59a . bcAccount) -- beneficiaryAcct :: Text
(beneficiaryAcctFreetext m) -- beneficiaryAcctDescription :: Text
(convertAmount m) -- settled :: Double
(m ^. sCode32A . vcsCurrency) -- currency :: Text
(Text.pack $ m ^. sCode32A . vcsValueDate . unTime ) -- valueDate :: Text
(Text.pack $ show $ m ^. sCode71A) -- details :: Text
convertAmount :: SWIFT -> Double
convertAmount m = fromRational $ wholeDollars + cents
where
wholeDollars :: Rational
wholeDollars = fromIntegral $ m ^. sCode32A . vcsSettlementAmount . vWhole
stupidCents :: Ratio Int
stupidCents = m ^. sCode32A . vcsSettlementAmount . vPart
cents :: Rational
cents = (fromIntegral $ numerator stupidCents) % (fromIntegral $ denominator stupidCents)
dirtyPickOutAccount50a :: SWIFT -> Text
dirtyPickOutAccount50a s = case s ^? (sCode50a . ocA_Account . unAccount) of
Just v -> v
Nothing -> case s ^? (sCode50a . ocK_Account . unAccount) of
Just v -> v
Nothing -> case s ^? (sCode50a . ocF_Account . unF50F_Account . unAccount) of
Just v -> v
Nothing -> case s ^? (sCode50a . ocF_Account . unF50F_PartyId . piIdentifier) of
Just v -> v
Nothing -> error "Invariant Error: invalid swift detected, no Code50a account"
orderingAcctFreetext :: SWIFT -> Text
orderingAcctFreetext s = maybe "" (intercalate "\n") $
(s ^? sCode50a . ocA_remainder . _Just) <|>
(s ^? sCode50a . ocF_remainder . _Just) <|>
(s ^? sCode50a . ocK_remainder . _Just)
beneficiaryAcctFreetext :: SWIFT -> Text
beneficiaryAcctFreetext s = s ^. sCode59a . bcDetails
convertTrans :: TransLog -> [Transaction]
convertTrans t = concat $ convertEntry <$> t
where
convertOp :: TransactionId -> OrderedOp -> Transaction
convertOp (TransactionId tId) (OpId oId, Cmd from' to' amt' _) =
Transaction (fromIntegral tId) (fromIntegral oId) from' to' (fromRational amt')
convertEntry :: (TransactionId, [OrderedOp]) -> [Transaction]
convertEntry (tId, oOps) = convertOp tId <$> oOps
| buckie/juno | src/Apps/Juno/Ledger.hs | bsd-3-clause | 6,006 | 0 | 19 | 1,188 | 2,009 | 1,106 | 903 | -1 | -1 |
{-# LANGUAGE DeriveGeneric, DeriveAnyClass #-}
module Base (
Context (..)
, Token (..)
, Term (..)
, TermType (..)
) where
import GHC.Generics (Generic)
import Control.DeepSeq (NFData)
newtype Context = Context [(String, TermType)]
deriving (Show)
data Token
= TokenIf | TokenThen | TokenElse
| TokenTrue | TokenFalse
| TokenArrow | TokenBool
| TokenColon
| TokenLambda | TokenVar String | TokenDot
| TokenLParen | TokenRParen
deriving (Show, Generic, NFData)
data Term
= TermIfThenElse Term Term Term
| TermTrue | TermFalse
| TermVar Int
| TermAbs String TermType Term
| TermApp Term Term
deriving (Eq, Show, Generic, NFData)
data TermType
= TypeBool
| TypeArrow TermType TermType
deriving (Eq, Show, Generic, NFData)
| foreverbell/unlimited-plt-toys | tapl/simplebool/Base.hs | bsd-3-clause | 761 | 0 | 7 | 151 | 231 | 140 | 91 | 29 | 0 |
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Main where
import Control.Lens
import Control.Monad
import Control.Monad.Writer
import Control.Monad.Trans
import Control.Monad.Random
import Reflex
import Reflex.Dom
import Text.Read
import Data.Maybe
import qualified Data.Map as M
import GameResult
import Lib hiding (main)
main :: IO ()
main = mainWidget $ el "div" $ do
rec
el "h1" $ text "GRE - prototype 1 v0.1"
text "Deck 1"
deck1 <- deckSelection
text "Deck2"
deck2 <- deckSelection
let selectDeck d = fmap (const $ M.elems d) runClick
selectedDeck1Event <- sampleMap selectDeck deck1
selectedDeck2Event <- sampleMap selectDeck deck2
let combinedDecks = combineEvents selectedDeck1Event selectedDeck2Event
gameResult <- performArg (\(d1, d2) -> runGameIO $ runGame d1 d2 cards) combinedDecks
let gameResultTuple = holdDyn (-1, -1, 0) ((matchResultOutput . fst) <$> gameResult)
--let gameLog = holdDyn [] (fmap snd $ test'')
text "Win | Loss | Played "
display =<< gameResultTuple
--display =<< gameLog
runClick <- button "run"
return ()
combineEvents :: (Reflex t) => Event t a -> Event t b -> Event t (a, b)
combineEvents e1 e2 = coincidence $ nestedEvents eventInside
where
eventInside = fmap (e1,) e2
nestedEvents e = fmap (\(a,b) -> fmap (,b) a) e
sampleMap :: (MonadHold t m, Reflex t) => (a -> Event t b) -> Dynamic t a -> m (Event t b)
sampleMap f d = do
x <- mapDyn f d
return $ switchPromptlyDyn x
deckSelection :: (MonadWidget t m) => m (Dynamic t (M.Map Card Int))
deckSelection = do
deck <- joinDynThroughMap <$> nestedDeckMap
--let deckValues = fmap (M.elems) deck
--el "div" $ display deckValues
return deck
where
nestedDeckMap :: (MonadWidget t m) => m (Dynamic t (M.Map Card (Dynamic t Int)))
nestedDeckMap = listWithKey (constDyn cardList) $ \k _ -> el "li" $ cardAddButton k
cardAddButton :: (MonadWidget t m, Show a) => a -> m (Dynamic t Int)
cardAddButton k = do
rec
cardAmtInput <- textInput $ def & textInputConfig_initialValue .~ "0"
text " - "
display cardAmtInt
text (" - " ++ show k)
let cardAmt = _textInput_value cardAmtInput
cardAmtInt <- mapDyn readOr0 cardAmt
return cardAmtInt
readOr0 :: (Read a, Num a) => String -> a
readOr0 a = fromMaybe 0 (readMaybe a)
matchResultOutput :: MatchResult -> (Int, Int, Int)
matchResultOutput (MatchResult a b c) = (a, b, c)
runGame :: (MonadWriter [GameLog] m, MonadRandom m) => [Int] -> [Int] -> [Card] -> m MatchResult
runGame d1 d2 cards = playMatch (assemble (zip cards d1)) (assemble (zip cards d2)) 5
performArg :: MonadWidget t m => (b -> IO a) -> Event t b -> m (Event t a)
performArg f x = performEvent (fmap (liftIO . f) x)
otherDeck :: [Card]
otherDeck = replicate 1000 card1
assemble :: [(Card, Int)] -> [Card]
assemble l = foldr (\(c, i) -> (++) (replicate i c)) [] l
cardList :: M.Map Card Int
cardList = M.fromList (zip cards [0..])
cards :: [Card]
cards = [card1
, card2
, card3
, card4
, card5
, card6
, card7
, card8
, card9
--, card10
, card11
, card12
, card13
, card14
--, card15
--, card16
, card17
, card18
--, card19
]
listx = ["1", "2", "3"]
countingBtn :: MonadWidget t m => m (Dynamic t Int)
countingBtn = count =<< button "+1"
| rubenpieters/gre-project | reflex/app/Main.hs | bsd-3-clause | 3,519 | 0 | 17 | 848 | 1,279 | 661 | 618 | 92 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Game where
import Control.Exception.Base
import Control.DeepSeq
import System.CPUTime
import Control.Concurrent
import Control.Concurrent.MVar
import qualified Data.Matrix as M
import Cell
import qualified Control.Concurrent.Thread.Delay as Delay
--
import Data.Text (Text)
import Foreign.C.Types
import SDL.Vect
import SDL.Video.Renderer
import SDL
import Linear (V2(..), V4(..))
import Control.Monad (when, unless, forM_)
--
import Data.Word
import qualified Grid as G
import qualified Render as R
data Color = White | Black | Red | Green | Blue
encodeColor :: Color -> V4 Word8
encodeColor White = V4 255 255 255 255
encodeColor Black = V4 0 0 0 255
encodeColor Red = V4 255 0 0 255
encodeColor Green = V4 0 255 0 255
encodeColor Blue = V4 0 0 255 255
{- Game params. -}
cellHeight, cellWidth :: CInt
cellHeight = 60
cellWidth = 60
gridHeight, gridWidth :: CInt
gridHeight = cellHeight * fromIntegral verticalCellsNum
gridWidth = cellWidth * fromIntegral horizontalCellsNum
verticalCellsNum, horizontalCellsNum :: Int
verticalCellsNum = 6
horizontalCellsNum = 6
lineColor, aliveCellColor, deadCellColor :: Color
lineColor = Red
aliveCellColor = Green
deadCellColor = Blue
upperLeftX, upperLeftY :: CInt
upperLeftX = 10
upperLeftY = 10
delayTimeout :: Integer
delayTimeout = 100000
{- Game main. -}
-- Type of state.
type GridState = G.Grid
-- Type of geometry.
type GridShape = R.Grid
-- Type of color.
data GridColor
= GridColor
{ gridColorLine :: Color
, gridColorCellAlive :: Color
, gridColorCellDead :: Color }
-- Wraps all grid related data together.
{- Note.
Packing all necessery data makes a blob of stuff
such that not every function it works with needs
it. It does some encapsulation but also hides
relationship between data. So that's a minus of
'Grid' data type.
-}
data Grid
= Grid
{ gridState :: GridState
, gridShape :: GridShape
, gridColor :: GridColor }
-- Makes grid.
makeGrid
:: M.Matrix Bool
-> (CInt, CInt)
-> Point V2 CInt
-> GridColor
-> Grid
makeGrid
stateMatrix
(cellHeight, cellWidht)
upperLeftCorner
color
= let
state = makeState
shape = makeShape in
Grid state shape color
where
makeState = G.fromMatrix stateMatrix
makeShape =
let vCellCnt = M.nrows stateMatrix
hCellCnt = M.ncols stateMatrix in
R.Grid
cellHeight
cellWidth
vCellCnt
hCellCnt
upperLeftCorner
-- Updates grid.
updateGrid :: Grid -> Grid
updateGrid (Grid state shape color) =
-- So that's where lenses should be used for!
Grid (G.update state) shape color
-- ^^^^^^^^^^^^^^^^
-- Returns coordinates of alive cells on grid.
aliveCells :: Grid -> [(Int, Int)]
aliveCells (Grid state _ _) = G.aliveCoords state
-- Returns a rectangle occupied by a grid.
gridRect :: Grid -> Rectangle CInt
gridRect (Grid _ shape _) =
let ulCorner = R.gridULCorner shape
-- These two must be calculated at construction time I guess.
height = getHeight
width = getWidth
-- Result.
in Rectangle ulCorner (V2 width height)
where
getHeight =
R.gridCellHeight shape *
fromIntegral (R.gridCellVerNum shape)
getWidth =
R.gridCellWidth shape *
fromIntegral (R.gridCellHorNum shape)
-- Renders lines of grid.
renderGridLines :: Grid -> Renderer -> IO ()
renderGridLines (Grid _ shape color) renderer =
do
-- Get line color.
let lnColor = gridColorLine color
-- Draw lines.
R.drawGrid shape (encodeColor lnColor) renderer
-- Renders background of grid.
renderGridBackground :: Grid -> Renderer -> IO ()
renderGridBackground g r =
do
-- Get color of dead cell.
let deadCellColor = gridColorCellDead $ gridColor g
-- Set color for drawing.
rendererDrawColor r $= encodeColor deadCellColor
-- Fill background.
fillRect r $ Just $ gridRect g
-- Renders alive cells of grid.
renderGridAliveCells :: Grid -> Renderer -> IO ()
renderGridAliveCells g r =
do
-- Get color of alive cell.
let aliveCellColor = gridColorCellAlive $ gridColor g
-- Set color for drawing.
rendererDrawColor r $= encodeColor aliveCellColor
-- Get coordinates of alive cells.
let aliveCoords = aliveCells g
-- Get shape of a grid.
let shape = gridShape g
-- Fill each alive cell.
forM_ aliveCoords $
\ coords ->
R.fillCell coords shape r
-- Renders grid.
renderGrid :: Grid -> Renderer -> IO ()
renderGrid grid renderer
= do
-- Background.
renderGridBackground grid renderer
-- Lines.
renderGridLines grid renderer
-- Alive cells.
renderGridAliveCells grid renderer
{- Running. -}
-- Initial state for test.
initGrid' :: M.Matrix Bool
initGrid' = M.fromLists
[[True, False, False, False, False, True]
,[False, True, False, True, False, True]
,[False, False, False, False, True, True]
,[False, False, True, True, False, False]
,[True, True, False, True, True, False]
,[False, True, True, False, False, True]]
-- Initial grid for test.
initGrid :: Grid
initGrid =
makeGrid
initGrid'
(cellHeight, cellWidth)
(P (V2 upperLeftX upperLeftY))
(GridColor Black Black White)
-- Runs test game.
runGame :: IO ()
runGame = do
initializeAll
window <- createWindow "Game of Life" defaultWindow
renderer <- createRenderer window (-1) defaultRenderer
-- Render at first iteration.
gameLoop initGrid renderer 0 renderTimeout
{-
NOTE:
It's not possible to collect events in separate thread.
It's better to present a renderer until some timer and
to collect events the rest time.
-}
-- Collects events into variable.
collectEvents :: MVar [Event] -> IO ()
collectEvents eventListVar = do
putStrLn "Collecting events..."
putStrLn "pollEvents"
-- Get new events.
newEventList <- pollEvents
print newEventList
putStrLn "putMVar"
-- Put them into variable.
--putMVar eventListVar newEventList
swapMVar eventListVar newEventList
putStrLn "delay"
-- Wait until timeout.
Delay.delay collectEventsTimeout
putStrLn "Collecting events... finished."
-- Repeat.
collectEvents eventListVar
where
-- Timeout to wait until check for new events.
collectEventsTimeout = 100
renderTimeout :: Integer
renderTimeout = 500000000000000000
renderingTime :: Integer
renderingTime = 10000000000000
-- Test game loop.
gameLoop :: Grid -> Renderer -> Int -> Integer -> IO ()
gameLoop g r counter timer = do
-- Checks whether 'q' key was pressed.
events <- pollEvents
let eventIsQPress event =
case eventPayload event of
KeyboardEvent keyboardEvent ->
keyboardEventKeyMotion keyboardEvent == Pressed &&
keysymKeycode (keyboardEventKeysym keyboardEvent) == KeycodeQ
_ -> False
qPressed = not (null (filter eventIsQPress events))
unless qPressed $ do
-- Get time increment.
timerInc <- getCPUTime
-- Calculate a new timer value.
let timer' = timer + timerInc
-- If timer' is equal or greater then renderTimeout,
-- then render and drop timer to zero.
if timer' >= renderTimeout
then do
-- Start time of rendering.
timeStart <- (getCPUTime >>= evaluate)
-- Set background color.
rendererDrawColor r $= V4 255 255 255 255
-- Fill background.
clear r >>= evaluate . rnf
-- Render grid.
renderGrid g r >>= evaluate . rnf
-- Flush graphics.
present r >>= evaluate . rnf
-- Update grid.
let g' = updateGrid g
-- Finish time of rendering.
timeStop <- (getCPUTime >>= evaluate)
-- Total rendering time.
let timeTotal = timeStop - timeStart
when (timeTotal < renderingTime) $
do
-- Time left.
let timeLeft = renderingTime - timeTotal
return ()
-- Sleep time which left.
-- Delay.delay timeLeft
-- putStrLn $ "Time left: " ++ show timeLeft
-- Show the relation between renderingTime and timeTotal.
putStr "renderingTime : timeTotal | "
putStr $ show renderingTime
putStr " : "
putStr $ show timeTotal
putStrLn ""
-- Continue with zero timer.
gameLoop g' r (counter + 1) 0
-- Wait until timeout.
-- Delay.delay delayTimeout
else gameLoop g r (counter + 1) timer'
| wowofbob/gol | src/Game.hs | bsd-3-clause | 9,785 | 0 | 19 | 3,377 | 1,920 | 1,022 | 898 | 198 | 3 |
module ASCII.Wide.Parser where
import PXM
import Util.Parser
import Control.Exception (assert)
import Data.Attoparsec.Char8 (Parser, decimal)
import Data.Word (Word16)
gray :: Depth -> Parser Word16
gray d = do g <- decimal
return $ assert (g <= d) g
pixel :: Depth -> Parser (Word16, Word16, Word16)
pixel d = do red <- decimal
_ <- whiteSpaces
green <- decimal
_ <- whiteSpaces
blue <- decimal
_ <- whiteSpaces
return (assert (red <=d) red
,assert (green<=d) green
,assert (blue <=d) blue)
| epsilonhalbe/Sammelsurium | PXM-Parser/ASCII/Wide/Parser.hs | bsd-3-clause | 642 | 0 | 11 | 229 | 217 | 113 | 104 | 19 | 1 |
module Data.Text.Coarbitrary where
import Test.QuickCheck
import Data.Text (Text)
import qualified Data.Text as X
text :: Text -> Gen b -> Gen b
text t = case X.uncons t of
Nothing -> variant (0 :: Int)
Just (x, xs) -> variant (1 :: Int) . coarbitrary x . text xs
| massysett/barecheck | lib/Data/Text/Coarbitrary.hs | bsd-3-clause | 270 | 0 | 11 | 55 | 119 | 64 | 55 | 8 | 2 |
module Section_2_3_3 where
import Numeric.LinearAlgebra
and' :: Double -> Double -> Double
and' x1 x2 =
let x = vector [x1, x2]
w = vector [0.5, 0.5]
b = (-0.7)
tmp = w <.> x + b
in if tmp <= 0
then 0
else 1
nand :: Double -> Double -> Double
nand x1 x2 =
let x = vector [x1, x2]
w = vector [(-0.5), (-0.5)]
b = 0.7
tmp = w <.> x + b
in if tmp <= 0
then 0
else 1
or' :: Double -> Double -> Double
or' x1 x2 =
let x = vector [x1, x2]
w = vector [0.5, 0.5]
b = (-0.2)
tmp = w <.> x + b
in if tmp <= 0
then 0
else 1
| lotz84/deep-learning-from-scratch | src/Section_2_3_3.hs | bsd-3-clause | 641 | 0 | 12 | 256 | 290 | 159 | 131 | 29 | 2 |
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module KMC.SSTCompiler.Classes where
import Control.Applicative
import Control.Monad.Reader
import Data.Functor.Identity (Identity(runIdentity))
import qualified Data.Map as M
import KMC.Program.IL
import qualified KMC.RangeSet as RS
import KMC.SSTCompiler.Env
import KMC.Theories
import KMC.Util.Coding
import KMC.SymbolicFST.ActionMachine (CodeInputLab(..),DecodeFunc(..))
import KMC.SymbolicFST.OracleMachine (CodeFunc(..))
import KMC.SymbolicFST.Transducer (CopyFunc(..))
import KMC.SymbolicSST.ActionSST (ConstOrAnyLab(..))
import Prelude
----------
-- Classes
----------
class PredicateListToExpr p where
predListToExpr :: [p] -> Int -> Expr
class Function f => CompilableFunction f tid | f -> tid where
-- ^ Lookup table generation
tables :: f -> M.Map tid Table
-- ^ Function constants
funcConstants :: f -> [[Int]]
-- ^ Code generation
compileFuncAppend :: (Ord st, Ord var, Ord tid) => BufferId -> f -> EnvReader st var tid Block
{-
-- | Tabulate a function. It is assumed that the codomain is a set of
-- bit-vectors with pairwise equal length.
tabulate :: (Function t, Rng t ~ [delta], Enum delta) => t -> Table
tabulate f = Table bitTable bitSize
where
bitTable = map (map fromEnum . eval f) (domain f)
bitSize = foldr max 0 (map length bitTable)
-}
------------
-- Instances
------------
instance (Eq a, Enum a) => PredicateListToExpr (RS.RangeSet a) where
predListToExpr [] _ = TrueE
predListToExpr xs i =
let (xsEq, xsRest') = span (\rs -> RS.size rs == 1) xs
(xsComplex, xsRest) = span (\rs -> RS.size rs > 1) xsRest'
eqExprs =
case xsEq of
[] -> []
[sgl] -> [predToExpr sgl i]
_ -> [CompareE i $ map (fromEnum . fromSingleton) xsEq]
complexExprs = zipWith predToExpr xsComplex [i + length xsEq..]
recExpr = predListToExpr xsRest (i + length xsEq + length xsComplex)
allExprs = eqExprs
++ complexExprs
++ [recExpr]
in foldr1 AndE allExprs
where
fromSingleton rs | [(l,h)] <- RS.ranges rs, l == h = l
| otherwise = error "not a singleton rangeset"
predToExpr rs j = case map rangeTest (RS.ranges rs) of
[] -> FalseE
ranges -> foldr1 OrE ranges
where
rangeTest (l, h)
| l == h = EqE (SymE j) (ConstE $ fromEnum l)
| otherwise = AndE (LteE (ConstE $ fromEnum l) (SymE j))
(LteE (SymE j) (ConstE $ fromEnum h))
instance (Eq a, Enum a) => PredicateListToExpr (CodeInputLab a) where
predListToExpr xs i
| all (== InputAny 1) xs = TrueE
| all isConstDigit xs = CompareE i [ fromEnum b | InputConst [b] <- xs ]
| otherwise = error $ "input labels not normalized"
where
isConstDigit (InputConst [_]) = True
isConstDigit _ = False
instance (Enum digit) => PredicateListToExpr (ConstOrAnyLab digit) where
predListToExpr xs i = case concat $ zipWith testE xs [i..] of
[] -> TrueE
ys -> foldr1 AndE ys
where
testE AnyLab _ = []
testE (ConstLab c) j = [EqE (SymE j) (ConstE (fromEnum c))]
instance (Enum digit, Enum sigma, Bounded digit, Ord enum, Enumerable enum sigma)
=> CompilableFunction (DecodeFunc enum digit (Identity sigma)) enum where
tables (DecodeConst _) = M.empty
tables (DecodeArg es) =
M.fromList [ (e, Table [ [fromEnum (lookupIndex i e)] | i <- [0 .. size e - 1] ] 1) | e <- es ]
funcConstants (DecodeConst cs) = [map (fromEnum . runIdentity) cs]
funcConstants (DecodeArg _) = []
compileFuncAppend bid (DecodeConst cs)
= (:[]) . AppendI bid <$> ((M.!) <$> asks cmap <*> pure (map (fromEnum . runIdentity) cs))
compileFuncAppend bid (DecodeArg es) = zipWithM aux [0..] es
where
aux i e = AppendTblI bid <$> ((M.!) <$> asks tmap <*> pure e) <*> pure i
instance (Bounded dom, Enum dom, Bounded digit, Enum digit, Enumerable enum dom, Ord enum)
=> CompilableFunction [EpsFunc (CodeFunc enum dom digit)] enum where
tables xs = M.fromList [ (e, tbl e) | JustFunc (CodeArg e) <- xs]
where
tbl (e :: enum) =
let bits = bitWidth (boundedSize (undefined :: digit)) (size e)
code (x :: dom) | member x e =
let digits = codeFixedWidthEnumSized (size e) (indexOf x e) :: [digit]
in map fromEnum digits
code _ = replicate bits 0
in Table [ code x | x <- [minBound .. (maxBound :: dom)] ] bits
funcConstants xs = [ map fromEnum ds | JustFunc (CodeConst ds) <- xs ]
compileFuncAppend bid xs = concat <$> zipWithM aux [0..] xs
where
aux _ EpsFunc = pure []
aux _ (JustFunc (CodeConst ds))
= ((:[]) . AppendI bid) <$> ((M.!) <$> asks cmap <*> pure (map fromEnum ds))
aux i (JustFunc (CodeArg e))
= (:[]) <$> (AppendTblI bid <$> ((M.!) <$> asks tmap <*> pure e) <*> pure i)
instance (Bounded sigma, Enum sigma)
=> CompilableFunction [EpsFunc (CopyFunc sigma [Identity sigma])] () where
tables _ = M.empty
funcConstants fs = [ [fromEnum $ runIdentity y | y <- ys] | JustFunc (CopyConst ys) <- fs ]
compileFuncAppend bid fs = concat <$> zipWithM aux [0..] fs
where
aux _ EpsFunc = pure []
aux i (JustFunc CopyArg)
= return [AppendSymI bid i]
aux _ (JustFunc (CopyConst ys))
= ((:[]) . AppendI bid) <$> ((M.!) <$> asks cmap <*> pure (map (fromEnum . runIdentity) ys))
| diku-kmc/repg | src/KMC/SSTCompiler/Classes.hs | mit | 5,952 | 0 | 19 | 1,740 | 2,090 | 1,084 | 1,006 | 102 | 0 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Tests.Readers.RST (tests) where
import Text.Pandoc.Definition
import Test.Framework
import Tests.Helpers
import Tests.Arbitrary()
import Text.Pandoc.Builder
import Text.Pandoc
rst :: String -> Pandoc
rst = readRST defaultParserState{ stateStandalone = True }
infix 5 =:
(=:) :: ToString c
=> String -> (String, c) -> Test
(=:) = test rst
tests :: [Test]
tests = [ "line block with blank line" =:
"| a\n|\n| b" =?> para (str "a" <> linebreak <>
linebreak <> str " " <> str "b")
, "field list" =:
[_LIT|
:Hostname: media08
:IP address: 10.0.0.19
:Size: 3ru
:Date: 2001-08-16
:Version: 1
:Authors: - Me
- Myself
- I
:Indentation: Since the field marker may be quite long, the second
and subsequent lines of the field body do not have to line up
with the first line, but they must be indented relative to the
field name marker, and they must line up with each other.
:Parameter i: integer
:Final: item
on two lines
|] =?> ( setAuthors ["Me","Myself","I"]
$ setDate "2001-08-16"
$ doc
$ definitionList [ (str "Hostname", [para "media08"])
, (str "IP address", [para "10.0.0.19"])
, (str "Size", [para "3ru"])
, (str "Version", [para "1"])
, (str "Indentation", [para "Since the field marker may be quite long, the second and subsequent lines of the field body do not have to line up with the first line, but they must be indented relative to the field name marker, and they must line up with each other."])
, (str "Parameter i", [para "integer"])
, (str "Final", [para "item on two lines"])
])
, "URLs with following punctuation" =:
("http://google.com, http://yahoo.com; http://foo.bar.baz.\n" ++
"http://foo.bar/baz_(bam) (http://foo.bar)") =?>
para (link "http://google.com" "" "http://google.com" <> ", " <>
link "http://yahoo.com" "" "http://yahoo.com" <> "; " <>
link "http://foo.bar.baz" "" "http://foo.bar.baz" <> ". " <>
link "http://foo.bar/baz_(bam)" "" "http://foo.bar/baz_(bam)"
<> " (" <> link "http://foo.bar" "" "http://foo.bar" <> ")")
]
| sol/pandoc | src/Tests/Readers/RST.hs | gpl-2.0 | 2,504 | 0 | 18 | 810 | 440 | 241 | 199 | -1 | -1 |
{-# OPTIONS_GHC -Weverything #-}
{-# OPTIONS_GHC -Wno-unticked-promoted-constructors -Wno-missing-import-lists -Wno-implicit-prelude -Wno-monomorphism-restriction -Wno-name-shadowing -Wno-all-missed-specialisations -Wno-unsafe -Wno-missing-export-lists -Wno-type-defaults -Wno-partial-fields -Wno-missing-local-signatures -Wno-orphans #-}
module Holo.Name
( Name(..)
, As(..), defName, defStyGeoName
, Style(..), sStyle, sStyleGene, initStyle, defStyle
, StyleGene(..), fromStyleGene
, Visual(..), VPort
)
where
import ExternalImports
-- Local imports
import Graphics.Flex (Geo, defGeo)
import {-# SOURCE #-}
Holo.Classes
import Holo.Port (IdToken, Drawable)
import qualified Holo.Port as Port
-- * Name
-- ..as per Пиотровский Р. Г. Текст, машина, человек — Л.: Наука, 1975
-- Which is supposed to make sense in context of As/Denoted
data Name (a ∷ Type) where
Name ∷
{ nToken ∷ IdToken
, nStyle ∷ Style a
, nGeo ∷ Geo
, n ∷ a
} → Name a
defName ∷ ∀ a. As a ⇒ IdToken → a → Name a
defName tok n = Name tok defStyle defGeo n
-- default of Named.compName
defStyGeoName ∷ As n ⇒ Proxy a → IdToken → n → Name n
defStyGeoName _ tok n = Name tok (initStyle $ compSty n) defGeo n
-- * Style wrapper
--
newtype StyleGene = StyleGene { _fromStyleGene ∷ Int } deriving (Eq, Ord)
fromStyleGene ∷ Lens' StyleGene Int
fromStyleGene f (StyleGene x) = f x <&> StyleGene
data Style a where
Style ∷
{ _sStyle ∷ Sty a
, _sStyleGene ∷ StyleGene
} → Style a
sStyle ∷ Lens' (Style a) (Sty a)
sStyle f s@Style{..} = f _sStyle <&> \x→ s{_sStyle=x}
sStyleGene ∷ Lens' (Style a) StyleGene
sStyleGene f s@Style{..} = f _sStyleGene <&> \x→ s{_sStyleGene=x}
initStyle ∷ Sty a → Style a
initStyle s = Style { _sStyle = s, _sStyleGene = StyleGene 0 }
defStyle ∷ ∀ a. As a ⇒ Style a
defStyle = initStyle $ defSty (Proxy @a)
-- * Visual wrapper
--
data Visual (a ∷ Type) where
Visual ∷ As a ⇒
{ vVisual ∷ Vis a
, vDrawable ∷ Drawable
} → Visual a
type VPort = Port.Port Visual
instance Port.PortVisual Visual where
pvDrawable = vDrawable
pvFree _pC pA = \case
Visual{..} → freeVis pA vVisual
| deepfire/mood | src/Holo/Name.hs | agpl-3.0 | 2,445 | 0 | 10 | 608 | 661 | 374 | 287 | -1 | -1 |
{- |
Module : Spark.Core.Dataset
Description : Dataset types and basic operations.
This module describes the core data types (Dataset, DataFrame,
Observable and DynObservable), and some basic operations to relate them.
-}
module Spark.Core.Dataset(
-- * Common data structures
-- TODO Should it be hidden?
ComputeNode,
LocLocal,
LocDistributed,
LocUnknown,
UntypedNode,
-- * Distributed data structures
Dataset,
DataFrame,
-- * Local data structures
LocalData,
LocalFrame,
-- * Conversions
asDF,
asDS,
asLocalObservable,
castType,
castType',
-- * Relations
parents,
untyped,
untyped',
depends,
logicalParents,
logicalParents',
-- * Attributes
nodeLogicalParents,
nodeLogicalDependencies,
nodeParents,
nodeOp,
nodeId,
nodeName,
nodeType,
) where
import Spark.Core.Internal.DatasetStructures
import Spark.Core.Internal.DatasetFunctions
import Spark.Core.Internal.Projections()
| krapsh/kraps-haskell | src/Spark/Core/Dataset.hs | apache-2.0 | 953 | 0 | 4 | 176 | 123 | 86 | 37 | 31 | 0 |
{- xmonad.hs
- Author: �yvind 'Mr.Elendig' Heggstad <mrelendig AT har-ikkje DOT net>
- Version: 0.0.9
- Modified version
-}
-------------------------------------------------------------------------------
-- Imports --
-- stuff
import XMonad hiding ( (|||) )
import qualified XMonad.StackSet as W
import qualified Data.Map as M
import System.Exit
import System.IO (Handle, hPutStrLn)
import XMonad.Actions.CycleWS
import XMonad.Actions.UpdatePointer
import XMonad.Actions.NoBorders
-- utils
import XMonad.Util.Run (spawnPipe)
-- hooks
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.EwmhDesktops
import XMonad.Hooks.UrgencyHook
import XMonad.Hooks.ManageHelpers
-- layouts
import XMonad.Layout.NoBorders
import XMonad.Layout.ResizableTile
import XMonad.Layout.Tabbed
import XMonad.Layout.PerWorkspace
import XMonad.Layout.LayoutCombinators
import XMonad.Layout.ShowWName
-------------------------------------------------------------------------------
-- Main --
{-main :: IO()-}
main = do
h <- spawnPipe "xmobar"
xmonad $ withUrgencyHook NoUrgencyHook $ defaultConfig
{ workspaces = workspaces'
, modMask = modMask'
, borderWidth = borderWidth'
, normalBorderColor = normalBorderColor'
, focusedBorderColor = focusedBorderColor'
, terminal = terminal'
, keys = keys'
, logHook = logHook' h
, layoutHook = layoutHook'
, manageHook = manageHook' <+> manageHook defaultConfig
, handleEventHook = fullscreenEventHook
, focusFollowsMouse = myFocusFollowsMouse
}
-------------------------------------------------------------------------------
-- Hooks --
manageHook' :: ManageHook
manageHook' = composeAll
[ isFullscreen --> doFullFloat
, isDialog --> doFloat
, className =? "Xmessage" --> doFloat
, className =? "MPlayer" --> ask >>= doF . W.sink
, manageDocks
]
logHook' :: Handle -> X ()
logHook' h = dynamicLogWithPP (customPP { ppOutput = hPutStrLn h })
>> updatePointer (Relative 0 0)
layoutHook' = customLayout
-- Top-level binding with no type signature: layoutHook' :: XMonad.Layout.LayoutModifier.ModifiedLayout
-------------------------------------------------------------------------------
-- Looks --
-- bar
customPP :: PP
customPP = defaultPP { ppCurrent = xmobarColor "#FFEE00" "" . wrap "[" "]"
, ppVisible = xmobarColor "#5599FF" "" . wrap "<" ">"
, ppTitle = shorten 45
, ppSep = "<fc=#AFAF87>|</fc>"
, ppHiddenNoWindows = xmobarColor "#404040" ""
, ppUrgent = xmobarColor "#ff0000" "" . wrap "!" "!"
}
-- Whether focus follows the mouse pointer.
myFocusFollowsMouse :: Bool
myFocusFollowsMouse = False
-- borders
borderWidth' :: Dimension
borderWidth' = 1
normalBorderColor', focusedBorderColor' :: String
normalBorderColor' = "#555555"
-- focusedBorderColor' = "#00FF00"
focusedBorderColor' = "#cccccc"
-- workspaces
workspaces' :: [WorkspaceId]
workspaces' = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "0"]
-- layouts
myTiled = smartBorders $ ResizableTall 1 (3/100) (1/2) []
myFull = noBorders Full
myTabbed = noBorders $ tabbed shrinkText defaultTheme
mySWNConfig = defaultSWNConfig { swn_font = "xft:Monospace:pixelsize=60:bold:antialias=true:hinting=true"
, swn_fade = 1
, swn_bgcolor = "#dddddd"
, swn_color = "#000000"
}
myShowWName = showWName' mySWNConfig
customLayout = myShowWName $ avoidStruts $
onWorkspaces ["4", "5", "6"] workLayout $
{-onWorkspaces ["8", "9"] (noBorders normalLayout) $-}
onWorkspaces ["2"] fullLayout
normalLayout
where
normalLayout = myTiled ||| myFull ||| myTabbed
workLayout = myTiled ||| myFull
fullLayout = myTabbed ||| myFull ||| noBorders myTiled
-------------------------------------------------------------------------------
-- Terminal --
terminal' :: String
terminal' = "terminator"
-------------------------------------------------------------------------------
-- Browser --
browser' :: String
browser' = "google-chrome"
-------------------------------------------------------------------------------
-- Keys/Button bindings --
-- modmask
modMask' :: KeyMask
modMask' = mod4Mask
-- keys
keys' :: XConfig Layout -> M.Map (KeyMask, KeySym) (X ())
keys' conf@(XConfig {XMonad.modMask = modMask}) = M.fromList $
-- launching and killing programs
[ ((modMask, xK_Return), spawn $ XMonad.terminal conf)
, ((modMask .|. shiftMask, xK_Return), spawn "google-chrome")
, ((modMask, xK_p ), spawn "gmrun")
, ((modMask .|. shiftMask, xK_p ), spawn "exe=`dmenu_path | dmenu` && eval \"exec $exe\"")
, ((modMask .|. shiftMask, xK_c ), kill)
, ((modMask, xK_q ), kill)
-- layouts
, ((modMask, xK_space ), sendMessage NextLayout)
, ((modMask .|. shiftMask, xK_space ), setLayout $ XMonad.layoutHook conf)
, ((modMask , xK_b ), sendMessage ToggleStruts)
, ((modMask , xK_f ), sendMessage $ JumpToLayout "Full")
, ((modMask , xK_r ), sendMessage $ JumpToLayout "ResizableTall")
-- Don't need split screens right now :)
--, ((modMask .|. controlMask, xK_l ), layoutSplitScreen 2 (TwoPane 0.5 0.5))
--, ((modMask .|. controlMask, xK_r ), rescreen)
-- floating layer stuff
, ((modMask, xK_t ), withFocused $ windows . W.sink)
, ((modMask, xK_g ), withFocused toggleBorder)
-- refresh
, ((modMask, xK_n ), refresh)
-- move focus between screens
, ((modMask .|. controlMask, xK_j ), nextScreen)
, ((modMask .|. controlMask, xK_k ), prevScreen)
, ((modMask, xK_z ), toggleWS)
, ((modMask, xK_o ), shiftNextScreen)
-- focus
, ((modMask, xK_j ), windows W.focusDown)
, ((modMask, xK_k ), windows W.focusUp)
, ((modMask, xK_m ), windows W.focusMaster)
-- swapping
--, ((modMask .|. shiftMask, xK_Return), windows W.swapMaster)
, ((modMask .|. shiftMask, xK_j ), windows W.swapDown )
, ((modMask .|. shiftMask, xK_k ), windows W.swapUp )
-- increase or decrease number of windows in the master area
, ((modMask , xK_comma ), sendMessage (IncMasterN 1))
, ((modMask , xK_period), sendMessage (IncMasterN (-1)))
-- resizing
, ((modMask, xK_h ), sendMessage Shrink)
, ((modMask, xK_l ), sendMessage Expand)
, ((modMask .|. shiftMask, xK_h ), sendMessage MirrorShrink)
, ((modMask .|. shiftMask, xK_l ), sendMessage MirrorExpand)
-- XF86AudioMute
, ((0 , 0x1008ff12), spawn "amixer -q set Master toggle")
-- XF86AudioLowerVolume
, ((0 , 0x1008ff11), spawn "amixer -q set Master 1- unmute")
-- XF86AudioRaiseVolume
, ((0 , 0x1008ff13), spawn "amixer -q set Master 1+ unmute")
-- XF86AudioNext
, ((0 , 0x1008ff17), spawn "mpc next")
-- XF86AudioPrev
, ((0 , 0x1008ff16), spawn "mpc prev")
-- XF86AudioPlay
, ((0 , 0x1008ff14), spawn "mpc toggle")
-- quit, or restart
, ((modMask .|. shiftMask, xK_q ), io exitSuccess)
, ((modMask .|. shiftMask, xK_q ), restart "xmonad" True)
]
++
-- mod-[1..9] %! Switch to workspace N
-- mod-shift-[1..9] %! Move client to workspace N
-- mod-control-[1..9] %! Switch to workspace N greedily
[((m .|. modMask, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) $ [xK_1 .. xK_9] ++ [xK_0]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask), (W.greedyView, controlMask)]]
| ahujamanish/dotfiles | .xmonad/xmonad.hs | bsd-2-clause | 8,177 | 0 | 14 | 2,203 | 1,744 | 1,050 | 694 | 126 | 1 |
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main ( main ) where
import Data.Char (chr)
import Data.Either (fromRight, isLeft)
import Data.Foldable (fold)
import Data.List (groupBy)
import qualified Data.List.NonEmpty as NEL
import qualified Data.Text as T
import Data.Versions
import Data.Void (Void)
import Lens.Micro
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Text.Megaparsec
import Text.Megaparsec.Char
import Text.Printf (printf)
---
instance Arbitrary SemVer where
arbitrary = SemVer <$> arbitrary <*> arbitrary <*> arbitrary <*> chunks <*> pure Nothing
-- | Sane generation of VChunks.
chunks :: Gen [VChunk]
chunks = resize 10 . listOf1 . fmap simplify . resize 10 $ listOf1 arbitrary
chunksNE :: Gen (NEL.NonEmpty VChunk)
chunksNE = NEL.fromList <$> chunks
simplify :: [VUnit] -> NEL.NonEmpty VUnit
simplify = NEL.fromList . map fold . groupBy f
where f (Digits _) (Digits _) = True
f (Str _) (Str _) = True
f _ _ = False
instance Arbitrary VUnit where
arbitrary = frequency [ (1, Digits . (+ 1) <$> arbitrary) , (1, s) ]
where s = Str . T.pack . map unletter <$> resize 10 (listOf1 arbitrary)
-- | An ASCII letter.
newtype Letter = Letter { unletter :: Char }
instance Arbitrary Letter where
arbitrary = Letter . chr <$> choose (97, 122)
instance Arbitrary Version where
arbitrary = Version <$> arbitrary <*> chunksNE <*> chunks <*> pure Nothing
-- | These don't need to parse as a SemVer.
goodVers :: [T.Text]
goodVers = [ "1", "1.2", "1.0rc0", "1.0rc1", "1.1rc1", "1.58.0-3", "44.0.2403.157-1"
, "0.25-2", "8.u51-1", "21-2", "7.1p1-1", "20150826-1", "1:0.10.16-3"
, "1.11.0.git.20200404-1", "1.11.0+20200830-1", "1:3.20"
]
badVers :: [T.Text]
badVers = ["", "1.2 "]
messes :: [T.Text]
messes = [ "10.2+0.93+1-1", "003.03-3", "002.000-7", "20.26.1_0-2", "1.6.0a+2014+m872b87e73dfb-1"
, "1.3.00.16851-1", "5.2.458699.0906-1" ]
messComps :: [T.Text]
messComps = [ "10.2+0.93+1-1", "10.2+0.93+1-2", "10.2+0.93+2-1"
, "10.2+0.94+1-1", "10.3+0.93+1-1", "11.2+0.93+1-1", "12"
]
badSemVs :: [T.Text]
badSemVs = [ "1", "1.2", "a.b.c", "1.01.1", "1.2.3+a1b!2c3.1", "", "1.2.3 "
]
goodSemVs :: [T.Text]
goodSemVs = [ "0.1.0", "1.2.3", "1.2.3-1", "1.2.3-alpha", "1.2.3-alpha.2"
, "1.2.3+a1b2c3.1", "1.2.3-alpha.2+a1b2c3.1", "2.2.1-b05"
-- Weird Pre-releases
, "1.0.0-x-y-z.-"
-- Weird metadata
, "1.0.0-alpha+001", "1.0.0+21AF26D3---117B344092BD"
]
-- | The exact example from `http://semver.org`
semverOrd :: [T.Text]
semverOrd = [ "1.0.0-alpha", "1.0.0-alpha.1", "1.0.0-alpha.beta"
, "1.0.0-beta", "1.0.0-beta.2", "1.0.0-beta.11", "1.0.0-rc.1"
, "1.0.0"
]
-- | Cabal makes this distinction: 0.2 < 0.2.0 < 0.2.0.0
-- Apparently there are only 5 packages on Hackage that actually
-- make this necessary, meaning `cabal` can't be simplified to ignore it.
-- Logically, these are the same package, but for those 5 packages, they
-- aren't.
cabalOrd :: [T.Text]
cabalOrd = [ "0", "0.2", "0.2.0", "0.2.0.0" ]
versionOrd :: [T.Text]
versionOrd = [ "0.9.9.9", "1.0.0.0", "1.0.0.1", "2" ]
suite :: TestTree
suite = testGroup "Tests"
[ testGroup "Property Tests"
[ testProperty "SemVer - Arbitrary" $ \a -> semver (prettySemVer a) == Right a
, testProperty "Version - Arbitrary" $ \a -> version (prettyVer a) == Right a
-- , testGroup "Version - Monoid" $
-- map (\(name, test) -> testProperty name test) . unbatch $ monoid (Version (Just 1) [[digits 2], [digits 3]])
]
, testGroup "Unit Tests"
[ testGroup "(Ideal) Semantic Versioning"
[ testGroup "Bad Versions (shouldn't parse)" $
map (\s -> testCase (T.unpack s) $ assertBool "A bad version parsed" $ isLeft $ semver s) badSemVs
, testGroup "Good Versions (should parse)" $
map (\s -> testCase (T.unpack s) $ isomorphSV s) goodSemVs
, testGroup "Comparisons" $
testCase "1.2.3-alpha.2 == 1.2.3-alpha.2+a1b2c3.1"
(assertBool "Equality test of two complicated SemVers failed"
$ semver "1.2.3-alpha.2" == semver "1.2.3-alpha.2+a1b2c3.1") :
zipWith (\a b -> testCase (T.unpack $ a <> " < " <> b) $ comp semver a b) semverOrd (tail semverOrd)
, testGroup "Whitespace Handling"
[ testCase "1.2.3-1[ ]" $ parse semver' "semver whitespace" "1.2.3-1 " @?= Right (SemVer 1 2 3 [[Digits 1]] Nothing)
]
, testGroup "Zero Handling"
[ testCase "2.2.1-b05" $ semver "2.2.1-b05" @?= Right (SemVer 2 2 1 [[Str "b", Digits 0, Digits 5]] Nothing)
]
]
, testGroup "(Haskell) PVP"
[ testGroup "Good PVPs" $
map (\s -> testCase (T.unpack s) $ isomorphPVP s) cabalOrd
, testGroup "Comparisons" $
zipWith (\a b -> testCase (T.unpack $ a <> " < " <> b) $ comp pvp a b) cabalOrd (tail cabalOrd)
]
, testGroup "(General) Versions"
[ testGroup "Good Versions" $
map (\s -> testCase (T.unpack s) $ isomorphV s) goodVers
, testGroup "Bad Versions (shouldn't parse)" $
map (\s -> testCase (T.unpack s) $ assertBool "A bad version parsed" $ isLeft $ version s) badVers
, testGroup "Comparisons" $
testCase "1.2-5 < 1.2.3-1" (comp version "1.2-5" "1.2.3-1") :
testCase "1.0rc1 < 1.0" (comp version "1.0rc1" "1.0") :
testCase "1.0 < 1:1.0" (comp version "1.0" "1:1.0") :
testCase "1.1 < 1:1.0" (comp version "1.1" "1:1.0") :
testCase "1.1 < 1:1.1" (comp version "1.1" "1:1.1") :
map (\(a,b) -> testCase (T.unpack $ a <> " < " <> b) $ comp version a b)
(zip cabalOrd (tail cabalOrd) <> zip versionOrd (tail versionOrd))
]
, testGroup "(Complex) Mess"
[ testGroup "Good Versions" $
map (\s -> testCase (T.unpack s) $ isomorphM s) messes
, testGroup "Bad Versions (shouldn't parse)" $
map (\s -> testCase (T.unpack s) $ assertBool "A bad version parsed" $ isLeft $ mess s) badVers
, testGroup "Comparisons" $
zipWith (\a b -> testCase (T.unpack $ a <> " < " <> b) $ comp mess a b) messComps (tail messComps)
, testGroup "SemVer-like Value Extraction"
[ testCase "messMajor" $
(hush (mess "1.6.0a+2014+m872b87e73dfb-1") >>= messMajor) @?= Just 1
, testCase "messMinor" $
(hush (mess "1.6.0a+2014+m872b87e73dfb-1") >>= messMinor) @?= Just 6
, testCase "messPatch - Good" $
(hush (mess "1.6.0+2014+m872b87e73dfb-1") >>= messPatch) @?= Just 0
, testCase "messPatch - Bad" $
(hush (mess "1.6.0a+2014+m872b87e73dfb-1") >>= messPatch) @?= Nothing
, testCase "messPatchChunk" $
(hush (mess "1.6.0a+2014+m872b87e73dfb-1") >>= messPatchChunk) @?= Just [Digits 0, Str "a"]
]
]
, testGroup "Mixed Versioning"
[ testGroup "Identification"
[ testCase "1.2.3 is SemVer" $ check $ isSemVer <$> versioning "1.2.3"
, testCase "1.2.3-1 is SemVer" $ check $ isSemVer <$> versioning "1.2.3-1"
, testCase "1.2.3-1+1 is SemVer" $ check $ isSemVer <$> versioning "1.2.3-1+1"
, testCase "1.2.3+1-1 is SemVer" $ check $ isSemVer <$> versioning "1.2.3+1-1"
, testCase "1.2.3r1 is Version" $ check $ isVersion <$> versioning "1.2.3r1"
, testCase "0.25-2 is Version" $ check $ isVersion <$> versioning "0.25-2"
, testCase "1:1.2.3-1 is Version" $ check $ isVersion <$> versioning "1:1.2.3-1"
, testCase "1:3.20.1-1 is Version" $ check $ isVersion <$> versioning "1:3.20.1-1"
, testCase "000.007-1 is Mess" $ check $ isMess <$> versioning "000.007-1"
, testCase "20.26.1_0-2 is Mess" $ check $ isMess <$> versioning "20.26.1_0-2"
, testCase "1:3.20.1-1 is Version" $ check $ isVersion <$> versioning "1:3.20.1-1"
]
, testGroup "Bad Versions" $
map (\s -> testCase (T.unpack s) $ assertBool "A bad version parsed" $ isLeft $ versioning s) badVers
, testGroup "Isomorphisms" $
map (\s -> testCase (T.unpack s) $ isomorph s) $ goodSemVs ++ goodVers ++ messes
, testGroup "Comparisons"
[ compVer "1.2.2r1-1" "1.2.3-1"
, compVer "1.2.3-1" "1.2.4r1-1"
, compVer "1.2.3-1" "2+0007-1"
, compVer "1.2.3r1-1" "2+0007-1"
, compVer "1.2-5" "1.2.3-1"
, compVer "1.6.0a+2014+m872b87e73dfb-1" "1.6.0-1"
, compVer "1.11.0.git.20200404-1" "1.11.0+20200830-1"
, compVer "0.17.0+r8+gc41db5f1-1" "0.17.0+r157+g584760cf-1"
, compVer "0.4.8-1" "0.4.9-1"
, compVer "7.42.13-4" "7.46.0-2"
, compVer "1.15.2-1" "1.15.3-1"
, compVer "2.1.16102-2" "2.1.17627-1"
, compVer "8.64.0.81-1" "8.65.0.78-1"
, compVer "1.3.00.16851-1" "1.3.00.25560-1"
, compVer "10.0.4-1" "10.1.0-1"
, compVer "1:3.20-1" "1:3.20.1-1"
, compVer "5.2.458699.0906-1" "5.3.472687.1012-1"
]
, testGroup "Equality"
[ eqVer "1:3.20.1-1"
, eqVer "1.3.00.25560-1"
, eqVer "150_28-3"
, eqVer "1.0.r15.g3fc772c-5"
, eqVer "0.88-2"
]
]
, testGroup "Lenses and Traversals"
[ testCase "SemVer - Increment Patch" incPatch
, testCase "SemVer - Increment Patch from Text" incFromT
, testCase "SemVer - Get patches" patches
]
, testGroup "Megaparsec Behaviour"
[ testCase "manyTill" $ parse nameGrab "manyTill" "linux-firmware-3.2.14-1-x86_64.pkg.tar.xz" @?= Right "linux-firmware"
, testCase "Extracting version" $ parse versionGrab "extraction" "linux-firmware-3.2.14-1-x86_64.pkg.tar.xz" @?= Right(Ideal $ SemVer 3 2 14 [[Digits 1, Str "-x", Digits 86]] Nothing)
]
]
]
compVer :: T.Text -> T.Text -> TestTree
compVer a b = testCase (printf "%s < %s" a b) $ comp versioning a b
eqVer :: T.Text -> TestTree
eqVer a = testCase (T.unpack a) $ equal versioning a
-- | Does pretty-printing return a Versioning to its original form?
isomorph :: T.Text -> Assertion
isomorph t = case prettyV <$> versioning t of
Right t' -> t @?= t'
Left e -> assertBool (errorBundlePretty e) False
-- | Does pretty-printing return a Version to its original form?
isomorphV :: T.Text -> Assertion
isomorphV t = case prettyVer <$> version t of
Right t' -> t @?= t'
Left e -> assertBool (errorBundlePretty e) False
-- | Does pretty-printing return a SemVer to its original form?
isomorphSV :: T.Text -> Assertion
isomorphSV t = case prettySemVer <$> semver t of
Right t' -> t @?= t'
Left e -> assertBool (errorBundlePretty e) False
isomorphPVP :: T.Text -> Assertion
isomorphPVP t = case prettyPVP <$> pvp t of
Right t' -> t @?= t'
Left e -> assertBool (errorBundlePretty e) False
isomorphM :: T.Text -> Assertion
isomorphM t = case prettyMess <$> mess t of
Right t' -> t @?= t'
Left e -> assertBool (errorBundlePretty e) False
comp :: Ord b => (T.Text -> Either a b) -> T.Text -> T.Text -> Assertion
comp f a b = check $ (<) <$> f a <*> f b
equal :: Ord r => (T.Text -> Either l r) -> T.Text -> Assertion
equal f a = check $ (\r -> r == r) <$> f a
check :: Either a Bool -> Assertion
check = assertBool "Some Either-based assertion failed" . fromRight False
isSemVer :: Versioning -> Bool
isSemVer (Ideal _) = True
isSemVer _ = False
isVersion :: Versioning -> Bool
isVersion (General _) = True
isVersion _ = False
isMess :: Versioning -> Bool
isMess (Complex _) = True
isMess _ = False
incPatch :: Assertion
incPatch = (v1 & patch %~ (+ 1)) @?= v2
where v1 = Ideal $ SemVer 1 2 3 [] Nothing
v2 = Ideal $ SemVer 1 2 4 [] Nothing
incFromT :: Assertion
incFromT = (("1.2.3" :: T.Text) & patch %~ (+ 1)) @?= "1.2.4"
patches :: Assertion
patches = ps @?= [3,4,5]
where ps = (["1.2.3","2.3.4","3.4.5"] :: [T.Text]) ^.. each . patch
main :: IO ()
main = defaultMain suite
nameGrab :: Parsec Void T.Text T.Text
nameGrab = T.pack <$> manyTill anySingle (try finished)
where finished = char '-' *> lookAhead digitChar
versionGrab :: Parsec Void T.Text Versioning
versionGrab = manyTill anySingle (try finished) *> ver
where finished = char '-' *> lookAhead digitChar
ver = fmap Ideal semver' <|> fmap General version' <|> fmap Complex mess'
hush :: Either a b -> Maybe b
hush (Left _) = Nothing
hush (Right b) = Just b
| aurapm/haskell-versions | test/Test.hs | bsd-3-clause | 12,673 | 0 | 22 | 3,128 | 3,691 | 1,908 | 1,783 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
--------------------------------------------------------------------------------
-- |
-- Module : Data.Comp.Multi.Derive.HFoldable
-- Copyright : (c) 2011 Patrick Bahr
-- License : BSD3
-- Maintainer : Patrick Bahr <paba@diku.dk>
-- Stability : experimental
-- Portability : non-portable (GHC Extensions)
--
-- Automatically derive instances of @HFoldable@.
--
--------------------------------------------------------------------------------
module Data.Comp.Multi.Derive.HFoldable
(
HFoldable,
makeHFoldable
)where
import Control.Monad
import Data.Comp.Derive.Utils
import Data.Comp.Multi.HFoldable
import Data.Comp.Multi.HFunctor
import Data.Foldable
import Data.Maybe
import Data.Monoid
import Language.Haskell.TH
import Prelude hiding (foldl, foldl1, foldr)
import qualified Prelude as P (foldl, foldl1, foldr)
iter 0 _ e = e
iter n f e = iter (n-1) f (f `appE` e)
iter' 0 _ e = e
iter' m f e = let f' = iter (m-1) [|fmap|] f
in iter' (m-1) f (f' `appE` e)
iterSp n f g e = run n e
where run 0 e = e
run m e = let f' = iter (m-1) [|fmap|] (if n == m then g else f)
in run (m-1) (f' `appE` e)
{-| Derive an instance of 'HFoldable' for a type constructor of any higher-order
kind taking at least two arguments. -}
makeHFoldable :: Name -> Q [Dec]
makeHFoldable fname = do
TyConI (DataD _cxt name args constrs _deriving) <- abstractNewtypeQ $ reify fname
let args' = init args
fArg = VarT . tyVarBndrName $ last args'
argNames = map (VarT . tyVarBndrName) (init args')
complType = P.foldl AppT (ConT name) argNames
classType = AppT (ConT ''HFoldable) complType
constrs' <- mapM (mkPatAndVars . isFarg fArg <=< normalConExp) constrs
foldDecl <- funD 'hfold (map foldClause constrs')
foldMapDecl <- funD 'hfoldMap (map foldMapClause constrs')
foldlDecl <- funD 'hfoldl (map foldlClause constrs')
foldrDecl <- funD 'hfoldr (map foldrClause constrs')
return [InstanceD [] classType [foldDecl,foldMapDecl,foldlDecl,foldrDecl]]
where isFarg fArg (constr, args) = (constr, map (`containsType'` fArg) args)
filterVar [] _ = Nothing
filterVar [d] x =Just (d, varE x)
filterVar _ _ = error "functor variable occurring twice in argument type"
filterVars args varNs = catMaybes $ zipWith filterVar args varNs
mkCPat constr args varNs = ConP constr $ zipWith mkPat args varNs
mkPat [] _ = WildP
mkPat _ x = VarP x
mkPatAndVars (constr, args) =
do varNs <- newNames (length args) "x"
return (mkCPat constr args varNs, filterVars args varNs)
foldClause (pat,vars) =
do let conApp (0,x) = [|unK $x|]
conApp (d,x) = iterSp d [|fold|] [| foldMap unK |] x
body <- if null vars
then [|mempty|]
else P.foldl1 (\ x y -> [|$x `mappend` $y|])
$ map conApp vars
return $ Clause [pat] (NormalB body) []
foldMapClause (pat,vars) =
do fn <- newName "y"
let f = varE fn
f' 0 = f
f' n = iter (n-1) [|fmap|] [| foldMap $f |]
fp = if null vars then WildP else VarP fn
body <- case vars of
[] -> [|mempty|]
(_:_) -> P.foldl1 (\ x y -> [|$x `mappend` $y|]) $
map (\ (d,z) -> iter' (max (d-1) 0) [|fold|] (f' d `appE` z)) vars
return $ Clause [fp, pat] (NormalB body) []
foldlClause (pat,vars) =
do fn <- newName "f"
en <- newName "e"
let f = varE fn
e = varE en
fp = if null vars then WildP else VarP fn
ep = VarP en
conApp x (0,y) = [|$f $x $y|]
conApp x (1,y) = [|foldl $f $x $y|]
conApp x (d,y) = let hidEndo = iter (d-1) [|fmap|] [|Endo . flip (foldl $f)|] `appE` y
endo = iter' (d-1) [|fold|] hidEndo
in [| appEndo $endo $x|]
body <- P.foldl conApp e vars
return $ Clause [fp, ep, pat] (NormalB body) []
foldrClause (pat,vars) =
do fn <- newName "f"
en <- newName "e"
let f = varE fn
e = varE en
fp = if null vars then WildP else VarP fn
ep = VarP en
conApp (0,x) y = [|$f $x $y|]
conApp (1,x) y = [|foldr $f $y $x |]
conApp (d,x) y = let hidEndo = iter (d-1) [|fmap|] [|Endo . flip (foldr $f)|] `appE` x
endo = iter' (d-1) [|fold|] hidEndo
in [| appEndo $endo $y|]
body <- P.foldr conApp e vars
return $ Clause [fp, ep, pat] (NormalB body) []
| spacekitteh/compdata | src/Data/Comp/Multi/Derive/HFoldable.hs | bsd-3-clause | 5,303 | 0 | 21 | 2,037 | 1,690 | 914 | 776 | -1 | -1 |
{-# OPTIONS_GHC -cpp #-}
-- #hide
module Distribution.Compat.Directory (
module System.Directory,
#if __GLASGOW_HASKELL__ <= 602
findExecutable, copyFile, getHomeDirectory, createDirectoryIfMissing,
removeDirectoryRecursive,
#endif
getDirectoryContentsWithoutSpecial
) where
#if __GLASGOW_HASKELL__ && __GLASGOW_HASKELL__ < 604
#if __GLASGOW_HASKELL__ < 603
#include "config.h"
#else
#include "ghcconfig.h"
#endif
#endif
#if !__GLASGOW_HASKELL__ || __GLASGOW_HASKELL__ > 602
import System.Directory
#else /* to end of file... */
import System.Environment ( getEnv )
import Distribution.Compat.FilePath
import System.IO
import Foreign
import System.Directory
import Distribution.Compat.Exception (bracket)
import Control.Monad (when, unless)
#if !(mingw32_HOST_OS || mingw32_TARGET_OS)
import System.Posix (getFileStatus,setFileMode,fileMode,accessTime,
setFileMode,modificationTime,setFileTimes)
#endif
findExecutable :: String -> IO (Maybe FilePath)
findExecutable binary = do
path <- getEnv "PATH"
search (parseSearchPath path)
where
search :: [FilePath] -> IO (Maybe FilePath)
search [] = return Nothing
search (d:ds) = do
let path = d `joinFileName` binary `joinFileExt` exeSuffix
b <- doesFileExist path
if b then return (Just path)
else search ds
exeSuffix :: String
#if mingw32_HOST_OS || mingw32_TARGET_OS
exeSuffix = "exe"
#else
exeSuffix = ""
#endif
copyPermissions :: FilePath -> FilePath -> IO ()
#if !(mingw32_HOST_OS || mingw32_TARGET_OS)
copyPermissions src dest
= do srcStatus <- getFileStatus src
setFileMode dest (fileMode srcStatus)
#else
copyPermissions src dest
= getPermissions src >>= setPermissions dest
#endif
copyFileTimes :: FilePath -> FilePath -> IO ()
#if !(mingw32_HOST_OS || mingw32_TARGET_OS)
copyFileTimes src dest
= do st <- getFileStatus src
let atime = accessTime st
mtime = modificationTime st
setFileTimes dest atime mtime
#else
copyFileTimes src dest
= return ()
#endif
-- |Preserves permissions and, if possible, atime+mtime
copyFile :: FilePath -> FilePath -> IO ()
copyFile src dest
| dest == src = fail "copyFile: source and destination are the same file"
#if (!(defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ > 600))
| otherwise = do readFile src >>= writeFile dest
try (copyPermissions src dest)
return ()
#else
| otherwise = bracket (openBinaryFile src ReadMode) hClose $ \hSrc ->
bracket (openBinaryFile dest WriteMode) hClose $ \hDest ->
do allocaBytes bufSize $ \buffer -> copyContents hSrc hDest buffer
try (copyPermissions src dest)
try (copyFileTimes src dest)
return ()
where bufSize = 1024
copyContents hSrc hDest buffer
= do count <- hGetBuf hSrc buffer bufSize
when (count > 0) $ do hPutBuf hDest buffer count
copyContents hSrc hDest buffer
#endif
getHomeDirectory :: IO FilePath
getHomeDirectory = getEnv "HOME"
createDirectoryIfMissing :: Bool -- ^ Create its parents too?
-> FilePath -- ^ The path to the directory you want to make
-> IO ()
createDirectoryIfMissing parents file = do
b <- doesDirectoryExist file
case (b,parents, file) of
(_, _, "") -> return ()
(True, _, _) -> return ()
(_, True, _) -> mapM_ (createDirectoryIfMissing False) (tail (pathParents file))
(_, False, _) -> createDirectory file
removeDirectoryRecursive :: FilePath -> IO ()
removeDirectoryRecursive startLoc = do
cont <- getDirectoryContentsWithoutSpecial startLoc
mapM_ (rm . joinFileName startLoc) cont
removeDirectory startLoc
where
rm :: FilePath -> IO ()
rm f = do temp <- try (removeFile f)
case temp of
Left e -> do isDir <- doesDirectoryExist f
-- If f is not a directory, re-throw the error
unless isDir $ ioError e
removeDirectoryRecursive f
Right _ -> return ()
#endif
getDirectoryContentsWithoutSpecial :: FilePath -> IO [FilePath]
getDirectoryContentsWithoutSpecial =
fmap (filter (not . flip elem [".", ".."])) . getDirectoryContents
| alekar/hugs | packages/Cabal/Distribution/Compat/Directory.hs | bsd-3-clause | 4,389 | 6 | 15 | 1,118 | 814 | 413 | 401 | 10 | 1 |
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
module Main where
-- | Like Latency, but creating lots of channels
import System.Environment
import Control.Applicative
import Control.Monad (void, forM_, forever, replicateM_)
import Control.Concurrent.MVar
import Control.Concurrent (forkOS, threadDelay)
import Control.Applicative
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Criterion.Types
import Criterion.Measurement as M
import Data.Binary (encode, decode)
import Data.ByteString.Char8 (pack)
import Network.Transport.ZMQ (createTransport, defaultZMQParameters)
import qualified Data.ByteString.Lazy as BSL
import Text.Printf
pingServer :: Process ()
pingServer = forever $ do
them <- expect
sendChan them ()
-- TODO: should this be automatic?
reconnectPort them
pingClient :: Int -> ProcessId -> Process ()
pingClient n them = do
replicateM_ n $ do
(sc, rc) <- newChan :: Process (SendPort (), ReceivePort ())
send them sc
receiveChan rc
initialServer :: Process ()
initialServer = do
us <- getSelfPid
liftIO $ BSL.writeFile "pingServer.pid" (encode us)
pingServer
initialClient :: Int -> Process ()
initialClient n = do
them <- liftIO $ decode <$> BSL.readFile "pingServer.pid"
pingClient n them
main :: IO ()
main = do
initializeTime
getArgs >>= \case
[] -> defaultBench
[role, host] -> do
transport <- createTransport defaultZMQParameters (pack host)
node <- newLocalNode transport initRemoteTable
case role of
"SERVER" -> runProcess node initialServer
"CLIENT" -> fmap read getLine >>= runProcess node . initialClient
_ -> error "Role should be either SERVER or CLIENT"
_ -> error "either call benchmark with [SERVER|CLIENT] host or without arguments"
where
defaultBench = do
void . forkOS $ do
transport <- createTransport defaultZMQParameters "127.0.0.1"
node <- newLocalNode transport initRemoteTable
runProcess node $ initialServer
threadDelay 1000000
e <- newEmptyMVar
void . forkOS $ do
putStrLn "pings time\n--- ---\n"
forM_ [100,200,600,800,1000,2000,5000,8000,10000] $ \i -> do
transport <- createTransport defaultZMQParameters "127.0.0.1"
node <- newLocalNode transport initRemoteTable
d <- snd <$> M.measure (nfIO $ runProcess node $ initialClient i) 1
printf "%-8i %10.4f\n" i d
putMVar e ()
takeMVar e
| tweag/network-transport-zeromq | benchmarks/Channels.hs | bsd-3-clause | 2,542 | 0 | 22 | 582 | 705 | 353 | 352 | 66 | 5 |
-- |
-- Module : GenProg
-- Copyright : (c) 2010 Jan Snajder
-- License : BSD-3 (see the LICENSE file)
--
-- Maintainer : Jan Snajder <jan.snajder@fer.hr>
-- Stability : experimental
-- Portability : non-portable
--
-- The Genetic Programming Library.
--
-- /Genetic programming/ is an evolutionary optimization technique
-- inspired by biological evolution. It is similar to /genetic algorithms/
-- except that the individual solutions are programs (or, more generally,
-- /expressions/) representing a solution to a given problem. A genetic
-- program is represented as an /abstract syntax tree/ and associated
-- with a custom-defined /fitness/ value indicating the quality of the
-- solution. Starting from a randomly generated initial population of
-- genetic programs, the genetic operators of /selection/, /crossover/,
-- and (occasionally) /mutation/ are used to evolve programs of
-- increasingly better quality.
--
-- Standard reference is: John Koza. /Genetic programming:/
-- /On the Programming of Computers by Means of Natural Selection/.
-- MIT Press, 1992.
--
-- In GenProg, a genetic program is represented by a value of an
-- algebraic datatype. To use a datatype as a genetic program, it
-- suffices to define it as an instance of the 'GenProg' typeclass.
-- A custom datatype can be made an instance of the 'GenProg'
-- typeclass, provided it is an instance of the 'Data' typeclass (see
-- "GenProg.GenExpr.Data").
--
-- An example of how to use this library is given below.
--
-----------------------------------------------------------------------------
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
NoMonomorphismRestriction #-}
module GenProg (
-- * Genetic programs
GenProg (..),
-- * Expressions
generateFullExpr,
generateGrownExpr,
depth,
nodes,
-- * Individuals
Ind,
unInd,
mkInd,
aFitness,
sFitness,
-- * Population
Pop,
unPop,
mkPop,
generatePop,
replenishPop,
mergePop,
best,
avgFitness,
avgDepth,
avgNodes,
-- * Genetic operators
-- | The following functions are not meant to be used directly.
-- They are exposed for debugging purposes.
crossoverInd,
mutateInd,
crossoverPop,
mutatePop,
-- * Evolution state
EvolState (..),
-- * Control parameters
Fitness,
Mutate,
defaultMutation,
Terminate,
tSuccess,
tFitness,
tGeneration,
EvolParams (..),
defaultEvolParams,
-- * Evolution
evolve,
evolveFrom,
evolveTrace,
evolveTraceFrom
-- * Example
-- $Example
) where
import Data.List
import Data.Ord
import Data.Maybe
import Control.Monad
import Control.Monad.Random
import GenProg.GenExpr
-- | A typeclass defining a genetic program interface. Datatypes @e@
-- that are to be used as genetic programs must be instances of the
-- 'GenExpr' typeclass and must implement this interface.
class (Eq e, GenExpr e, MonadRandom m) => GenProg m e | e -> m where
-- | Generates a random terminal @T@.
terminal :: m e
-- | Generates a random nonterminal (functional) node @F(T,...,T)@ whose
-- arguments are again terminals (this condition is not verified).
nonterminal :: m e
-----------------------------------------------------------------------------
-- Expressions
-- | Generates a random expression of a given maximum depth.
generateExpr :: (GenProg m e) => m e -> Int -> m e
generateExpr g d
| d < 1 = error "GenProg.generateExpr: Invalid expression depth"
| otherwise = nonterminal >>= step (d - 1)
where step 0 _ = terminal
step d e = nodeMapM (const g >=> step (d - 1)) e
-- | Generates a random expression fully expanded to the specified depth.
generateFullExpr :: (GenProg m e) => Int -> m e
generateFullExpr = generateExpr nonterminal
-- | Generates a random expression of limited depth. The maximum depth of
-- the resulting expression may be less than the specified depth
-- limit, and paths may be of different length.
generateGrownExpr :: (GenProg m e) => Int -> m e
generateGrownExpr d = do
t <- getRandom
generateExpr (if t then terminal else nonterminal) d
-----------------------------------------------------------------------------
-- Individuals
-- | A genetically programmed individual, representing a basic unit
-- of evolution. (Basically a wrapper around a genetically programmable
-- expression.)
data Ind e = Ind {
-- | Returns the expression wrapped by an individual.
unInd :: e,
-- | Adjusted fitness of an individual. Adjusted fitness equals
-- @1/(1+s)@, where @s@ is the standardized fitness as computed by
-- 'fitness'. To reduce computational costs, this value is computed
-- only once and then cached.
aFitness :: Double,
-- The indices of inner (functional) nodes of an individual's expression.
iNodes :: [Int],
-- The indices of external (terminal) nodes of an individual's expression.
eNodes :: [Int] }
deriving (Show)
instance (Eq e) => Eq (Ind e) where
i1 == i2 = unInd i1 == unInd i2
instance (Eq e) => Ord (Ind e) where
compare = comparing aFitness
-- | Wraps an expression into an individual.
mkInd :: (GenProg m e) => Fitness e -> e -> Ind e
mkInd f e = Ind e (adjust $ f e) fs ts
where (fs,ts) = nodeIndices e
-- Adjusts fitness.
adjust :: Double -> Double
adjust f = 1 / (1 + max 0 f)
-- Unadjusts fitness (the inverse of adjustFitness).
unadjust :: Double -> Double
unadjust f = 1 / f - 1
-- | Standardized fitness of an individual as computed by 'fitness'
sFitness :: Ind e -> Double
sFitness = unadjust . aFitness
-----------------------------------------------------------------------------
-- Population
-- | A population of individuals. (Basically a wrapper around a list of
-- individuals.)
data Pop e = Pop
{ unPop :: [Ind e] -- ^ Unwraps a population.
, dist_ :: [Double] -- ^ Fitness distribution.
} deriving (Show, Eq)
-- | Wraps a list of individuals into a population.
mkPop :: [Ind e] -> Pop e
mkPop is = Pop is ds
where ds = map snd . distribution $
map (\i -> (unInd i, aFitness i)) is
-- | Generate population of given size and given depth limit using
-- /ramped half-and-half/ method (Koza, 1992): for each depth value from 0 to
-- the initial depth limit 'iDepth', 50% of individuals are generated using
-- 'generateFullExpr' and 50% are generated using
-- 'generateGrownExpr'. Afterwards, duplicates are removed, thus the
-- size of the resulting population may actually be less than the
-- specified size.
generatePop :: (GenProg m e) => EvolParams m e -> m (Pop e)
generatePop p
| s < 2 || n==0 = error "GenProg.generatePop: Invalid population size"
| otherwise = do
iss <- forM [2..di] $ \i -> do
is1 <- replicateM n (mkInd (fitness p) `liftM` generateFullExpr di)
is2 <- replicateM n (mkInd (fitness p) `liftM` generateGrownExpr di)
return $ is1 ++ is2
return . mkPop . nub $ concat iss
where n = s `div` (2 * (di - 1))
s = popSize p
di = iDepth p
-- | Replenishes a population up to 'popSize' by randomly
-- generating new individuals.
replenishPop :: (GenProg m e) => EvolParams m e -> Pop e -> m (Pop e)
replenishPop p pop1 = do
pop2 <- generatePop p
return . mkPop $ unPop pop1 ++ drop s (unPop pop2)
where s = length $ unPop pop1
-- | Merges two populations by taking 'popSize' best-fitted individuals
-- from the union of the two populations.
mergePop :: (GenProg m e) => EvolParams m e -> Pop e -> Pop e -> Pop e
mergePop p pop1 pop2 = mkPop $ take (popSize p) is
where is = sortBy (flip $ comparing aFitness) $ unPop pop1 ++ unPop pop2
-- | Population's best-fitted individual.
best :: Pop e -> Ind e
best = maximumBy (comparing aFitness) . unPop
avg :: (Fractional a) => [a] -> a
avg xs = sum xs / realToFrac n
where n = length xs
-- | Population's average standardized fitness.
avgFitness :: Pop e -> Double
avgFitness = avg . map (unadjust . aFitness) . unPop
-- | Average depth of expressions in the population.
avgDepth :: (GenProg m e) => Pop e -> Double
avgDepth = avg . map (realToFrac . depth . unInd) . unPop
-- | Average number of expression nodes in the population.
avgNodes :: (GenProg m e) => Pop e -> Double
avgNodes = avg . map (realToFrac . nodes . unInd) . unPop
-----------------------------------------------------------------------------
-- Genetic operators
-- Selects at random an index of an expression node. Functional
-- (internal) nodes are selected with probability 'pci', whereas
-- terminal (external) nodes are selecred with probability '1-pi'.
selectNode :: (GenProg m e, MonadRandom m) => Double -> Ind e -> m Int
selectNode pi i
| null $ iNodes i = oneof $ eNodes i
| otherwise = choice pi (oneof $ iNodes i) (oneof $ eNodes i)
-- | Crossover operation of two individuals, resulting in two
-- offsprings. Crossover is performed by choosing at random two nodes
-- in each expressions, and then by exchanging the subexpressions
-- rooted at these nodes between the two individuals. The probability
-- that an internal (functional) node is chosen as crossover point is
-- set by the 'ciProb' parameter in 'EvolParams', whereas the
-- probability that an external (terminal) node is chosen equals
-- @1-ciProb@. Among internal and external nodes, nodes are chosen
-- uniformly at random. If the depth of a created offspring exceeds
-- the depth limit 'cDepth' specified by evolution parameters
-- 'EvolParams', that offspring is discarded and a parent is
-- reproduced (i.e., copied as-is).
crossoverInd :: (GenProg m e) =>
EvolParams m e -> Ind e -> Ind e -> m (Ind e, Ind e)
crossoverInd p i1 i2 = do
n1 <- selectNode (ciProb p) i1
n2 <- selectNode (ciProb p) i2
let (r1,r2) = exchange (unInd i1) n1 (unInd i2) n2
return (if depth r1 <= cDepth p then mkInd (fitness p) r1 else i1,
if depth r2 <= cDepth p then mkInd (fitness p) r2 else i2)
-- | Mutates an individual by applying the mutation function @mutate@
-- to a randomly selected node. The probability that an internal
-- (functional) node is chosen for muration is set by the 'miProb'
-- parameter in 'EvolParams', whereas the probability that an external
-- (terminal) node is chosen equals @1-miProb@. Among internal and
-- external nodes, nodes are chosen uniformly at random. If the depth
-- of the mutated expression exceeds the depth limit 'cDepth'
-- specified by evolution parameters 'EvolParams', the individual is
-- left unaltered.
mutateInd :: (GenProg m e) => EvolParams m e -> Ind e -> m (Ind e)
mutateInd p i = do
n <- selectNode (miProb p) i
e2 <- adjustM (mutate p) e1 n
return . mkInd (fitness p) $ if depth e2 <= cDepth p then e2 else e1
where e1 = unInd i
-- Discrete distribution.
type Distribution a = [(a, Double)]
-- Computes distribution from a weighted list.
-- The weights need not sum to 1.
distribution :: [(a, Double)] -> Distribution a
distribution xs = [(x,f i) | ((x,_),i) <- zip xs [1..]]
where f i = sum . map snd $ take i ys
s = sum $ map snd xs
ys = map (\(x, w) -> (x, w/s)) xs
-- Samples a value from a discrete distribution.
choose :: (MonadRandom m) => Distribution a -> m a
choose xs = do
p <- getRandomR (0,1)
return . fst . fromJust $ find ((>= p) . snd) xs
-- Chose first action with probability 'p' and second with probability
-- 1-p.
choice :: (MonadRandom m) => Double -> m a -> m a -> m a
choice p a1 a2 = do
r <- getRandomR (0,1)
if r <= p then a1 else a2
oneof :: (MonadRandom m) => [a] -> m a
oneof xs = (xs!!) `liftM` getRandomR (0,length xs-1)
-- Fitness-proportionate selection of an individual from a population.
selectInd :: (MonadRandom m) => Pop e -> m (Ind e)
selectInd pop = choose (zip (unPop pop) (dist_ pop))
reproducePop :: (MonadRandom m) => Pop e -> m (Ind e)
reproducePop = selectInd
-- | Applies crossover to two randomly chosen individuals from a
-- population. The probability of an individual being chosen as parent
-- is fitness-proportionate (individuals with better fitness have
-- better chanches of being chosen for crossover).
crossoverPop :: (GenProg m e) => EvolParams m e -> Pop e -> m (Ind e,Ind e)
crossoverPop p pop = do
i1 <- selectInd pop
i2 <- selectInd pop
crossoverInd p i1 i2
-- | Applies mutation operation to individuals from a population. The
-- probability of mutating each individual is determined by 'mProb' parameter
-- from 'EvalParams'.
mutatePop :: (GenProg m e) => EvolParams m e -> Pop e -> m (Pop e)
mutatePop p pop
| mProb p == 0 = return pop
| otherwise = liftM mkPop . forM (unPop pop) $ \i ->
choice (mProb p) (mutateInd p i) (return i)
-----------------------------------------------------------------------------
-- Evolution state
-- | The state of the evolution.
data EvolState e = EvolState
{ pop :: Pop e -- ^ Current population.
, iter :: Int -- ^ Iteration (current generation number).
, cachedBest :: Ind e -- ^ Best individual evolved so far.
} deriving (Show,Eq)
initState :: Pop e -> EvolState e
initState pop =
EvolState { pop = pop, iter = 0, cachedBest = best pop }
-- | Advances to next evolution state.
nextState :: (GenProg m e ) =>
EvolParams m e -> EvolState e -> m (EvolState e)
nextState p es1 = do
pop2 <- evolvePop p pop1
return $ es1 { pop = pop2, iter = iter es1 + 1,
cachedBest = max (cachedBest es1) (best pop1) }
where pop1 = pop es1
-----------------------------------------------------------------------------
-- Control parameters
-- | Standardized fitness. It takes on values from 0 (best fitness) to
-- +infinity (worst fitness).
type Fitness e = e -> Double
-- | A function to mutate a chosen expression node.
type Mutate m e = e -> m e
-- | Default mutation. Replaces a node, irrespective of its value,
-- with a randomly generated subexpression whose depth is limited to
-- 'iDepth'.
defaultMutation :: (GenProg m e) => EvolParams m e -> Mutate m e
defaultMutation p = const $ generateGrownExpr (iDepth p)
-- | Termination predicate.
type Terminate e = EvolState e -> Bool
-- | Termination predicate: terminate if any individual satisfies the
-- specified predicate.
tSuccess :: (e -> Bool) -> Terminate e
tSuccess c = any (c . unInd) . unPop . pop
-- | Termination predicate: terminate if best individual's
-- standardized fitness is greater than or equal to the specified value.
tFitness :: (GenProg m e) => Double -> Terminate e
tFitness f = (>= f) . unadjust . aFitness . cachedBest
-- | Termination predicate: terminate after running for the specified
-- number of iterations.
tGeneration :: Int -> Terminate e
tGeneration n = (>=n) . iter
-- | Parameters governing the evolution.
--
-- Default evolution parameters,
-- as used in (Koza, 1992), are defined by 'defaultEvolParams'
-- and indicated below. At least the fitness function 'fitness' should
-- be overriden.
data EvolParams m e = EvolParams {
-- | Population size (number of individuals). Default is @500@.
popSize :: Int,
-- | Depth of expressions in initial population. Default is @6@.
iDepth :: Int,
-- | Maximum depth of expressions created during the evolution.
-- Default is @17@.
cDepth :: Int,
-- | Probability of crossover. Default is @0.9@. If crossover is not
-- chosen, an individual is simply reproduced (copied as-is) into
-- the next generation.
cProb :: Double,
-- | Probability that an internal (functional) node is chosen as a
-- crossover point. Default is @0.9@. If an internal node is not
-- chosen, an external (terminal) node is
-- chosen.
ciProb :: Double,
-- | Probability that an individual gets mutated. Default is @0@
-- (no mutation).
mProb :: Double,
-- | Probability that an internal (functional) node is chosen for
-- mutation. Default is @0.1@.
miProb :: Double,
-- | Standardized fitness function. Default value is @undefined@
-- (must be overriden).
fitness :: Fitness e,
-- | Mutation function. Defines how to change a randomly chosen
-- node. Default is @defaultMutation defaultEvolParams@
-- (replacement of a chosen node with a randomly generated subexpression).
mutate :: Mutate m e,
-- | Elitist factor: number of best-fitted individuals that are preserved
-- from each generation (reproduced as-is into next evolution state).
-- Default is @0@.
elitists :: Int,
-- | Termination predicate. Default is @50@ (terminate after 50 generations).
terminate :: Terminate e }
defaultEvolParams = EvolParams
{ popSize = 500
, iDepth = 6
, cDepth = 17
, cProb = 0.9
, ciProb = 0.9
, mProb = 0.0
, miProb = 0.1
, terminate = tGeneration 50
, fitness = error "GenProg.defaultEvolParams: fitness function is undefined"
, mutate = const $ generateGrownExpr (iDepth defaultEvolParams)
, elitists = 0 }
-----------------------------------------------------------------------------
-- Evolution
untilM :: (Monad m) => (a -> Bool) -> (a -> m a) -> a -> m a
untilM p f x | p x = return x
| otherwise = f x >>= untilM p f
iterateUntilM :: (Monad m) => (a -> Bool) -> (a -> m a) -> a -> m [a]
iterateUntilM p f x
| p x = return []
| otherwise = do y <- f x
ys <- iterateUntilM p f y
return (y:ys)
-- | Evolves one population from another one by performing a single
-- evolution step.
evolvePop :: (GenProg m e) => EvolParams m e -> Pop e -> m (Pop e)
evolvePop p pop1 = do
pop2 <- mkPop `liftM` untilM ((>= s) . length) step []
pop3 <- mutatePop p pop2
return $ mkPop (elite ++ unPop pop3)
where s = popSize p - length elite
elite = take (elitists p) topRanked
topRanked = sortBy (flip $ comparing aFitness) $ unPop pop1
step is | length is == s - 1 = (:is) `liftM` reproducePop pop1
| otherwise = choice (cProb p)
(do (i1,i2) <- crossoverPop p pop1; return (i1:i2:is))
((:is) `liftM` reproducePop pop1)
-- | Creates an initial population and evolves it until termination
-- predicate is satisfied, returning the last evolution state.
evolve :: (GenProg m e) => EvolParams m e -> m (EvolState e)
evolve p = -- generatePop p >>= evolveFrom p
last `liftM` evolveTrace p
-- | Evolves a given initial population until termination
-- predicate is satisfied, returning the last evolution state.
-- If the size of the initial population is less than
-- 'popSize', the population will be replenished (see 'replenishPop').
evolveFrom :: (GenProg m e) => EvolParams m e -> Pop e -> m (EvolState e)
evolveFrom p pop = -- untilM (terminate p) (nextState p) . initState
last `liftM` evolveTraceFrom p pop
-- | Runs evolution on a given initial population until termination
-- predicate is satisfied and returns a list of successive evolution
-- states. If the size of the initial population is less than
-- 'popSize', the population will be replenished (see 'replenishPop').
evolveTraceFrom :: (GenProg m e) => EvolParams m e -> Pop e -> m [EvolState e]
evolveTraceFrom p pop1 =
iterateUntilM (terminate p) (nextState p) . initState =<< replenishPop p pop1
-- | Creates an initial population and runs evolution until
-- termination predicate is satisfied. Returns a list of successive
-- evolution states.
evolveTrace :: (GenProg m e) => EvolParams m e -> m [EvolState e]
evolveTrace p = generatePop p >>= evolveTraceFrom p
-----------------------------------------------------------------------------
-- Example
{- $Example
This is a simple, worked through example of how to use the GenProg
library. Given a target number @n@, out aim is to evolve an arithmetic
expression that evaluates to @n@. For example, given @13@ as the
target number, one possible solution is @(3 * 5) - 2@. The constants
allowed to appear in the expression are restricted to integers from 1
to 9. The allowed operations are @+@, @-@, @*@, and integer division
without remainder.
We begin by defining the datatype for the genetically programed
expression:
@
-- The following language extensions need to be enabled:
-- DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses
import GenProg
import Data.Generics
import Control.Monad
import Control.Monad.Random
data E = Plus E E
| Minus E E
| Times E E
| Div E E
| Const Int
deriving (Typeable,Data,Eq,Show)
@
In order to evolve arithmetic expressions, we need to be able to
compute their values. To this end we define
@
eval :: E -> Maybe Int
eval (Const c) = Just c
eval (Plus e1 e2) = liftM2 (+) (eval e1) (eval e2)
eval (Minus e1 e2) = liftM2 (-) (eval e1) (eval e2)
eval (Times e1 e2) = liftM2 (*) (eval e1) (eval e2)
eval (Div e1 e2) | ok = liftM2 div x1 x2
| otherwise = Nothing
where (x1,x2) = (eval e1,eval e2)
ok = x2 /= Just 0 && liftM2 mod x1 x2 == Just 0
@
Dividing by zero and dividing with a remainder are not allowed and in
such cases we return @Nothing@.
Because we have made @E@ an instance of the 'Data' typeclass, it can
be readily used as a genetically programmable expression. Next step is
to make 'E' an instance of the 'GenProg' typeclass:
@
instance GenProg (Rand StdGen) E where
terminal = Const `liftM` getRandomR (1,9)
nonterminal = do
r <- getRandomR (0,3)
[liftM2 Plus terminal terminal,
liftM2 Minus terminal terminal,
liftM2 Times terminal terminal,
liftM2 Div terminal terminal] !! r
@
Thus, a random terminal node contains one of the constants from 1 to
9. A nonterminal node can be one of the four arithmetic operations,
each with terminal nodes as arguments. Note that computations are run
within the standard random generator monad (@Rand StdGen@).
The fitness function evaluates the accurateness of the arithmetic
expression with respect to the target number. If the value of the
expression is far off from the target number @n@, the standardized
fitness should be high. Moreover, we would like to keep the expression
as simple as possible. To this end, we include a /parsimony factor/
that is proportional to the number of nodes an expression has. We
define the overall standardized fitness as
@
myFitness :: Int -> E -> Double
myFitness n e = error + size
where error = realToFrac $ maybe maxBound (abs . (n-)) (eval e)
size = (realToFrac $ nodes e) / 100
@
The number of nodes is divided by a factor of 100 to make it less
important than the numeric accuracy of the expression.
We now have everything in place to get the evolution going. We will use
default evolution parameters and choose @12345@ as the target number:
>>> let params = defaultEvolParams { fitness = myFitness 12345 }
Let us first create a random number generator:
>>> let g = mkStdGen 0
We are doing this because we want our results to be reproducible, and
because we want to be able to compare the results of different
evolution runs. Normally, you would use @getStdGen@ to get a random
generator with random seed.
To run the evolution and get the best evolved individual, we type
>>> let i = cachedBest $ evalRand (evolve params) g
To check out its standardized fitness, we type
>>> sFitness i
39.61
Let us see how the actual expression looks like:
>>> unInd i
Times (Minus (Minus (Minus (Plus (Const 4) (Const 4)) (Plus (Const 6)
(Const 7))) (Minus (Minus (Const 5) (Const 9)) (Plus (Minus (Const 5)
(Const 9)) (Minus (Const 4) (Const 4))))) (Plus (Times (Plus (Const 5)
(Const 1)) (Const 6)) (Times (Plus (Const 9) (Const 3)) (Minus (Const 1)
(Const 8))))) (Div (Times (Plus (Plus (Const 3) (Const 5)) (Times (Const 4)
(Const 7))) (Plus (Const 4) (Const 4))) (Minus (Minus (Plus (Const 2)
(Const 8)) (Plus (Const 6) (Const 7))) (Plus (Minus (Const 5) (Const 9))
(Minus (Const 4) (Const 4)))))
The number of nodes is
>>> nodes $ unInd i
61
Let us see to what number the expression evaluates:
>>> eval $ unInd i
Just 12384
So in this run we didn't get a perfect match, but we were close. Let
us see if we can do better.
When doing genetic programming, it is always a good idea to experiment
a bit with the parameters. There are no parameters that work best for
any given problem. You can learn a lot about how parameters influence
the evolution by analysing how the evolution progresses in time. This
can be accomplised by evolving an evolution trace:
>>> let trace = evalRand (evolveTrace params) g
We can now analyse how the standardized fitness of the
best individual improves during the evolution:
>>> map (sFitness . best . pop) trace
[9591.35,2343.59,1935.59,2343.59,903.51,903.45,585.59,585.59,327.45,225.41,
225.41,135.43,57.49,39.61,39.61,39.61,39.61,39.61,57.43,57.47,57.43,57.45,
57.33,57.43,57.43,57.45,57.43,57.43,57.35,57.35,57.43,57.27,57.33,57.33,57.43,
57.29,57.33,57.41,57.29,57.43,57.33,57.35,57.35,57.33,57.39,57.39,57.39,57.33,
57.37,57.37]
We see that at some point the fitness decreases and then increases
again. This indicates that the best fitted individual was lost by
evolving from one generation to the other. We can prevent this by
employing the /elitist strategy/. Let us see what happens if we
preserve a best fitted individual in each generation:
>>> let trace = evalRand (evolveTrace params {elitists = 1}) g
>>> map (sFitness . best . pop) trace
[9591.35,2343.59,711.61,711.61,711.61,711.61,57.55,57.53,57.39,57.39,57.39,
57.39,57.37,57.37,57.37,57.37,57.37,57.37,57.37,57.37,57.35,57.35,57.35,
57.35,57.35,57.35,57.35,57.35,57.35,57.35,57.33,57.33,57.33,57.33,57.33,
57.33,57.33,57.33,57.33,25.31,25.31,25.31,25.31,25.31,25.31,25.296,25.296,
25.296,25.296,25.296]
This gives us better fitness, but still not an exact match:
>>> let i = cachedBest $ last trace
>>> eval $ unInd i
Just 12320
In the previous evolution run fitness converged relatively fast, but then
remained stuck. To stir up things a little, let us allow for some
mutation. Setting mutation probability to 5%, while retaining the
elitist strategy, we get
>>> let trace = evalRand (evolveTrace params {elitists = 1, mProb = 0.05}) g
>>> map (sFitness . best . pop) trace
[9591.35,9591.35,9591.35,9591.35,9591.35,9591.35,9159.35,8403.23,7239.11,
6087.15,6087.15,1479.13,819.21,60.13,51.19,5.19,5.19,5.19,5.19,5.19,1.23,
1.23,1.23,1.23,1.23,1.23,1.21,1.21,1.21,1.21,0.23998,0.23998,0.23998,0.23998,
0.23998,0.23998,0.23998,0.23998,0.23998,0.23998,0.23998,0.23998,0.23998,
0.23998,0.23998,0.23998,0.23998,0.23998,0.23998,0.23998]
This time we've got a perfect match:
>>> let i = cachedBest $ last trace
>>> eval $ unInd i
Just 12345
while at the same time the expression is rather compact:
>>> unInd i
Plus (Times (Const 4) (Plus (Const 9) (Const 4))) (Plus (Plus (Times
(Plus (Const 4) (Const 3)) (Times (Times (Const 3) (Const 9)) (Times
(Const 5) (Plus (Const 9) (Const 4))))) (Const 3)) (Const 5))
>>> nodes $ unInd i
23
-}
| jsnajder/genprog | src/GenProg.hs | bsd-3-clause | 26,777 | 0 | 19 | 5,318 | 4,179 | 2,248 | 1,931 | 250 | 3 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Network/Wai/Handler/Warp/File.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Handler.Warp.File (
RspFileInfo(..)
, conditionalRequest
, addContentHeadersForFilePart
, parseByteRanges
) where
import Control.Applicative ((<|>))
import Data.Array ((!))
import qualified Data.ByteString as B hiding (pack)
import qualified Data.ByteString.Char8 as B (pack, readInteger)
import Data.ByteString (ByteString)
import Data.Maybe (fromMaybe)
import Network.HTTP.Date
import qualified Network.HTTP.Types as H
import qualified Network.HTTP.Types.Header as H
import Network.Wai
import qualified Network.Wai.Handler.Warp.FileInfoCache as I
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.PackInt
import Numeric (showInt)
-- $setup
-- >>> import Test.QuickCheck
----------------------------------------------------------------
data RspFileInfo = WithoutBody H.Status
| WithBody H.Status H.ResponseHeaders Integer Integer
deriving (Eq,Show)
----------------------------------------------------------------
conditionalRequest :: I.FileInfo
-> H.ResponseHeaders -> IndexedHeader
-> RspFileInfo
conditionalRequest finfo hs0 reqidx = case condition of
nobody@(WithoutBody _) -> nobody
WithBody s _ off len -> let !hs = (H.hLastModified,date) :
addContentHeaders hs0 off len size
in WithBody s hs off len
where
!mtime = I.fileInfoTime finfo
!size = I.fileInfoSize finfo
!date = I.fileInfoDate finfo
!mcondition = ifmodified reqidx size mtime
<|> ifunmodified reqidx size mtime
<|> ifrange reqidx size mtime
!condition = fromMaybe (unconditional reqidx size) mcondition
----------------------------------------------------------------
ifModifiedSince :: IndexedHeader -> Maybe HTTPDate
ifModifiedSince reqidx = reqidx ! fromEnum ReqIfModifiedSince >>= parseHTTPDate
ifUnmodifiedSince :: IndexedHeader -> Maybe HTTPDate
ifUnmodifiedSince reqidx = reqidx ! fromEnum ReqIfUnmodifiedSince >>= parseHTTPDate
ifRange :: IndexedHeader -> Maybe HTTPDate
ifRange reqidx = reqidx ! fromEnum ReqIfRange >>= parseHTTPDate
----------------------------------------------------------------
ifmodified :: IndexedHeader -> Integer -> HTTPDate -> Maybe RspFileInfo
ifmodified reqidx size mtime = do
date <- ifModifiedSince reqidx
return $ if date /= mtime
then unconditional reqidx size
else WithoutBody H.notModified304
ifunmodified :: IndexedHeader -> Integer -> HTTPDate -> Maybe RspFileInfo
ifunmodified reqidx size mtime = do
date <- ifUnmodifiedSince reqidx
return $ if date == mtime
then unconditional reqidx size
else WithoutBody H.preconditionFailed412
ifrange :: IndexedHeader -> Integer -> HTTPDate -> Maybe RspFileInfo
ifrange reqidx size mtime = do
date <- ifRange reqidx
rng <- reqidx ! fromEnum ReqRange
return $ if date == mtime
then parseRange rng size
else WithBody H.ok200 [] 0 size
unconditional :: IndexedHeader -> Integer -> RspFileInfo
unconditional reqidx size = case reqidx ! fromEnum ReqRange of
Nothing -> WithBody H.ok200 [] 0 size
Just rng -> parseRange rng size
----------------------------------------------------------------
parseRange :: ByteString -> Integer -> RspFileInfo
parseRange rng size = case parseByteRanges rng of
Nothing -> WithoutBody H.requestedRangeNotSatisfiable416
Just [] -> WithoutBody H.requestedRangeNotSatisfiable416
Just (r:_) -> let (!beg, !end) = checkRange r size
!len = end - beg + 1
s = if beg == 0 && end == size - 1 then
H.ok200
else
H.partialContent206
in WithBody s [] beg len
checkRange :: H.ByteRange -> Integer -> (Integer, Integer)
checkRange (H.ByteRangeFrom beg) size = (beg, size - 1)
checkRange (H.ByteRangeFromTo beg end) size = (beg, min (size - 1) end)
checkRange (H.ByteRangeSuffix count) size = (max 0 (size - count), size - 1)
-- | Parse the value of a Range header into a 'H.ByteRanges'.
parseByteRanges :: B.ByteString -> Maybe H.ByteRanges
parseByteRanges bs1 = do
bs2 <- stripPrefix "bytes=" bs1
(r, bs3) <- range bs2
ranges (r:) bs3
where
range bs2 = do
(i, bs3) <- B.readInteger bs2
if i < 0 -- has prefix "-" ("-0" is not valid, but here treated as "0-")
then Just (H.ByteRangeSuffix (negate i), bs3)
else do
bs4 <- stripPrefix "-" bs3
case B.readInteger bs4 of
Just (j, bs5) | j >= i -> Just (H.ByteRangeFromTo i j, bs5)
_ -> Just (H.ByteRangeFrom i, bs4)
ranges front bs3
| B.null bs3 = Just (front [])
| otherwise = do
bs4 <- stripPrefix "," bs3
(r, bs5) <- range bs4
ranges (front . (r:)) bs5
stripPrefix x y
| x `B.isPrefixOf` y = Just (B.drop (B.length x) y)
| otherwise = Nothing
----------------------------------------------------------------
contentRange :: H.HeaderName
contentRange = H.hContentRange
-- | @contentRangeHeader beg end total@ constructs a Content-Range 'H.Header'
-- for the range specified.
contentRangeHeader :: Integer -> Integer -> Integer -> H.Header
contentRangeHeader beg end total = (contentRange, range)
where
range = B.pack
-- building with ShowS
$ 'b' : 'y': 't' : 'e' : 's' : ' '
: (if beg > end then ('*':) else
showInt beg
. ('-' :)
. showInt end)
( '/'
: showInt total "")
acceptRange :: H.HeaderName
acceptRange = H.hAcceptRanges
addContentHeaders :: H.ResponseHeaders -> Integer -> Integer -> Integer -> H.ResponseHeaders
addContentHeaders hs off len size
| len == size = hs'
| otherwise = let !ctrng = contentRangeHeader off (off + len - 1) size
in ctrng:hs'
where
!lengthBS = packIntegral len
!hs' = (H.hContentLength, lengthBS) : (acceptRange,"bytes") : hs
-- |
--
-- >>> addContentHeadersForFilePart [] (FilePart 2 10 16)
-- [("Content-Range","bytes 2-11/16"),("Content-Length","10"),("Accept-Ranges","bytes")]
-- >>> addContentHeadersForFilePart [] (FilePart 0 16 16)
-- [("Content-Length","16"),("Accept-Ranges","bytes")]
addContentHeadersForFilePart :: H.ResponseHeaders -> FilePart -> H.ResponseHeaders
addContentHeadersForFilePart hs part = addContentHeaders hs off len size
where
off = filePartOffset part
len = filePartByteCount part
size = filePartFileSize part
| phischu/fragnix | tests/packages/scotty/Network.Wai.Handler.Warp.File.hs | bsd-3-clause | 6,976 | 0 | 19 | 1,795 | 1,815 | 938 | 877 | 134 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.