code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Sound.IoSox
(
createSoxProcess
, independentSox
-- * Input from file
, rvreadfilewav
, rlreadfilewav
, SoxReadFile(..)
-- * Formats
, Wav(..)
, Snd(..)
-- * Format conversion
, conwavau
)
where
import Sound.InfList
import Sound.IoPtr
import Sound.IoFail
import Sound.StreamVector
import Sound.Time
import qualified System.Exit as Ex
import qualified Data.ByteString as Bs
import qualified Data.Vector.Unboxed as Vu
import qualified System.Process as P
{- |
Convert WAV to AU.
This may not work correctly on Windows.
This may not work correctly on Haskell implementations other than GHC.
(This may hang.)
This requires <http://sox.sourceforge.net/ sox> to be in PATH.
You can install it on Ubuntu 12.04 like this:
@
sudo apt-get install sox
@
-}
conwavau :: Bs.ByteString -> IO Bs.ByteString
conwavau wav_ = do
(i, o, p) <- createSoxProcess args
let
loop wav au = do
wavTail <- Bs.hPutNonBlocking i wav
auTail <- Bs.hGetNonBlocking o 1048576
me <- P.getProcessExitCode p
case me of
Just Ex.ExitSuccess -> return au
Just e -> ioError . userError $ show e
_ -> loop wavTail (Bs.append au auTail)
loop wav_ Bs.empty
where
args = ["-", "--type", "au", "--encoding", "floating-point", "--bits", "64", "-"]
createSoxProcess :: [String] -> IO (Handle, Handle, P.ProcessHandle)
createSoxProcess args = do
(Just i, Just o, _, p) <- P.createProcess (P.proc sox args)
{
P.cwd = Just "/tmp"
, P.std_in = P.CreatePipe
, P.std_out = P.CreatePipe
, P.std_err = P.Inherit
}
return (i, o, p)
where
sox = "sox"
-- | Create a child process without piping any standard streams.
independentSox :: [String] -> IO Ex.ExitCode
independentSox args = P.rawSystem "sox" args
data Wav = Wav deriving (Read, Show)
data Snd = Snd deriving (Read, Show)
{-
class GeneralWrite what whither monad result where
write :: what -> whither -> monad result
-}
class SoxReadFile format collection sample where
soxReadFile :: format -> FilePath -> IO (Rated (collection sample))
instance SoxReadFile Wav Vu.Vector Double where
soxReadFile _ = rvreadfilewav
rvreadfilewav :: FilePath -> IO (Rated (Vu.Vector Double))
rvreadfilewav path =
slurp path
>>= conwavau
>>= return . rvreadau
rlreadfilewav :: FilePath -> IO (Rated (L Double))
rlreadfilewav = fmap (rlfromrv 0) . rvreadfilewav
|
edom/sound
|
src/Sound/IoSox.hs
|
bsd-3-clause
| 2,545
| 0
| 18
| 647
| 669
| 361
| 308
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Dhall.Test.Lint where
import Data.Text (Text)
import Dhall.Parser (Header (..))
import Prelude hiding (FilePath)
import Test.Tasty (TestTree)
import Turtle (FilePath)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text.IO
import qualified Dhall.Core as Core
import qualified Dhall.Lint as Lint
import qualified Dhall.Parser as Parser
import qualified Dhall.Pretty as Pretty
import qualified Dhall.Test.Util as Test.Util
import qualified Prettyprinter as Doc
import qualified Prettyprinter.Render.Text as Doc.Render.Text
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.HUnit as Tasty.HUnit
import qualified Turtle
lintDirectory :: FilePath
lintDirectory = "./tests/lint"
getTests :: IO TestTree
getTests = do
lintTests <- Test.Util.discover (Turtle.chars <* "A.dhall") lintTest (Turtle.lstree lintDirectory)
let testTree = Tasty.testGroup "lint tests" [ lintTests ]
return testTree
format :: Header -> Core.Expr Parser.Src Core.Import -> Text
format (Header header) expr =
let doc = Doc.pretty header
<> Pretty.prettyCharacterSet Pretty.Unicode expr
<> "\n"
docStream = Pretty.layout doc
in
Doc.Render.Text.renderStrict docStream
lintTest :: Text -> TestTree
lintTest prefix =
Tasty.HUnit.testCase (Text.unpack prefix) $ do
let inputFile = Text.unpack (prefix <> "A.dhall")
let outputFile = Text.unpack (prefix <> "B.dhall")
inputText <- Text.IO.readFile inputFile
(header, parsedInput) <- Core.throws (Parser.exprAndHeaderFromText mempty inputText)
let actualExpression = Lint.lint parsedInput
let actualText = format header actualExpression
expectedText <- Text.IO.readFile outputFile
let message = "The linted expression did not match the expected output"
Tasty.HUnit.assertEqual message expectedText actualText
|
Gabriel439/Haskell-Dhall-Library
|
dhall/tests/Dhall/Test/Lint.hs
|
bsd-3-clause
| 2,096
| 0
| 13
| 518
| 515
| 286
| 229
| 45
| 1
|
-- | Exports relevant functionality of the PiReader.
module Language.Java.Paragon.Monad.PiReader
( -- * Exported functionality
PiPath
, PiReader(..)
, liftToBaseM
, MonadPR(..)
, getPiPath
, doesPkgExist
, doesTypeExist
, getPkgContents
, getPiPathContents
, getTypeContents
) where
import Language.Java.Paragon.Monad.PiReader.MonadPR
import Language.Java.Paragon.Monad.PiReader.PiFunc
|
bvdelft/paragon
|
src/Language/Java/Paragon/Monad/PiReader.hs
|
bsd-3-clause
| 413
| 0
| 5
| 67
| 69
| 50
| 19
| 14
| 0
|
module Network.Bitcoin.Haskoin
( transactionOutputAddress
, transactionInputAddress
, getTransaction
, getTransactionOutput
, outpointAddress
, importAddress
, -- * Utility functions
addressToHex
, decodeHexTx
, hexTxHash
, hexToAddress
, transactionIdToTxHash
, -- * network-bitcoin reexports
getClient
, Client
, -- * haskoin Rexports
outputAddress
, inputAddress
) where
import Control.Monad ((<=<))
import qualified Data.ByteString.Base16 as B16
import Data.Maybe (fromMaybe)
import Data.Serialize (decode)
import Data.Text.Encoding as E
import Haskoin.Address (Address, addrToText, inputAddress,
outputAddress, scriptToAddressBS,
textToAddr)
import Haskoin.Constants (Network)
import Haskoin.Script (decodeInputBS)
import Haskoin.Transaction (OutPoint (..), Tx (..), TxHash, TxIn,
TxOut, hexToTxHash, scriptInput,
scriptOutput, txHashToHex)
import Network.Bitcoin (Client, RawTransaction, TransactionID,
getClient, getRawTransaction)
import qualified Network.Bitcoin as B
transactionOutputAddress :: TxOut -> Either String Address
transactionOutputAddress = scriptToAddressBS . scriptOutput
transactionInputAddress :: Network -> TxIn -> Either String Address
transactionInputAddress net = maybe (Left "could not decode address") Right . inputAddress <=< decodeInputBS net . scriptInput
addressToHex :: Network -> Address -> B.Address
addressToHex net = fromMaybe (error "Address encoding error") . addrToText net
hexToAddress :: Network -> B.Address -> Address
hexToAddress net = fromMaybe (error "Unable to parse address") . textToAddr net
-- | TODO Catch bad decodes
decodeHexTx :: RawTransaction -> Tx
decodeHexTx = fromRight . decode . fst . B16.decode . E.encodeUtf8
where
fromRight (Right x) = x
fromRight (Left e) = error e
hexTxHash :: TxHash -> TransactionID
hexTxHash = txHashToHex
transactionIdToTxHash :: TransactionID -> TxHash
transactionIdToTxHash = fromMaybe (error "Unable to parse txid") . hexToTxHash
-- | TODO Catch errors from bitcoind
getTransaction :: Client -> TxHash -> IO Tx
getTransaction c hash = decodeHexTx <$> getRawTransaction c (hexTxHash hash)
getTransactionOutput :: Client -> OutPoint -> IO TxOut
getTransactionOutput cl (OutPoint hash i) = (!! fromIntegral i) . txOut <$> getTransaction cl hash
outpointAddress :: Client -> OutPoint -> IO (Either String Address)
outpointAddress c op = transactionOutputAddress <$> getTransactionOutput c op
importAddress :: Client -> Network -> Address -> Maybe B.Account -> Maybe Bool -> IO ()
importAddress client net addr = B.importAddress client (addressToHex net addr)
|
WraithM/haskoin-bitcoind
|
src/Network/Bitcoin/Haskoin.hs
|
bsd-3-clause
| 3,035
| 0
| 11
| 813
| 689
| 380
| 309
| -1
| -1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.GLUT.GameMode
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/GLUT/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : stable
-- Portability : portable
--
-- In addition to the functionality offered by
-- 'Graphics.UI.GLUT.Window.fullScreen', GLUT offers an sub-API to change the
-- screen resolution, color depth, and refresh rate of the display for a single
-- full screen window. This mode of operation is called /game mode/, and is
-- restricted in various ways: No pop-up menus are allowed for this full screen
-- window, no other (sub-)windows can be created, and all other applications are
-- hidden.
--
-- /X Implementation Notes:/ Note that game mode is not fully supported in the
-- original GLUT for X, it is essentially the same as using
-- 'Graphics.UI.GLUT.Window.fullScreen'. The GLUT clone freeglut
-- (see <http://freeglut.sourceforge.net/>) does not have this restriction.
--
--------------------------------------------------------------------------------
module Graphics.UI.GLUT.GameMode (
GameModeCapability(..), GameModeCapabilityDescription(..),
gameModeCapabilities, enterGameMode, leaveGameMode,
BitsPerPlane, RefreshRate, GameModeInfo(..), gameModeInfo,
gameModeActive
) where
import Data.List ( intersperse )
import Foreign.C.String ( CString, withCString )
import Foreign.C.Types ( CInt )
import Graphics.Rendering.OpenGL.GL.BasicTypes ( GLenum )
import Graphics.Rendering.OpenGL.GL.CoordTrans ( Size(..) )
import Graphics.Rendering.OpenGL.GL.StateVar (
GettableStateVar, makeGettableStateVar,
SettableStateVar, makeSettableStateVar )
import Graphics.UI.GLUT.Constants (
glut_GAME_MODE_DISPLAY_CHANGED, glut_GAME_MODE_POSSIBLE,
glut_GAME_MODE_WIDTH, glut_GAME_MODE_HEIGHT,
glut_GAME_MODE_PIXEL_DEPTH, glut_GAME_MODE_REFRESH_RATE,
glut_GAME_MODE_ACTIVE )
import Graphics.UI.GLUT.Types ( makeWindow, relationToString )
import Graphics.UI.GLUT.Window ( Window )
import Graphics.UI.GLUT.Initialization ( Relation(..) )
--------------------------------------------------------------------------------
-- | Capabilities for 'gameModeCapabilities'
data GameModeCapability
= GameModeWidth -- ^ Width of the screen resolution in pixels
| GameModeHeight -- ^ Height of the screen resolution in pixels
| GameModeBitsPerPlane -- ^ Color depth of the screen in bits
| GameModeRefreshRate -- ^ Refresh rate in Hertz
| GameModeNum -- ^ Match the Nth frame buffer configuration
-- compatible with the given capabilities
-- (numbering starts at 1)
deriving ( Eq, Ord, Show )
gameModeCapabilityToString :: GameModeCapability -> String
gameModeCapabilityToString x = case x of
GameModeWidth -> "width"
GameModeHeight -> "height"
GameModeBitsPerPlane -> "bpp"
GameModeRefreshRate -> "hertz"
GameModeNum -> "num"
-- | A single capability description for 'gameModeCapabilities'.
data GameModeCapabilityDescription = Where' GameModeCapability Relation Int
deriving ( Eq, Ord, Show )
gameModeCapabilityDescriptionToString :: GameModeCapabilityDescription -> String
gameModeCapabilityDescriptionToString (Where' c r i) =
gameModeCapabilityToString c ++ relationToString r ++ show i
--------------------------------------------------------------------------------
-- | Controls the /game mode/ to be used when 'enterGameMode' is called. It is
-- described by a list of zero or more capability descriptions, which are
-- translated into a set of criteria used to select the appropriate screen
-- configuration. The criteria are matched in strict left to right order of
-- precdence. That is, the first specified criterion (leftmost) takes precedence
-- over the later criteria for non-exact criteria
-- ('Graphics.UI.GLUT.Initialization.IsGreaterThan',
-- 'Graphics.UI.GLUT.Initialization.IsLessThan', etc.). Exact criteria
-- ('Graphics.UI.GLUT.Initialization.IsEqualTo',
-- 'Graphics.UI.GLUT.Initialization.IsNotEqualTo') must match exactly so
-- precedence is not relevant.
--
-- To determine which configuration will actually be tried by 'enterGameMode'
-- (if any), use 'gameModeInfo'.
--
-- Note that even for game mode the current values of
-- 'Graphics.UI.GLUT.Initialization.initialDisplayMode'or
-- 'Graphics.UI.GLUT.Initialization.initialDisplayCapabilities' will
-- determine which buffers are available, if double buffering is used or not,
-- etc.
gameModeCapabilities :: SettableStateVar [GameModeCapabilityDescription]
gameModeCapabilities = makeSettableStateVar $ \ds ->
withCString (descriptionsToString ds) glutGameModeString
foreign import CALLCONV unsafe "glutGameModeString" glutGameModeString ::
CString -> IO ()
-- freeglut currently handles only simple game mode descriptions like "WxH:B@R",
-- so we try hard to use this format instead of the more general format allowed
-- by the "real" GLUT.
descriptionsToString :: [GameModeCapabilityDescription] -> String
descriptionsToString ds =
let ws = [ x | Where' GameModeWidth IsEqualTo x <- ds ]
hs = [ x | Where' GameModeHeight IsEqualTo x <- ds ]
bs = [ x | Where' GameModeBitsPerPlane IsEqualTo x <- ds ]
rs = [ x | Where' GameModeRefreshRate IsEqualTo x <- ds ]
allSimple = (length ws + length hs + length bs + length rs) == (length ds)
dimensionsOK = (null ws) == (null hs)
in if allSimple && dimensionsOK
then simpleCapStr ws hs bs rs
else generalCapStr ds
simpleCapStr :: [Int] -> [Int] -> [Int] -> [Int] -> String
simpleCapStr ws hs bs rs =
showCap "" ws ++ showCap "x" hs ++ showCap ":" bs ++ showCap "@" rs
where showCap _ [] = ""
showCap prefix (x:_) = prefix ++ show x
generalCapStr :: [GameModeCapabilityDescription] -> String
generalCapStr =
concat . intersperse " " . map gameModeCapabilityDescriptionToString
--------------------------------------------------------------------------------
-- | Enter /game mode/, trying to change resolution, refresh rate, etc., as
-- specified by the current value of 'gameModeCapabilities'. An identifier for
-- the game mode window and a flag, indicating if the display mode actually
-- changed, are returned. The game mode window is made the /current window/.
--
-- Re-entering /game mode/ is allowed, the previous game mode window gets
-- destroyed by this, and a new one is created.
enterGameMode :: IO (Window, Bool)
enterGameMode = do
w <- glutEnterGameMode
c <- getBool glut_GAME_MODE_DISPLAY_CHANGED
return (makeWindow w, c)
foreign import CALLCONV unsafe "glutEnterGameMode" glutEnterGameMode :: IO CInt
--------------------------------------------------------------------------------
-- | Leave /game mode/, restoring the old display mode and destroying the game
-- mode window.
foreign import CALLCONV unsafe "glutLeaveGameMode" leaveGameMode :: IO ()
--------------------------------------------------------------------------------
-- | The color depth of the screen, measured in bits (e.g. 8, 16, 24, 32, ...)
type BitsPerPlane = Int
-- | The refresh rate of the screen, measured in Hertz (e.g. 60, 75, 100, ...)
type RefreshRate = Int
data GameModeInfo = GameModeInfo Size BitsPerPlane RefreshRate
deriving ( Eq, Ord, Show )
--------------------------------------------------------------------------------
-- | Return 'Just' the mode which would be tried by the next call to
-- 'enterGameMode'. Returns 'Nothing' if the mode requested by the current value
-- of 'gameModeCapabilities' is not possible, in which case 'enterGameMode'
-- would simply create a full screen window using the current mode.
gameModeInfo :: GettableStateVar (Maybe GameModeInfo)
gameModeInfo = makeGettableStateVar $ do
possible <- getBool glut_GAME_MODE_POSSIBLE
if possible
then do
w <- glutGameModeGet glut_GAME_MODE_WIDTH
h <- glutGameModeGet glut_GAME_MODE_HEIGHT
let size = Size (fromIntegral w) (fromIntegral h)
b <- glutGameModeGet glut_GAME_MODE_PIXEL_DEPTH
r <- glutGameModeGet glut_GAME_MODE_REFRESH_RATE
return $ Just $ GameModeInfo size (fromIntegral b) (fromIntegral r)
else return Nothing
getBool :: GLenum -> IO Bool
getBool = fmap (/= 0) . glutGameModeGet
foreign import CALLCONV unsafe "glutGameModeGet" glutGameModeGet ::
GLenum -> IO CInt
--------------------------------------------------------------------------------
-- | Contains 'True' when the /game mode/ is active, 'False' otherwise.
gameModeActive :: GettableStateVar Bool
gameModeActive = makeGettableStateVar $ getBool glut_GAME_MODE_ACTIVE
|
FranklinChen/hugs98-plus-Sep2006
|
packages/GLUT/Graphics/UI/GLUT/GameMode.hs
|
bsd-3-clause
| 8,827
| 82
| 9
| 1,494
| 1,183
| 684
| 499
| -1
| -1
|
{-# LANGUAGE InstanceSigs #-}
module Builder (
-- * Data types
ArMode (..), CcMode (..), ConfigurationInfo (..), GhcMode (..),
GhcPkgMode (..), HaddockMode (..), SphinxMode (..), TarMode (..),
Builder (..),
-- * Builder properties
builderProvenance, systemBuilderPath, builderPath, isSpecified, needBuilder,
runBuilder, runBuilderWith, runBuilderWithCmdOptions, getBuilderPath,
builderEnvironment,
-- * Ad hoc builder invocation
applyPatch
) where
import Development.Shake.Classes
import GHC.Generics
import qualified Hadrian.Builder as H
import Hadrian.Builder hiding (Builder)
import Hadrian.Builder.Ar
import Hadrian.Builder.Sphinx
import Hadrian.Builder.Tar
import Hadrian.Oracles.Path
import Hadrian.Oracles.TextFile
import Hadrian.Utilities
import Base
import Context
import Oracles.Flag
import Packages
-- | C compiler can be used in two different modes:
-- * Compile or preprocess a source file.
-- * Extract source dependencies by passing @-MM@ command line argument.
data CcMode = CompileC | FindCDependencies deriving (Eq, Generic, Show)
instance Binary CcMode
instance Hashable CcMode
instance NFData CcMode
-- | GHC can be used in four different modes:
-- * Compile a Haskell source file.
-- * Compile a C source file.
-- * Extract source dependencies by passing @-M@ command line argument.
-- * Link object files & static libraries into an executable.
data GhcMode = CompileHs
| CompileCWithGhc
| FindHsDependencies
| LinkHs
| ToolArgs
deriving (Eq, Generic, Show)
instance Binary GhcMode
instance Hashable GhcMode
instance NFData GhcMode
-- | To configure a package we need two pieces of information, which we choose
-- to record separately for convenience.
--
-- * Command line arguments to be passed to the setup script.
--
-- * Package configuration flags that enable/disable certain package features.
-- Here is an example from "Settings.Packages":
--
-- > package rts
-- > ? builder (Cabal Flags)
-- > ? any (wayUnit Profiling) rtsWays
-- > ? arg "profiling"
--
-- This instructs package configuration functions (such as 'configurePackage')
-- to enable the @profiling@ Cabal flag when processing @rts.cabal@ and
-- building RTS with profiling information.
data ConfigurationInfo = Setup | Flags deriving (Eq, Generic, Show)
instance Binary ConfigurationInfo
instance Hashable ConfigurationInfo
instance NFData ConfigurationInfo
-- TODO: Do we really need all these modes? Why do we need 'Dependencies'? We
-- can extract dependencies using the Cabal library. Note: we used to also have
-- the @Init@ mode for initialising a new package database but we've deleted it.
-- | 'GhcPkg' can initialise a package database and register packages in it.
data GhcPkgMode = Copy -- ^ Copy a package from one database to another.
| Dependencies -- ^ Compute package dependencies.
| Unregister -- ^ Unregister a package.
| Update -- ^ Update a package.
deriving (Eq, Generic, Show)
instance Binary GhcPkgMode
instance Hashable GhcPkgMode
instance NFData GhcPkgMode
-- | Haddock can be used in two different modes:
-- * Generate documentation for a single package
-- * Generate an index page for a collection of packages
data HaddockMode = BuildPackage | BuildIndex deriving (Eq, Generic, Show)
instance Binary HaddockMode
instance Hashable HaddockMode
instance NFData HaddockMode
-- | A 'Builder' is a (usually external) command invoked in a separate process
-- via 'cmd'. Here are some examples:
-- * 'Alex' is a lexical analyser generator that builds @Lexer.hs@ from @Lexer.x@.
-- * 'Ghc' 'Stage0' is the bootstrapping Haskell compiler used in 'Stage0'.
-- * 'Ghc' @StageN@ (N > 0) is the GHC built in stage (N - 1) and used in @StageN@.
--
-- The 'Cabal' builder is unusual in that it does not correspond to an external
-- program but instead relies on the Cabal library for package configuration.
data Builder = Alex
| Ar ArMode Stage
| Autoreconf FilePath
| Cabal ConfigurationInfo Stage
| Cc CcMode Stage
| Configure FilePath
| DeriveConstants
| GenApply
| GenPrimopCode
| Ghc GhcMode Stage
| GhcPkg GhcPkgMode Stage
| Haddock HaddockMode
| Happy
| Hp2Ps
| Hpc
| HsCpp
| Hsc2Hs Stage
| Ld Stage
| Make FilePath
| Makeinfo
| Nm
| Objdump
| Patch
| Python
| Ranlib
| RunTest
| Sphinx SphinxMode
| Tar TarMode
| Unlit
| Xelatex
deriving (Eq, Generic, Show)
instance Binary Builder
instance Hashable Builder
instance NFData Builder
-- | Some builders are built by this very build system, in which case
-- 'builderProvenance' returns the corresponding build 'Context' (which includes
-- 'Stage' and GHC 'Package').
builderProvenance :: Builder -> Maybe Context
builderProvenance = \case
DeriveConstants -> context Stage0 deriveConstants
GenApply -> context Stage0 genapply
GenPrimopCode -> context Stage0 genprimopcode
Ghc _ Stage0 -> Nothing
Ghc _ stage -> context (pred stage) ghc
GhcPkg _ Stage0 -> Nothing
GhcPkg _ s -> context (pred s) ghcPkg
Haddock _ -> context Stage1 haddock
Hpc -> context Stage1 hpcBin
Hp2Ps -> context Stage0 hp2ps
Hsc2Hs _ -> context Stage0 hsc2hs
Unlit -> context Stage0 unlit
_ -> Nothing
where
context s p = Just $ vanillaContext s p
instance H.Builder Builder where
builderPath :: Builder -> Action FilePath
builderPath builder = case builderProvenance builder of
Nothing -> systemBuilderPath builder
Just context -> programPath context
runtimeDependencies :: Builder -> Action [FilePath]
runtimeDependencies = \case
Autoreconf dir -> return [dir -/- "configure.ac"]
Configure dir -> return [dir -/- "configure"]
Ghc _ Stage0 -> includesDependencies Stage0
Ghc _ stage -> do
root <- buildRoot
touchyPath <- programPath (vanillaContext Stage0 touchy)
unlitPath <- builderPath Unlit
ghcgens <- includesDependencies stage
-- GHC from the previous stage is used to build artifacts in the
-- current stage. Need the previous stage's GHC deps.
ghcdeps <- ghcBinDeps (pred stage)
return $ [ unlitPath ]
++ ghcdeps
++ ghcgens
++ [ touchyPath | windowsHost ]
++ [ root -/- mingwStamp | windowsHost ]
-- proxy for the entire mingw toolchain that
-- we have in inplace/mingw initially, and then at
-- root -/- mingw.
Hsc2Hs stage -> (\p -> [p]) <$> templateHscPath stage
Make dir -> return [dir -/- "Makefile"]
Haddock _ -> haddockDeps Stage1 -- Haddock currently runs in Stage1
_ -> return []
-- query the builder for some information.
-- contrast this with runBuilderWith, which returns @Action ()@
-- this returns the @stdout@ from running the builder.
-- For now this only implements asking @ghc-pkg@ about package
-- dependencies.
askBuilderWith :: Builder -> BuildInfo -> Action String
askBuilderWith builder BuildInfo {..} = case builder of
GhcPkg Dependencies _ -> do
let input = fromSingleton msgIn buildInputs
msgIn = "[askBuilder] Exactly one input file expected."
needBuilder builder
path <- H.builderPath builder
need [path]
Stdout stdout <- cmd [path] ["--no-user-package-db", "field", input, "depends"]
return stdout
_ -> error $ "Builder " ++ show builder ++ " can not be asked!"
runBuilderWith :: Builder -> BuildInfo -> Action ()
runBuilderWith builder BuildInfo {..} = do
path <- builderPath builder
withResources buildResources $ do
verbosity <- getVerbosity
let input = fromSingleton msgIn buildInputs
msgIn = "[runBuilderWith] Exactly one input file expected."
output = fromSingleton msgOut buildOutputs
msgOut = "[runBuilderWith] Exactly one output file expected."
-- Suppress stdout depending on the Shake's verbosity setting.
echo = EchoStdout (verbosity >= Loud)
-- Capture stdout and write it to the output file.
captureStdout = do
Stdout stdout <- cmd [path] buildArgs
writeFileChanged output stdout
case builder of
Ar Pack _ -> do
useTempFile <- flag ArSupportsAtFile
if useTempFile then runAr path buildArgs
else runArWithoutTempFile path buildArgs
Ar Unpack _ -> cmd echo [Cwd output] [path] buildArgs
Autoreconf dir -> cmd echo [Cwd dir] ["sh", path] buildArgs
Configure dir -> do
-- Inject /bin/bash into `libtool`, instead of /bin/sh,
-- otherwise Windows breaks. TODO: Figure out why.
bash <- bashPath
let env = AddEnv "CONFIG_SHELL" bash
cmd echo env [Cwd dir] ["sh", path] buildOptions buildArgs
GenApply -> captureStdout
GenPrimopCode -> do
stdin <- readFile' input
Stdout stdout <- cmd (Stdin stdin) [path] buildArgs
writeFileChanged output stdout
GhcPkg Copy _ -> do
Stdout pkgDesc <- cmd [path]
[ "--expand-pkgroot"
, "--no-user-package-db"
, "describe"
, input -- the package name
]
cmd (Stdin pkgDesc) [path] (buildArgs ++ ["-"])
GhcPkg Unregister _ -> do
Exit _ <- cmd echo [path] (buildArgs ++ [input])
return ()
HsCpp -> captureStdout
Make dir -> cmd echo path ["-C", dir] buildArgs
Makeinfo -> do
cmd echo [path] "--no-split" [ "-o", output] [input]
Xelatex -> do
unit $ cmd [Cwd output] [path] buildArgs
unit $ cmd [Cwd output] [path] buildArgs
unit $ cmd [Cwd output] [path] buildArgs
unit $ cmd [Cwd output] ["makeindex"] (input -<.> "idx")
unit $ cmd [Cwd output] [path] buildArgs
unit $ cmd [Cwd output] [path] buildArgs
_ -> cmd echo [path] buildArgs
-- TODO: Some builders are required only on certain platforms. For example,
-- 'Objdump' is only required on OpenBSD and AIX. Add support for platform
-- specific optional builders as soon as we can reliably test this feature.
-- See https://github.com/snowleopard/hadrian/issues/211.
isOptional :: Builder -> Bool
isOptional = \case
Objdump -> True
_ -> False
-- | Determine the location of a system 'Builder'.
systemBuilderPath :: Builder -> Action FilePath
systemBuilderPath builder = case builder of
Alex -> fromKey "alex"
Ar _ Stage0 -> fromKey "system-ar"
Ar _ _ -> fromKey "ar"
Autoreconf _ -> stripExe =<< fromKey "autoreconf"
Cc _ Stage0 -> fromKey "system-cc"
Cc _ _ -> fromKey "cc"
-- We can't ask configure for the path to configure!
Configure _ -> return "configure"
Ghc _ Stage0 -> fromKey "system-ghc"
GhcPkg _ Stage0 -> fromKey "system-ghc-pkg"
Happy -> fromKey "happy"
HsCpp -> fromKey "hs-cpp"
Ld _ -> fromKey "ld"
Make _ -> fromKey "make"
Makeinfo -> fromKey "makeinfo"
Nm -> fromKey "nm"
Objdump -> fromKey "objdump"
Patch -> fromKey "patch"
Python -> fromKey "python"
Ranlib -> fromKey "ranlib"
RunTest -> fromKey "python"
Sphinx _ -> fromKey "sphinx-build"
Tar _ -> fromKey "tar"
Xelatex -> fromKey "xelatex"
_ -> error $ "No entry for " ++ show builder ++ inCfg
where
inCfg = " in " ++ quote configFile ++ " file."
fromKey key = do
let unpack = fromMaybe . error $ "Cannot find path to builder "
++ quote key ++ inCfg ++ " Did you skip configure?"
path <- unpack <$> lookupValue configFile key
if null path
then do
unless (isOptional builder) . error $ "Non optional builder "
++ quote key ++ " is not specified" ++ inCfg
return "" -- TODO: Use a safe interface.
else do
fullPath <- lookupInPath path
case (windowsHost, hasExtension fullPath) of
(False, _ ) -> return fullPath
(True , True ) -> fixAbsolutePathOnWindows fullPath
(True , False) -> fixAbsolutePathOnWindows fullPath <&> (<.> exe)
-- Without this function, on Windows we can observe a bad builder path
-- for 'autoreconf'. If the relevant system.config field is set to
-- /usr/bin/autoreconf in the file, the path that we read
-- is C:/msys64/usr/bin/autoreconf.exe. A standard msys2 set up happens
-- to have an executable named 'autoreconf' there, without the 'exe'
-- extension. Hence this function.
stripExe s = do
let sNoExt = dropExtension s
exists <- doesFileExist s
if exists then return s else return sNoExt
-- | Was the path to a given system 'Builder' specified in configuration files?
isSpecified :: Builder -> Action Bool
isSpecified = fmap (not . null) . systemBuilderPath
-- | Apply a patch by executing the 'Patch' builder in a given directory.
applyPatch :: FilePath -> FilePath -> Action ()
applyPatch dir patch = do
let file = dir -/- patch
needBuilder Patch
path <- builderPath Patch
putBuild $ "| Apply patch " ++ file
quietly $ cmd [Cwd dir, FileStdin file] [path, "-p0"]
|
sdiehl/ghc
|
hadrian/src/Builder.hs
|
bsd-3-clause
| 14,711
| 0
| 20
| 4,764
| 2,773
| 1,417
| 1,356
| -1
| -1
|
-- {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Data.List.CommonSubstringSpec where
import Test.Hspec
import Control.Applicative
import qualified Data.ByteString.Char8 as BS
import Control.Monad(forM_)
import qualified Data.List.CommonSubstring as Suffix
import qualified Data.List.SlowSubstring as Slow
-- this type may not be accurate if there are many equal-length
-- longest common substrings
spec :: Spec
spec = describe "common substring" $ do
describe "suffixtree implementation" $ do
let samples = [("abcd", "aaaabcd")]
forM_ [("suffixtree", Suffix.longestSubstring)] $ \(label,f) ->
it ("should work for " ++ label) $
forM_ samples $ \(first, second) ->
f first second `shouldBe` Slow.longestSubstring first second
|
mwotton/string-similarity
|
test/Data/List/CommonSubstringSpec.hs
|
bsd-3-clause
| 784
| 0
| 17
| 138
| 185
| 109
| 76
| 16
| 1
|
{-# LANGUAGE Rank2Types #-}
{-|
Parallel stochastic sampling for 'mwc-random' package.
-}
module Control.Parallel.Stochastic
( purifyRandomST
, ParallelSeeds
, parMapST
, splitParMapST
)
where
import Control.Monad.ST
import Control.Parallel.Strategies
import System.Random.MWC
import Data.Splittable
-- | Convert ST action with PRNG state into a pure function of seed.
purifyRandomST :: (forall s.GenST s -> ST s a) -> Seed -> (a, Seed)
purifyRandomST f seed = runST $ do
g <- restore seed
r <- f g
g' <- save g
return (r, g')
{-# INLINE purifyRandomST #-}
type RandomFunction source result = (forall s.GenST s -> source -> ST s result)
-- | 'parMap' with 'rpar' over list of data and initial seeds using ST
-- action which takes single PRNG state; produce list of results and
-- used seeds.
parMapST :: RandomFunction a b -> [(a, Seed)] -> [(b, Seed)]
parMapST f = parMap rpar (\(p, seed) -> purifyRandomST (`f` p) seed)
{-# INLINE parMapST #-}
-- | Split the given source, process subsources in parallel, return
-- combined results and used seeds.
splitParMapST :: (Split source, Combine result) =>
RandomFunction source result
-> source
-> ParallelSeeds
-> (result, ParallelSeeds)
splitParMapST f wholeSource oldSeeds =
let
sources = (splitIn (length oldSeeds) wholeSource)
(results, newSeeds) = unzip $ parMapST f $ zip sources oldSeeds
in
(combine results, newSeeds)
{-# INLINE splitParMapST #-}
-- | List of seeds which preserve PRNG states between runs of parallel
-- stochastic process sampling.
type ParallelSeeds = [Seed]
|
dzhus/dsmc
|
src/Control/Parallel/Stochastic.hs
|
bsd-3-clause
| 1,763
| 0
| 12
| 473
| 384
| 215
| 169
| 33
| 1
|
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS -Wall #-}
module IterX.Exception (
IterException
,IException(..)
,iExceptionToException
,iExceptionFromException
,TerminateEarly(..)
,isTerminateEarly
,terminateEarlyHandler
,IterFailure(..)
,isIterFailure
) where
import Control.Exception
import Data.Data
-- and all inheritants are descendents of 'IterException'.
data IterException = forall e . Exception e => IterException e
deriving (Typeable)
instance Show IterException where
show (IterException e) = show e
instance Exception IterException
iExceptionToException :: Exception e => e -> SomeException
iExceptionToException = toException . IterException
iExceptionFromException :: Exception e => SomeException -> Maybe e
iExceptionFromException x = do
IterException a <- fromException x
cast a
class Exception e => IException e where
toIterException :: e -> IterException
toIterException = IterException
fromIterException :: IterException -> Maybe e
fromIterException = fromException . toException
-- | The consumer has indicated that it's done, and the generator should
-- terminate (further values will have no effect)
data TerminateEarly = TerminateEarly String
deriving (Typeable, Show)
instance Exception TerminateEarly where
toException = iExceptionToException
fromException = iExceptionFromException
instance IException TerminateEarly
isTerminateEarly :: Exception e => e -> Bool
isTerminateEarly e = case fromException $ toException e of
Just (TerminateEarly _) -> True
_ -> False
terminateEarlyHandler :: Monad m => TerminateEarly -> m Bool
terminateEarlyHandler _ = return False
-- | The consumer has indicated that there is some sort of problem
-- and continuation is impossible
data IterFailure = IterFailure String
deriving (Typeable, Show)
instance Exception IterFailure where
toException = iExceptionToException
fromException = iExceptionFromException
instance IException IterFailure
isIterFailure :: Exception e => e -> Bool
isIterFailure e = case fromException $ toException e of
Just (IterFailure _) -> True
_ -> False
|
JohnLato/iterx
|
src/IterX/Exception.hs
|
bsd-3-clause
| 2,219
| 0
| 10
| 405
| 479
| 251
| 228
| 53
| 2
|
-- | Assorted conditions used later on in AI logic.
module Game.LambdaHack.Client.AI.ConditionM
( condAimEnemyTargetedM
, condAimEnemyOrStashM
, condAimEnemyOrRememberedM
, condAimNonEnemyPresentM
, condAimCrucialM
, condTgtNonmovingEnemyM
, condAdjTriggerableM
, meleeThreatDistList
, condBlocksFriendsM
, condFloorWeaponM
, condNoEqpWeaponM
, condCanProjectM
, condProjectListM
, benAvailableItems
, hinders
, condDesirableFloorItemM
, benGroundItems
, desirableItem
, condSupport
, condAloneM
, condShineWouldBetrayM
, fleeList
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import Game.LambdaHack.Client.Bfs
import Game.LambdaHack.Client.MonadClient
import Game.LambdaHack.Client.State
import Game.LambdaHack.Common.Actor
import Game.LambdaHack.Common.ActorState
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.Item
import qualified Game.LambdaHack.Common.ItemAspect as IA
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.ReqFailure
import Game.LambdaHack.Common.State
import qualified Game.LambdaHack.Common.Tile as Tile
import Game.LambdaHack.Common.Time
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Common.Vector
import Game.LambdaHack.Content.FactionKind
import qualified Game.LambdaHack.Content.ItemKind as IK
import qualified Game.LambdaHack.Content.RuleKind as RK
import qualified Game.LambdaHack.Core.Dice as Dice
import qualified Game.LambdaHack.Definition.Ability as Ability
import Game.LambdaHack.Definition.Defs
-- All conditions are (partially) lazy, because they are not always
-- used in the strict monadic computations they are in.
-- | Require that a target enemy is visible by the party.
condAimEnemyTargetedM :: MonadClientRead m => ActorId -> m Bool
condAimEnemyTargetedM aid = do
btarget <- getsClient $ getTarget aid
return $ case btarget of
Just (TEnemy _) -> True
_ -> False
-- | Require that a target enemy or enemy stash is visible by the party.
condAimEnemyOrStashM :: MonadClientRead m => ActorId -> m Bool
condAimEnemyOrStashM aid = do
btarget <- getsClient $ getTarget aid
return $ case btarget of
Just (TEnemy _) -> True
Just (TPoint (TStash _) _ _) -> True -- speedup from: lid == blid b
_ -> False
-- | Require that a target enemy is remembered on the actor's level.
condAimEnemyOrRememberedM :: MonadClientRead m => ActorId -> m Bool
condAimEnemyOrRememberedM aid = do
b <- getsState $ getActorBody aid
btarget <- getsClient $ getTarget aid
return $ case btarget of
Just (TEnemy _) -> True
Just (TPoint (TEnemyPos _) lid _) -> lid == blid b
Just (TPoint (TStash _) lid _) -> lid == blid b
_ -> False
-- | Require that a target non-enemy is visible by the party.
condAimNonEnemyPresentM :: MonadClientRead m => ActorId -> m Bool
condAimNonEnemyPresentM aid = do
btarget <- getsClient $ getTarget aid
return $ case btarget of
Just (TNonEnemy _) -> True
_ -> False
-- | Require that the target is crucial to success, e.g., an item,
-- or that it's not too far away and so the changes to get it are high.
condAimCrucialM :: MonadClientRead m => ActorId -> m Bool
condAimCrucialM aid = do
b <- getsState $ getActorBody aid
mtgtMPath <- getsClient $ EM.lookup aid . stargetD
return $ case mtgtMPath of
Just TgtAndPath{tapTgt=TEnemy _} -> True
Just TgtAndPath{tapTgt=TPoint tgoal lid _, tapPath=Just AndPath{pathLen}} ->
lid == blid b
&& (pathLen < 10 -- close enough to get there first
|| tgoal `notElem` [TUnknown, TKnown])
Just TgtAndPath{tapTgt=TVector{}, tapPath=Just AndPath{pathLen}} ->
pathLen < 7 -- can't say if the target important, but the constants
-- from @take6@ and @traSlack7@ ensure target is
-- already approached or close to level edge
-- or not a random @traSlack7@ wandering
_ -> False -- includes the case of target with no path
-- | Check if the target is a nonmoving enemy.
condTgtNonmovingEnemyM :: MonadClientRead m => ActorId -> m Bool
condTgtNonmovingEnemyM aid = do
btarget <- getsClient $ getTarget aid
case btarget of
Just (TEnemy enemy) -> do
actorMaxSk <- getsState $ getActorMaxSkills enemy
return $ Ability.getSk Ability.SkMove actorMaxSk <= 0
_ -> return False
-- | Require the actor stands on or adjacent to a triggerable tile
-- (e.g., stairs).
condAdjTriggerableM :: MonadStateRead m => Ability.Skills -> ActorId -> m Bool
condAdjTriggerableM actorSk aid = do
COps{coTileSpeedup} <- getsState scops
b <- getsState $ getActorBody aid
lvl <- getLevel $ blid b
let alterSkill = Ability.getSk Ability.SkAlter actorSk
alterMinSkill p = Tile.alterMinSkill coTileSpeedup $ lvl `at` p
underFeet p = p == bpos b -- if enter and alter, be more permissive
-- Before items are applied (which AI attempts even if apply
-- skills too low), tile must be alerable, hence both checks.
hasTriggerable p = (underFeet p
|| alterSkill >= fromEnum (alterMinSkill p))
&& p `EM.member` lembed lvl
return $ any hasTriggerable $ bpos b : vicinityUnsafe (bpos b)
-- | Produce the chess-distance-sorted list of non-low-HP,
-- melee-cabable foes on the level. We don't consider path-distance,
-- because we are interested in how soon the foe can close in to hit us,
-- which can diverge greately from path distance for short distances,
-- e.g., when terrain gets revealed. We don't consider non-moving actors,
-- because they can't chase us and also because they can't be aggresive
-- so to resolve the stalemate, the opposing AI has to be aggresive
-- by ignoring them and closing in to melee distance.
meleeThreatDistList :: [(ActorId, Actor)] -> ActorId -> State
-> [(Int, (ActorId, Actor))]
meleeThreatDistList foeAssocs aid s =
let actorMaxSkills = sactorMaxSkills s
b = getActorBody aid s
strongActor (aid2, b2) =
let actorMaxSk = actorMaxSkills EM.! aid2
nonmoving = Ability.getSk Ability.SkMove actorMaxSk <= 0
in not (hpTooLow b2 actorMaxSk || nonmoving)
&& actorCanMeleeToHarm actorMaxSkills aid2 b2
allThreats = filter strongActor foeAssocs
addDist (aid2, b2) = (chessDist (bpos b) (bpos b2), (aid2, b2))
in sortBy (comparing fst) $ map addDist allThreats
-- | Require the actor blocks the paths of any of his party members.
condBlocksFriendsM :: MonadClientRead m => ActorId -> m Bool
condBlocksFriendsM aid = do
b <- getsState $ getActorBody aid
targetD <- getsClient stargetD
let blocked aid2 = aid2 /= aid &&
case EM.lookup aid2 targetD of
Just TgtAndPath{tapPath=Just AndPath{pathList=q : _}} | q == bpos b ->
True
_ -> False
any blocked <$> getsState (fidActorRegularIds (bfid b) (blid b))
-- | Require the actor stands over a weapon that would be auto-equipped,
-- if only it was a desirable item (checked elsewhere).
condFloorWeaponM :: MonadStateRead m => ActorId -> m Bool
condFloorWeaponM aid =
any (IA.checkFlag Ability.Meleeable . aspectRecordFull . snd) <$>
getsState (fullAssocs aid [CGround])
-- | Check whether the actor has no weapon in equipment.
condNoEqpWeaponM :: MonadStateRead m => ActorId -> m Bool
condNoEqpWeaponM aid =
not . any (IA.checkFlag Ability.Meleeable . aspectRecordFull . snd) <$>
getsState (fullAssocs aid [CEqp])
-- | Require that the actor can project any items.
condCanProjectM :: MonadClientRead m => Int -> ActorId -> m Bool
condCanProjectM skill aid = do
side <- getsClient sside
curChal <- getsClient scurChal
fact <- getsState $ (EM.! side) . sfactionD
if skill < 1
|| ckeeper curChal && fhasUI (gkind fact)
then return False
else -- shortcut
-- Compared to conditions in @projectItem@, range and charge are ignored,
-- because they may change by the time the position for the fling
-- is reached.
not . null <$> condProjectListM skill aid
condProjectListM :: MonadClientRead m
=> Int -> ActorId
-> m [(Double, CStore, ItemId, ItemFull, ItemQuant)]
condProjectListM skill aid = do
condShineWouldBetray <- condShineWouldBetrayM aid
condAimEnemyOrRemembered <- condAimEnemyOrRememberedM aid
discoBenefit <- getsClient sdiscoBenefit
getsState $ projectList discoBenefit skill aid
condShineWouldBetray condAimEnemyOrRemembered
projectList :: DiscoveryBenefit -> Int -> ActorId -> Bool -> Bool -> State
-> [(Double, CStore, ItemId, ItemFull, ItemQuant)]
projectList discoBenefit skill aid
condShineWouldBetray condAimEnemyOrRemembered s =
let b = getActorBody aid s
actorMaxSk = getActorMaxSkills aid s
calmE = calmEnough b actorMaxSk
heavilyDistressed = -- Actor hit by a projectile or similarly distressed.
deltasSerious (bcalmDelta b)
uneasy = condAimEnemyOrRemembered
|| not calmE
|| heavilyDistressed
-- don't take recent fleeing into account when item can be lost
coeff CGround = 2 -- pickup turn saved
coeff COrgan = error $ "" `showFailure` benList
coeff CEqp = 1000 -- must hinder currently (or be very potent);
-- note: not larger, to avoid Int32 overflow
coeff CStash = 1
-- This detects if the value of keeping the item in eqp is in fact < 0.
hind = hinders condShineWouldBetray uneasy actorMaxSk
goodMissile (Benefit{benInEqp, benFling}, cstore, iid, itemFull, kit) =
let arItem = aspectRecordFull itemFull
benR = coeff cstore * benFling
in if benR < -1 -- ignore very weak projectiles
&& (not benInEqp -- can't wear, so OK to risk losing or breaking
|| not (IA.checkFlag Ability.Meleeable arItem)
-- anything else expendable
&& hind itemFull) -- hinders now, so possibly often
&& permittedProjectAI skill calmE itemFull
then Just (benR, cstore, iid, itemFull, kit)
else Nothing
stores = [CStash, CGround] ++ [CEqp | calmE]
benList = benAvailableItems discoBenefit aid stores s
in mapMaybe goodMissile benList
-- | Produce the list of items from the given stores available to the actor
-- and the items' values.
benAvailableItems :: DiscoveryBenefit -> ActorId -> [CStore] -> State
-> [(Benefit, CStore, ItemId, ItemFull, ItemQuant)]
benAvailableItems discoBenefit aid cstores s =
let b = getActorBody aid s
mstash = gstash $ sfactionD s EM.! bfid b
ben _ CGround | mstash == Just (blid b, bpos b) = []
ben bag cstore =
[ (discoBenefit EM.! iid, cstore, iid, itemToFull iid s, kit)
| (iid, kit) <- EM.assocs bag]
benCStore cs = ben (getBodyStoreBag b cs s) cs
in concatMap benCStore cstores
hinders :: Bool -> Bool -> Ability.Skills -> ItemFull -> Bool
hinders condShineWouldBetray uneasy actorMaxSk itemFull =
let arItem = aspectRecordFull itemFull
itemShine = 0 < IA.getSkill Ability.SkShine arItem
-- @condAnyFoeAdj@ is not checked, because it's transient and also item
-- management is unlikely to happen during melee, anyway
itemShineBad = condShineWouldBetray && itemShine
in -- In the presence of enemies (seen, remembered or unseen but distressing)
-- actors want to hide in the dark.
uneasy && itemShineBad -- even if it's a weapon, take it off
-- Fast actors want to hit hard, because they hit much more often
-- than receive hits.
|| gearSpeed actorMaxSk > speedWalk
&& not (IA.checkFlag Ability.Meleeable arItem)
-- in case it's the only weapon
&& 0 > IA.getSkill Ability.SkHurtMelee arItem
-- | Require that the actor stands over a desirable item.
condDesirableFloorItemM :: MonadClientRead m => ActorId -> m Bool
condDesirableFloorItemM aid = not . null <$> benGroundItems aid
-- | Produce the list of items on the ground beneath the actor
-- that are worth picking up.
benGroundItems :: MonadClientRead m
=> ActorId
-> m [(Benefit, CStore, ItemId, ItemFull, ItemQuant)]
benGroundItems aid = do
cops <- getsState scops
b <- getsState $ getActorBody aid
fact <- getsState $ (EM.! bfid b) . sfactionD
discoBenefit <- getsClient sdiscoBenefit
let canEsc = fcanEscape (gkind fact)
isDesirable (ben, _, _, itemFull, _) =
desirableItem cops canEsc (benPickup ben)
(aspectRecordFull itemFull) (itemKind itemFull)
99 -- fake, because no time is wasted walking to item
filter isDesirable
<$> getsState (benAvailableItems discoBenefit aid [CGround])
desirableItem :: COps -> Bool -> Double -> IA.AspectRecord -> IK.ItemKind -> Int
-> Bool
desirableItem COps{corule}
canEsc benPickup arItem itemKind k =
let loneProjectile =
IK.isymbol itemKind == IK.rsymbolProjectile (RK.ritemSymbols corule)
&& k == 1
&& Dice.infDice (IK.icount itemKind) > 1
-- never generated as lone; usually means weak
useful = if canEsc
then benPickup > 0
|| IA.checkFlag Ability.Precious arItem
else -- A hack to prevent monsters from picking up
-- treasure meant for heroes.
let preciousNotUseful = IA.isHumanTrinket itemKind
in benPickup > 0 && not preciousNotUseful
in useful && not loneProjectile
condSupport :: MonadClientRead m
=> [(ActorId, Actor)] -> Int -> ActorId -> m Bool
{-# INLINE condSupport #-}
condSupport friendAssocs param aid = do
mtgtMPath <- getsClient $ EM.lookup aid . stargetD
getsState $ strongSupport friendAssocs param aid mtgtMPath
strongSupport :: [(ActorId, Actor)]
-> Int -> ActorId -> Maybe TgtAndPath -> State
-> Bool
strongSupport friendAssocs param aid mtgtMPath s =
-- The smaller the area scanned for friends, the lower number required.
let actorMaxSkills = sactorMaxSkills s
actorMaxSk = actorMaxSkills EM.! aid
n = min 2 param - Ability.getSk Ability.SkAggression actorMaxSk
b = getActorBody aid s
approaching b2 = case mtgtMPath of
Just TgtAndPath{tapTgt=TEnemy{},tapPath=Just AndPath{pathGoal}} ->
chessDist (bpos b2) pathGoal <= 1 + param -- will soon melee anyway
_ -> False
closeEnough b2 = let dist = chessDist (bpos b) (bpos b2)
in dist > 0 && (dist <= max 2 param || approaching b2)
closeAndStrong (aid2, b2) = closeEnough b2
&& actorCanMeleeToHarm actorMaxSkills aid2 b2
closeAndStrongFriends = filter closeAndStrong friendAssocs
in n <= 0 || not (null (drop (n - 1) closeAndStrongFriends))
-- optimized: length closeAndStrongFriends >= n
-- The numbers reflect fleeing AI conditions for non-aggresive actors
-- so that actors don't wait for support that is not possible due to not
-- enough friends on the level, even counting sleeping ones.
condAloneM :: MonadStateRead m => [(ActorId, Actor)] -> ActorId -> m Bool
condAloneM friendAssocs aid = do
b <- getsState $ getActorBody aid
mstash <- getsState $ \s -> gstash $ sfactionD s EM.! bfid b
let onStashLevel = case mstash of
Nothing -> False
Just (lid, _) -> lid == blid b
return $! length friendAssocs <= if onStashLevel then 3 else 2
-- | Require that the actor stands in the dark and so would be betrayed
-- by his own equipped light,
condShineWouldBetrayM :: MonadStateRead m => ActorId -> m Bool
condShineWouldBetrayM aid = do
b <- getsState $ getActorBody aid
aInAmbient <- getsState $ actorInAmbient b
return $ not aInAmbient -- tile is dark, so actor could hide
-- | Produce a list of acceptable adjacent points to flee to.
fleeList :: MonadClientRead m
=> [(ActorId, Actor)] -> ActorId -> m ([(Int, Point)], [(Int, Point)])
fleeList foeAssocs aid = do
COps{coTileSpeedup} <- getsState scops
mtgtMPath <- getsClient $ EM.lookup aid . stargetD
-- Prefer fleeing along the path to target, unless the target is a foe,
-- in which case flee in the opposite direction.
let etgtPath = case mtgtMPath of
Just TgtAndPath{ tapPath=Just AndPath{pathList, pathGoal}
, tapTgt } -> case tapTgt of
TEnemy{} -> Left pathGoal
TPoint TEnemyPos{} _ _ -> Left pathGoal
-- this is too weak, because only one is recorded and sometimes
-- many are needed to decide to flee next turn as well
_ -> Right pathList
_ -> Right []
b <- getsState $ getActorBody aid
lvl <- getLevel $ blid b
localTime <- getsState $ getLocalTime (blid b)
fleeD <- getsClient sfleeD
-- But if fled recently, prefer even more fleeing further this turn.
let eOldFleeOrTgt = case EM.lookup aid fleeD of
Just (fleeStart, time) | timeRecent5 localTime time -> Left fleeStart
_ -> etgtPath
myVic = vicinityUnsafe $ bpos b
dist p | null foeAssocs = 100
| otherwise = minimum $ map (chessDist p . bpos . snd) foeAssocs
dVic = map (dist &&& id) myVic
-- Flee, if possible. Direct access required; not enough time to open.
-- Can't be occupied.
accWalkUnocc p = Tile.isWalkable coTileSpeedup (lvl `at` p)
&& not (occupiedBigLvl p lvl)
&& not (occupiedProjLvl p lvl)
accWalkVic = filter (accWalkUnocc . snd) dVic
gtVic = filter ((> dist (bpos b)) . fst) accWalkVic
eqVicRaw = filter ((== dist (bpos b)) . fst) accWalkVic
(eqVicOld, eqVic) = partition ((== boldpos b) . Just . snd) eqVicRaw
accNonWalkUnocc p = not (Tile.isWalkable coTileSpeedup (lvl `at` p))
&& Tile.isEasyOpen coTileSpeedup (lvl `at` p)
&& not (occupiedBigLvl p lvl)
&& not (occupiedProjLvl p lvl)
accNonWalkVic = filter (accNonWalkUnocc . snd) dVic
gtEqNonVic = filter ((>= dist (bpos b)) . fst) accNonWalkVic
ltAllVic = filter ((< dist (bpos b)) . fst) dVic
rewardPath mult (d, p) = case eOldFleeOrTgt of
Right tgtPathList | p `elem` tgtPathList ->
(100 * mult * d, p)
Right tgtPathList | any (adjacent p) tgtPathList ->
(10 * mult * d, p)
Left pathGoal | bpos b /= pathGoal ->
let venemy = towards (bpos b) pathGoal
vflee = towards (bpos b) p
sq = euclidDistSqVector venemy vflee
skew = case compare sq 2 of
GT -> 100 * sq
EQ -> 10 * sq
LT -> sq -- going towards enemy (but may escape adjacent foes)
in (mult * skew * d, p)
_ -> (mult * d, p) -- far from target path or even on target goal
goodVic = map (rewardPath 10000) gtVic
++ map (rewardPath 100) eqVic
badVic = map (rewardPath 1) $ gtEqNonVic ++ eqVicOld ++ ltAllVic
return (goodVic, badVic)
|
LambdaHack/LambdaHack
|
engine-src/Game/LambdaHack/Client/AI/ConditionM.hs
|
bsd-3-clause
| 19,507
| 0
| 22
| 4,996
| 4,819
| 2,478
| 2,341
| -1
| -1
|
module Generics.GPAH.Derive.PPrint where
import Generics.GPAH.Derive.Base
import Text.CSV
import System.IO
import qualified Data.Map as M
import Data.Function
import Data.List
pprint :: Analysis -> FilePath -> IO ()
pprint (Analysis a1 a2 a3 a4) fp = do
let p = [["DeriveTemplateHaskellDirectives", show $ sortBy (flip compare `on` snd) $ M.toList a1]
,["DerivePreprocessingDirectives", show $ sortBy (flip compare `on` snd) $ M.toList a2]
,["DrIFTNormalDirectives", show $ sortBy (flip compare `on` snd) $ M.toList a3]
,["DrIFTGlobalDirectives", show $ sortBy (flip compare `on` snd) $ M.toList a4]
]
pCSV = printCSV p
writeFile fp pCSV
putStrLn "Derive+DrIFT Results:\n###############"
putStrLn pCSV
|
bezirg/gpah
|
src/Generics/GPAH/Derive/PPrint.hs
|
bsd-3-clause
| 831
| 0
| 17
| 224
| 268
| 144
| 124
| 17
| 1
|
import Prelude hiding ((>), (-), (.))
import System.Shana
import System.Shana.Utils
import System.Shana.DSL.Shell
main = shana - ls "." > grep "src"
|
nfjinjing/shana
|
src/Main.hs
|
bsd-3-clause
| 151
| 0
| 7
| 23
| 59
| 37
| 22
| 5
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | The work is mainly steal from https://gist.github.com/2725402
--
--
module Snap.Snaplet.Auth.Backends.MongoDB where
import Control.Exception (Exception)
import Data.Typeable (Typeable)
import Control.Applicative
import Control.Arrow
import Control.Monad
import Control.Monad.CatchIO (throw)
import Control.Monad.Error
import Data.Baeson.Types
import qualified Data.Bson as BSON
import qualified Data.Configurator as C
import qualified Data.HashMap.Lazy as HM
import Control.Lens hiding ((.=), Action)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Database.MongoDB as M
import Snap.Snaplet
import Snap.Snaplet.Auth
import qualified Snap.Snaplet.MongoDB as SM
import Snap.Snaplet.Session
import System.IO.Pool (Pool, aResource)
import Web.ClientSession
import Database.MongoDB (AccessMode (UnconfirmedWrites),
Action, Database, Failure (..),
Pipe)
------------------------------------------------------------------------------
-- | Simple function to get auth settings from a config file. All options
-- are optional and default to what's in defAuthSettings if not supplied.
settingsFromConfig :: Initializer b (AuthManager b) AuthSettings
settingsFromConfig = do
config <- getSnapletUserConfig
minPasswordLen' <- liftIO $ C.lookup config "minPasswordLen"
let pw = maybe id (\x s -> s { asMinPasswdLen = x }) minPasswordLen'
rememberCookie' <- liftIO $ C.lookup config "rememberCookie"
let rc = maybe id (\x s -> s { asRememberCookieName = x }) rememberCookie'
rememberPeriod' <- liftIO $ C.lookup config "rememberPeriod"
let rp = maybe id (\x s -> s { asRememberPeriod = Just x }) rememberPeriod'
lockout' <- liftIO $ C.lookup config "lockout"
let lo = maybe id (\x s -> s { asLockout = Just (second fromInteger x) })
lockout'
siteKey' <- liftIO (C.lookup config "db.siteKey")
-- very wired that not able to lookup anything from config even exists.
let sk = maybe id (\x s -> s { asSiteKey = x }) siteKey'
return $ (pw . rc . rp . lo . sk) defAuthSettings
------------------------------------------------------------------------------
-- | Initializer for the MongoDB backend to the auth snaplet.
--
initMongoAuth :: SnapletLens b SessionManager
-> Snaplet SM.MongoDB
-> Maybe String -- ^ Site Key path
-> SnapletInit b (AuthManager b)
initMongoAuth sess db sk = makeSnaplet "mongodb-auth" desc Nothing $ do
config <- getSnapletUserConfig
authTable <- liftIO $ C.lookupDefault "auth_user" config "authCollection"
authSettings <- settingsFromConfig
key <- liftIO $ getKey (fromMaybe (asSiteKey authSettings) sk)
let
lens' = db ^. snapletValue
manager = MongoBackend authTable (SM.mongoDatabase lens')
(SM.mongoPool lens')
rng <- liftIO mkRNG
return AuthManager
{ backend = manager
, session = sess
, activeUser = Nothing
, minPasswdLen = asMinPasswdLen authSettings
, rememberCookieName = asRememberCookieName authSettings
, rememberPeriod = asRememberPeriod authSettings
, siteKey = key
, lockout = asLockout authSettings
, randomNumberGenerator = rng
}
where
desc = "A MongoDB backend for user authentication"
--datadir = "/resources/auth" :: String
data MongoBackend = MongoBackend
{ mongoCollection :: M.Collection
, mongoDatabase :: Database
, mongoPool :: Pool IOError Pipe
}
accessMode :: AccessMode
accessMode = UnconfirmedWrites
-- | default UserId is Nothing thus set to same as UserLogin
--
mongoSave :: MongoBackend -> AuthUser -> IO (Either AuthFailure AuthUser)
mongoSave mong usr =
case userId usr of
Nothing -> insertUser' usr
_ -> saveUser' usr
where insertUser' u = do
res <- dbQuery mong $ M.insert (mongoCollection mong) $ usrToMong u
case res of
Left (WriteFailure 11000 _) -> return $ Left DuplicateLogin
Left v -> throwBE v
Right r -> return $ Right (insertId' r)
insertId' x = usr { userId = fmap objectIdToUserId $ BSON.cast' x}
saveUser' u = do
res <- dbQuery mong $ M.save (mongoCollection mong) $ usrToMong u
case res of
Left (WriteFailure 11000 _) -> return $ Left DuplicateLogin
Left v -> throwBE v
_ -> return $ Right u
throwBE = return . Left . AuthError . show
mongoAct :: MongoBackend -> Action IO a -> (a -> IO b) -> IO b
mongoAct mong act' conv = do
res <- dbQuery mong act'
case res of
Left e -> throw $ AuthMongoException $ show e
Right r -> conv r
mongoLookup :: MongoBackend -> M.Document -> IO (Maybe AuthUser)
mongoLookup mong doc =
mongoAct mong act' conv
where
act' = M.findOne $ M.select doc (mongoCollection mong)
conv = maybe (return Nothing) parseUsr
parseUsr = liftM Just . usrFromMongThrow
mongoLookupByLogin :: MongoBackend -> Text -> IO (Maybe AuthUser)
mongoLookupByLogin mong login = mongoLookup mong ["login" .= login]
mongoLookupById :: MongoBackend -> UserId -> IO (Maybe AuthUser)
mongoLookupById mong uid = mongoLookup mong ["_id" .= uid]
mongoLookupByToken :: MongoBackend -> Text -> IO (Maybe AuthUser)
mongoLookupByToken mong tok = mongoLookup mong ["rememberToken" .= tok]
mongoDestroy :: MongoBackend -> AuthUser -> IO ()
mongoDestroy mong usr =
maybe (return ()) actonid $ userId usr
where
coll = mongoCollection mong
actonid uid = mongoAct mong (act' uid) return
act' uid = M.deleteOne (M.select ["_id" .= uid] coll)
instance IAuthBackend MongoBackend where
save = mongoSave
lookupByUserId = mongoLookupById
lookupByLogin = mongoLookupByLogin
lookupByRememberToken = mongoLookupByToken
destroy = mongoDestroy
dbQuery :: (MonadIO m)
=> MongoBackend
-> Action IO a
-> m (Either Failure a)
dbQuery mong action = do
let
pool = mongoPool mong
database = mongoDatabase mong
mode = accessMode
ep <- liftIO $ runErrorT $ aResource pool
case ep of
Left err -> return $ Left $ ConnectionFailure err
Right pip -> liftIO $ M.access pip mode database action
instance ToBSON Password where
toBSON (Encrypted p) = toBSON p
toBSON _ = error "Can't store unencrypted password"
instance FromBSON Password where
fromBSON v = Encrypted <$> fromBSON v
instance ToBSON Role where
toBSON (Role r) = toBSON $ T.decodeUtf8 r
instance FromBSON Role where
fromBSON v = Role . T.encodeUtf8 <$> fromBSON v
-- | UserId is stored as ObjectId in mongoDB.
--
instance FromBSON UserId where
fromBSON (M.ObjId oid) = pure $ objectIdToUserId oid
objectIdToUserId :: BSON.ObjectId -> UserId
objectIdToUserId = UserId . T.pack . show
userIdToObjectId :: UserId -> BSON.ObjectId
userIdToObjectId = read . T.unpack . unUid
-- | Transform UserId to ObjectId
--
instance ToBSON UserId where
toBSON = M.ObjId . read . T.unpack . unUid
-- | Transform UserLogin name to UUID for unique id.
--
userLoginToUUID :: Text -> M.UUID
userLoginToUUID = M.UUID . T.encodeUtf8
usrToMong :: AuthUser -> M.Document
usrToMong usr = case userId usr of
Nothing -> docs
Just x -> ("_id" .= x ) : docs
where docs = [ "login" .= userLogin usr
, "email" .= userEmail usr
, "password" .= userPassword usr
, "activatedAt" .= userActivatedAt usr
, "suspendedAt" .= userSuspendedAt usr
, "rememberToken" .= userRememberToken usr
, "loginCount" .= userLoginCount usr
, "userFailedLoginCount" .= userFailedLoginCount usr
, "lockedOutUntil" .= userLockedOutUntil usr
, "currentLoginAt" .= userCurrentLoginAt usr
, "lastLoginAt" .= userLastLoginAt usr
, "currentLoginIp" .= userCurrentLoginIp usr
, "lastLoginIp" .= userLastLoginIp usr
, "createdAt" .= userCreatedAt usr
, "updatedAt" .= userUpdatedAt usr
, "resetToken" .= userResetToken usr
, "resetRequestedAt" .= userResetRequestedAt usr
, "roles" .= userRoles usr
]
usrFromMong :: M.Document -> Parser AuthUser
usrFromMong d = AuthUser
<$> d .:? "_id"
<*> d .: "login"
<*> d .:? "email"
<*> d .:? "password"
<*> d .:? "activatedAt"
<*> d .:? "suspendedAt"
<*> d .:? "rememberToken"
<*> d .: "loginCount"
<*> d .: "userFailedLoginCount"
<*> d .:? "lockedOutUntil"
<*> d .:? "currentLoginAt"
<*> d .:? "lastLoginAt"
<*> d .:? "currentLoginIp"
<*> d .:? "lastLoginIp"
<*> d .:? "createdAt"
<*> d .:? "updatedAt"
<*> d .:? "resetToken"
<*> d .:? "resetRequestedAt"
<*> d .: "roles" .!= []
<*> pure HM.empty
usrFromMongThrow :: M.Document -> IO AuthUser
usrFromMongThrow d =
case parseEither usrFromMong d of
Left e -> throw $ ParseDocumentException e
Right r -> return r
data AuthMongoException = ParseDocumentException String
| AuthMongoException String
deriving (Show, Typeable)
instance Exception AuthMongoException
|
HaskellCNOrg/snap-web
|
src/Snap/Snaplet/Auth/Backends/MongoDB.hs
|
bsd-3-clause
| 10,392
| 0
| 44
| 3,307
| 2,572
| 1,319
| 1,253
| 209
| 6
|
module Api (
module Api.Config,
module Api.Monad,
module Api.Routes
) where
import Api.Config
import Api.Monad
import Api.Routes
|
raptros/chatless-hs
|
src/Api.hs
|
bsd-3-clause
| 174
| 0
| 5
| 62
| 39
| 25
| 14
| 7
| 0
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PatternSynonyms #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
module Lib
( ) where
import Data.Comp
import Data.Comp.Derive
import Data.Comp.Render
import Data.Comp.TermRewriting (reduce)
import Data.Rewriting.Rules
import Data.Rewriting.FirstOrder (bottomUp)
import Data.Rewriting.HigherOrder
import Data.String(IsString(..))
import Data.Maybe(fromMaybe)
import qualified Data.Set as Set
import Derive1
import Control.Monad (guard,(>=>))
data STUDENT a = Student a a a a deriving Show
data MAJOR a = English | Math | Physics deriving Show
newtype LIT b a = L {unL ::b} deriving Show
data NUM a = Plus a a | Minus a a | Times a a | Negate a | Divide a a deriving Show
--[
$(derive [makeFunctor,makeTraversable,makeFoldable,
makeEqF,makeOrdF,makeShowF,smartConstructors,makeShowConstr] [''STUDENT,''LIT,''MAJOR,''NUM])
$(derive [makeEqF,makeShowF,smartConstructors,makeShowConstr] [''WILD])
$(derive [smartRep] [''STUDENT,''LIT,''MAJOR])
$(derive [makeOrdF] [''VAR,''LAM,''APP])
--]
type SIG = NUM :+: STUDENT :+: MAJOR :+: LIT Int :+: LIT String :+: LIT Float :+: ADDONS
type ADDONS = VAR :+: LAM :+: APP -- Not needed as written, but allow higher order rewrite rules.
newtype Expr f a = Expr {unExpr :: Term f} deriving Functor
-- Restricted smart constructors [
student :: (Rep r,STUDENT :<: PF r) => r Int -> r String -> r Int -> r (MAJOR b) -> r b
student = rStudent
l :: (LIT a :<: PF r, Rep r) => a -> r a
l = rL
--]
deriving instance Functor (LHS f)
deriving instance Functor (RHS f)
instance (LIT a :<: PF (r f),Functor (r f),Num a,Rep (r f)) => Num (r (f :: * -> *) a) where
fromInteger = l . fromInteger
abs (fromRep -> a) = l $ fromMaybe 0 $ do
a' <- project a
return $ abs $ unL a'
instance (LIT a :<: PF (r f),Functor (r f),Fractional a,Rep (r f)) => Fractional (r (f :: * -> *) a) where
fromRational = l . fromRational
instance (LIT String :<: PF (r f),Functor (r f),Rep (r f)) => IsString (r (f :: * -> *) String) where
fromString = l . fromString
rewrite' --[
:: ( VAR :<: f
, LAM :<: f
, APP :<: f
, VAR :<: PF (LHS' f)
, LAM :<: PF (LHS' f)
, VAR :<: PF (RHS f)
, LAM :<: PF (RHS f)
, Traversable f, EqF f,OrdF f,Render f
, g ~ (f :&: Set.Set Name)
)
=> (Term g -> Term g -> Term g) -- ^ Application operator
-> Rule (LHS' f) (RHS f)
-> Term g
-> Maybe (Term g)
rewrite' app (Rule lhs'@(LHS' conds lhs) rhs) t = do
subst <- match lhs t
case conds of
Nothing -> return ()
Just c -> do
cont <- unBOOL (unTerm c) subst
guard cont
substitute app subst rhs --]
-- Render and Show and Rep Expr [
instance Render NUM
instance Render STUDENT
instance Show b => Render (LIT b)
instance Render MAJOR
instance Render WILD
instance (MetaRep f ~ MetaId) => Render (META f)
instance (MetaRep f ~ MetaId) => ShowConstr (META f) where
showConstr (Meta (MVar (MetaId rep))) = show rep
instance Rep (Expr f) where
type PF (Expr f) = f
toRep = Expr
fromRep = unExpr
instance Rep (LHS' f)
where
type PF (LHS' f) = WILD :+: META (LHS f) :+: f
toRep = LHS'' . LHS
fromRep = unLHS . unLHS'
--]
data LHS' f a = LHS' { unC :: Maybe (Conditional f), unLHS' :: LHS f a } --Term (WILD :+: (META (LHS' f) :+: f))}
pattern LHS'' a = LHS' Nothing a
guarded a b = LHS' (Just b) a
data BOOL f a = Boolean {unBOOL :: Data.Rewriting.Rules.Subst (f :&: Set.Set Name) -> Maybe Bool}
| a :&& a
| a :|| a
type Conditional f = Term (BOOL f)
boolHelper boolFun f g = Term $ Boolean $ \subs -> do
f' <- unBOOL (unTerm f) subs
g' <- unBOOL (unTerm g) subs
return (f' `boolFun` g')
(.&&) = boolHelper (&&)
(.||) = boolHelper (||)
infixr 4 .&& , .||
notB f = Boolean $ unBOOL f >=> return . not
ordHelp :: (Traversable f,Ord a,VAR :<: f,LAM :<: f,VAR :<: PF (RHS f),LAM :<: PF (RHS f),APP :<: f,OrdF f)
=> [Ordering] -> RHS f a -> RHS f a -> Term (BOOL f)
ordHelp ords a b = Term $ Boolean $ \subs -> do
a' <- substitute app subs a
b' <- substitute app subs b
return $ elem (compareF (stripAnn a') (stripAnn b')) ords
(.<) = ordHelp [LT]
(.>) = ordHelp [GT]
(.>=) = ordHelp [GT,EQ]
(.<=) = ordHelp [LT,EQ]
(.==) = ordHelp [EQ]
(.|) = guarded
infixr 2 .|
ex :: Expr SIG a
ex = student 1 "NOT matched" 2 rEnglish
--student_rule3 :: _ => MetaId Int -> Rule (LHS' f) rhs
student_rule x y= student (meta x) __ (meta y) __ .| meta x .<= 1 .&&
meta x .> 0 .&&
meta y .> meta x
===>
student 99999 "matched!" 2 rEnglish
student_rule2 x = student (meta x) __ __ __ .| meta x .> 1
===>
student 99999 "matched!" 2 rMath
main = do
let e = unExpr ex
drawTerm $ rewriteWith (reduce $ rewrite' app $ quantify student_rule) e
drawTerm $ rewriteWith (reduce $ rewrite' app $ quantify student_rule2) e
|
tomberek/RETE
|
src/RETE/Lib3.hs
|
bsd-3-clause
| 5,582
| 0
| 15
| 1,455
| 2,227
| 1,166
| 1,061
| -1
| -1
|
module T367 where
import Data.Singletons.TH (singletonsOnly)
import Prelude.Singletons
$(singletonsOnly [d|
const' :: a -> b -> a
const' x _ = x
|])
test :: Sing True
test = sConst' @Bool @() STrue STuple0
|
goldfirere/singletons
|
singletons-base/tests/compile-and-dump/Singletons/T367.hs
|
bsd-3-clause
| 215
| 0
| 7
| 43
| 62
| 35
| 27
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Q3Demo.Loader.BSP where
import Control.Applicative
import Control.Monad
import Data.Bits
import Data.Int
import Data.Word
import Data.Binary as B
import Data.Binary.Get as B
import Data.Binary.IEEE754
import Data.Vect hiding (Vector)
import Data.Vector (Vector)
import qualified Data.ByteString as SB8
import qualified Data.ByteString.Char8 as SB
import qualified Data.ByteString.Lazy as LB
import qualified Data.Vector as V
{-
Information: http://graphics.stanford.edu/~kekoa/q3/
Data types
Quake 3 BSP files contains only four basic data types. They are:
Type Description
ubyte unsigned byte
int 4-byte integer, little-endian
float 4-byte IEEE float, little-endian
string[n] string of n ASCII bytes, not necessarily null-terminated
All data in a BSP file is organized into records composed of these four data types.
-}
-- http://www.mralligator.com/q3/
lumpEntities = 0 :: Int -- ^ Game-related object descriptions
lumpShaders = 1 :: Int -- ^ Stores texture information
lumpPlanes = 2 :: Int -- ^ Stores the splitting planes
lumpNodes = 3 :: Int -- ^ Stores the BSP nodes
lumpLeaves = 4 :: Int -- ^ Stores the leafs of the nodes
lumpLeafSurfaces = 5 :: Int -- ^ Stores the leaf's indices into the faces
lumpLeafBrushes = 6 :: Int -- ^ Stores the leaf's indices into the brushes
lumpModels = 7 :: Int -- ^ Descriptions of rigid world geometry in map
lumpBrushes = 8 :: Int -- ^ Stores the brushes info (for collision)
lumpBrushSides = 9 :: Int -- ^ Stores the brush surfaces
lumpDrawVertices = 10 :: Int -- ^ Stores the level vertices
lumpDrawIndices = 11 :: Int -- ^ Stores the level indices
lumpFogs = 12 :: Int -- ^ List of special map effects
lumpSurfaces = 13 :: Int -- ^ Stores the faces for the level
lumpLightmaps = 14 :: Int -- ^ Stores the lightmaps for the level
lumpLightGrid = 15 :: Int -- ^ Local illumination data
lumpVisibility = 16 :: Int -- ^ Stores PVS and cluster info (visibility)
data Model
= Model
{ mdMins :: Vec3
, mdMaxs :: Vec3
, mdFirstSurface :: Int
, mdNumSurfaces :: Int
, mdFirstBrush :: Int
, mdNumBrushes :: Int
}
data Shader
= Shader
{ shName :: SB.ByteString
, shSurfaceFlags :: Int
, shContentFlags :: Int
}
data Plane
= Plane
{ plNormal :: Vec3
, plDist :: Float
}
data Node
= Node
{ ndPlaneNum :: Int
, ndChildren :: (Int,Int)
, ndMins :: Vec3
, ndMaxs :: Vec3
}
data Leaf
= Leaf
{ lfCluster :: Int
, lfArea :: Int
, lfMins :: Vec3
, lfMaxs :: Vec3
, lfFirstLeafSurface :: Int
, lfNumLeafSurfaces :: Int
, lfFirstLeafBrush :: Int
, lfNumLeafBrushes :: Int
}
data BrushSide
= BrushSide
{ bsPlaneNum :: Int
, bsShaderNum :: Int
}
data Brush
= Brush
{ brFirstSide :: Int
, brNumSides :: Int
, brShaderNum :: Int
}
data Fog
= Fog
{ fgName :: SB.ByteString
, fgBrushNum :: Int
, fgVisibleSide :: Int
}
data DrawVertex
= DrawVertex
{ dvPosition :: Vec3
, dvDiffuseUV :: Vec2
, dvLightmaptUV :: Vec2
, dvNormal :: Vec3
, dvColor :: Vec4
}
data SurfaceType
= Planar
| Patch
| TriangleSoup
| Flare
data Surface
= Surface
{ srShaderNum :: Int
, srFogNum :: Int
, srSurfaceType :: SurfaceType
, srFirstVertex :: Int
, srNumVertices :: Int
, srFirstIndex :: Int
, srNumIndices :: Int
, srLightmapNum :: Int
, srLightmapPos :: Vec2
, srLightmapSize :: Vec2
, srLightmapOrigin :: Vec3
, srLightmapVec1 :: Vec3
, srLightmapVec2 :: Vec3
, srLightmapVec3 :: Vec3
, srPatchSize :: (Int,Int)
}
data Lightmap
= Lightmap
{ lmMap :: SB.ByteString
}
data LightGrid
= LightGrid
data Visibility
= Visibility
{ vsNumVecs :: Int
, vsSizeVecs :: Int
, vsVecs :: Vector Word8
}
data BSPLevel
= BSPLevel
{ blEntities :: SB.ByteString
, blShaders :: Vector Shader
, blPlanes :: Vector Plane
, blNodes :: Vector Node
, blLeaves :: Vector Leaf
, blLeafSurfaces :: Vector Int
, blLeafBrushes :: Vector Int
, blModels :: Vector Model
, blBrushes :: Vector Brush
, blBrushSides :: Vector BrushSide
, blDrawVertices :: Vector DrawVertex
, blDrawIndices :: Vector Int
, blFogs :: Vector Fog
, blSurfaces :: Vector Surface
, blLightmaps :: Vector Lightmap
, blLightgrid :: Vector LightGrid
, blVisibility :: Visibility
}
getString = fmap (SB.takeWhile (/= '\0')) . getByteString
getWord = getWord32le
getUByte = B.get :: Get Word8
getUByte2 = B.get :: Get (Word8,Word8)
getUByte3 = B.get :: Get (Word8,Word8,Word8)
getFloat = getFloat32le
getVec2 = Vec2 <$> getFloat <*> getFloat
getVec3 = (\x y z -> Vec3 x z (-y)) <$> getFloat <*> getFloat <*> getFloat
getVec2i = (\x y -> Vec2 (fromIntegral x) (fromIntegral y)) <$> getInt <*> getInt
getVec3i = (\x y z -> Vec3 (fromIntegral x) (fromIntegral z) (fromIntegral (-y))) <$> getInt <*> getInt <*> getInt
getVec4RGBA = (\r g b a -> Vec4 (f r) (f g) (f b) (f a)) <$> getUByte <*> getUByte <*> getUByte <*> getUByte
where
f v = fromIntegral v / 255
getInt = fromIntegral <$> getInt' :: Get Int
where
getInt' = fromIntegral <$> getWord32le :: Get Int32
getInt2 = (,) <$> getInt <*> getInt
getItems s act l = V.fromList <$> replicateM (l `div` s) act
getHeader = do
magic <- getString 4
case magic == "IBSP" of
True -> return ()
_ -> error "Invalid format."
version <- getWord
el <- replicateM 17 getInt2
return (magic,version,el)
getBSPLevel el = BSPLevel
<$> getLump getEntities lumpEntities
<*> getLump getShaders lumpShaders
<*> getLump getPlanes lumpPlanes
<*> getLump getNodes lumpNodes
<*> getLump getLeaves lumpLeaves
<*> getLump getLeafSurfaces lumpLeafSurfaces
<*> getLump getLeafBrushes lumpLeafBrushes
<*> getLump getModels lumpModels
<*> getLump getBrushes lumpBrushes
<*> getLump getBrushSides lumpBrushSides
<*> getLump getDrawVertices lumpDrawVertices
<*> getLump getDrawIndices lumpDrawIndices
<*> getLump getFogs lumpFogs
<*> getLump getSurfaces lumpSurfaces
<*> getLump getLightmaps lumpLightmaps
<*> getLump getLightGrid lumpLightGrid
<*> getLump getVisibility lumpVisibility
where
getLump g i = lookAhead $ do
let (o,l) = el !! i
skip o
g l
getSurfaceType = getInt >>= \i -> return $ case i of
1 -> Planar
2 -> Patch
3 -> TriangleSoup
4 -> Flare
_ -> error "Invalid surface type"
getEntities l = getString l
getShaders = getItems 72 $ Shader <$> getString 64 <*> getInt <*> getInt
getPlanes = getItems 16 $ Plane <$> getVec3 <*> getFloat
getNodes = getItems 36 $ Node <$> getInt <*> getInt2 <*> getVec3i <*> getVec3i
getLeaves = getItems 48 $ Leaf <$> getInt <*> getInt <*> getVec3i <*> getVec3i <*> getInt <*> getInt <*> getInt <*> getInt
getLeafSurfaces = getItems 4 getInt
getLeafBrushes = getItems 4 getInt
getModels = getItems 40 $ Model <$> getVec3 <*> getVec3 <*> getInt <*> getInt <*> getInt <*> getInt
getBrushes = getItems 12 $ Brush <$> getInt <*> getInt <*> getInt
getBrushSides = getItems 8 $ BrushSide <$> getInt <*> getInt
getDrawVertices = getItems 44 $ DrawVertex <$> getVec3 <*> getVec2 <*> getVec2 <*> getVec3 <*> getVec4RGBA
getDrawIndices = getItems 4 getInt
getFogs = getItems 72 $ Fog <$> getString 64 <*> getInt <*> getInt
getSurfaces = getItems 104 $ Surface <$> getInt <*> getInt <*> getSurfaceType <*> getInt <*> getInt <*> getInt <*> getInt <*> getInt
<*> getVec2i <*> getVec2i <*> getVec3 <*> getVec3 <*> getVec3 <*> getVec3 <*> getInt2
getLightmaps = getItems (128*128*3) (Lightmap <$> (getByteString $ 128*128*3))
getLightGrid = getItems 8 $ do
ambient <- getUByte3
directional <- getUByte3
dir <- getUByte2
return LightGrid
getVisibility l = do
nvecs <- getInt
szvecs <- getInt
vecs <- getByteString $ nvecs * szvecs
return $ Visibility nvecs szvecs $ V.fromList $ SB8.unpack vecs
loadBSP :: String -> IO BSPLevel
loadBSP n = readBSP <$> LB.readFile n
readBSP dat = do
let (magic,version,el) = runGet getHeader dat
runGet (getBSPLevel el) dat
|
csabahruska/q3demo
|
src/Q3Demo/Loader/BSP.hs
|
bsd-3-clause
| 8,897
| 0
| 22
| 2,585
| 2,246
| 1,239
| 1,007
| 221
| 5
|
{-|
Module : Export.PdfGenerator
Description : Contains functions for generating a PDF from LaTeX text.
-}
module Export.PdfGenerator
(createPDF) where
import System.Process (createProcess, shell, waitForProcess, ProcessHandle)
import GHC.IO.Handle.Types
import Export.ImageConversion (removeFile)
import Data.List.Utils (replace)
-- | Opens a new process to create a PDF from a TEX (texName) and deletes
-- the tex file and extra files created by pdflatex
createPDF :: String -> IO ()
createPDF texName = do
(_, _, _, pid) <- convertTexToPDF texName
putStrLn "Waiting for a process..."
_ <- waitForProcess pid
let auxFile = replace ".tex" ".aux" texName
logFile = replace ".tex" ".log" texName
_ <- removeFile (auxFile ++ " " ++ logFile ++ " " ++ texName)
putStrLn "Process Complete"
-- | Create a process to use the pdflatex program to create a PDF from a TEX
-- file (texName). The process is run in nonstop mode and so it will not block
-- if an error occurs. The resulting PDF will have the same filename as texName.
convertTexToPDF :: String -> IO
(Maybe Handle,
Maybe Handle,
Maybe Handle,
ProcessHandle)
convertTexToPDF texName =
createProcess $ shell $ "pdflatex -interaction=nonstopmode " ++ texName
|
hermish/courseography
|
app/Export/PdfGenerator.hs
|
gpl-3.0
| 1,368
| 0
| 13
| 346
| 240
| 128
| 112
| 22
| 1
|
module Model.Role.Internal where
import Prelude
import Data.Text as T
import Database.Persist.TH
import Debug.Trace
import Web.PathPieces
data Role
= TeamMember
| Moderator
| Admin
deriving (Bounded, Enum, Eq, Show, Read, Ord)
derivePersistField "Role"
instance PathPiece Role where
fromPathPiece s =
if T.null s then Nothing else Just (read $ traceShow s $ T.unpack s)
toPathPiece = T.pack . show
|
chreekat/snowdrift
|
Model/Role/Internal.hs
|
agpl-3.0
| 435
| 0
| 11
| 95
| 141
| 77
| 64
| -1
| -1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Tholos.API.ContentTypes where
import Data.Aeson
import Data.Aeson.Parser
import Data.Aeson.Types (parseEither)
import Data.Attoparsec.ByteString (endOfInput, parseOnly)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BSL
import qualified Data.Text as T
import Data.Typeable
import Network.HTTP.Media ((//), (/:))
import Servant
data JSONAPI deriving Typeable
instance Accept JSONAPI where
contentType _ = ("application" :: ByteString) // ("json" :: ByteString)
instance ToJSON a => MimeRender JSONAPI a where
mimeRender _ = encode
instance FromJSON a => MimeUnrender JSONAPI a where
mimeUnrender _ = eitherDecodeLenient
eitherDecodeLenient :: FromJSON a => BSL.ByteString -> Either String a
eitherDecodeLenient input = do
v :: Value <- parseOnly (value <* endOfInput) (BSL.toStrict input)
parseEither parseJSON v
|
charlescrain/tholos
|
src/Tholos/API/ContentTypes.hs
|
bsd-3-clause
| 1,157
| 0
| 11
| 282
| 258
| 148
| 110
| -1
| -1
|
module Language.Embedded.Hardware.Expression
( HExp
, HType
, Bit
, Bits
, bitFromInteger
, bitToInteger
, module Language.Embedded.Hardware.Expression.Frontend
) where
import Language.Embedded.Hardware.Expression.Syntax (HExp, HType)
import Language.Embedded.Hardware.Expression.Frontend
import Language.Embedded.Hardware.Expression.Represent.Bit (Bit, Bits, bitFromInteger, bitToInteger)
import Language.Embedded.Hardware.Expression.Backend.VHDL ()
|
markus-git/imperative-edsl-vhdl
|
src/Language/Embedded/Hardware/Expression.hs
|
bsd-3-clause
| 468
| 0
| 5
| 51
| 96
| 68
| 28
| 12
| 0
|
-- (c) The University of Glasgow 2006
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
module Unify (
tcMatchTy, tcMatchTys, tcMatchTyX, tcMatchTysX, tcUnifyTyWithTFs,
ruleMatchTyX,
-- * Rough matching
roughMatchTcs, instanceCantMatch,
typesCantMatch,
-- Side-effect free unification
tcUnifyTy, tcUnifyTys,
tcUnifyTysFG,
BindFlag(..),
UnifyResult, UnifyResultM(..),
-- Matching a type against a lifted type (coercion)
liftCoMatch
) where
#include "HsVersions.h"
import Var
import VarEnv
import VarSet
import Kind
import Name( Name )
import Type hiding ( getTvSubstEnv )
import Coercion hiding ( getCvSubstEnv )
import TyCon
import TyCoRep hiding ( getTvSubstEnv, getCvSubstEnv )
import Util
import Pair
import Outputable
import Control.Monad
#if __GLASGOW_HASKELL__ > 710
import qualified Control.Monad.Fail as MonadFail
#endif
import Control.Applicative hiding ( empty )
import qualified Control.Applicative
{-
Unification is much tricker than you might think.
1. The substitution we generate binds the *template type variables*
which are given to us explicitly.
2. We want to match in the presence of foralls;
e.g (forall a. t1) ~ (forall b. t2)
That is what the RnEnv2 is for; it does the alpha-renaming
that makes it as if a and b were the same variable.
Initialising the RnEnv2, so that it can generate a fresh
binder when necessary, entails knowing the free variables of
both types.
3. We must be careful not to bind a template type variable to a
locally bound variable. E.g.
(forall a. x) ~ (forall b. b)
where x is the template type variable. Then we do not want to
bind x to a/b! This is a kind of occurs check.
The necessary locals accumulate in the RnEnv2.
Note [Kind coercions in Unify]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We wish to match/unify while ignoring casts. But, we can't just ignore
them completely, or we'll end up with ill-kinded substitutions. For example,
say we're matching `a` with `ty |> co`. If we just drop the cast, we'll
return [a |-> ty], but `a` and `ty` might have different kinds. We can't
just match/unify their kinds, either, because this might gratuitously
fail. After all, `co` is the witness that the kinds are the same -- they
may look nothing alike.
So, we pass a kind coercion to the match/unify worker. This coercion witnesses
the equality between the substed kind of the left-hand type and the substed
kind of the right-hand type. To get this coercion, we first have to match/unify
the kinds before looking at the types. Happily, we need look only one level
up, as all kinds are guaranteed to have kind *.
We thought, at one point, that this was all unnecessary: why should casts
be in types in the first place? But they do. In
dependent/should_compile/KindEqualities2, we see, for example
the constraint Num (Int |> (blah ; sym blah)).
We naturally want to find a dictionary for that constraint, which
requires dealing with coercions in this manner.
-}
-- | @tcMatchTy t1 t2@ produces a substitution (over fvs(t1))
-- @s@ such that @s(t1)@ equals @t2@.
-- The returned substitution might bind coercion variables,
-- if the variable is an argument to a GADT constructor.
--
-- We don't pass in a set of "template variables" to be bound
-- by the match, because tcMatchTy (and similar functions) are
-- always used on top-level types, so we can bind any of the
-- free variables of the LHS.
tcMatchTy :: Type -> Type -> Maybe TCvSubst
tcMatchTy ty1 ty2 = tcMatchTys [ty1] [ty2]
-- | This is similar to 'tcMatchTy', but extends a substitution
tcMatchTyX :: TCvSubst -- ^ Substitution to extend
-> Type -- ^ Template
-> Type -- ^ Target
-> Maybe TCvSubst
tcMatchTyX subst ty1 ty2 = tcMatchTysX subst [ty1] [ty2]
-- | Like 'tcMatchTy' but over a list of types.
tcMatchTys :: [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot; in principle the template
-- variables could be free in the target
tcMatchTys tys1 tys2
= tcMatchTysX (mkEmptyTCvSubst in_scope) tys1 tys2
where
in_scope = mkInScopeSet (tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2)
-- | Like 'tcMatchTys', but extending a substitution
tcMatchTysX :: TCvSubst -- ^ Substitution to extend
-> [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot substitution
tcMatchTysX (TCvSubst in_scope tv_env cv_env) tys1 tys2
-- See Note [Kind coercions in Unify]
= case tc_unify_tys (const BindMe)
False -- Matching, not unifying
False -- Not an injectivity check
(mkRnEnv2 in_scope) tv_env cv_env tys1 tys2 of
Unifiable (tv_env', cv_env')
-> Just $ TCvSubst in_scope tv_env' cv_env'
_ -> Nothing
-- | This one is called from the expression matcher,
-- which already has a MatchEnv in hand
ruleMatchTyX
:: TyCoVarSet -- ^ template variables
-> RnEnv2
-> TvSubstEnv -- ^ type substitution to extend
-> Type -- ^ Template
-> Type -- ^ Target
-> Maybe TvSubstEnv
ruleMatchTyX tmpl_tvs rn_env tenv tmpl target
-- See Note [Kind coercions in Unify]
= case tc_unify_tys (matchBindFun tmpl_tvs) False False rn_env
tenv emptyCvSubstEnv [tmpl] [target] of
Unifiable (tenv', _) -> Just tenv'
_ -> Nothing
matchBindFun :: TyCoVarSet -> TyVar -> BindFlag
matchBindFun tvs tv = if tv `elemVarSet` tvs then BindMe else Skolem
{- *********************************************************************
* *
Rough matching
* *
********************************************************************* -}
-- See Note [Rough match] field in InstEnv
roughMatchTcs :: [Type] -> [Maybe Name]
roughMatchTcs tys = map rough tys
where
rough ty
| Just (ty', _) <- splitCastTy_maybe ty = rough ty'
| Just (tc,_) <- splitTyConApp_maybe ty = Just (tyConName tc)
| otherwise = Nothing
instanceCantMatch :: [Maybe Name] -> [Maybe Name] -> Bool
-- (instanceCantMatch tcs1 tcs2) returns True if tcs1 cannot
-- possibly be instantiated to actual, nor vice versa;
-- False is non-committal
instanceCantMatch (mt : ts) (ma : as) = itemCantMatch mt ma || instanceCantMatch ts as
instanceCantMatch _ _ = False -- Safe
itemCantMatch :: Maybe Name -> Maybe Name -> Bool
itemCantMatch (Just t) (Just a) = t /= a
itemCantMatch _ _ = False
{-
************************************************************************
* *
GADTs
* *
************************************************************************
Note [Pruning dead case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider data T a where
T1 :: T Int
T2 :: T a
newtype X = MkX Int
newtype Y = MkY Char
type family F a
type instance F Bool = Int
Now consider case x of { T1 -> e1; T2 -> e2 }
The question before the house is this: if I know something about the type
of x, can I prune away the T1 alternative?
Suppose x::T Char. It's impossible to construct a (T Char) using T1,
Answer = YES we can prune the T1 branch (clearly)
Suppose x::T (F a), where 'a' is in scope. Then 'a' might be instantiated
to 'Bool', in which case x::T Int, so
ANSWER = NO (clearly)
We see here that we want precisely the apartness check implemented within
tcUnifyTysFG. So that's what we do! Two types cannot match if they are surely
apart. Note that since we are simply dropping dead code, a conservative test
suffices.
-}
-- | Given a list of pairs of types, are any two members of a pair surely
-- apart, even after arbitrary type function evaluation and substitution?
typesCantMatch :: [(Type,Type)] -> Bool
-- See Note [Pruning dead case alternatives]
typesCantMatch prs = any (uncurry cant_match) prs
where
cant_match :: Type -> Type -> Bool
cant_match t1 t2 = case tcUnifyTysFG (const BindMe) [t1] [t2] of
SurelyApart -> True
_ -> False
{-
************************************************************************
* *
Unification
* *
************************************************************************
Note [Fine-grained unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do the types (x, x) and ([y], y) unify? The answer is seemingly "no" --
no substitution to finite types makes these match. But, a substitution to
*infinite* types can unify these two types: [x |-> [[[...]]], y |-> [[[...]]] ].
Why do we care? Consider these two type family instances:
type instance F x x = Int
type instance F [y] y = Bool
If we also have
type instance Looper = [Looper]
then the instances potentially overlap. The solution is to use unification
over infinite terms. This is possible (see [1] for lots of gory details), but
a full algorithm is a little more power than we need. Instead, we make a
conservative approximation and just omit the occurs check.
[1]: http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/axioms-extended.pdf
tcUnifyTys considers an occurs-check problem as the same as general unification
failure.
tcUnifyTysFG ("fine-grained") returns one of three results: success, occurs-check
failure ("MaybeApart"), or general failure ("SurelyApart").
See also Trac #8162.
It's worth noting that unification in the presence of infinite types is not
complete. This means that, sometimes, a closed type family does not reduce
when it should. See test case indexed-types/should_fail/Overlap15 for an
example.
Note [The substitution in MaybeApart]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The constructor MaybeApart carries data with it, typically a TvSubstEnv. Why?
Because consider unifying these:
(a, a, Int) ~ (b, [b], Bool)
If we go left-to-right, we start with [a |-> b]. Then, on the middle terms, we
apply the subst we have so far and discover that we need [b |-> [b]]. Because
this fails the occurs check, we say that the types are MaybeApart (see above
Note [Fine-grained unification]). But, we can't stop there! Because if we
continue, we discover that Int is SurelyApart from Bool, and therefore the
types are apart. This has practical consequences for the ability for closed
type family applications to reduce. See test case
indexed-types/should_compile/Overlap14.
Note [Unifying with skolems]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we discover that two types unify if and only if a skolem variable is
substituted, we can't properly unify the types. But, that skolem variable
may later be instantiated with a unifyable type. So, we return maybeApart
in these cases.
Note [Lists of different lengths are MaybeApart]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is unusual to call tcUnifyTys or tcUnifyTysFG with lists of different
lengths. The place where we know this can happen is from compatibleBranches in
FamInstEnv, when checking data family instances. Data family instances may be
eta-reduced; see Note [Eta reduction for data family axioms] in TcInstDcls.
We wish to say that
D :: * -> * -> *
axDF1 :: D Int ~ DFInst1
axDF2 :: D Int Bool ~ DFInst2
overlap. If we conclude that lists of different lengths are SurelyApart, then
it will look like these do *not* overlap, causing disaster. See Trac #9371.
In usages of tcUnifyTys outside of family instances, we always use tcUnifyTys,
which can't tell the difference between MaybeApart and SurelyApart, so those
usages won't notice this design choice.
-}
tcUnifyTy :: Type -> Type -- All tyvars are bindable
-> Maybe TCvSubst
-- A regular one-shot (idempotent) substitution
-- Simple unification of two types; all type variables are bindable
tcUnifyTy t1 t2 = tcUnifyTys (const BindMe) [t1] [t2]
-- | Unify two types, treating type family applications as possibly unifying
-- with anything and looking through injective type family applications.
tcUnifyTyWithTFs :: Bool -- ^ True <=> do two-way unification;
-- False <=> do one-way matching.
-- See end of sec 5.2 from the paper
-> Type -> Type -> Maybe TCvSubst
-- This algorithm is an implementation of the "Algorithm U" presented in
-- the paper "Injective type families for Haskell", Figures 2 and 3.
-- The code is incorporated with the standard unifier for convenience, but
-- its operation should match the specification in the paper.
tcUnifyTyWithTFs twoWay t1 t2
= case tc_unify_tys (const BindMe) twoWay True
rn_env emptyTvSubstEnv emptyCvSubstEnv
[t1] [t2] of
Unifiable (subst, _) -> Just $ niFixTCvSubst subst
MaybeApart (subst, _) -> Just $ niFixTCvSubst subst
-- we want to *succeed* in questionable cases. This is a
-- pre-unification algorithm.
SurelyApart -> Nothing
where
rn_env = mkRnEnv2 $ mkInScopeSet $ tyCoVarsOfTypes [t1, t2]
-----------------
tcUnifyTys :: (TyCoVar -> BindFlag)
-> [Type] -> [Type]
-> Maybe TCvSubst
-- ^ A regular one-shot (idempotent) substitution
-- that unifies the erased types. See comments
-- for 'tcUnifyTysFG'
-- The two types may have common type variables, and indeed do so in the
-- second call to tcUnifyTys in FunDeps.checkClsFD
tcUnifyTys bind_fn tys1 tys2
= case tcUnifyTysFG bind_fn tys1 tys2 of
Unifiable result -> Just result
_ -> Nothing
-- This type does double-duty. It is used in the UM (unifier monad) and to
-- return the final result. See Note [Fine-grained unification]
type UnifyResult = UnifyResultM TCvSubst
data UnifyResultM a = Unifiable a -- the subst that unifies the types
| MaybeApart a -- the subst has as much as we know
-- it must be part of an most general unifier
-- See Note [The substitution in MaybeApart]
| SurelyApart
deriving Functor
instance Applicative UnifyResultM where
pure = Unifiable
(<*>) = ap
instance Monad UnifyResultM where
SurelyApart >>= _ = SurelyApart
MaybeApart x >>= f = case f x of
Unifiable y -> MaybeApart y
other -> other
Unifiable x >>= f = f x
instance Alternative UnifyResultM where
empty = SurelyApart
a@(Unifiable {}) <|> _ = a
_ <|> b@(Unifiable {}) = b
a@(MaybeApart {}) <|> _ = a
_ <|> b@(MaybeApart {}) = b
SurelyApart <|> SurelyApart = SurelyApart
instance MonadPlus UnifyResultM
-- | @tcUnifyTysFG bind_tv tys1 tys2@ attepts to find a substitution @s@ (whose
-- domain elements all respond 'BindMe' to @bind_tv@) such that
-- @s(tys1)@ and that of @s(tys2)@ are equal, as witnessed by the returned
-- Coercions.
tcUnifyTysFG :: (TyVar -> BindFlag)
-> [Type] -> [Type]
-> UnifyResult
tcUnifyTysFG bind_fn tys1 tys2
= do { (env, _) <- tc_unify_tys bind_fn True False env
emptyTvSubstEnv emptyCvSubstEnv
tys1 tys2
; return $ niFixTCvSubst env }
where
vars = tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2
env = mkRnEnv2 $ mkInScopeSet vars
-- | This function is actually the one to call the unifier -- a little
-- too general for outside clients, though.
tc_unify_tys :: (TyVar -> BindFlag)
-> Bool -- ^ True <=> unify; False <=> match
-> Bool -- ^ True <=> doing an injectivity check
-> RnEnv2
-> TvSubstEnv -- ^ substitution to extend
-> CvSubstEnv
-> [Type] -> [Type]
-> UnifyResultM (TvSubstEnv, CvSubstEnv)
tc_unify_tys bind_fn unif inj_check rn_env tv_env cv_env tys1 tys2
= initUM bind_fn unif inj_check rn_env tv_env cv_env $
do { unify_tys kis1 kis2
; unify_tys tys1 tys2
; (,) <$> getTvSubstEnv <*> getCvSubstEnv }
where
kis1 = map typeKind tys1
kis2 = map typeKind tys2
instance Outputable a => Outputable (UnifyResultM a) where
ppr SurelyApart = text "SurelyApart"
ppr (Unifiable x) = text "Unifiable" <+> ppr x
ppr (MaybeApart x) = text "MaybeApart" <+> ppr x
{-
************************************************************************
* *
Non-idempotent substitution
* *
************************************************************************
Note [Non-idempotent substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During unification we use a TvSubstEnv/CvSubstEnv pair that is
(a) non-idempotent
(b) loop-free; ie repeatedly applying it yields a fixed point
Note [Finding the substitution fixpoint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Finding the fixpoint of a non-idempotent substitution arising from a
unification is harder than it looks, because of kinds. Consider
T k (H k (f:k)) ~ T * (g:*)
If we unify, we get the substitution
[ k -> *
, g -> H k (f:k) ]
To make it idempotent we don't want to get just
[ k -> *
, g -> H * (f:k) ]
We also want to substitute inside f's kind, to get
[ k -> *
, g -> H k (f:*) ]
If we don't do this, we may apply the substitition to something,
and get an ill-formed type, i.e. one where typeKind will fail.
This happened, for example, in Trac #9106.
This is the reason for extending env with [f:k -> f:*], in the
definition of env' in niFixTvSubst
-}
niFixTCvSubst :: TvSubstEnv -> TCvSubst
-- Find the idempotent fixed point of the non-idempotent substitution
-- See Note [Finding the substitution fixpoint]
-- ToDo: use laziness instead of iteration?
niFixTCvSubst tenv = f tenv
where
f tenv
| not_fixpoint = f (mapVarEnv (substTy subst') tenv)
| otherwise = subst
where
not_fixpoint = foldVarSet ((||) . in_domain) False range_tvs
in_domain tv = tv `elemVarEnv` tenv
range_tvs = foldVarEnv (unionVarSet . tyCoVarsOfType) emptyVarSet tenv
subst = mkTCvSubst (mkInScopeSet range_tvs)
(tenv, emptyCvSubstEnv)
-- env' extends env by replacing any free type with
-- that same tyvar with a substituted kind
-- See note [Finding the substitution fixpoint]
tenv' = extendVarEnvList tenv [ (rtv, mkTyVarTy $
setTyVarKind rtv $
substTy subst $
tyVarKind rtv)
| rtv <- varSetElems range_tvs
, not (in_domain rtv) ]
subst' = mkTCvSubst (mkInScopeSet range_tvs)
(tenv', emptyCvSubstEnv)
niSubstTvSet :: TvSubstEnv -> TyCoVarSet -> TyCoVarSet
-- Apply the non-idempotent substitution to a set of type variables,
-- remembering that the substitution isn't necessarily idempotent
-- This is used in the occurs check, before extending the substitution
niSubstTvSet tsubst tvs
= foldVarSet (unionVarSet . get) emptyVarSet tvs
where
get tv
| Just ty <- lookupVarEnv tsubst tv
= niSubstTvSet tsubst (tyCoVarsOfType ty)
| otherwise
= unitVarSet tv
{-
************************************************************************
* *
The workhorse
* *
************************************************************************
Note [Specification of unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The algorithm implemented here is rather delicate, and we depend on it
to uphold certain properties. This is a summary of these required
properties. Any reference to "flattening" refers to the flattening
algorithm in FamInstEnv (See Note [Flattening] in FamInstEnv), not
the flattening algorithm in the solver.
Notation:
θ,φ substitutions
ξ type-function-free types
τ,σ other types
τ♭ type τ, flattened
≡ eqType
(U1) Soundness.
If (unify τ₁ τ₂) = Unifiable θ, then θ(τ₁) ≡ θ(τ₂). θ is a most general
unifier for τ₁ and τ₂.
(U2) Completeness.
If (unify ξ₁ ξ₂) = SurelyApart,
then there exists no substitution θ such that θ(ξ₁) ≡ θ(ξ₂).
These two properties are stated as Property 11 in the "Closed Type Families"
paper (POPL'14). Below, this paper is called [CTF].
(U3) Apartness under substitution.
If (unify ξ τ♭) = SurelyApart, then (unify ξ θ(τ)♭) = SurelyApart, for
any θ. (Property 12 from [CTF])
(U4) Apart types do not unify.
If (unify ξ τ♭) = SurelyApart, then there exists no θ such that
θ(ξ) = θ(τ). (Property 13 from [CTF])
THEOREM. Completeness w.r.t ~
If (unify τ₁♭ τ₂♭) = SurelyApart, then there exists no proof that (τ₁ ~ τ₂).
PROOF. See appendix of [CTF].
The unification algorithm is used for type family injectivity, as described
in the "Injective Type Families" paper (Haskell'15), called [ITF]. When run
in this mode, it has the following properties.
(I1) If (unify σ τ) = SurelyApart, then σ and τ are not unifiable, even
after arbitrary type family reductions. Note that σ and τ are not flattened
here.
(I2) If (unify σ τ) = MaybeApart θ, and if some
φ exists such that φ(σ) ~ φ(τ), then φ extends θ.
Furthermore, the RULES matching algorithm requires this property,
but only when using this algorithm for matching:
(M1) If (match σ τ) succeeds with θ, then all matchable tyvars in σ
are bound in θ.
Property M1 means that we must extend the substitution with, say
(a ↦ a) when appropriate during matching.
See also Note [Self-substitution when matching].
(M2) Completeness of matching.
If θ(σ) = τ, then (match σ τ) = Unifiable φ, where θ is an extension of φ.
Sadly, property M2 and I2 conflict. Consider
type family F1 a b where
F1 Int Bool = Char
F1 Double String = Char
Consider now two matching problems:
P1. match (F1 a Bool) (F1 Int Bool)
P2. match (F1 a Bool) (F1 Double String)
In case P1, we must find (a ↦ Int) to satisfy M2.
In case P2, we must /not/ find (a ↦ Double), in order to satisfy I2. (Note
that the correct mapping for I2 is (a ↦ Int). There is no way to discover
this, but we musn't map a to anything else!)
We thus must parameterize the algorithm over whether it's being used
for an injectivity check (refrain from looking at non-injective arguments
to type families) or not (do indeed look at those arguments).
(It's all a question of whether or not to include equation (7) from Fig. 2
of [ITF].)
This extra parameter is a bit fiddly, perhaps, but seemingly less so than
having two separate, almost-identical algorithms.
Note [Self-substitution when matching]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should happen when we're *matching* (not unifying) a1 with a1? We
should get a substitution [a1 |-> a1]. A successful match should map all
the template variables (except ones that disappear when expanding synonyms).
But when unifying, we don't want to do this, because we'll then fall into
a loop.
This arrangement affects the code in three places:
- If we're matching a refined template variable, don't recur. Instead, just
check for equality. That is, if we know [a |-> Maybe a] and are matching
(a ~? Maybe Int), we want to just fail.
- Skip the occurs check when matching. This comes up in two places, because
matching against variables is handled separately from matching against
full-on types.
Note that this arrangement was provoked by a real failure, where the same
unique ended up in the template as in the target. (It was a rule firing when
compiling Data.List.NonEmpty.)
Note [Matching coercion variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
type family F a
data G a where
MkG :: F a ~ Bool => G a
type family Foo (x :: G a) :: F a
type instance Foo MkG = False
We would like that to be accepted. For that to work, we need to introduce
a coercion variable on the left an then use it on the right. Accordingly,
at use sites of Foo, we need to be able to use matching to figure out the
value for the coercion. (See the desugared version:
axFoo :: [a :: *, c :: F a ~ Bool]. Foo (MkG c) = False |> (sym c)
) We never want this action to happen during *unification* though, when
all bets are off.
-}
-- See Note [Specification of unification]
unify_ty :: Type -> Type -> Coercion -- Types to be unified and a co
-- between their kinds
-- See Note [Kind coercions in Unify]
-> UM ()
-- Respects newtypes, PredTypes
unify_ty ty1 ty2 kco
| Just ty1' <- coreView ty1 = unify_ty ty1' ty2 kco
| Just ty2' <- coreView ty2 = unify_ty ty1 ty2' kco
| CastTy ty1' co <- ty1 = unify_ty ty1' ty2 (co `mkTransCo` kco)
| CastTy ty2' co <- ty2 = unify_ty ty1 ty2' (kco `mkTransCo` mkSymCo co)
unify_ty (TyVarTy tv1) ty2 kco = uVar tv1 ty2 kco
unify_ty ty1 (TyVarTy tv2) kco
= do { unif <- amIUnifying
; if unif
then umSwapRn $ uVar tv2 ty1 (mkSymCo kco)
else surelyApart } -- non-tv on left; tv on right: can't match.
unify_ty ty1 ty2 _kco
| Just (tc1, tys1) <- splitTyConApp_maybe ty1
, Just (tc2, tys2) <- splitTyConApp_maybe ty2
= if tc1 == tc2 || (isStarKind ty1 && isStarKind ty2)
then if isInjectiveTyCon tc1 Nominal
then unify_tys tys1 tys2
else do { let inj | isTypeFamilyTyCon tc1
= case familyTyConInjectivityInfo tc1 of
NotInjective -> repeat False
Injective bs -> bs
| otherwise
= repeat False
(inj_tys1, noninj_tys1) = partitionByList inj tys1
(inj_tys2, noninj_tys2) = partitionByList inj tys2
; unify_tys inj_tys1 inj_tys2
; inj_tf <- checkingInjectivity
; unless inj_tf $ -- See (end of) Note [Specification of unification]
don'tBeSoSure $ unify_tys noninj_tys1 noninj_tys2 }
else -- tc1 /= tc2
if isGenerativeTyCon tc1 Nominal && isGenerativeTyCon tc2 Nominal
then surelyApart
else maybeApart
-- Applications need a bit of care!
-- They can match FunTy and TyConApp, so use splitAppTy_maybe
-- NB: we've already dealt with type variables,
-- so if one type is an App the other one jolly well better be too
unify_ty (AppTy ty1a ty1b) ty2 _kco
| Just (ty2a, ty2b) <- tcRepSplitAppTy_maybe ty2
= unify_ty_app ty1a ty1b ty2a ty2b
unify_ty ty1 (AppTy ty2a ty2b) _kco
| Just (ty1a, ty1b) <- tcRepSplitAppTy_maybe ty1
= unify_ty_app ty1a ty1b ty2a ty2b
unify_ty (LitTy x) (LitTy y) _kco | x == y = return ()
unify_ty (ForAllTy (Named tv1 _) ty1) (ForAllTy (Named tv2 _) ty2) kco
= do { unify_ty (tyVarKind tv1) (tyVarKind tv2) (mkNomReflCo liftedTypeKind)
; umRnBndr2 tv1 tv2 $ unify_ty ty1 ty2 kco }
-- See Note [Matching coercion variables]
unify_ty (CoercionTy co1) (CoercionTy co2) kco
= do { unif <- amIUnifying
; c_subst <- getCvSubstEnv
; case co1 of
CoVarCo cv
| not unif
, not (cv `elemVarEnv` c_subst)
-> do { b <- tvBindFlagL cv
; if b == BindMe
then do { checkRnEnvRCo co2
; let [_, _, co_l, co_r] = decomposeCo 4 kco
-- cv :: t1 ~ t2
-- co2 :: s1 ~ s2
-- co_l :: t1 ~ s1
-- co_r :: t2 ~ s2
; extendCvEnv cv (co_l `mkTransCo`
co2 `mkTransCo`
mkSymCo co_r) }
else return () }
_ -> return () }
unify_ty ty1 _ _
| Just (tc1, _) <- splitTyConApp_maybe ty1
, not (isGenerativeTyCon tc1 Nominal)
= maybeApart
unify_ty _ ty2 _
| Just (tc2, _) <- splitTyConApp_maybe ty2
, not (isGenerativeTyCon tc2 Nominal)
= do { unif <- amIUnifying
; if unif then maybeApart else surelyApart }
unify_ty _ _ _ = surelyApart
unify_ty_app :: Type -> Type -> Type -> Type -> UM ()
unify_ty_app ty1a ty1b ty2a ty2b
= do { -- TODO (RAE): Remove this exponential behavior.
let ki1a = typeKind ty1a
ki2a = typeKind ty2a
; unify_ty ki1a ki2a (mkNomReflCo liftedTypeKind)
; let kind_co = mkNomReflCo ki1a
; unify_ty ty1a ty2a kind_co
; unify_ty ty1b ty2b (mkNthCo 0 kind_co) }
unify_tys :: [Type] -> [Type] -> UM ()
unify_tys orig_xs orig_ys
= go orig_xs orig_ys
where
go [] [] = return ()
go (x:xs) (y:ys)
= do { unify_ty x y (mkNomReflCo $ typeKind x)
; go xs ys }
go _ _ = maybeApart -- See Note [Lists of different lengths are MaybeApart]
---------------------------------
uVar :: TyVar -- Variable to be unified
-> Type -- with this Type
-> Coercion -- :: kind tv ~N kind ty
-> UM ()
uVar tv1 ty kco
= do { -- Check to see whether tv1 is refined by the substitution
subst <- getTvSubstEnv
; case (lookupVarEnv subst tv1) of
Just ty' -> do { unif <- amIUnifying
; if unif
then unify_ty ty' ty kco -- Yes, call back into unify
else -- when *matching*, we don't want to just recur here.
-- this is because the range of the subst is the target
-- type, not the template type. So, just check for
-- normal type equality.
guard (ty' `eqType` ty) }
Nothing -> uUnrefined tv1 ty ty kco } -- No, continue
uUnrefined :: TyVar -- variable to be unified
-> Type -- with this Type
-> Type -- (version w/ expanded synonyms)
-> Coercion -- :: kind tv ~N kind ty
-> UM ()
-- We know that tv1 isn't refined
uUnrefined tv1 ty2 ty2' kco
| Just ty2'' <- coreView ty2'
= uUnrefined tv1 ty2 ty2'' kco -- Unwrap synonyms
-- This is essential, in case we have
-- type Foo a = a
-- and then unify a ~ Foo a
| TyVarTy tv2 <- ty2'
= do { tv1' <- umRnOccL tv1
; tv2' <- umRnOccR tv2
; unif <- amIUnifying
-- See Note [Self-substitution when matching]
; when (tv1' /= tv2' || not unif) $ do
{ subst <- getTvSubstEnv
-- Check to see whether tv2 is refined
; case lookupVarEnv subst tv2 of
{ Just ty' | unif -> uUnrefined tv1 ty' ty' kco
; _ -> do
{ -- So both are unrefined
-- And then bind one or the other,
-- depending on which is bindable
; b1 <- tvBindFlagL tv1
; b2 <- tvBindFlagR tv2
; let ty1 = mkTyVarTy tv1
; case (b1, b2) of
(BindMe, _) -> do { checkRnEnvR ty2 -- make sure ty2 is not a local
; extendTvEnv tv1 (ty2 `mkCastTy` mkSymCo kco) }
(_, BindMe) | unif -> do { checkRnEnvL ty1 -- ditto for ty1
; extendTvEnv tv2 (ty1 `mkCastTy` kco) }
_ | tv1' == tv2' -> return ()
-- How could this happen? If we're only matching and if
-- we're comparing forall-bound variables.
_ -> maybeApart -- See Note [Unification with skolems]
}}}}
uUnrefined tv1 ty2 ty2' kco -- ty2 is not a type variable
= do { occurs <- elemNiSubstSet tv1 (tyCoVarsOfType ty2')
; unif <- amIUnifying
; if unif && occurs -- See Note [Self-substitution when matching]
then maybeApart -- Occurs check, see Note [Fine-grained unification]
else do bindTv tv1 (ty2 `mkCastTy` mkSymCo kco) }
-- Bind tyvar to the synonym if poss
elemNiSubstSet :: TyVar -> TyCoVarSet -> UM Bool
elemNiSubstSet v set
= do { tsubst <- getTvSubstEnv
; return $ v `elemVarSet` niSubstTvSet tsubst set }
bindTv :: TyVar -> Type -> UM ()
bindTv tv ty -- ty is not a variable
= do { checkRnEnvR ty -- make sure ty mentions no local variables
; b <- tvBindFlagL tv
; case b of
Skolem -> maybeApart -- See Note [Unification with skolems]
BindMe -> extendTvEnv tv ty
}
{-
%************************************************************************
%* *
Binding decisions
* *
************************************************************************
-}
data BindFlag
= BindMe -- A regular type variable
| Skolem -- This type variable is a skolem constant
-- Don't bind it; it only matches itself
deriving Eq
{-
************************************************************************
* *
Unification monad
* *
************************************************************************
-}
data UMEnv = UMEnv { um_bind_fun :: TyVar -> BindFlag
-- the user-supplied BindFlag function
, um_unif :: Bool -- unification (True) or matching?
, um_inj_tf :: Bool -- checking for injectivity?
-- See (end of) Note [Specification of unification]
, um_rn_env :: RnEnv2 }
data UMState = UMState
{ um_tv_env :: TvSubstEnv
, um_cv_env :: CvSubstEnv }
newtype UM a = UM { unUM :: UMEnv -> UMState
-> UnifyResultM (UMState, a) }
instance Functor UM where
fmap = liftM
instance Applicative UM where
pure a = UM (\_ s -> pure (s, a))
(<*>) = ap
instance Monad UM where
fail _ = UM (\_ _ -> SurelyApart) -- failed pattern match
m >>= k = UM (\env state ->
do { (state', v) <- unUM m env state
; unUM (k v) env state' })
-- need this instance because of a use of 'guard' above
instance Alternative UM where
empty = UM (\_ _ -> Control.Applicative.empty)
m1 <|> m2 = UM (\env state ->
unUM m1 env state <|>
unUM m2 env state)
instance MonadPlus UM
#if __GLASGOW_HASKELL__ > 710
instance MonadFail.MonadFail UM where
fail _ = UM (\_tvs _subst -> SurelyApart) -- failed pattern match
#endif
initUM :: (TyVar -> BindFlag)
-> Bool -- True <=> unify; False <=> match
-> Bool -- True <=> doing an injectivity check
-> RnEnv2
-> TvSubstEnv -- subst to extend
-> CvSubstEnv
-> UM a -> UnifyResultM a
initUM badtvs unif inj_tf rn_env subst_env cv_subst_env um
= case unUM um env state of
Unifiable (_, subst) -> Unifiable subst
MaybeApart (_, subst) -> MaybeApart subst
SurelyApart -> SurelyApart
where
env = UMEnv { um_bind_fun = badtvs
, um_unif = unif
, um_inj_tf = inj_tf
, um_rn_env = rn_env }
state = UMState { um_tv_env = subst_env
, um_cv_env = cv_subst_env }
tvBindFlagL :: TyVar -> UM BindFlag
tvBindFlagL tv = UM $ \env state ->
Unifiable (state, if inRnEnvL (um_rn_env env) tv
then Skolem
else um_bind_fun env tv)
tvBindFlagR :: TyVar -> UM BindFlag
tvBindFlagR tv = UM $ \env state ->
Unifiable (state, if inRnEnvR (um_rn_env env) tv
then Skolem
else um_bind_fun env tv)
getTvSubstEnv :: UM TvSubstEnv
getTvSubstEnv = UM $ \_ state -> Unifiable (state, um_tv_env state)
getCvSubstEnv :: UM CvSubstEnv
getCvSubstEnv = UM $ \_ state -> Unifiable (state, um_cv_env state)
extendTvEnv :: TyVar -> Type -> UM ()
extendTvEnv tv ty = UM $ \_ state ->
Unifiable (state { um_tv_env = extendVarEnv (um_tv_env state) tv ty }, ())
extendCvEnv :: CoVar -> Coercion -> UM ()
extendCvEnv cv co = UM $ \_ state ->
Unifiable (state { um_cv_env = extendVarEnv (um_cv_env state) cv co }, ())
umRnBndr2 :: TyCoVar -> TyCoVar -> UM a -> UM a
umRnBndr2 v1 v2 thing = UM $ \env state ->
let rn_env' = rnBndr2 (um_rn_env env) v1 v2 in
unUM thing (env { um_rn_env = rn_env' }) state
checkRnEnv :: (RnEnv2 -> Var -> Bool) -> VarSet -> UM ()
checkRnEnv inRnEnv varset = UM $ \env state ->
if any (inRnEnv (um_rn_env env)) (varSetElems varset)
then MaybeApart (state, ())
else Unifiable (state, ())
-- | Converts any SurelyApart to a MaybeApart
don'tBeSoSure :: UM () -> UM ()
don'tBeSoSure um = UM $ \env state ->
case unUM um env state of
SurelyApart -> MaybeApart (state, ())
other -> other
checkRnEnvR :: Type -> UM ()
checkRnEnvR ty = checkRnEnv inRnEnvR (tyCoVarsOfType ty)
checkRnEnvL :: Type -> UM ()
checkRnEnvL ty = checkRnEnv inRnEnvL (tyCoVarsOfType ty)
checkRnEnvRCo :: Coercion -> UM ()
checkRnEnvRCo co = checkRnEnv inRnEnvR (tyCoVarsOfCo co)
umRnOccL :: TyVar -> UM TyVar
umRnOccL v = UM $ \env state ->
Unifiable (state, rnOccL (um_rn_env env) v)
umRnOccR :: TyVar -> UM TyVar
umRnOccR v = UM $ \env state ->
Unifiable (state, rnOccR (um_rn_env env) v)
umSwapRn :: UM a -> UM a
umSwapRn thing = UM $ \env state ->
let rn_env' = rnSwap (um_rn_env env) in
unUM thing (env { um_rn_env = rn_env' }) state
amIUnifying :: UM Bool
amIUnifying = UM $ \env state -> Unifiable (state, um_unif env)
checkingInjectivity :: UM Bool
checkingInjectivity = UM $ \env state -> Unifiable (state, um_inj_tf env)
maybeApart :: UM ()
maybeApart = UM (\_ state -> MaybeApart (state, ()))
surelyApart :: UM a
surelyApart = UM (\_ _ -> SurelyApart)
{-
%************************************************************************
%* *
Matching a (lifted) type against a coercion
%* *
%************************************************************************
This section defines essentially an inverse to liftCoSubst. It is defined
here to avoid a dependency from Coercion on this module.
-}
data MatchEnv = ME { me_tmpls :: TyVarSet
, me_env :: RnEnv2 }
-- | 'liftCoMatch' is sort of inverse to 'liftCoSubst'. In particular, if
-- @liftCoMatch vars ty co == Just s@, then @tyCoSubst s ty == co@,
-- where @==@ there means that the result of tyCoSubst has the same
-- type as the original co; but may be different under the hood.
-- That is, it matches a type against a coercion of the same
-- "shape", and returns a lifting substitution which could have been
-- used to produce the given coercion from the given type.
-- Note that this function is incomplete -- it might return Nothing
-- when there does indeed exist a possible lifting context.
--
-- This function is incomplete in that it doesn't respect the equality
-- in `eqType`. That is, it's possible that this will succeed for t1 and
-- fail for t2, even when t1 `eqType` t2. That's because it depends on
-- there being a very similar structure between the type and the coercion.
-- This incompleteness shouldn't be all that surprising, especially because
-- it depends on the structure of the coercion, which is a silly thing to do.
--
-- The lifting context produced doesn't have to be exacting in the roles
-- of the mappings. This is because any use of the lifting context will
-- also require a desired role. Thus, this algorithm prefers mapping to
-- nominal coercions where it can do so.
liftCoMatch :: TyCoVarSet -> Type -> Coercion -> Maybe LiftingContext
liftCoMatch tmpls ty co
= do { cenv1 <- ty_co_match menv emptyVarEnv ki ki_co ki_ki_co ki_ki_co
; cenv2 <- ty_co_match menv cenv1 ty co
(mkNomReflCo co_lkind) (mkNomReflCo co_rkind)
; return (LC (mkEmptyTCvSubst in_scope) cenv2) }
where
menv = ME { me_tmpls = tmpls, me_env = mkRnEnv2 in_scope }
in_scope = mkInScopeSet (tmpls `unionVarSet` tyCoVarsOfCo co)
-- Like tcMatchTy, assume all the interesting variables
-- in ty are in tmpls
ki = typeKind ty
ki_co = promoteCoercion co
ki_ki_co = mkNomReflCo liftedTypeKind
Pair co_lkind co_rkind = coercionKind ki_co
-- | 'ty_co_match' does all the actual work for 'liftCoMatch'.
ty_co_match :: MatchEnv -- ^ ambient helpful info
-> LiftCoEnv -- ^ incoming subst
-> Type -- ^ ty, type to match
-> Coercion -- ^ co, coercion to match against
-> Coercion -- ^ :: kind of L type of substed ty ~N L kind of co
-> Coercion -- ^ :: kind of R type of substed ty ~N R kind of co
-> Maybe LiftCoEnv
ty_co_match menv subst ty co lkco rkco
| Just ty' <- coreViewOneStarKind ty = ty_co_match menv subst ty' co lkco rkco
-- handle Refl case:
| tyCoVarsOfType ty `isNotInDomainOf` subst
, Just (ty', _) <- isReflCo_maybe co
, ty `eqType` ty'
= Just subst
where
isNotInDomainOf :: VarSet -> VarEnv a -> Bool
isNotInDomainOf set env
= noneSet (\v -> elemVarEnv v env) set
noneSet :: (Var -> Bool) -> VarSet -> Bool
noneSet f = foldVarSet (\v rest -> rest && (not $ f v)) True
ty_co_match menv subst ty co lkco rkco
| CastTy ty' co' <- ty
= ty_co_match menv subst ty' co (co' `mkTransCo` lkco) (co' `mkTransCo` rkco)
| CoherenceCo co1 co2 <- co
= ty_co_match menv subst ty co1 (lkco `mkTransCo` mkSymCo co2) rkco
| SymCo co' <- co
= swapLiftCoEnv <$> ty_co_match menv (swapLiftCoEnv subst) ty co' rkco lkco
-- Match a type variable against a non-refl coercion
ty_co_match menv subst (TyVarTy tv1) co lkco rkco
| Just co1' <- lookupVarEnv subst tv1' -- tv1' is already bound to co1
= if eqCoercionX (nukeRnEnvL rn_env) co1' co
then Just subst
else Nothing -- no match since tv1 matches two different coercions
| tv1' `elemVarSet` me_tmpls menv -- tv1' is a template var
= if any (inRnEnvR rn_env) (tyCoVarsOfCoList co)
then Nothing -- occurs check failed
else Just $ extendVarEnv subst tv1' $
castCoercionKind co (mkSymCo lkco) (mkSymCo rkco)
| otherwise
= Nothing
where
rn_env = me_env menv
tv1' = rnOccL rn_env tv1
-- just look through SubCo's. We don't really care about roles here.
ty_co_match menv subst ty (SubCo co) lkco rkco
= ty_co_match menv subst ty co lkco rkco
ty_co_match menv subst (AppTy ty1a ty1b) co _lkco _rkco
| Just (co2, arg2) <- splitAppCo_maybe co -- c.f. Unify.match on AppTy
= ty_co_match_app menv subst ty1a ty1b co2 arg2
ty_co_match menv subst ty1 (AppCo co2 arg2) _lkco _rkco
| Just (ty1a, ty1b) <- repSplitAppTy_maybe ty1
-- yes, the one from Type, not TcType; this is for coercion optimization
= ty_co_match_app menv subst ty1a ty1b co2 arg2
ty_co_match menv subst (TyConApp tc1 tys) (TyConAppCo _ tc2 cos) _lkco _rkco
= ty_co_match_tc menv subst tc1 tys tc2 cos
ty_co_match menv subst (ForAllTy (Anon ty1) ty2) (TyConAppCo _ tc cos) _lkco _rkco
= ty_co_match_tc menv subst funTyCon [ty1, ty2] tc cos
ty_co_match menv subst (ForAllTy (Named tv1 _) ty1)
(ForAllCo tv2 kind_co2 co2)
lkco rkco
= do { subst1 <- ty_co_match menv subst (tyVarKind tv1) kind_co2
ki_ki_co ki_ki_co
; let rn_env0 = me_env menv
rn_env1 = rnBndr2 rn_env0 tv1 tv2
menv' = menv { me_env = rn_env1 }
; ty_co_match menv' subst1 ty1 co2 lkco rkco }
where
ki_ki_co = mkNomReflCo liftedTypeKind
ty_co_match _ subst (CoercionTy {}) _ _ _
= Just subst -- don't inspect coercions
ty_co_match menv subst ty co lkco rkco
| Just co' <- pushRefl co = ty_co_match menv subst ty co' lkco rkco
| otherwise = Nothing
ty_co_match_tc :: MatchEnv -> LiftCoEnv
-> TyCon -> [Type]
-> TyCon -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_tc menv subst tc1 tys1 tc2 cos2
= do { guard (tc1 == tc2)
; ty_co_match_args menv subst tys1 cos2 lkcos rkcos }
where
Pair lkcos rkcos
= traverse (fmap mkNomReflCo . coercionKind) cos2
ty_co_match_app :: MatchEnv -> LiftCoEnv
-> Type -> Type -> Coercion -> Coercion
-> Maybe LiftCoEnv
ty_co_match_app menv subst ty1a ty1b co2a co2b
= do { -- TODO (RAE): Remove this exponential behavior.
subst1 <- ty_co_match menv subst ki1a ki2a ki_ki_co ki_ki_co
; let Pair lkco rkco = mkNomReflCo <$> coercionKind ki2a
; subst2 <- ty_co_match menv subst1 ty1a co2a lkco rkco
; ty_co_match menv subst2 ty1b co2b (mkNthCo 0 lkco) (mkNthCo 0 rkco) }
where
ki1a = typeKind ty1a
ki2a = promoteCoercion co2a
ki_ki_co = mkNomReflCo liftedTypeKind
ty_co_match_args :: MatchEnv -> LiftCoEnv -> [Type]
-> [Coercion] -> [Coercion] -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_args _ subst [] [] _ _ = Just subst
ty_co_match_args menv subst (ty:tys) (arg:args) (lkco:lkcos) (rkco:rkcos)
= do { subst' <- ty_co_match menv subst ty arg lkco rkco
; ty_co_match_args menv subst' tys args lkcos rkcos }
ty_co_match_args _ _ _ _ _ _ = Nothing
pushRefl :: Coercion -> Maybe Coercion
pushRefl (Refl Nominal (AppTy ty1 ty2))
= Just (AppCo (Refl Nominal ty1) (mkNomReflCo ty2))
pushRefl (Refl r (ForAllTy (Anon ty1) ty2))
= Just (TyConAppCo r funTyCon [mkReflCo r ty1, mkReflCo r ty2])
pushRefl (Refl r (TyConApp tc tys))
= Just (TyConAppCo r tc (zipWith mkReflCo (tyConRolesX r tc) tys))
pushRefl (Refl r (ForAllTy (Named tv _) ty))
= Just (mkHomoForAllCos_NoRefl [tv] (Refl r ty))
-- NB: NoRefl variant. Otherwise, we get a loop!
pushRefl (Refl r (CastTy ty co)) = Just (castCoercionKind (Refl r ty) co co)
pushRefl _ = Nothing
|
nushio3/ghc
|
compiler/types/Unify.hs
|
bsd-3-clause
| 47,697
| 301
| 19
| 13,932
| 7,010
| 3,842
| 3,168
| 551
| 9
|
module Test003 where
import qualified Data.Text as T
import Kask.Print
import Prelude hiding (print)
test1 :: String
test1 = toString $ do
print ("aaa" :: String)
printLn $ show (127 :: Int)
printLn ("ąęśćółżźń" :: T.Text)
test2 :: ShowS
test2 = toShowS $ do
print ("bbb" :: String)
printLn $ show (128 :: Int)
printLn ("ąęśćółżźń" :: T.Text)
test3 :: T.Text
test3 = toText $ do
print ("ccc" :: String)
printLn $ show (129 :: Int)
printLn ("ąęśćółżźń" :: T.Text)
test4 :: IO ()
test4 = do
print ("ddd" :: String)
printLn $ show (130 :: Int)
printLn ("ąęśćółżźń" :: T.Text)
test5 :: IO ()
test5 = do
printLn test1
printLn (test2 "")
printLn test3
test4
|
kongra/kask-base
|
app/Test003.hs
|
bsd-3-clause
| 776
| 0
| 10
| 204
| 295
| 153
| 142
| 30
| 1
|
module System.Console.Haskeline.Key(Key(..),
Modifier(..),
BaseKey(..),
noModifier,
simpleKey,
simpleChar,
metaChar,
ctrlChar,
metaKey,
ctrlKey,
parseKey
) where
import Data.Bits
import Data.Char
import Data.Maybe
import Data.List (intercalate)
import Control.Monad
data Key = Key Modifier BaseKey
deriving (Eq,Ord)
instance Show Key where
show (Key modifier base)
| modifier == noModifier = show base
| otherwise = show modifier ++ "-" ++ show base
data Modifier = Modifier {hasControl, hasMeta, hasShift :: Bool}
deriving (Eq,Ord)
instance Show Modifier where
show m = intercalate "-"
$ catMaybes [maybeUse hasControl "ctrl"
, maybeUse hasMeta "meta"
, maybeUse hasShift "shift"
]
where
maybeUse f str = if f m then Just str else Nothing
noModifier :: Modifier
noModifier = Modifier False False False
-- Note: a few of these aren't really keys (e.g., KillLine),
-- but they provide useful enough binding points to include.
data BaseKey = KeyChar Char
| FunKey Int
| LeftKey | RightKey | DownKey | UpKey
| KillLine | Home | End | PageDown | PageUp
| Backspace | Delete
| SearchReverse | SearchForward
deriving (Eq, Ord)
instance Show BaseKey where
show (KeyChar '\n') = "Return"
show (KeyChar '\t') = "Tab"
show (KeyChar '\ESC') = "Esc"
show (KeyChar c)
| isPrint c = [c]
| isPrint unCtrld = "ctrl-" ++ [unCtrld]
| otherwise = show c
where
unCtrld = toEnum (fromEnum c .|. ctrlBits)
show (FunKey n) = 'f' : show n
show LeftKey = "Left"
show RightKey = "Right"
show DownKey = "Down"
show UpKey = "Up"
show KillLine = "KillLine"
show Home = "Home"
show End = "End"
show PageDown = "PageDown"
show PageUp = "PageUp"
show Backspace = "Backspace"
show Delete = "Delete"
show SearchReverse = "SearchReverse"
show SearchForward = "SearchForward"
simpleKey :: BaseKey -> Key
simpleKey = Key noModifier
metaKey :: Key -> Key
metaKey (Key m bc) = Key m {hasMeta = True} bc
ctrlKey :: Key -> Key
ctrlKey (Key m bc) = Key m {hasControl = True} bc
simpleChar, metaChar, ctrlChar :: Char -> Key
simpleChar = simpleKey . KeyChar
metaChar = metaKey . simpleChar
ctrlChar = simpleChar . setControlBits
setControlBits :: Char -> Char
setControlBits '?' = toEnum 127
setControlBits c = toEnum $ fromEnum c .&. complement ctrlBits
ctrlBits :: Int
ctrlBits = bit 5 .|. bit 6
specialKeys :: [(String,BaseKey)]
specialKeys = [("left",LeftKey)
,("right",RightKey)
,("down",DownKey)
,("up",UpKey)
,("killline",KillLine)
,("home",Home)
,("end",End)
,("pagedown",PageDown)
,("pageup",PageUp)
,("backspace",Backspace)
,("delete",Delete)
,("return",KeyChar '\n')
,("enter",KeyChar '\n')
,("tab",KeyChar '\t')
,("esc",KeyChar '\ESC')
,("escape",KeyChar '\ESC')
,("reversesearchhistory",SearchReverse)
,("forwardsearchhistory",SearchForward)
]
parseModifiers :: [String] -> BaseKey -> Key
parseModifiers strs = Key mods
where mods = foldl1 (.) (map parseModifier strs) noModifier
parseModifier :: String -> (Modifier -> Modifier)
parseModifier str m = case map toLower str of
"ctrl" -> m {hasControl = True}
"control" -> m {hasControl = True}
"meta" -> m {hasMeta = True}
"shift" -> m {hasShift = True}
_ -> m
breakAtDashes :: String -> [String]
breakAtDashes "" = []
breakAtDashes str = case break (=='-') str of
(xs,'-':rest) -> xs : breakAtDashes rest
(xs,_) -> [xs]
parseKey :: String -> Maybe Key
parseKey str = fmap canonicalizeKey $
case reverse (breakAtDashes str) of
[ks] -> liftM simpleKey (parseBaseKey ks)
ks:ms -> liftM (parseModifiers ms) (parseBaseKey ks)
[] -> Nothing
parseBaseKey :: String -> Maybe BaseKey
parseBaseKey ks = lookup (map toLower ks) specialKeys
`mplus` parseFunctionKey ks
`mplus` parseKeyChar ks
where
parseKeyChar [c] | isPrint c = Just (KeyChar c)
parseKeyChar _ = Nothing
parseFunctionKey (f:ns) | f `elem` "fF" = case reads ns of
[(n,"")] -> Just (FunKey n)
_ -> Nothing
parseFunctionKey _ = Nothing
canonicalizeKey :: Key -> Key
canonicalizeKey (Key m (KeyChar c))
| hasControl m = Key m {hasControl = False}
(KeyChar (setControlBits c))
| hasShift m = Key m {hasShift = False} (KeyChar (toUpper c))
canonicalizeKey k = k
|
judah/haskeline
|
System/Console/Haskeline/Key.hs
|
bsd-3-clause
| 4,976
| 0
| 13
| 1,563
| 1,595
| 858
| 737
| 133
| 5
|
module Blockchain.BlockSynchronizer (
handleNewBlockHashes,
handleNewBlocks
) where
import Control.Monad.IO.Class
import Control.Monad.State
import qualified Data.Binary as Bin
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Function
import Data.List
import Data.Maybe
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Blockchain.BlockChain
import qualified Blockchain.Colors as CL
import Blockchain.Communication
import Blockchain.Context
import Blockchain.Data.BlockDB
import Blockchain.Data.DataDefs
import Blockchain.Data.RLP
import Blockchain.Data.Wire
import Blockchain.DBM
import Blockchain.ExtDBs
import Blockchain.Frame
import Blockchain.SHA
--import Debug.Trace
data GetBlockHashesResult = NeedMore SHA | NeededHashes [SHA] deriving (Show)
--Only use for debug purposes, to trick the peer to rerun VM code for a particular block
debug_blockDBGet::B.ByteString->DBM (Maybe B.ByteString)
debug_blockDBGet hash = do
maybeBlockBytes <- blockDBGet hash
case maybeBlockBytes of
Nothing -> return Nothing
Just blockBytes -> do
let theBlock = rlpDecode . rlpDeserialize $ blockBytes
if blockDataNumber (blockBlockData theBlock) > 99263
then return Nothing
else return maybeBlockBytes
findFirstHashAlreadyInDB::[SHA]->ContextM (Maybe SHA)
findFirstHashAlreadyInDB hashes = do
items <- lift $ filterM (fmap (not . isNothing) . blockDBGet . BL.toStrict . Bin.encode) hashes
--items <- lift $ filterM (fmap (not . isNothing) . debug_blockDBGet . BL.toStrict . Bin.encode) hashes
return $ safeHead items
where
safeHead::[a]->Maybe a
safeHead [] = Nothing
safeHead (x:_) = Just x
handleNewBlockHashes::[SHA]->EthCryptM ContextM ()
--handleNewBlockHashes _ list | trace ("########### handleNewBlockHashes: " ++ show list) $ False = undefined
handleNewBlockHashes [] = return () --error "handleNewBlockHashes called with empty list"
handleNewBlockHashes blockHashes = do
result <- lift $ findFirstHashAlreadyInDB blockHashes
case result of
Nothing -> do
--liftIO $ putStrLn "Requesting more block hashes"
cxt <- lift get
lift $ put cxt{neededBlockHashes=reverse blockHashes ++ neededBlockHashes cxt}
sendMsg $ GetBlockHashes [last blockHashes] 0x500
Just hashInDB -> do
liftIO $ putStrLn $ "Found a serverblock already in our database: " ++ show (pretty hashInDB)
cxt <- lift get
--liftIO $ putStrLn $ show (pretty blockHashes)
lift $ put cxt{neededBlockHashes=reverse (takeWhile (/= hashInDB) blockHashes) ++ neededBlockHashes cxt}
askForSomeBlocks
askForSomeBlocks::EthCryptM ContextM ()
askForSomeBlocks = do
cxt <- lift get
if null (neededBlockHashes cxt)
then return ()
else do
let (firstBlocks, lastBlocks) = splitAt 128 (neededBlockHashes cxt)
lift $ put cxt{neededBlockHashes=lastBlocks}
sendMsg $ GetBlocks firstBlocks
handleNewBlocks::[Block]->EthCryptM ContextM ()
handleNewBlocks [] = error "handleNewBlocks called with empty block list"
handleNewBlocks blocks = do
let orderedBlocks =
sortBy (compare `on` blockDataNumber . blockBlockData) blocks
maybeParentBlock <- lift $ lift $ getBlock (blockDataParentHash $ blockBlockData $ head $ orderedBlocks) --head OK, [] weeded out
cxt <- lift get
case (neededBlockHashes cxt, maybeParentBlock) of
([], Nothing) -> do
liftIO $ putStrLn $ CL.red $ "Resynching!!!!!!!!"
handleNewBlockHashes [blockHash $ head orderedBlocks] -- head OK, [] weeded out
(_, Nothing) ->
liftIO $ putStrLn $ CL.red "Warning: a new block has arrived before another block sync is in progress. This block will be thrown away for now, and re-requested later."
(_, Just _) -> do
liftIO $ putStrLn "Submitting new blocks"
lift $ addBlocks False $ sortBy (compare `on` blockDataNumber . blockBlockData) blocks
liftIO $ putStrLn $ show (length blocks) ++ " blocks have been submitted"
askForSomeBlocks
|
jamshidh/ethereum-vm
|
src/Blockchain/BlockSynchronizer.hs
|
bsd-3-clause
| 4,188
| 0
| 20
| 895
| 1,023
| 526
| 497
| -1
| -1
|
module NPDA.Vorganger
-- $Id$
( vorganger
, vorganger_konfigurationen -- brauchen wir nicht?
)
where
import NPDA.Type
import NPDA.Konfiguration
import Control.Monad (guard)
import Autolib.Schichten
vorganger :: NPDAC x y z
=> NPDA x y z -> Set (Konfiguration x y z)
-> [ Konfiguration x y z ]
vorganger a ks = concat $ map setToList $
schichten' (vorganger_konfigurationen a ) ks
vorganger_konfigurationen
:: NPDAC x y z
=> NPDA x y z -> Konfiguration x y z
-> Set (Konfiguration x y z)
vorganger_konfigurationen a k = mkSet $ do
((mx, z, y), zys) <- fmToList (tafel a)
(z', y') <- setToList zys
let xs = case mx of Just x -> [x]; Nothing -> []
guard $ z' == zustand k
let (y1, y2) = splitAt (length y') (keller k)
guard $ y1 == y'
return $ Konfiguration { eingabe = xs ++ eingabe k
, keller = [y] ++ y2
, zustand = z
, link = Just k
}
|
Erdwolf/autotool-bonn
|
src/NPDA/Vorganger.hs
|
gpl-2.0
| 948
| 12
| 14
| 274
| 384
| 198
| 186
| 27
| 2
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnBinds]{Renaming and dependency analysis of bindings}
This module does renaming and dependency analysis on value bindings in
the abstract syntax. It does {\em not} do cycle-checks on class or
type-synonym declarations; those cannot be done at this stage because
they may be affected by renaming (which isn't fully worked out yet).
-}
{-# LANGUAGE CPP #-}
module RnBinds (
-- Renaming top-level bindings
rnTopBindsLHS, rnTopBindsRHS, rnValBindsRHS,
-- Renaming local bindings
rnLocalBindsAndThen, rnLocalValBindsLHS, rnLocalValBindsRHS,
-- Other bindings
rnMethodBinds, renameSigs, mkSigTvFn,
rnMatchGroup, rnGRHSs, rnGRHS,
makeMiniFixityEnv, MiniFixityEnv,
HsSigCtxt(..)
) where
import {-# SOURCE #-} RnExpr( rnLExpr, rnStmts )
import HsSyn
import TcRnMonad
import TcEvidence ( emptyTcEvBinds )
import RnTypes
import RnPat
import RnNames
import RnEnv
import DynFlags
import Module
import Name
import NameEnv
import NameSet
import RdrName ( RdrName, rdrNameOcc )
import SrcLoc
import ListSetOps ( findDupsEq )
import BasicTypes ( RecFlag(..) )
import Digraph ( SCC(..) )
import Bag
import Outputable
import FastString
import Data.List ( partition, sort )
import Maybes ( orElse )
import Control.Monad
#if __GLASGOW_HASKELL__ < 709
import Data.Traversable ( traverse )
#endif
{-
-- ToDo: Put the annotations into the monad, so that they arrive in the proper
-- place and can be used when complaining.
The code tree received by the function @rnBinds@ contains definitions
in where-clauses which are all apparently mutually recursive, but which may
not really depend upon each other. For example, in the top level program
\begin{verbatim}
f x = y where a = x
y = x
\end{verbatim}
the definitions of @a@ and @y@ do not depend on each other at all.
Unfortunately, the typechecker cannot always check such definitions.
\footnote{Mycroft, A. 1984. Polymorphic type schemes and recursive
definitions. In Proceedings of the International Symposium on Programming,
Toulouse, pp. 217-39. LNCS 167. Springer Verlag.}
However, the typechecker usually can check definitions in which only the
strongly connected components have been collected into recursive bindings.
This is precisely what the function @rnBinds@ does.
ToDo: deal with case where a single monobinds binds the same variable
twice.
The vertag tag is a unique @Int@; the tags only need to be unique
within one @MonoBinds@, so that unique-Int plumbing is done explicitly
(heavy monad machinery not needed).
************************************************************************
* *
* naming conventions *
* *
************************************************************************
\subsection[name-conventions]{Name conventions}
The basic algorithm involves walking over the tree and returning a tuple
containing the new tree plus its free variables. Some functions, such
as those walking polymorphic bindings (HsBinds) and qualifier lists in
list comprehensions (@Quals@), return the variables bound in local
environments. These are then used to calculate the free variables of the
expression evaluated in these environments.
Conventions for variable names are as follows:
\begin{itemize}
\item
new code is given a prime to distinguish it from the old.
\item
a set of variables defined in @Exp@ is written @dvExp@
\item
a set of variables free in @Exp@ is written @fvExp@
\end{itemize}
************************************************************************
* *
* analysing polymorphic bindings (HsBindGroup, HsBind)
* *
************************************************************************
\subsubsection[dep-HsBinds]{Polymorphic bindings}
Non-recursive expressions are reconstructed without any changes at top
level, although their component expressions may have to be altered.
However, non-recursive expressions are currently not expected as
\Haskell{} programs, and this code should not be executed.
Monomorphic bindings contain information that is returned in a tuple
(a @FlatMonoBinds@) containing:
\begin{enumerate}
\item
a unique @Int@ that serves as the ``vertex tag'' for this binding.
\item
the name of a function or the names in a pattern. These are a set
referred to as @dvLhs@, the defined variables of the left hand side.
\item
the free variables of the body. These are referred to as @fvBody@.
\item
the definition's actual code. This is referred to as just @code@.
\end{enumerate}
The function @nonRecDvFv@ returns two sets of variables. The first is
the set of variables defined in the set of monomorphic bindings, while the
second is the set of free variables in those bindings.
The set of variables defined in a non-recursive binding is just the
union of all of them, as @union@ removes duplicates. However, the
free variables in each successive set of cumulative bindings is the
union of those in the previous set plus those of the newest binding after
the defined variables of the previous set have been removed.
@rnMethodBinds@ deals only with the declarations in class and
instance declarations. It expects only to see @FunMonoBind@s, and
it expects the global environment to contain bindings for the binders
(which are all class operations).
************************************************************************
* *
\subsubsection{ Top-level bindings}
* *
************************************************************************
-}
-- for top-level bindings, we need to make top-level names,
-- so we have a different entry point than for local bindings
rnTopBindsLHS :: MiniFixityEnv
-> HsValBinds RdrName
-> RnM (HsValBindsLR Name RdrName)
rnTopBindsLHS fix_env binds
= rnValBindsLHS (topRecNameMaker fix_env) binds
rnTopBindsRHS :: NameSet -> HsValBindsLR Name RdrName
-> RnM (HsValBinds Name, DefUses)
rnTopBindsRHS bound_names binds
= do { is_boot <- tcIsHsBootOrSig
; if is_boot
then rnTopBindsBoot binds
else rnValBindsRHS (TopSigCtxt bound_names False) binds }
rnTopBindsBoot :: HsValBindsLR Name RdrName -> RnM (HsValBinds Name, DefUses)
-- A hs-boot file has no bindings.
-- Return a single HsBindGroup with empty binds and renamed signatures
rnTopBindsBoot (ValBindsIn mbinds sigs)
= do { checkErr (isEmptyLHsBinds mbinds) (bindsInHsBootFile mbinds)
; (sigs', fvs) <- renameSigs HsBootCtxt sigs
; return (ValBindsOut [] sigs', usesOnly fvs) }
rnTopBindsBoot b = pprPanic "rnTopBindsBoot" (ppr b)
{-
*********************************************************
* *
HsLocalBinds
* *
*********************************************************
-}
rnLocalBindsAndThen :: HsLocalBinds RdrName
-> (HsLocalBinds Name -> RnM (result, FreeVars))
-> RnM (result, FreeVars)
-- This version (a) assumes that the binding vars are *not* already in scope
-- (b) removes the binders from the free vars of the thing inside
-- The parser doesn't produce ThenBinds
rnLocalBindsAndThen EmptyLocalBinds thing_inside
= thing_inside EmptyLocalBinds
rnLocalBindsAndThen (HsValBinds val_binds) thing_inside
= rnLocalValBindsAndThen val_binds $ \ val_binds' ->
thing_inside (HsValBinds val_binds')
rnLocalBindsAndThen (HsIPBinds binds) thing_inside = do
(binds',fv_binds) <- rnIPBinds binds
(thing, fvs_thing) <- thing_inside (HsIPBinds binds')
return (thing, fvs_thing `plusFV` fv_binds)
rnIPBinds :: HsIPBinds RdrName -> RnM (HsIPBinds Name, FreeVars)
rnIPBinds (IPBinds ip_binds _no_dict_binds) = do
(ip_binds', fvs_s) <- mapAndUnzipM (wrapLocFstM rnIPBind) ip_binds
return (IPBinds ip_binds' emptyTcEvBinds, plusFVs fvs_s)
rnIPBind :: IPBind RdrName -> RnM (IPBind Name, FreeVars)
rnIPBind (IPBind ~(Left n) expr) = do
(expr',fvExpr) <- rnLExpr expr
return (IPBind (Left n) expr', fvExpr)
{-
************************************************************************
* *
ValBinds
* *
************************************************************************
-}
-- Renaming local binding groups
-- Does duplicate/shadow check
rnLocalValBindsLHS :: MiniFixityEnv
-> HsValBinds RdrName
-> RnM ([Name], HsValBindsLR Name RdrName)
rnLocalValBindsLHS fix_env binds
= do { binds' <- rnValBindsLHS (localRecNameMaker fix_env) binds
-- Check for duplicates and shadowing
-- Must do this *after* renaming the patterns
-- See Note [Collect binders only after renaming] in HsUtils
-- We need to check for dups here because we
-- don't don't bind all of the variables from the ValBinds at once
-- with bindLocatedLocals any more.
--
-- Note that we don't want to do this at the top level, since
-- sorting out duplicates and shadowing there happens elsewhere.
-- The behavior is even different. For example,
-- import A(f)
-- f = ...
-- should not produce a shadowing warning (but it will produce
-- an ambiguity warning if you use f), but
-- import A(f)
-- g = let f = ... in f
-- should.
; let bound_names = collectHsValBinders binds'
-- There should be only Ids, but if there are any bogus
-- pattern synonyms, we'll collect them anyway, so that
-- we don't generate subsequent out-of-scope messages
; envs <- getRdrEnvs
; checkDupAndShadowedNames envs bound_names
; return (bound_names, binds') }
-- renames the left-hand sides
-- generic version used both at the top level and for local binds
-- does some error checking, but not what gets done elsewhere at the top level
rnValBindsLHS :: NameMaker
-> HsValBinds RdrName
-> RnM (HsValBindsLR Name RdrName)
rnValBindsLHS topP (ValBindsIn mbinds sigs)
= do { mbinds' <- mapBagM (wrapLocM (rnBindLHS topP doc)) mbinds
; return $ ValBindsIn mbinds' sigs }
where
bndrs = collectHsBindsBinders mbinds
doc = text "In the binding group for:" <+> pprWithCommas ppr bndrs
rnValBindsLHS _ b = pprPanic "rnValBindsLHSFromDoc" (ppr b)
-- General version used both from the top-level and for local things
-- Assumes the LHS vars are in scope
--
-- Does not bind the local fixity declarations
rnValBindsRHS :: HsSigCtxt
-> HsValBindsLR Name RdrName
-> RnM (HsValBinds Name, DefUses)
rnValBindsRHS ctxt (ValBindsIn mbinds sigs)
= do { (sigs', sig_fvs) <- renameSigs ctxt sigs
; binds_w_dus <- mapBagM (rnLBind (mkSigTvFn sigs')) mbinds
; case depAnalBinds binds_w_dus of
(anal_binds, anal_dus) -> return (valbind', valbind'_dus)
where
valbind' = ValBindsOut anal_binds sigs'
valbind'_dus = anal_dus `plusDU` usesOnly sig_fvs
-- Put the sig uses *after* the bindings
-- so that the binders are removed from
-- the uses in the sigs
}
rnValBindsRHS _ b = pprPanic "rnValBindsRHS" (ppr b)
-- Wrapper for local binds
--
-- The *client* of this function is responsible for checking for unused binders;
-- it doesn't (and can't: we don't have the thing inside the binds) happen here
--
-- The client is also responsible for bringing the fixities into scope
rnLocalValBindsRHS :: NameSet -- names bound by the LHSes
-> HsValBindsLR Name RdrName
-> RnM (HsValBinds Name, DefUses)
rnLocalValBindsRHS bound_names binds
= rnValBindsRHS (LocalBindCtxt bound_names) binds
-- for local binds
-- wrapper that does both the left- and right-hand sides
--
-- here there are no local fixity decls passed in;
-- the local fixity decls come from the ValBinds sigs
rnLocalValBindsAndThen :: HsValBinds RdrName
-> (HsValBinds Name -> RnM (result, FreeVars))
-> RnM (result, FreeVars)
rnLocalValBindsAndThen binds@(ValBindsIn _ sigs) thing_inside
= do { -- (A) Create the local fixity environment
new_fixities <- makeMiniFixityEnv [L loc sig
| L loc (FixSig sig) <- sigs]
-- (B) Rename the LHSes
; (bound_names, new_lhs) <- rnLocalValBindsLHS new_fixities binds
-- ...and bring them (and their fixities) into scope
; bindLocalNamesFV bound_names $
addLocalFixities new_fixities bound_names $ do
{ -- (C) Do the RHS and thing inside
(binds', dus) <- rnLocalValBindsRHS (mkNameSet bound_names) new_lhs
; (result, result_fvs) <- thing_inside binds'
-- Report unused bindings based on the (accurate)
-- findUses. E.g.
-- let x = x in 3
-- should report 'x' unused
; let real_uses = findUses dus result_fvs
-- Insert fake uses for variables introduced implicitly by
-- wildcards (#4404)
implicit_uses = hsValBindsImplicits binds'
; warnUnusedLocalBinds bound_names
(real_uses `unionNameSet` implicit_uses)
; let
-- The variables "used" in the val binds are:
-- (1) the uses of the binds (allUses)
-- (2) the FVs of the thing-inside
all_uses = allUses dus `plusFV` result_fvs
-- Note [Unused binding hack]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Note that *in contrast* to the above reporting of
-- unused bindings, (1) above uses duUses to return *all*
-- the uses, even if the binding is unused. Otherwise consider:
-- x = 3
-- y = let p = x in 'x' -- NB: p not used
-- If we don't "see" the dependency of 'y' on 'x', we may put the
-- bindings in the wrong order, and the type checker will complain
-- that x isn't in scope
--
-- But note that this means we won't report 'x' as unused,
-- whereas we would if we had { x = 3; p = x; y = 'x' }
; return (result, all_uses) }}
-- The bound names are pruned out of all_uses
-- by the bindLocalNamesFV call above
rnLocalValBindsAndThen bs _ = pprPanic "rnLocalValBindsAndThen" (ppr bs)
-- Process the fixity declarations, making a FastString -> (Located Fixity) map
-- (We keep the location around for reporting duplicate fixity declarations.)
--
-- Checks for duplicates, but not that only locally defined things are fixed.
-- Note: for local fixity declarations, duplicates would also be checked in
-- check_sigs below. But we also use this function at the top level.
makeMiniFixityEnv :: [LFixitySig RdrName] -> RnM MiniFixityEnv
makeMiniFixityEnv decls = foldlM add_one_sig emptyFsEnv decls
where
add_one_sig env (L loc (FixitySig names fixity)) =
foldlM add_one env [ (loc,name_loc,name,fixity)
| L name_loc name <- names ]
add_one env (loc, name_loc, name,fixity) = do
{ -- this fixity decl is a duplicate iff
-- the ReaderName's OccName's FastString is already in the env
-- (we only need to check the local fix_env because
-- definitions of non-local will be caught elsewhere)
let { fs = occNameFS (rdrNameOcc name)
; fix_item = L loc fixity };
case lookupFsEnv env fs of
Nothing -> return $ extendFsEnv env fs fix_item
Just (L loc' _) -> do
{ setSrcSpan loc $
addErrAt name_loc (dupFixityDecl loc' name)
; return env}
}
dupFixityDecl :: SrcSpan -> RdrName -> SDoc
dupFixityDecl loc rdr_name
= vcat [ptext (sLit "Multiple fixity declarations for") <+> quotes (ppr rdr_name),
ptext (sLit "also at ") <+> ppr loc]
---------------------
-- renaming a single bind
rnBindLHS :: NameMaker
-> SDoc
-> HsBind RdrName
-- returns the renamed left-hand side,
-- and the FreeVars *of the LHS*
-- (i.e., any free variables of the pattern)
-> RnM (HsBindLR Name RdrName)
rnBindLHS name_maker _ bind@(PatBind { pat_lhs = pat })
= do
-- we don't actually use the FV processing of rnPatsAndThen here
(pat',pat'_fvs) <- rnBindPat name_maker pat
return (bind { pat_lhs = pat', bind_fvs = pat'_fvs })
-- We temporarily store the pat's FVs in bind_fvs;
-- gets updated to the FVs of the whole bind
-- when doing the RHS below
rnBindLHS name_maker _ bind@(FunBind { fun_id = rdr_name })
= do { name <- applyNameMaker name_maker rdr_name
; return (bind { fun_id = name
, bind_fvs = placeHolderNamesTc }) }
rnBindLHS name_maker _ (PatSynBind psb@PSB{ psb_id = rdrname })
| isTopRecNameMaker name_maker
= do { addLocM checkConName rdrname
; name <- lookupLocatedTopBndrRn rdrname -- Should be bound at top level already
; return (PatSynBind psb{ psb_id = name }) }
| otherwise -- Pattern synonym, not at top level
= do { addErr localPatternSynonymErr -- Complain, but make up a fake
-- name so that we can carry on
; name <- applyNameMaker name_maker rdrname
; return (PatSynBind psb{ psb_id = name }) }
where
localPatternSynonymErr :: SDoc
localPatternSynonymErr
= hang (ptext (sLit "Illegal pattern synonym declaration for") <+> quotes (ppr rdrname))
2 (ptext (sLit "Pattern synonym declarations are only valid at top level"))
rnBindLHS _ _ b = pprPanic "rnBindHS" (ppr b)
rnLBind :: (Name -> [Name]) -- Signature tyvar function
-> LHsBindLR Name RdrName
-> RnM (LHsBind Name, [Name], Uses)
rnLBind sig_fn (L loc bind)
= setSrcSpan loc $
do { (bind', bndrs, dus) <- rnBind sig_fn bind
; return (L loc bind', bndrs, dus) }
-- assumes the left-hands-side vars are in scope
rnBind :: (Name -> [Name]) -- Signature tyvar function
-> HsBindLR Name RdrName
-> RnM (HsBind Name, [Name], Uses)
rnBind _ bind@(PatBind { pat_lhs = pat
, pat_rhs = grhss
-- pat fvs were stored in bind_fvs
-- after processing the LHS
, bind_fvs = pat_fvs })
= do { mod <- getModule
; (grhss', rhs_fvs) <- rnGRHSs PatBindRhs rnLExpr grhss
-- No scoped type variables for pattern bindings
; let all_fvs = pat_fvs `plusFV` rhs_fvs
fvs' = filterNameSet (nameIsLocalOrFrom mod) all_fvs
-- Keep locally-defined Names
-- As well as dependency analysis, we need these for the
-- MonoLocalBinds test in TcBinds.decideGeneralisationPlan
bndrs = collectPatBinders pat
bind' = bind { pat_rhs = grhss',
pat_rhs_ty = placeHolderType, bind_fvs = fvs' }
is_wild_pat = case pat of
L _ (WildPat {}) -> True
L _ (BangPat (L _ (WildPat {}))) -> True -- #9127
_ -> False
-- Warn if the pattern binds no variables, except for the
-- entirely-explicit idiom _ = rhs
-- which (a) is not that different from _v = rhs
-- (b) is sometimes used to give a type sig for,
-- or an occurrence of, a variable on the RHS
; whenWOptM Opt_WarnUnusedBinds $
when (null bndrs && not is_wild_pat) $
addWarn $ unusedPatBindWarn bind'
; fvs' `seq` -- See Note [Free-variable space leak]
return (bind', bndrs, all_fvs) }
rnBind sig_fn bind@(FunBind { fun_id = name
, fun_infix = is_infix
, fun_matches = matches })
-- invariant: no free vars here when it's a FunBind
= do { let plain_name = unLoc name
; (matches', rhs_fvs) <- bindSigTyVarsFV (sig_fn plain_name) $
-- bindSigTyVars tests for Opt_ScopedTyVars
rnMatchGroup (FunRhs plain_name is_infix)
rnLExpr matches
; when is_infix $ checkPrecMatch plain_name matches'
; mod <- getModule
; let fvs' = filterNameSet (nameIsLocalOrFrom mod) rhs_fvs
-- Keep locally-defined Names
-- As well as dependency analysis, we need these for the
-- MonoLocalBinds test in TcBinds.decideGeneralisationPlan
; fvs' `seq` -- See Note [Free-variable space leak]
return (bind { fun_matches = matches'
, bind_fvs = fvs' },
[plain_name], rhs_fvs)
}
rnBind sig_fn (PatSynBind bind)
= do { (bind', name, fvs) <- rnPatSynBind sig_fn bind
; return (PatSynBind bind', name, fvs) }
rnBind _ b = pprPanic "rnBind" (ppr b)
{-
Note [Free-variable space leak]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have
fvs' = trim fvs
and we seq fvs' before turning it as part of a record.
The reason is that trim is sometimes something like
\xs -> intersectNameSet (mkNameSet bound_names) xs
and we don't want to retain the list bound_names. This showed up in
trac ticket #1136.
-}
rnPatSynBind :: (Name -> [Name]) -- Signature tyvar function
-> PatSynBind Name RdrName
-> RnM (PatSynBind Name Name, [Name], Uses)
rnPatSynBind _sig_fn bind@(PSB { psb_id = L _ name
, psb_args = details
, psb_def = pat
, psb_dir = dir })
-- invariant: no free vars here when it's a FunBind
= do { pattern_synonym_ok <- xoptM Opt_PatternSynonyms
; unless pattern_synonym_ok (addErr patternSynonymErr)
; ((pat', details'), fvs1) <- rnPat PatSyn pat $ \pat' -> do
-- We check the 'RdrName's instead of the 'Name's
-- so that the binding locations are reported
-- from the left-hand side
{ (details', fvs) <- case details of
PrefixPatSyn vars ->
do { checkDupRdrNames vars
; names <- mapM lookupVar vars
; return (PrefixPatSyn names, mkFVs (map unLoc names)) }
InfixPatSyn var1 var2 ->
do { checkDupRdrNames [var1, var2]
; name1 <- lookupVar var1
; name2 <- lookupVar var2
-- ; checkPrecMatch -- TODO
; return (InfixPatSyn name1 name2, mkFVs (map unLoc [name1, name2])) }
; return ((pat', details'), fvs) }
; (dir', fvs2) <- case dir of
Unidirectional -> return (Unidirectional, emptyFVs)
ImplicitBidirectional -> return (ImplicitBidirectional, emptyFVs)
ExplicitBidirectional mg ->
do { (mg', fvs) <- rnMatchGroup PatSyn rnLExpr mg
; return (ExplicitBidirectional mg', fvs) }
; mod <- getModule
; let fvs = fvs1 `plusFV` fvs2
fvs' = filterNameSet (nameIsLocalOrFrom mod) fvs
-- Keep locally-defined Names
-- As well as dependency analysis, we need these for the
-- MonoLocalBinds test in TcBinds.decideGeneralisationPlan
; let bind' = bind{ psb_args = details'
, psb_def = pat'
, psb_dir = dir'
, psb_fvs = fvs' }
; fvs' `seq` -- See Note [Free-variable space leak]
return (bind', [name], fvs1)
-- See Note [Pattern synonym wrappers don't yield dependencies]
}
where
lookupVar = wrapLocM lookupOccRn
patternSynonymErr :: SDoc
patternSynonymErr
= hang (ptext (sLit "Illegal pattern synonym declaration"))
2 (ptext (sLit "Use -XPatternSynonyms to enable this extension"))
{-
Note [Pattern synonym wrappers don't yield dependencies]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When renaming a pattern synonym that has an explicit wrapper,
references in the wrapper definition should not be used when
calculating dependencies. For example, consider the following pattern
synonym definition:
pattern P x <- C1 x where
P x = f (C1 x)
f (P x) = C2 x
In this case, 'P' needs to be typechecked in two passes:
1. Typecheck the pattern definition of 'P', which fully determines the
type of 'P'. This step doesn't require knowing anything about 'f',
since the wrapper definition is not looked at.
2. Typecheck the wrapper definition, which needs the typechecked
definition of 'f' to be in scope.
This behaviour is implemented in 'tcValBinds', but it crucially
depends on 'P' not being put in a recursive group with 'f' (which
would make it look like a recursive pattern synonym a la 'pattern P =
P' which is unsound and rejected).
-}
---------------------
depAnalBinds :: Bag (LHsBind Name, [Name], Uses)
-> ([(RecFlag, LHsBinds Name)], DefUses)
-- Dependency analysis; this is important so that
-- unused-binding reporting is accurate
depAnalBinds binds_w_dus
= (map get_binds sccs, map get_du sccs)
where
sccs = depAnal (\(_, defs, _) -> defs)
(\(_, _, uses) -> nameSetElems uses)
(bagToList binds_w_dus)
get_binds (AcyclicSCC (bind, _, _)) = (NonRecursive, unitBag bind)
get_binds (CyclicSCC binds_w_dus) = (Recursive, listToBag [b | (b,_,_) <- binds_w_dus])
get_du (AcyclicSCC (_, bndrs, uses)) = (Just (mkNameSet bndrs), uses)
get_du (CyclicSCC binds_w_dus) = (Just defs, uses)
where
defs = mkNameSet [b | (_,bs,_) <- binds_w_dus, b <- bs]
uses = unionNameSets [u | (_,_,u) <- binds_w_dus]
---------------------
-- Bind the top-level forall'd type variables in the sigs.
-- E.g f :: a -> a
-- f = rhs
-- The 'a' scopes over the rhs
--
-- NB: there'll usually be just one (for a function binding)
-- but if there are many, one may shadow the rest; too bad!
-- e.g x :: [a] -> [a]
-- y :: [(a,a)] -> a
-- (x,y) = e
-- In e, 'a' will be in scope, and it'll be the one from 'y'!
mkSigTvFn :: [LSig Name] -> (Name -> [Name])
-- Return a lookup function that maps an Id Name to the names
-- of the type variables that should scope over its body..
mkSigTvFn sigs
= \n -> lookupNameEnv env n `orElse` []
where
extractScopedTyVars :: LHsType Name -> [Name]
extractScopedTyVars (L _ (HsForAllTy Explicit _ ltvs _ _)) = hsLKiTyVarNames ltvs
extractScopedTyVars _ = []
env :: NameEnv [Name]
env = mkNameEnv [ (name, nwcs ++ extractScopedTyVars ty) -- Kind variables and type variables
| L _ (TypeSig names ty nwcs) <- sigs
, L _ name <- names]
-- Note the pattern-match on "Explicit"; we only bind
-- type variables from signatures with an explicit top-level for-all
{-
@rnMethodBinds@ is used for the method bindings of a class and an instance
declaration. Like @rnBinds@ but without dependency analysis.
NOTA BENE: we record each {\em binder} of a method-bind group as a free variable.
That's crucial when dealing with an instance decl:
\begin{verbatim}
instance Foo (T a) where
op x = ...
\end{verbatim}
This might be the {\em sole} occurrence of @op@ for an imported class @Foo@,
and unless @op@ occurs we won't treat the type signature of @op@ in the class
decl for @Foo@ as a source of instance-decl gates. But we should! Indeed,
in many ways the @op@ in an instance decl is just like an occurrence, not
a binder.
-}
rnMethodBinds :: Name -- Class name
-> (Name -> [Name]) -- Signature tyvar function
-> LHsBinds RdrName
-> RnM (LHsBinds Name, FreeVars)
rnMethodBinds cls sig_fn binds
= do { checkDupRdrNames meth_names
-- Check that the same method is not given twice in the
-- same instance decl instance C T where
-- f x = ...
-- g y = ...
-- f x = ...
-- We must use checkDupRdrNames because the Name of the
-- method is the Name of the class selector, whose SrcSpan
-- points to the class declaration; and we use rnMethodBinds
-- for instance decls too
; foldlM do_one (emptyBag, emptyFVs) (bagToList binds) }
where
meth_names = collectMethodBinders binds
do_one (binds,fvs) bind
= do { (bind', fvs_bind) <- rnMethodBind cls sig_fn bind
; return (binds `unionBags` bind', fvs_bind `plusFV` fvs) }
rnMethodBind :: Name
-> (Name -> [Name])
-> LHsBindLR RdrName RdrName
-> RnM (Bag (LHsBindLR Name Name), FreeVars)
rnMethodBind cls sig_fn
(L loc bind@(FunBind { fun_id = name, fun_infix = is_infix
, fun_matches = MG { mg_alts = matches
, mg_origin = origin } }))
= setSrcSpan loc $ do
sel_name <- wrapLocM (lookupInstDeclBndr cls (ptext (sLit "method"))) name
let plain_name = unLoc sel_name
-- We use the selector name as the binder
(new_matches, fvs) <- bindSigTyVarsFV (sig_fn plain_name) $
mapFvRn (rnMatch (FunRhs plain_name is_infix) rnLExpr)
matches
let new_group = mkMatchGroupName origin new_matches
when is_infix $ checkPrecMatch plain_name new_group
return (unitBag (L loc (bind { fun_id = sel_name
, fun_matches = new_group
, bind_fvs = fvs })),
fvs `addOneFV` plain_name)
-- The 'fvs' field isn't used for method binds
-- Can't handle method pattern-bindings which bind multiple methods.
rnMethodBind _ _ (L loc bind@(PatBind {})) = do
addErrAt loc (methodBindErr bind)
return (emptyBag, emptyFVs)
-- Associated pattern synonyms are not implemented yet
rnMethodBind _ _ (L loc bind@(PatSynBind {})) = do
addErrAt loc $ methodPatSynErr bind
return (emptyBag, emptyFVs)
rnMethodBind _ _ b = pprPanic "rnMethodBind" (ppr b)
{-
************************************************************************
* *
\subsubsection[dep-Sigs]{Signatures (and user-pragmas for values)}
* *
************************************************************************
@renameSigs@ checks for:
\begin{enumerate}
\item more than one sig for one thing;
\item signatures given for things not bound here;
\end{enumerate}
At the moment we don't gather free-var info from the types in
signatures. We'd only need this if we wanted to report unused tyvars.
-}
renameSigs :: HsSigCtxt
-> [LSig RdrName]
-> RnM ([LSig Name], FreeVars)
-- Renames the signatures and performs error checks
renameSigs ctxt sigs
= do { mapM_ dupSigDeclErr (findDupSigs sigs)
; checkDupMinimalSigs sigs
; (sigs', sig_fvs) <- mapFvRn (wrapLocFstM (renameSig ctxt)) sigs
; let (good_sigs, bad_sigs) = partition (okHsSig ctxt) sigs'
; mapM_ misplacedSigErr bad_sigs -- Misplaced
; return (good_sigs, sig_fvs) }
----------------------
-- We use lookupSigOccRn in the signatures, which is a little bit unsatisfactory
-- because this won't work for:
-- instance Foo T where
-- {-# INLINE op #-}
-- Baz.op = ...
-- We'll just rename the INLINE prag to refer to whatever other 'op'
-- is in scope. (I'm assuming that Baz.op isn't in scope unqualified.)
-- Doesn't seem worth much trouble to sort this.
renameSig :: HsSigCtxt -> Sig RdrName -> RnM (Sig Name, FreeVars)
-- FixitySig is renamed elsewhere.
renameSig _ (IdSig x)
= return (IdSig x, emptyFVs) -- Actually this never occurs
renameSig ctxt sig@(TypeSig vs ty _)
= do { new_vs <- mapM (lookupSigOccRn ctxt sig) vs
-- (named and anonymous) wildcards are bound here.
; (wcs, ty') <- extractWildcards ty
; bindLocatedLocalsFV wcs $ \wcs_new -> do {
(new_ty, fvs) <- rnHsSigType (ppr_sig_bndrs vs) ty'
; return (TypeSig new_vs new_ty wcs_new, fvs) } }
renameSig ctxt sig@(GenericSig vs ty)
= do { defaultSigs_on <- xoptM Opt_DefaultSignatures
; unless defaultSigs_on (addErr (defaultSigErr sig))
; new_v <- mapM (lookupSigOccRn ctxt sig) vs
; (new_ty, fvs) <- rnHsSigType (ppr_sig_bndrs vs) ty
; return (GenericSig new_v new_ty, fvs) }
renameSig _ (SpecInstSig src ty)
= do { (new_ty, fvs) <- rnLHsType SpecInstSigCtx ty
; return (SpecInstSig src new_ty,fvs) }
-- {-# SPECIALISE #-} pragmas can refer to imported Ids
-- so, in the top-level case (when mb_names is Nothing)
-- we use lookupOccRn. If there's both an imported and a local 'f'
-- then the SPECIALISE pragma is ambiguous, unlike all other signatures
renameSig ctxt sig@(SpecSig v tys inl)
= do { new_v <- case ctxt of
TopSigCtxt {} -> lookupLocatedOccRn v
_ -> lookupSigOccRn ctxt sig v
-- ; (new_ty, fvs) <- rnHsSigType (quotes (ppr v)) ty
; (new_ty, fvs) <- foldM do_one ([],emptyFVs) tys
; return (SpecSig new_v new_ty inl, fvs) }
where
do_one (tys,fvs) ty
= do { (new_ty, fvs_ty) <- rnHsSigType (quotes (ppr v)) ty
; return ( new_ty:tys, fvs_ty `plusFV` fvs) }
renameSig ctxt sig@(InlineSig v s)
= do { new_v <- lookupSigOccRn ctxt sig v
; return (InlineSig new_v s, emptyFVs) }
renameSig ctxt sig@(FixSig (FixitySig vs f))
= do { new_vs <- mapM (lookupSigOccRn ctxt sig) vs
; return (FixSig (FixitySig new_vs f), emptyFVs) }
renameSig ctxt sig@(MinimalSig s bf)
= do new_bf <- traverse (lookupSigOccRn ctxt sig) bf
return (MinimalSig s new_bf, emptyFVs)
renameSig ctxt sig@(PatSynSig v (flag, qtvs) prov req ty)
= do { v' <- lookupSigOccRn ctxt sig v
; let doc = TypeSigCtx $ quotes (ppr v)
; loc <- getSrcSpanM
; let (tv_kvs, mentioned) = extractHsTysRdrTyVars (ty:unLoc prov ++ unLoc req)
; tv_bndrs <- case flag of
Implicit ->
return $ mkHsQTvs . userHsTyVarBndrs loc $ mentioned
Explicit ->
do { let heading = ptext (sLit "In the pattern synonym type signature")
<+> quotes (ppr sig)
; warnUnusedForAlls (heading $$ docOfHsDocContext doc) qtvs mentioned
; return qtvs }
Qualified -> panic "renameSig: Qualified"
; bindHsTyVars doc Nothing tv_kvs tv_bndrs $ \ tyvars -> do
{ (prov', fvs1) <- rnContext doc prov
; (req', fvs2) <- rnContext doc req
; (ty', fvs3) <- rnLHsType doc ty
; let fvs = plusFVs [fvs1, fvs2, fvs3]
; return (PatSynSig v' (flag, tyvars) prov' req' ty', fvs) }}
ppr_sig_bndrs :: [Located RdrName] -> SDoc
ppr_sig_bndrs bs = quotes (pprWithCommas ppr bs)
okHsSig :: HsSigCtxt -> LSig a -> Bool
okHsSig ctxt (L _ sig)
= case (sig, ctxt) of
(GenericSig {}, ClsDeclCtxt {}) -> True
(GenericSig {}, _) -> False
(TypeSig {}, _) -> True
(PatSynSig {}, TopSigCtxt{}) -> True
(PatSynSig {}, _) -> False
(FixSig {}, InstDeclCtxt {}) -> False
(FixSig {}, _) -> True
(IdSig {}, TopSigCtxt {}) -> True
(IdSig {}, InstDeclCtxt {}) -> True
(IdSig {}, _) -> False
(InlineSig {}, HsBootCtxt) -> False
(InlineSig {}, _) -> True
(SpecSig {}, TopSigCtxt {}) -> True
(SpecSig {}, LocalBindCtxt {}) -> True
(SpecSig {}, InstDeclCtxt {}) -> True
(SpecSig {}, _) -> False
(SpecInstSig {}, InstDeclCtxt {}) -> True
(SpecInstSig {}, _) -> False
(MinimalSig {}, ClsDeclCtxt {}) -> True
(MinimalSig {}, _) -> False
-------------------
findDupSigs :: [LSig RdrName] -> [[(Located RdrName, Sig RdrName)]]
-- Check for duplicates on RdrName version,
-- because renamed version has unboundName for
-- not-in-scope binders, which gives bogus dup-sig errors
-- NB: in a class decl, a 'generic' sig is not considered
-- equal to an ordinary sig, so we allow, say
-- class C a where
-- op :: a -> a
-- default op :: Eq a => a -> a
findDupSigs sigs
= findDupsEq matching_sig (concatMap (expand_sig . unLoc) sigs)
where
expand_sig sig@(FixSig (FixitySig ns _)) = zip ns (repeat sig)
expand_sig sig@(InlineSig n _) = [(n,sig)]
expand_sig sig@(TypeSig ns _ _) = [(n,sig) | n <- ns]
expand_sig sig@(GenericSig ns _) = [(n,sig) | n <- ns]
expand_sig _ = []
matching_sig (L _ n1,sig1) (L _ n2,sig2) = n1 == n2 && mtch sig1 sig2
mtch (FixSig {}) (FixSig {}) = True
mtch (InlineSig {}) (InlineSig {}) = True
mtch (TypeSig {}) (TypeSig {}) = True
mtch (GenericSig {}) (GenericSig {}) = True
mtch _ _ = False
-- Warn about multiple MINIMAL signatures
checkDupMinimalSigs :: [LSig RdrName] -> RnM ()
checkDupMinimalSigs sigs
= case filter isMinimalLSig sigs of
minSigs@(_:_:_) -> dupMinimalSigErr minSigs
_ -> return ()
{-
************************************************************************
* *
\subsection{Match}
* *
************************************************************************
-}
rnMatchGroup :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> MatchGroup RdrName (Located (body RdrName))
-> RnM (MatchGroup Name (Located (body Name)), FreeVars)
rnMatchGroup ctxt rnBody (MG { mg_alts = ms, mg_origin = origin })
= do { empty_case_ok <- xoptM Opt_EmptyCase
; when (null ms && not empty_case_ok) (addErr (emptyCaseErr ctxt))
; (new_ms, ms_fvs) <- mapFvRn (rnMatch ctxt rnBody) ms
; return (mkMatchGroupName origin new_ms, ms_fvs) }
rnMatch :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> LMatch RdrName (Located (body RdrName))
-> RnM (LMatch Name (Located (body Name)), FreeVars)
rnMatch ctxt rnBody = wrapLocFstM (rnMatch' ctxt rnBody)
rnMatch' :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> Match RdrName (Located (body RdrName))
-> RnM (Match Name (Located (body Name)), FreeVars)
rnMatch' ctxt rnBody match@(Match { m_fun_id_infix = mf, m_pats = pats
, m_type = maybe_rhs_sig, m_grhss = grhss })
= do { -- Result type signatures are no longer supported
case maybe_rhs_sig of
Nothing -> return ()
Just (L loc ty) -> addErrAt loc (resSigErr ctxt match ty)
-- Now the main event
-- note that there are no local ficity decls for matches
; rnPats ctxt pats $ \ pats' -> do
{ (grhss', grhss_fvs) <- rnGRHSs ctxt rnBody grhss
; let mf' = case (ctxt,mf) of
(FunRhs funid isinfix,Just (L lf _,_))
-> Just (L lf funid,isinfix)
_ -> Nothing
; return (Match { m_fun_id_infix = mf', m_pats = pats'
, m_type = Nothing, m_grhss = grhss'}, grhss_fvs ) }}
emptyCaseErr :: HsMatchContext Name -> SDoc
emptyCaseErr ctxt = hang (ptext (sLit "Empty list of alternatives in") <+> pp_ctxt)
2 (ptext (sLit "Use EmptyCase to allow this"))
where
pp_ctxt = case ctxt of
CaseAlt -> ptext (sLit "case expression")
LambdaExpr -> ptext (sLit "\\case expression")
_ -> ptext (sLit "(unexpected)") <+> pprMatchContextNoun ctxt
resSigErr :: Outputable body
=> HsMatchContext Name -> Match RdrName body -> HsType RdrName -> SDoc
resSigErr ctxt match ty
= vcat [ ptext (sLit "Illegal result type signature") <+> quotes (ppr ty)
, nest 2 $ ptext (sLit
"Result signatures are no longer supported in pattern matches")
, pprMatchInCtxt ctxt match ]
{-
************************************************************************
* *
\subsubsection{Guarded right-hand sides (GRHSs)}
* *
************************************************************************
-}
rnGRHSs :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> GRHSs RdrName (Located (body RdrName))
-> RnM (GRHSs Name (Located (body Name)), FreeVars)
rnGRHSs ctxt rnBody (GRHSs grhss binds)
= rnLocalBindsAndThen binds $ \ binds' -> do
(grhss', fvGRHSs) <- mapFvRn (rnGRHS ctxt rnBody) grhss
return (GRHSs grhss' binds', fvGRHSs)
rnGRHS :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> LGRHS RdrName (Located (body RdrName))
-> RnM (LGRHS Name (Located (body Name)), FreeVars)
rnGRHS ctxt rnBody = wrapLocFstM (rnGRHS' ctxt rnBody)
rnGRHS' :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> GRHS RdrName (Located (body RdrName))
-> RnM (GRHS Name (Located (body Name)), FreeVars)
rnGRHS' ctxt rnBody (GRHS guards rhs)
= do { pattern_guards_allowed <- xoptM Opt_PatternGuards
; ((guards', rhs'), fvs) <- rnStmts (PatGuard ctxt) rnLExpr guards $ \ _ ->
rnBody rhs
; unless (pattern_guards_allowed || is_standard_guard guards')
(addWarn (nonStdGuardErr guards'))
; return (GRHS guards' rhs', fvs) }
where
-- Standard Haskell 1.4 guards are just a single boolean
-- expression, rather than a list of qualifiers as in the
-- Glasgow extension
is_standard_guard [] = True
is_standard_guard [L _ (BodyStmt _ _ _ _)] = True
is_standard_guard _ = False
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
dupSigDeclErr :: [(Located RdrName, Sig RdrName)] -> RnM ()
dupSigDeclErr pairs@((L loc name, sig) : _)
= addErrAt loc $
vcat [ ptext (sLit "Duplicate") <+> what_it_is
<> ptext (sLit "s for") <+> quotes (ppr name)
, ptext (sLit "at") <+> vcat (map ppr $ sort $ map (getLoc . fst) pairs) ]
where
what_it_is = hsSigDoc sig
dupSigDeclErr [] = panic "dupSigDeclErr"
misplacedSigErr :: LSig Name -> RnM ()
misplacedSigErr (L loc sig)
= addErrAt loc $
sep [ptext (sLit "Misplaced") <+> hsSigDoc sig <> colon, ppr sig]
defaultSigErr :: Sig RdrName -> SDoc
defaultSigErr sig = vcat [ hang (ptext (sLit "Unexpected default signature:"))
2 (ppr sig)
, ptext (sLit "Use DefaultSignatures to enable default signatures") ]
methodBindErr :: HsBindLR RdrName RdrName -> SDoc
methodBindErr mbind
= hang (ptext (sLit "Pattern bindings (except simple variables) not allowed in instance declarations"))
2 (ppr mbind)
methodPatSynErr :: HsBindLR RdrName RdrName -> SDoc
methodPatSynErr mbind
= hang (ptext (sLit "Pattern synonyms not allowed in class/instance declarations"))
2 (ppr mbind)
bindsInHsBootFile :: LHsBindsLR Name RdrName -> SDoc
bindsInHsBootFile mbinds
= hang (ptext (sLit "Bindings in hs-boot files are not allowed"))
2 (ppr mbinds)
nonStdGuardErr :: Outputable body => [LStmtLR Name Name body] -> SDoc
nonStdGuardErr guards
= hang (ptext (sLit "accepting non-standard pattern guards (use PatternGuards to suppress this message)"))
4 (interpp'SP guards)
unusedPatBindWarn :: HsBind Name -> SDoc
unusedPatBindWarn bind
= hang (ptext (sLit "This pattern-binding binds no variables:"))
2 (ppr bind)
dupMinimalSigErr :: [LSig RdrName] -> RnM ()
dupMinimalSigErr sigs@(L loc _ : _)
= addErrAt loc $
vcat [ ptext (sLit "Multiple minimal complete definitions")
, ptext (sLit "at") <+> vcat (map ppr $ sort $ map getLoc sigs)
, ptext (sLit "Combine alternative minimal complete definitions with `|'") ]
dupMinimalSigErr [] = panic "dupMinimalSigErr"
|
forked-upstream-packages-for-ghcjs/ghc
|
compiler/rename/RnBinds.hs
|
bsd-3-clause
| 46,352
| 0
| 23
| 13,860
| 9,062
| 4,790
| 4,272
| 544
| 20
|
module Main where
import System.Plugins
main = do
makeAll "A.hs" []
y <- load "A.o" ["."] [] "u"
case y of
LoadSuccess _ _ -> putStrLn $ "YES"
LoadFailure e -> mapM_ putStrLn e
|
abuiles/turbinado-blog
|
tmp/dependencies/hs-plugins-1.3.1/testsuite/hier/hier4/Main.hs
|
bsd-3-clause
| 211
| 0
| 10
| 68
| 81
| 39
| 42
| 8
| 2
|
module HW05.StackVM (StackVal(..), StackExp(..), Stack, Program, stackVM) where
-- Values that may appear in the stack. Such a value will also be
-- returned by the stackVM program execution function.
data StackVal = IVal Integer | BVal Bool | Void deriving Show
-- The various expressions our VM understands.
data StackExp = PushI Integer
| PushB Bool
| Add
| Mul
| And
| Or
deriving Show
type Stack = [StackVal]
type Program = [StackExp]
-- Execute the given program. Returns either an error message or the
-- value on top of the stack after execution.
stackVM :: Program -> Either String StackVal
stackVM = execute []
errType :: String -> Either String a
errType op = Left $ "Encountered '" ++ op ++ "' opcode with ill-typed stack."
errUnderflow :: String -> Either String a
errUnderflow op = Left $ "Stack underflow with '" ++ op ++ "' opcode."
-- Execute a program against a given stack.
execute :: Stack -> Program -> Either String StackVal
execute [] [] = Right Void
execute (s:_) [] = Right s
execute s (PushI x : xs) = execute (IVal x : s) xs
execute s (PushB x : xs) = execute (BVal x : s) xs
execute (IVal s1 : IVal s2 : ss) (Add : xs) = execute (s':ss) xs
where s' = IVal (s1 + s2)
execute (_:_:_) (Add:_) = errType "Add"
execute _ (Add:_) = errUnderflow "Add"
execute (IVal s1:IVal s2:ss) (Mul : xs) = execute (s':ss) xs
where s' = IVal (s1 * s2)
execute (_:_:_) (Mul:_) = errType "Mul"
execute _ (Mul:_) = errUnderflow "Mul"
execute (BVal s1:BVal s2:ss) (And : xs) = execute (s':ss) xs
where s' = BVal (s1 && s2)
execute (_:_:_) (And:_) = errType "And"
execute _ (And:_) = errUnderflow "And"
execute (BVal s1 : BVal s2 : ss) (Or : xs) = execute (s':ss) xs
where s' = BVal (s1 || s2)
execute (_:_:_) (Or:_) = errType "Or"
execute _ (Or:_) = errUnderflow "Or"
test = stackVM [PushI 3, PushI 5, Add]
|
boojinks/cis194-solutions
|
src/HW05/StackVM.hs
|
bsd-3-clause
| 2,218
| 0
| 9
| 748
| 802
| 421
| 381
| 39
| 1
|
module Haskell.Warnings (spam, main)
where
spam eggs = map lines eggs
main :: IO ()
main = (putStrLn "hello world")
|
flycheck/flycheck
|
test/resources/language/haskell/Warnings.hs
|
gpl-3.0
| 118
| 0
| 6
| 22
| 48
| 26
| 22
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
module KAT_Camellia (tests) where
import Imports ()
import BlockCipher
import qualified Data.ByteString as B
import Crypto.Cipher.Camellia
vectors_camellia128 =
[ KAT_ECB (B.replicate 16 0) (B.replicate 16 0) (B.pack [0x3d,0x02,0x80,0x25,0xb1,0x56,0x32,0x7c,0x17,0xf7,0x62,0xc1,0xf2,0xcb,0xca,0x71])
, KAT_ECB (B.pack [0x01,0x23,0x45,0x67,0x89,0xab,0xcd,0xef,0xfe,0xdc,0xba,0x98,0x76,0x54,0x32,0x10])
(B.pack [0x01,0x23,0x45,0x67,0x89,0xab,0xcd,0xef,0xfe,0xdc,0xba,0x98,0x76,0x54,0x32,0x10])
(B.pack [0x67,0x67,0x31,0x38,0x54,0x96,0x69,0x73,0x08,0x57,0x06,0x56,0x48,0xea,0xbe,0x43])
]
vectors_camellia192 =
[ KAT_ECB (B.pack [0x01,0x23,0x45,0x67,0x89,0xab,0xcd,0xef,0xfe,0xdc,0xba,0x98,0x76,0x54,0x32,0x10,0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77]) (B.pack [0x01,0x23,0x45,0x67,0x89,0xab,0xcd,0xef,0xfe,0xdc,0xba,0x98,0x76,0x54,0x32,0x10]) (B.pack [0xb4,0x99,0x34,0x01,0xb3,0xe9,0x96,0xf8,0x4e,0xe5,0xce,0xe7,0xd7,0x9b,0x09,0xb9])
]
vectors_camellia256 =
[ KAT_ECB (B.pack [0x01,0x23,0x45,0x67,0x89,0xab,0xcd,0xef,0xfe,0xdc,0xba,0x98,0x76,0x54,0x32,0x10 ,0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77,0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff])
(B.pack [0x01,0x23,0x45,0x67,0x89,0xab,0xcd,0xef,0xfe,0xdc,0xba,0x98,0x76,0x54,0x32,0x10])
(B.pack [0x9a,0xcc,0x23,0x7d,0xff,0x16,0xd7,0x6c,0x20,0xef,0x7c,0x91,0x9e,0x3a,0x75,0x09])
]
kats128 = defaultKATs { kat_ECB = vectors_camellia128 }
kats192 = defaultKATs { kat_ECB = vectors_camellia192 }
kats256 = defaultKATs { kat_ECB = vectors_camellia256 }
tests = testBlockCipher kats128 (undefined :: Camellia128)
|
nomeata/cryptonite
|
tests/KAT_Camellia.hs
|
bsd-3-clause
| 1,791
| 0
| 9
| 198
| 798
| 506
| 292
| 24
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>Report Generation</title>
<maps>
<homeID>reports</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/reports/src/main/javahelp/org/zaproxy/addon/reports/resources/help_it_IT/helpset_it_IT.hs
|
apache-2.0
| 966
| 98
| 29
| 156
| 395
| 209
| 186
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Ang Browser na tanawin | Extension ng ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Ang mga nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Ang index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Hanapin</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/browserView/src/main/javahelp/org/zaproxy/zap/extension/browserView/resources/help_fil_PH/helpset_fil_PH.hs
|
apache-2.0
| 1,004
| 95
| 29
| 166
| 416
| 218
| 198
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.C.Analysis.Syntax
-- Copyright : (c) 2008 Benedikt Huber
-- License : BSD-style
-- Maintainer : benedikt.huber@gmail.com
-- Stability : alpha
-- Portability : ghc
--
-- This module contains definitions for representing C translation units.
-- In contrast to 'Language.C.Syntax.AST', the representation tries to express the semantics of
-- of a translation unit.
---------------------------------------------------------------------------------------------------
module Language.C.Analysis.SemRep(
-- * Sums of tags and identifiers
TagDef(..),typeOfTagDef,
Declaration(..),declIdent,declName,declType,declAttrs,
IdentDecl(..),objKindDescr, splitIdentDecls,
-- * Global definitions
GlobalDecls(..),emptyGlobalDecls,filterGlobalDecls,mergeGlobalDecls,
-- * Events for visitors
DeclEvent(..),
-- * Declarations and definitions
Decl(..),
ObjDef(..),isTentative,
FunDef(..),
ParamDecl(..),MemberDecl(..),
TypeDef(..),identOfTypeDef,
VarDecl(..),
-- * Declaration attributes
DeclAttrs(..),isExtDecl,
Storage(..),declStorage,ThreadLocal,Register,
Linkage(..),hasLinkage,declLinkage,
-- * Types
Type(..),
FunType(..),
ArraySize(..),
TypeDefRef(..),
TypeName(..),BuiltinType(..),
IntType(..),FloatType(..),
HasSUERef(..),HasCompTyKind(..),
CompTypeRef(..),CompType(..),typeOfCompDef,CompTyKind(..),
EnumTypeRef(..),EnumType(..),typeOfEnumDef,
Enumerator(..),
TypeQuals(..),noTypeQuals,mergeTypeQuals,
-- * Variable names
VarName(..),identOfVarName,isNoName,AsmName,
-- * Attributes (STUB, not yet analyzed)
Attr(..),Attributes,noAttributes,mergeAttributes,
-- * Statements and Expressions (STUB, aliases to Syntax)
Stmt,Expr,Initializer,AsmBlock,
)
where
import Language.C.Data
import Language.C.Syntax
import Language.C.Syntax.Constants
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Generics
import Text.PrettyPrint.HughesPJ
-- | accessor class : struct\/union\/enum names
class HasSUERef a where
sueRef :: a -> SUERef
-- | accessor class : composite type tags (struct or union)
class HasCompTyKind a where
compTag :: a -> CompTyKind
-- | Composite type definitions (tags)
data TagDef = CompDef CompType --definition
| EnumDef EnumType -- enum definition
deriving (Typeable, Data {-! ,CNode !-})
instance HasSUERef TagDef where
sueRef (CompDef ct) = sueRef ct
sueRef (EnumDef et) = sueRef et
-- | return the type corresponding to a tag definition
typeOfTagDef :: TagDef -> TypeName
typeOfTagDef (CompDef comptype) = typeOfCompDef comptype
typeOfTagDef (EnumDef enumtype) = typeOfEnumDef enumtype
-- | All datatypes aggregating a declaration are instances of @Declaration@
class Declaration n where
-- | get the name, type and declaration attributes of a declaration or definition
getVarDecl :: n -> VarDecl
-- | get the declaration corresponding to a definition
declOfDef :: (Declaration n, CNode n) => n -> Decl
declOfDef def = let vd = getVarDecl def in Decl vd (nodeInfo def)
-- | get the variable identifier of a declaration (only safe if the
-- the declaration is known to have a name)
declIdent :: (Declaration n) => n -> Ident
declIdent = identOfVarName . declName
-- | get the variable name of a @Declaration@
declName :: (Declaration n) => n -> VarName
declName = (\(VarDecl n _ _) -> n) . getVarDecl
-- | get the type of a @Declaration@
declType :: (Declaration n) => n -> Type
declType = (\(VarDecl _ _ ty) -> ty) . getVarDecl
-- | get the declaration attributes of a @Declaration@
declAttrs :: (Declaration n) => n -> DeclAttrs
declAttrs = (\(VarDecl _ specs _) -> specs) . getVarDecl
instance (Declaration a, Declaration b) => Declaration (Either a b) where
getVarDecl = either getVarDecl getVarDecl
-- | identifiers, typedefs and enumeration constants (namespace sum)
data IdentDecl = Declaration Decl -- ^ object or function declaration
| ObjectDef ObjDef -- ^ object definition
| FunctionDef FunDef -- ^ function definition
| EnumeratorDef Enumerator -- ^ definition of an enumerator
deriving (Typeable, Data {-! ,CNode !-})
instance Declaration IdentDecl where
getVarDecl (Declaration decl) = getVarDecl decl
getVarDecl (ObjectDef def) = getVarDecl def
getVarDecl (FunctionDef def) = getVarDecl def
getVarDecl (EnumeratorDef def) = getVarDecl def
-- | textual description of the kind of an object
objKindDescr :: IdentDecl -> String
objKindDescr (Declaration _ ) = "declaration"
objKindDescr (ObjectDef _) = "object definition"
objKindDescr (FunctionDef _) = "function definition"
objKindDescr (EnumeratorDef _) = "enumerator definition"
-- | @splitIdentDecls includeAllDecls@ splits a map of object, function and enumerator declarations and definitions into one map
-- holding declarations, and three maps for object definitions, enumerator definitions and function definitions.
-- If @includeAllDecls@ is @True@ all declarations are present in the first map, otherwise only those where no corresponding definition
-- is available.
splitIdentDecls :: Bool -> Map Ident IdentDecl -> (Map Ident Decl,
( Map Ident Enumerator,
Map Ident ObjDef,
Map Ident FunDef ) )
splitIdentDecls include_all = Map.foldWithKey (if include_all then deal else deal') (Map.empty,(Map.empty,Map.empty,Map.empty))
where
deal ident entry (decls,defs) = (Map.insert ident (declOfDef entry) decls, addDef ident entry defs)
deal' ident (Declaration d) (decls,defs) = (Map.insert ident d decls,defs)
deal' ident def (decls,defs) = (decls, addDef ident def defs)
addDef ident entry (es,os,fs) =
case entry of
Declaration _ -> (es,os,fs)
EnumeratorDef e -> (Map.insert ident e es,os,fs)
ObjectDef o -> (es,Map.insert ident o os,fs)
FunctionDef f -> (es, os,Map.insert ident f fs)
-- | global declaration\/definition table returned by the analysis
data GlobalDecls = GlobalDecls {
gObjs :: Map Ident IdentDecl,
gTags :: Map SUERef TagDef,
gTypeDefs :: Map Ident TypeDef
}
-- | empty global declaration table
emptyGlobalDecls :: GlobalDecls
emptyGlobalDecls = GlobalDecls Map.empty Map.empty Map.empty
-- | filter global declarations
filterGlobalDecls :: (DeclEvent -> Bool) -> GlobalDecls -> GlobalDecls
filterGlobalDecls decl_filter gmap = GlobalDecls
{
gObjs = Map.filter (decl_filter . DeclEvent) (gObjs gmap),
gTags = Map.filter (decl_filter . TagEvent) (gTags gmap),
gTypeDefs = Map.filter (decl_filter . TypeDefEvent) (gTypeDefs gmap)
}
-- | merge global declarations
mergeGlobalDecls :: GlobalDecls -> GlobalDecls -> GlobalDecls
mergeGlobalDecls gmap1 gmap2 = GlobalDecls
{
gObjs = Map.union (gObjs gmap1) (gObjs gmap2),
gTags = Map.union (gTags gmap1) (gTags gmap2),
gTypeDefs = Map.union (gTypeDefs gmap1) (gTypeDefs gmap2)
}
-- * Events
-- | Declaration events
--
-- Those events are reported to callbacks, which are executed during the traversal.
data DeclEvent =
TagEvent TagDef
-- ^ file-scope struct\/union\/enum event
| DeclEvent IdentDecl
-- ^ file-scope declaration or definition
| ParamEvent ParamDecl
-- ^ parameter declaration
| LocalEvent IdentDecl
-- ^ local variable declaration or definition
| TypeDefEvent TypeDef
-- ^ a type definition
| AsmEvent AsmBlock
-- ^ assembler block
deriving ({-! CNode !-})
-- * Declarations and definitions
-- | Declarations, which aren't definitions
data Decl = Decl VarDecl NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
instance Declaration Decl where
getVarDecl (Decl vd _) = vd
-- | Object Definitions
--
-- An object definition is a declaration together with an initializer.
--
-- If the initializer is missing, it is a tentative definition, i.e. a
-- definition which might be overriden later on.
data ObjDef = ObjDef VarDecl (Maybe Initializer) NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
instance Declaration ObjDef where
getVarDecl (ObjDef vd _ _) = vd
-- | Returns @True@ if the given object definition is tentative.
isTentative :: ObjDef -> Bool
isTentative (ObjDef decl init_opt _) | isExtDecl decl = maybe True (const False) init_opt
| otherwise = False
-- | Function definitions
--
-- A function definition is a declaration together with a statement (the function body).
data FunDef = FunDef VarDecl Stmt NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
instance Declaration FunDef where
getVarDecl (FunDef vd _ _) = vd
-- | Parameter declaration
data ParamDecl = ParamDecl VarDecl NodeInfo
| AbstractParamDecl VarDecl NodeInfo
deriving (Typeable, Data {-! ,CNode !-} )
instance Declaration ParamDecl where
getVarDecl (ParamDecl vd _) = vd
getVarDecl (AbstractParamDecl vd _) = vd
-- | Struct\/Union member declaration
data MemberDecl = MemberDecl VarDecl (Maybe Expr) NodeInfo
-- ^ @MemberDecl vardecl bitfieldsize node@
| AnonBitField Type Expr NodeInfo
-- ^ @AnonBitField typ size@
deriving (Typeable, Data {-! ,CNode !-} )
instance Declaration MemberDecl where
getVarDecl (MemberDecl vd _ _) = vd
getVarDecl (AnonBitField ty _ _) = VarDecl NoName (DeclAttrs False NoStorage []) ty
-- | @typedef@ definitions.
--
-- The identifier is a new name for the given type.
data TypeDef = TypeDef Ident Type Attributes NodeInfo
deriving (Typeable, Data {-! ,CNode !-} )
-- | return the idenitifier of a @typedef@
identOfTypeDef :: TypeDef -> Ident
identOfTypeDef (TypeDef ide _ _ _) = ide
-- | Generic variable declarations
data VarDecl = VarDecl VarName DeclAttrs Type
deriving (Typeable, Data)
instance Declaration VarDecl where
getVarDecl = id
-- @isExtDecl d@ returns true if the declaration has /linkage/
isExtDecl :: (Declaration n) => n -> Bool
isExtDecl = hasLinkage . declStorage
-- | Declaration attributes of the form @DeclAttrs isInlineFunction storage linkage attrs@
--
-- They specify the storage and linkage of a declared object.
data DeclAttrs = DeclAttrs Bool Storage Attributes
-- ^ @DeclAttrs inline storage attrs@
deriving (Typeable, Data)
-- | get the 'Storage' of a declaration
declStorage :: (Declaration d) => d -> Storage
declStorage d = case declAttrs d of (DeclAttrs _ st _) -> st
-- In C we have
-- Identifiers can either have internal, external or no linkage
-- (same object everywhere, same object within the translation unit, unique).
-- * top-level identifiers
-- static : internal linkage (objects and function defs)
-- extern : linkage of prior declaration (if specified), external linkage otherwise
-- no-spec: external linkage
-- * storage duration
-- * static storage duration: objects with external or internal linkage, or local ones with the static keyword
-- * automatic storage duration: otherwise (register)
-- See http://publications.gbdirect.co.uk/c_book/chapter8/declarations_and_definitions.html, Table 8.1, 8.2
-- | Storage duration and linkage of a variable
data Storage = NoStorage -- ^ no storage
| Auto Register -- ^ automatic storage (optional: register)
| Static Linkage ThreadLocal -- ^ static storage, linkage spec and thread local specifier (gnu c)
| FunLinkage Linkage -- ^ function, either internal or external linkage
deriving (Typeable, Data, Show, Eq, Ord)
type ThreadLocal = Bool
type Register = Bool
-- | Linkage: Either no linkage, internal to the translation unit or external
data Linkage = NoLinkage | InternalLinkage | ExternalLinkage
deriving (Typeable, Data, Show, Eq, Ord)
-- | return @True@ if the object has linkage
hasLinkage :: Storage -> Bool
hasLinkage (Auto _) = False
hasLinkage (Static NoLinkage _) = False
hasLinkage _ = True
-- | Get the linkage of a definition
declLinkage :: (Declaration d) => d -> Linkage
declLinkage decl =
case declStorage decl of
NoStorage -> undefined
Auto _ -> NoLinkage
Static linkage _ -> linkage
FunLinkage linkage -> linkage
-- * types
-- | types of C objects
data Type =
DirectType TypeName TypeQuals Attributes
-- ^ a non-derived type
| PtrType Type TypeQuals Attributes
-- ^ pointer type
| ArrayType Type ArraySize TypeQuals Attributes
-- ^ array type
| FunctionType FunType Attributes
-- ^ function type
| TypeDefType TypeDefRef TypeQuals Attributes
-- ^ a defined type
deriving (Typeable, Data)
-- | Function types are of the form @FunType return-type params isVariadic@.
--
-- If the parameter types aren't yet known, the function has type @FunTypeIncomplete type attrs@.
data FunType = FunType Type [ParamDecl] Bool
| FunTypeIncomplete Type
deriving (Typeable, Data)
-- | An array type may either have unknown size or a specified array size, the latter either variable or constant.
-- Furthermore, when used as a function parameters, the size may be qualified as /static/.
-- In a function prototype, the size may be `Unspecified variable size' (@[*]@).
data ArraySize = UnknownArraySize Bool
-- ^ @UnknownArraySize is-starred@
| ArraySize Bool Expr
-- ^ @FixedSizeArray is-static size-expr@
deriving (Typeable, Data)
-- | normalized type representation
data TypeName =
TyVoid
| TyIntegral IntType
| TyFloating FloatType
| TyComplex FloatType
| TyComp CompTypeRef
| TyEnum EnumTypeRef
| TyBuiltin BuiltinType
deriving (Typeable, Data)
-- | Builtin type (va_list, anything)
data BuiltinType = TyVaList
| TyAny
deriving (Typeable, Data)
-- | typdef references
-- If the actual type is known, it is attached for convenience
data TypeDefRef = TypeDefRef Ident (Maybe Type) NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
-- | integral types (C99 6.7.2.2)
data IntType =
TyBool
| TyChar
| TySChar
| TyUChar
| TyShort
| TyUShort
| TyInt
| TyUInt
| TyLong
| TyULong
| TyLLong
| TyULLong
deriving (Typeable, Data, Eq, Ord)
instance Show IntType where
show TyBool = "_Bool"
show TyChar = "char"
show TySChar = "signed char"
show TyUChar = "unsigned char"
show TyShort = "short"
show TyUShort = "unsigned short"
show TyInt = "int"
show TyUInt = "unsigned int"
show TyLong = "long"
show TyULong = "unsigned long"
show TyLLong = "long long"
show TyULLong = "unsigned long long"
-- | floating point type (C99 6.7.2.2)
data FloatType =
TyFloat
| TyDouble
| TyLDouble
deriving (Typeable, Data, Eq, Ord)
instance Show FloatType where
show TyFloat = "float"
show TyDouble = "double"
show TyLDouble = "long double"
-- | composite type declarations
data CompTypeRef = CompTypeRef SUERef CompTyKind NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
instance HasSUERef CompTypeRef where sueRef (CompTypeRef ref _ _) = ref
instance HasCompTyKind CompTypeRef where compTag (CompTypeRef _ tag _) = tag
data EnumTypeRef = EnumTypeRef SUERef NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
instance HasSUERef EnumTypeRef where sueRef (EnumTypeRef ref _) = ref
-- | Composite type (struct or union).
data CompType = CompType SUERef CompTyKind [MemberDecl] Attributes NodeInfo
deriving (Typeable, Data {-! ,CNode !-} )
instance HasSUERef CompType where sueRef (CompType ref _ _ _ _) = ref
instance HasCompTyKind CompType where compTag (CompType _ tag _ _ _) = tag
-- | return the type of a composite type definition
typeOfCompDef :: CompType -> TypeName
typeOfCompDef (CompType ref tag _ _ _) = TyComp (CompTypeRef ref tag undefNode)
-- | a tag to determine wheter we refer to a @struct@ or @union@, see 'CompType'.
data CompTyKind = StructTag
| UnionTag
deriving (Eq,Ord,Typeable,Data)
instance Show CompTyKind where
show StructTag = "struct"
show UnionTag = "union"
-- | Representation of C enumeration types
data EnumType = EnumType SUERef [Enumerator] Attributes NodeInfo
-- ^ @EnumType name enumeration-constants attrs node@
deriving (Typeable, Data {-! ,CNode !-} )
instance HasSUERef EnumType where sueRef (EnumType ref _ _ _) = ref
-- | return the type of an enum definition
typeOfEnumDef :: EnumType -> TypeName
typeOfEnumDef (EnumType ref _ _ _) = TyEnum (EnumTypeRef ref undefNode)
-- | An Enumerator consists of an identifier, a constant expressions and the link to its type
data Enumerator = Enumerator Ident Expr EnumType NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
instance Declaration Enumerator where
getVarDecl (Enumerator ide _ enumty _) =
VarDecl
(VarName ide Nothing)
(DeclAttrs False NoStorage [])
(DirectType (typeOfEnumDef enumty) noTypeQuals noAttributes)
-- | Type qualifiers: constant, volatile and restrict
data TypeQuals = TypeQuals { constant :: Bool, volatile :: Bool, restrict :: Bool }
deriving (Typeable, Data)
-- | no type qualifiers
noTypeQuals :: TypeQuals
noTypeQuals = TypeQuals False False False
-- | merge (/&&/) two type qualifier sets
mergeTypeQuals :: TypeQuals -> TypeQuals -> TypeQuals
mergeTypeQuals (TypeQuals c1 v1 r1) (TypeQuals c2 v2 r2) = TypeQuals (c1 && c2) (v1 && v2) (r1 && r2)
-- * initializers
-- | 'Initializer' is currently an alias for 'CInit'.
--
-- We're planning a normalized representation, but this depends on the implementation of
-- constant expression evaluation
type Initializer = CInit
-- | Normalized C Initializers
-- * If the expression has scalar type, the initializer is an expression
-- * If the expression has struct type, the initializer is a map from designators to initializers
-- * If the expression has array type, the initializer is a list of values
-- Not implemented yet, as it depends on constant expression evaluation
-- * names and attributes
-- | @VarName name assembler-name@ is a name of an declared object
data VarName = VarName Ident (Maybe AsmName)
| NoName
deriving (Typeable, Data)
identOfVarName :: VarName -> Ident
identOfVarName NoName = error "identOfVarName: NoName"
identOfVarName (VarName ident _) = ident
isNoName :: VarName -> Bool
isNoName NoName = True
isNoName _ = False
-- | Top level assembler block (alias for @CStrLit@)
type AsmBlock = CStrLit
-- | Assembler name (alias for @CStrLit@)
type AsmName = CStrLit
-- | @__attribute__@ annotations
--
-- Those are of the form @Attr attribute-name attribute-parameters@,
-- and serve as generic properties of some syntax tree elements.
--
-- Some examples:
--
-- * labels can be attributed with /unused/ to indicate that their not used
--
-- * struct definitions can be attributed with /packed/ to tell the compiler to use the most compact representation
--
-- * declarations can be attributed with /deprecated/
--
-- * function declarations can be attributes with /noreturn/ to tell the compiler that the function will never return,
--
-- * or with /const/ to indicate that it is a pure function
--
-- /TODO/: ultimatively, we want to parse attributes and represent them in a typed way
data Attr = Attr Ident [Expr] NodeInfo
deriving (Typeable, Data {-! ,CNode !-})
type Attributes = [Attr]
-- |Empty attribute list
noAttributes :: Attributes
noAttributes = []
-- |Merge attribute lists
-- /TODO/: currently does not remove duplicates
mergeAttributes :: Attributes -> Attributes -> Attributes
mergeAttributes = (++)
-- * statements and expressions (Type aliases)
-- | 'Stmt' is an alias for 'CStat' (Syntax)
type Stmt = CStat
-- | 'Expr' is currently an alias for 'CExpr' (Syntax)
type Expr = CExpr
-- GENERATED START
instance CNode TagDef where
nodeInfo (CompDef d) = nodeInfo d
nodeInfo (EnumDef d) = nodeInfo d
instance Pos TagDef where
posOf x = posOf (nodeInfo x)
instance CNode IdentDecl where
nodeInfo (Declaration d) = nodeInfo d
nodeInfo (ObjectDef d) = nodeInfo d
nodeInfo (FunctionDef d) = nodeInfo d
nodeInfo (EnumeratorDef d) = nodeInfo d
instance Pos IdentDecl where
posOf x = posOf (nodeInfo x)
instance CNode DeclEvent where
nodeInfo (TagEvent d) = nodeInfo d
nodeInfo (DeclEvent d) = nodeInfo d
nodeInfo (ParamEvent d) = nodeInfo d
nodeInfo (LocalEvent d) = nodeInfo d
nodeInfo (TypeDefEvent d) = nodeInfo d
nodeInfo (AsmEvent d) = nodeInfo d
instance Pos DeclEvent where
posOf x = posOf (nodeInfo x)
instance CNode Decl where
nodeInfo (Decl _ n) = n
instance Pos Decl where
posOf x = posOf (nodeInfo x)
instance CNode ObjDef where
nodeInfo (ObjDef _ _ n) = n
instance Pos ObjDef where
posOf x = posOf (nodeInfo x)
instance CNode FunDef where
nodeInfo (FunDef _ _ n) = n
instance Pos FunDef where
posOf x = posOf (nodeInfo x)
instance CNode ParamDecl where
nodeInfo (ParamDecl _ n) = n
nodeInfo (AbstractParamDecl _ n) = n
instance Pos ParamDecl where
posOf x = posOf (nodeInfo x)
instance CNode MemberDecl where
nodeInfo (MemberDecl _ _ n) = n
nodeInfo (AnonBitField _ _ n) = n
instance Pos MemberDecl where
posOf x = posOf (nodeInfo x)
instance CNode TypeDef where
nodeInfo (TypeDef _ _ _ n) = n
instance Pos TypeDef where
posOf x = posOf (nodeInfo x)
instance CNode TypeDefRef where
nodeInfo (TypeDefRef _ _ n) = n
instance Pos TypeDefRef where
posOf x = posOf (nodeInfo x)
instance CNode CompTypeRef where
nodeInfo (CompTypeRef _ _ n) = n
instance Pos CompTypeRef where
posOf x = posOf (nodeInfo x)
instance CNode EnumTypeRef where
nodeInfo (EnumTypeRef _ n) = n
instance Pos EnumTypeRef where
posOf x = posOf (nodeInfo x)
instance CNode CompType where
nodeInfo (CompType _ _ _ _ n) = n
instance Pos CompType where
posOf x = posOf (nodeInfo x)
instance CNode EnumType where
nodeInfo (EnumType _ _ _ n) = n
instance Pos EnumType where
posOf x = posOf (nodeInfo x)
instance CNode Enumerator where
nodeInfo (Enumerator _ _ _ n) = n
instance Pos Enumerator where
posOf x = posOf (nodeInfo x)
instance CNode Attr where
nodeInfo (Attr _ _ n) = n
instance Pos Attr where
posOf x = posOf (nodeInfo x)
-- GENERATED STOP
|
wdanilo/haskell-language-c
|
src/Language/C/Analysis/SemRep.hs
|
bsd-3-clause
| 23,110
| 0
| 12
| 5,284
| 4,897
| 2,707
| 2,190
| 375
| 6
|
{-# OPTIONS_GHC -fwarn-unsafe #-}
module SH_Overlap11_B (
C(..)
) where
class C a where
f :: a -> String
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/overlapping/SH_Overlap11_B.hs
|
bsd-3-clause
| 115
| 0
| 7
| 29
| 32
| 19
| 13
| 5
| 0
|
{-# OPTIONS_GHC -fwarn-unsafe #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred
-- unsafe due to overlapping instances at call site `f`.
--
-- Testing that we are given correct reason.
module SH_Overlap11 where
import SH_Overlap11_A
instance
C [a] where
f _ = "[a]"
test :: String
test = f ([1,2,3,4] :: [Int])
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/overlapping/SH_Overlap11.hs
|
bsd-3-clause
| 391
| 0
| 7
| 74
| 66
| 42
| 24
| 9
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.Sequence.RightPerfect where
import Numeric.Peano
import Data.Kind
import Control.Applicative
import Data.Bits
import Data.Functor.Identity
import Data.Traversable
import Data.Coerce.Utils
{-# ANN module "HLint: ignore Avoid lambda" #-}
data Nil a = Nil
type family Array (xs :: [ℕ]) = (arr :: Type -> Type) | arr -> xs where
Array '[] = Nil
Array (Z : xs) = Odd xs
Array (S x : xs) = Even x xs
data Odd xs a = Odd a (Array xs a) (Array xs a)
data Even x xs a = Even (Array (x:xs) a) (Array (x:xs) a)
class ArrayTraversable xs where
arrayTraverse :: Applicative f => (Array xs b -> c) -> (a -> f b) -> Array xs a -> f c
instance ArrayTraversable '[] where
arrayTraverse k _ _ = pure (k Nil)
instance ArrayTraversable xs =>
ArrayTraversable ('Z : xs) where
arrayTraverse k f (Odd x ys zs) =
liftA3
(\x' ys' zs' ->
k (Odd x' ys' zs'))
(f x)
(arrayTraverse id f ys)
(arrayTraverse id f zs)
instance ArrayTraversable (x : xs) =>
ArrayTraversable ('S x : xs) where
arrayTraverse k f (Even xs ys) =
liftA2
(\xs' ys' ->
k (Even xs' ys'))
(arrayTraverse id f xs)
(arrayTraverse id f ys)
class ArrayCreate xs where
arrayCreate :: Applicative f => (Array xs a -> b) -> f a -> f b
instance ArrayCreate '[] where
arrayCreate k _ = pure (k Nil)
instance ArrayCreate xs => ArrayCreate ('Z : xs) where
arrayCreate k x = let ys = arrayCreate id x in liftA3 (\x' ys' zs' -> k (Odd x' ys' zs')) x ys ys
instance ArrayCreate (x:xs) => ArrayCreate ('S x : xs) where
arrayCreate k x = let ys = arrayCreate id x in liftA2 (\ ys' zs' → k (Even ys' zs')) ys ys
class ArrayZip xs where
arrayZip :: (a -> b -> c) -> Array xs a -> Array xs b -> Array xs c
instance ArrayZip '[] where
arrayZip _ _ _ = Nil
instance ArrayZip xs => ArrayZip ('Z:xs) where
arrayZip f (Odd x1 ys1 zs1) (Odd x2 ys2 zs2)
= Odd (f x1 x2) (arrayZip f ys1 ys2) (arrayZip f zs1 zs2)
instance ArrayZip (x:xs) => ArrayZip ('S x : xs) where
arrayZip f (Even xs1 ys1) (Even xs2 ys2) = Even (arrayZip f xs1 xs2) (arrayZip f ys1 ys2)
class ArrayIndex (xs :: [ℕ]) where
arrayIndex :: Applicative f => Int -> (Array xs a -> b) -> (a -> f a) -> Array xs a -> f b
instance ArrayIndex '[] where
arrayIndex _ k _ _ = pure (k Nil)
instance ArrayIndex xs => ArrayIndex ('Z:xs) where
arrayIndex 0 k f (Odd x ys zs) = fmap (\x' -> k (Odd x' ys zs)) (f x)
arrayIndex i k f (Odd x ys zs) = case i - 1 of
!j -> if testBit j 0
then arrayIndex (shiftR j 1) (\zs' -> k (Odd x ys zs')) f zs
else arrayIndex (shiftR j 1) (\ys' -> k (Odd x ys' zs)) f ys
instance ArrayIndex (x:xs) => ArrayIndex ('S x : xs) where
arrayIndex i k f (Even xs ys)
| testBit i 0 = arrayIndex (shiftR i 1) (\ys' -> k (Even xs ys')) f ys
| otherwise = arrayIndex (shiftR i 1) (\xs' -> k (Even xs' ys)) f xs
arrayFoldMap :: forall xs a b. (ArrayTraversable xs, Monoid b) => (a -> b) -> Array xs a -> b
arrayFoldMap f = getConst #. arrayTraverse id (Const #. f)
{-# INLINE arrayFoldMap #-}
arrayFmap :: forall xs a b. (ArrayTraversable xs) => (a -> b) -> Array xs a -> Array xs b
arrayFmap f = runIdentity #. arrayTraverse id (Identity #. f)
{-# INLINE arrayFmap #-}
type KnownArray xs = (ArrayTraversable xs, ArrayIndex xs, ArrayZip xs)
newtype WrappedArray xs a
= WrappedArray { runWrappedArray :: Array xs a }
instance ArrayTraversable xs => Functor (WrappedArray xs) where
fmap = fmapDefault
instance ArrayTraversable xs => Foldable (WrappedArray xs) where
foldMap = foldMapDefault
instance ArrayTraversable xs => Traversable (WrappedArray xs) where
traverse f (WrappedArray xs) = arrayTraverse WrappedArray f xs
instance (ArrayCreate xs, ArrayZip xs, ArrayTraversable xs) =>
Applicative (WrappedArray xs) where
pure = WrappedArray #. runIdentity #. arrayCreate id .# Identity
liftA2 f (WrappedArray xs) (WrappedArray ys) = WrappedArray (arrayZip f xs ys)
|
oisdk/Square
|
src/Data/Sequence/RightPerfect.hs
|
mit
| 4,209
| 0
| 15
| 1,097
| 1,868
| 951
| 917
| -1
| -1
|
-- Problem 24
--
-- A permutation is an ordered arrangement of objects. For example, 3124 is
-- one possible permutation of the digits 1, 2, 3 and 4. If all of the
-- permutations are listed numerically or alphabetically, we call it
-- lexicographic order. The lexicographic permutations of 0, 1 and 2 are:
--
-- 012 021 102 120 201 210
--
-- What is the millionth lexicographic permutation of the digits 0, 1, 2,
-- 3, 4, 5, 6, 7, 8 and 9?
import Data.List
euler24 = (sort . permutations) "0123456789" !! 999999
|
RossMeikleham/Project-Euler-Haskell
|
24.hs
|
mit
| 525
| 1
| 8
| 108
| 39
| 24
| 15
| 2
| 1
|
{-# LANGUAGE CPP, DataKinds, FlexibleInstances, MultiParamTypeClasses, TypeFamilies, RecordWildCards, OverloadedStrings, TemplateHaskell, TypeOperators #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.Either
import Control.Monad.Reader
import qualified Crypto.Hash.SHA1 as H
import Crypto.PubKey.OpenSsh
import Crypto.PubKey.HashDescr
import Crypto.PubKey.RSA.PKCS15
import Crypto.Random
import Data.Aeson
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as BC
import qualified Data.Cache.LRU as LRU
import Data.Either.Combinators
import Data.IORef
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Data.Proxy
import Data.Serialize.Get (getWord32be, getWord8, runGet)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Version
import Database.PostgreSQL.Simple
import Servant.API
import Servant.Server
import Web.Users.Types
import Web.Users.Postgresql ()
import Network.Wai.Handler.Warp
import System.Environment
import Command (Command (..), Option (..), userOption, passOption)
import Commands
import Model
import Paths_usercfg
#ifdef DEBUG
import Debug.Trace
trc :: Show a => String -> a -> a
trc str x = trace (str ++ ": " ++ show x) x
#endif
type Api = "info" :> Get Response
:<|> "dh" :> ReqBody DhRequest :> Post Response
:<|> "command" :> ReqBody DhCmdRequest :> Post Response
#ifdef DEBUG
:<|> "print_state" :> Get ()
#endif
api :: Proxy Api
api = Proxy
data DhData = DhData
{ dhLRU :: LRU.LRU T.Text B.ByteString
, dhCPRG :: SystemRNG
}
type DhState = IORef DhData
mbToE :: e -> Maybe a -> Either e a
mbToE _ (Just x) = Right x
mbToE e Nothing = Left e
mbToET :: Monad m => e -> Maybe a -> EitherT e m a
mbToET _ (Just x) = right x
mbToET e Nothing = left e
authPubKey :: DhCmdRequest -> Maybe B.ByteString -> T.Text -> T.Text -> Either Error (M.Map T.Text T.Text -> Bool)
authPubKey req shared sshHash sigBlob = do
shared' <- unpackShared shared
blob <- mapLeft (const $ ParseError "b64") $ B64.decode $ TE.encodeUtf8 sigBlob
(_, sig) <- mapLeft ParseError $ flip runGet blob $ do
al <- fromIntegral <$> getWord32be
algo <- B.pack <$> replicateM al getWord8
sl <- fromIntegral <$> getWord32be
sig <- B.pack <$> replicateM sl getWord8
return (algo, sig)
Right $ \sshKeys ->
let pubkey = M.lookup sshHash sshKeys
pubkey' = join $ unpackPubKey . decodePublic . TE.encodeUtf8 <$> pubkey
in fromMaybe False $ verify hashDescrSHA1
<$> pubkey'
<*> pure (shared' <> hashCmdRequest req)
<*> pure sig
where
unpackShared (Just x) = Right x
unpackShared _ = Left NoSharedKeyError
unpackPubKey (Right (OpenSshPublicKeyRsa x _)) = Just x
unpackPubKey _ = Nothing
runServer :: UserStorageBackend bck => Int -> bck -> [(T.Text, Command bck (IO Response))] -> IO ()
runServer port bck cmds = do
ep <- createEntropyPool
stref <- newIORef $ DhData (LRU.newLRU $ Just 10000) (cprgCreate ep)
runServer' stref
where
runServer' stref = run port $ serve api $ info
:<|> dh
:<|> command
#ifdef DEBUG
:<|> printState
#endif
where
info = return $ response $ object'
[ "commands" .= cmds
, "version" .= showVersion version
]
dh DhRequest {..} = liftIO $ do
st <- readIORef stref
let (shared, cprg) = cprgGenerate 256 (dhCPRG st)
modifyIORef stref $ \st' -> st'
-- SHA-1 produces a 40 digit hex string; reject longer inputs
{ dhLRU = LRU.insert (T.take 40 dhReqHash) shared $ dhLRU st'
, dhCPRG = cprg
}
return $ response $ toJSON $ BC.unpack $ B64.encode shared
command req@(DhCmdRequest {..}) = liftIO $ do
st <- readIORef stref
let userMay = lookup (optName userOption) dhClOptions
passMay = lookup (optName passOption) dhClOptions
cmdHash = TE.decodeUtf8 $ B16.encode $ H.hash $ hashCmdRequest req
(lru', shared) = LRU.delete cmdHash $ dhLRU st
mbToR r = maybe (return $ responseFail r)
verifyUserPassword :: T.Text -> User UserData -> Bool
verifyUserPassword pass = verifyPassword (PasswordPlain pass) . u_password
auth cmd
| Just pass <- passMay
, Just user <- userMay = do
r <- withAuthUser bck user (verifyUserPassword pass) cmd
mbToR AuthError return r
| Just (keyHash, sig) <- dhClSig
, Just user <- userMay = runEitherT $ do
f <- hoistEither $ authPubKey req shared keyHash sig
r <- liftIO
$ withAuthUser bck user (f . usrSshKeys . u_more) cmd
hoistEither $ maybe (Left AuthError) id r
| otherwise = return $ responseFail AuthNeededError
exec cmd
| Left f <- cmdFn cmd =
mbToR MissingOptionsError ($ bck) $ runReaderT f dhClOptions
| Right f <- cmdFn cmd =
auth $ \uid -> mbToR MissingOptionsError (\rsv -> rsv uid bck)
$ runReaderT f dhClOptions
| otherwise = return $ responseFail NoSuchCommandError
execCmd
| Just cmd <- lookup dhClCommand cmds
= exec cmd
| otherwise
= return $ responseFail NoSuchCommandError
writeIORef stref $ st { dhLRU = lru' }
execCmd
#ifdef DEBUG
printState = liftIO $ do
readIORef stref >>= print . dhLRU
return ()
#endif
main :: IO ()
main = do
dburl <- fromMaybe "" <$> lookupEnv "DATABASE_URL"
port <- read . fromMaybe "8000" <$> lookupEnv "PORT"
bck <- connectPostgreSQL $ BC.pack dburl
initUserBackend bck
housekeepBackend bck
runServer port bck commands
|
pkamenarsky/usercfg
|
server/src/Main.hs
|
mit
| 6,756
| 0
| 24
| 2,354
| 1,900
| 960
| 940
| 135
| 3
|
module Y2016.D01Spec (spec) where
import Y2016
import Test.Hspec
spec :: Spec
spec = parallel $ do
describe "Day 1" $ do
describe "blockDistance" $ do
it "counts a simple case" $
blockDistance "R2, L3" `shouldBe` 5
it "counts looping routes" $
blockDistance "R2, R2, R2" `shouldBe` 2
it "counts longer routes" $
blockDistance "R5, L5, R5, R3" `shouldBe` 12
describe "visitedTwice" $ do
it "finds easter bunny HQ" $
visitedTwice "R8, R4, R4, R8" `shouldBe` Just 4
|
tylerjl/adventofcode
|
test/Y2016/D01Spec.hs
|
mit
| 598
| 0
| 16
| 212
| 142
| 69
| 73
| 16
| 1
|
import Control.Monad
import Data.Array.IO
import Data.Array
import Data.Array.Base (unsafeThaw, thaw)
import Data.List
-- convert element string into a tuple of int and string values
readElement e = (n, s)
where
arr = words e
n = read (head arr) :: Int
s = last arr
-- 99 is the max int input, so code below is OK
count :: [(Int, String)] -> [(Int, Int)]
count input = assocs . accumArray (+) 0 (0, 99) . map (\(i, s) -> (i, 1)) $ input
-- countLessThan = mapAccumL (\acc (x, c) -> (acc + c, (x, acc + c))) 0
-- split :: [a] -> ([a], [a])
split l = (take half l, drop half l)
where
len = length l
half = div len 2
adj = mod len 2
-- what's going on here
-- countingSort is curried, first argument is list (l)
-- then resuting function expectes counts, which is an array of (elem, count) tuples
-- so what does it do to tuples
-- it applices concatMap, which maps the function (uncurry $ flip replicate) over each tuple
-- and concatenates results into 1 list
-- the (uncurry $ lifp replicate) does the following
-- (uncurry $ flip replicate) is just another way to say (uncurry (flip replicate))
-- uncurry converts curried function to a function on pairs
-- in another words it converts f x y into f (x, y)
-- that suggests that (flip replicate) is a curried function
-- and it is because flip f x y = f y x
-- so flip takes a function as input and returns "flipped" function
-- x and y are curried
-- thus uncurry turns it to flip f x y = f (y, x)
-- and once uncurried it then accepts pairs of (a, b) and that's exactly what is in counts array
-- so what the hell is flip replicate in the first place?
-- it should take (0, 6) and turn it into list of 0s repeated 6 times
-- replicate N x repeats x N times, so replicate (0, 6) would not work the way we want it
-- first of all it would not get the tuple, but uncurrying takes care of that
-- so flip doesn't work on args level, but "alters" the way function takes args in
countingSort l = concatMap (uncurry $ flip replicate)
-- placeElement :: (Int, String) -> m (a Int Int) -> m (a Int String) -> Array Int Int -> (Int, String)
-- placeElement :: Int -> String -> IO (IOArray Int Int) -> IO (IOArray Int (Int, String)) -> [Int] -> Int
placeElement num s outArr lessThanCounts diffsArr = do
let lessThanCount = lessThanCounts!num
curDiff <- readArray diffsArr num
let placeIndex = lessThanCount - curDiff
writeArray outArr placeIndex s
writeArray diffsArr num (curDiff - 1)
return Nothing
-- smartCountingSort :: [(Int, String)] -> [Int] -> m (Int, (t0, String))
smartCountingSort list = do
let listLen = length list
let countsLen = 100
let countsArr = accumArray (+) 0 (0, pred countsLen) . map (\(i, s) -> (i, 1)) $ list
print "done countsArr"
let lessThanCountsList = snd (mapAccumL (\acc (x, c) -> (acc + c, (x, acc + c))) 0 (assocs countsArr))
print "done lessThanCountsList"
let lessThanCountsArr = listArray (0, pred countsLen) (map snd lessThanCountsList)
print "done lessThanCountsArr"
outArr <- newArray (0, pred listLen) "-" :: IO (IOArray Int String)
print "done outArr"
-- this looks like to be the costly part
-- find a way to reduce this step, e.g. get the counts as part of getting less than counts
diffsArr <- unsafeThaw countsArr :: IO (IOArray Int Int)
print "done diffsArr"
let (fstHalf, sndHalf) = split list
print "done split"
forM_ fstHalf $ \(num, s) -> do
placeElement num "-" outArr lessThanCountsArr diffsArr
return Nothing
print "done 1st half"
forM_ sndHalf $ \(num, s) -> do
-- print (num, s)
placeElement num s outArr lessThanCountsArr diffsArr
return Nothing
print "done 2nd half"
x <- getElems outArr
print "done get elems"
return (unwords x)
-- return Nothing
-- -- smartCountingSort :: [(Int, String)] -> [Int] -> m (Int, (t0, String))
-- smartCountingSort list counts lessThanCounts = do
-- let len = length list
-- outArr <- newArray (0, pred len) "-" :: IO (IOArray Int String)
-- -- print "done outArr"
-- -- shiftArr <- newArray (0, pred len) 0 :: IO (IOArray Int Int)
-- -- let zippedCounts = zip counts (0:counts)
-- -- let diffsList = map (uncurry (-)) zippedCounts
-- diffsArr <- newListArray (0, pred (length counts)) (map snd counts) :: IO (IOArray Int Int)
-- -- print "done diffsArr"
-- let lessThanCountsArr = listArray (0, pred (length lessThanCounts)) lessThanCounts
-- -- print lessThanCountsArr
-- -- print "done lessThanCountsArr"
-- let (fstHalf, sndHalf) = split list
-- -- print "done split"
--
-- forM_ fstHalf $ \(num, s) -> do
-- placeElement num "-" outArr lessThanCountsArr diffsArr
-- return Nothing
--
-- -- print "done 1st half"
--
-- forM_ sndHalf $ \(num, s) -> do
-- -- print (num, s)
-- placeElement num s outArr lessThanCountsArr diffsArr
-- return Nothing
--
-- -- print "done 2nd half"
--
-- x <- getElems outArr
-- -- print "done get elems"
-- return (unwords x)
--
-- -- return Nothing
main :: IO ()
main = do
n <- readLn :: IO Int
list <- replicateM n getLine
let input = map readElement list
-- let lessThanCountsArr = mapAccum (\acc (x, c) -> (acc + c, (x, acc + c))) 0 countsArr
smartSorted <- smartCountingSort input
-- putStrLn smartSorted
print "---"
-- let counts = count input
-- -- print counts
-- -- print "done counts"
-- let lessThanCounts = map snd (snd (countLessThan counts))
-- -- print lessCounts
-- -- print "done less counts"
-- smartSorted <- smartCountingSort input counts lessThanCounts
-- -- print "done"
-- putStrLn smartSorted
|
mgrebenets/hackerrank
|
alg/arr-n-srt/the-full-counting-sort.hs
|
mit
| 5,796
| 5
| 17
| 1,393
| 923
| 481
| 442
| 56
| 1
|
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGPathSegCurvetoCubicRel
(setX, getX, setY, getY, setX1, getX1, setY1, getY1, setX2, getX2,
setY2, getY2, SVGPathSegCurvetoCubicRel(..),
gTypeSVGPathSegCurvetoCubicRel)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.x Mozilla SVGPathSegCurvetoCubicRel.x documentation>
setX :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> Float -> m ()
setX self val = liftDOM (self ^. jss "x" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.x Mozilla SVGPathSegCurvetoCubicRel.x documentation>
getX :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> m Float
getX self
= liftDOM (realToFrac <$> ((self ^. js "x") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.y Mozilla SVGPathSegCurvetoCubicRel.y documentation>
setY :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> Float -> m ()
setY self val = liftDOM (self ^. jss "y" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.y Mozilla SVGPathSegCurvetoCubicRel.y documentation>
getY :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> m Float
getY self
= liftDOM (realToFrac <$> ((self ^. js "y") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.x1 Mozilla SVGPathSegCurvetoCubicRel.x1 documentation>
setX1 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> Float -> m ()
setX1 self val = liftDOM (self ^. jss "x1" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.x1 Mozilla SVGPathSegCurvetoCubicRel.x1 documentation>
getX1 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> m Float
getX1 self
= liftDOM (realToFrac <$> ((self ^. js "x1") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.y1 Mozilla SVGPathSegCurvetoCubicRel.y1 documentation>
setY1 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> Float -> m ()
setY1 self val = liftDOM (self ^. jss "y1" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.y1 Mozilla SVGPathSegCurvetoCubicRel.y1 documentation>
getY1 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> m Float
getY1 self
= liftDOM (realToFrac <$> ((self ^. js "y1") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.x2 Mozilla SVGPathSegCurvetoCubicRel.x2 documentation>
setX2 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> Float -> m ()
setX2 self val = liftDOM (self ^. jss "x2" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.x2 Mozilla SVGPathSegCurvetoCubicRel.x2 documentation>
getX2 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> m Float
getX2 self
= liftDOM (realToFrac <$> ((self ^. js "x2") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.y2 Mozilla SVGPathSegCurvetoCubicRel.y2 documentation>
setY2 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> Float -> m ()
setY2 self val = liftDOM (self ^. jss "y2" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoCubicRel.y2 Mozilla SVGPathSegCurvetoCubicRel.y2 documentation>
getY2 :: (MonadDOM m) => SVGPathSegCurvetoCubicRel -> m Float
getY2 self
= liftDOM (realToFrac <$> ((self ^. js "y2") >>= valToNumber))
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/SVGPathSegCurvetoCubicRel.hs
|
mit
| 4,363
| 0
| 12
| 509
| 1,024
| 580
| 444
| 50
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Mezzo.Render.MIDI
-- Description : MIDI exporting
-- Copyright : (c) Dima Szamozvancev
-- License : MIT
--
-- Maintainer : ds709@cam.ac.uk
-- Stability : experimental
-- Portability : portable
--
-- Functions for exporting Mezzo compositions into MIDI files.
-- Skeleton code by Stephen Lavelle.
--
-----------------------------------------------------------------------------
module Mezzo.Render.MIDI
( MidiNote (..), Score, (><)
, renderScore, renderScores, withMusic, defScore, playLive, playLive' )
where
import Mezzo.Model
import Mezzo.Compose (_th, _si, _ei, _qu, _ha, _wh)
import Mezzo.Render.Score
import Mezzo.Compose.Builder
import Codec.Midi hiding (key, Key)
import qualified Codec.Midi as CM (key, Key)
import Euterpea.IO.MIDI.Play (playM')
import Euterpea.IO.MIDI.MidiIO (unsafeOutputID)
import Prelude hiding (min)
-------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
-- | A MIDI representation of a musical note.
data MidiNote = MidiNote
{ noteNum :: Int -- ^ MIDI number of a note (middle C is 60).
, vel :: Velocity -- ^ Performance velocity of the note.
, startT :: Ticks -- ^ Relative start time of the note.
, noteDur :: Ticks -- ^ Duration of the note.
} deriving Show
-- | A MIDI event: a MIDI message at a specific timestamp.
type MidiEvent = (Ticks, Message)
-- | A sequence of MIDI events.
type MidiTrack = Track Ticks
-- | A musical score.
type Score = MidiTrack
-------------------------------------------------------------------------------
-- Operations
-------------------------------------------------------------------------------
-- | Play a MIDI note with the specified duration and default velocity.
midiNote :: Int -> Ticks -> MidiNote
midiNote root dur = MidiNote {noteNum = root, vel = 100, startT = 0, noteDur = dur}
midiRest :: Ticks -> MidiNote
midiRest dur = MidiNote {noteNum = 0, vel = 0, startT = 0, noteDur = dur}
-- | Start playing the specified 'MidiNote'.
keyDown :: MidiNote -> MidiEvent
keyDown n = (startT n, NoteOn {channel = 0, CM.key = noteNum n, velocity = vel n})
-- | Stop playing the specified 'MidiNote'.
keyUp :: MidiNote -> MidiEvent
keyUp n = (startT n + noteDur n, NoteOn {channel = 0, CM.key = noteNum n, velocity = 0})
-- | Play the specified 'MidiNote'.
playNote :: Int -> Ticks -> MidiTrack
playNote root dur = map ($ midiNote root (dur * 60)) [keyDown, keyUp]
-- | Play a rest of the specified duration.
playRest :: Ticks -> MidiTrack
playRest dur = map ($ midiRest (dur * 60)) [keyDown, keyUp]
-- | Play the specified 'MidiNote'.
playTriplet :: [Int] -> Ticks -> MidiTrack
playTriplet ts dur = concatMap playShortNote ts
where playShortNote root = map ($ midiNote root (dur * 40)) [keyDown, keyUp]
-- | Merge two parallel MIDI tracks.
(><) :: MidiTrack -> MidiTrack -> MidiTrack
m1 >< m2 = removeTrackEnds $ m1 `merge` m2
-------------------------------------------------------------------------------
-- Rendering
-------------------------------------------------------------------------------
-- | Title of a composition
type Title = String
-- | Convert a 'Music' piece into a 'MidiTrack'.
musicToMidi :: forall t k m r. Music (Sig :: Signature t k r) m -> Score
musicToMidi (m1 :|: m2) = musicToMidi m1 ++ musicToMidi m2
musicToMidi (m1 :-: m2) = musicToMidi m1 >< musicToMidi m2
musicToMidi (Note root dur) = playNote (prim root) (prim dur)
musicToMidi (Rest dur) = playRest (prim dur)
musicToMidi (Chord c d) = foldr1 (><) notes
where notes = map (`playNote` prim d) $ prim c
musicToMidi (Progression p) = foldr1 (++) chords
where chords = (toChords <$> init (prim p)) ++ [cadence (last (prim p))]
toChords :: [Int] -> Score
toChords = concat . replicate (prim (TimeSig @t)) . foldr1 (><) . map (`playNote` prim _qu)
cadence :: [Int] -> Score
cadence = foldr1 (><) . map (`playNote` prim _wh)
musicToMidi (Homophony m a) = musicToMidi m >< musicToMidi a
musicToMidi (Triplet d r1 r2 r3) = playTriplet [prim r1, prim r2, prim r3] (prim d)
-- | Sets the music content of the score.
withMusic :: ATerm (Music (Sig :: Signature t k r) m) (Attributes t k r) Score
withMusic atts m = [ (0, getTimeSig atts)
, (0, getKeySig atts)
, (0, TempoChange (60000000 `div` tempo atts))
] ++ musicToMidi m
-- | Shorthand for quickly creating a score with the default attributes.
defScore :: Music (Sig :: Signature 4 (Key C Natural MajorMode) Classical) m -> Score
defScore = score withMusic
-- | A basic skeleton of a MIDI file.
midiSkeleton :: Title -> Score -> Midi
midiSkeleton trName mel = Midi
{ fileType = SingleTrack
, timeDiv = TicksPerBeat 480
, tracks =
[ [ (0, ChannelPrefix 0)
, (0, TrackName trName)
, (0, InstrumentName "GM Device 1")
]
++ mel
++ [ (0, TrackEnd) ]
]
}
-- | Create a MIDI file with the specified name and track.
exportMidi :: FilePath -> Title -> Score -> IO ()
exportMidi f trName notes = do
exportFile f $ midiSkeleton trName notes
putStrLn $ "Composition rendered to " ++ f ++ "."
-- | Create a MIDI file with the specified path, title and score.
renderScore :: FilePath -> Title -> Score -> IO ()
renderScore f compTitle sc = exportMidi f compTitle sc
-- | Create a MIDI file with the specified path, title and list of scores.
renderScores :: FilePath -> Title -> [Score] -> IO ()
renderScores f compTitle ts = renderScore f compTitle (concat ts)
-- | Live playback of a Mezzo score.
playLive' :: Score -> IO ()
playLive' s = playM' (Just $ unsafeOutputID 0) $ midiSkeleton "Live playback" s
-- | Live playback of a Mezzo piece with default score attributes.
playLive :: Music (Sig :: Signature 4 (Key C Natural MajorMode) Classical) m
-> IO ()
playLive m = playLive' (defScore m)
|
DimaSamoz/mezzo
|
src/Mezzo/Render/MIDI.hs
|
mit
| 6,140
| 4
| 15
| 1,246
| 1,615
| 903
| 712
| -1
| -1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -fdefer-type-errors #-}
#endif
module UnificationTest where
import Testing
import Control.Hspl.Internal.Ast
import Control.Hspl.Internal.Unification
import Control.Hspl.Internal.VarMap (Entry (..))
import qualified Control.Hspl.Internal.VarMap as M
#if __GLASGOW_HASKELL__ >= 800
import Test.ShouldNotTypecheck
#endif
import Control.Exception.Base (evaluate)
import Control.Monad.State hiding (when)
import Control.Monad.Trans.Maybe
import Control.Monad.Writer (MonadWriter (..), runWriter)
import Data.Data
import Data.Monoid ((<>))
import Data.Typeable
import GHC.Generics
data RecursiveType = Base | Rec RecursiveType
deriving (Show, Eq, Typeable, Data, Generic)
instance Termable RecursiveType
data TwoChars = TwoChars Char Char
deriving (Show, Eq, Typeable, Data, Generic)
instance Termable TwoChars
type MockUnification = MaybeT (StateT Unifier VarGenerator)
instance MonadVarGenerator MockUnification where
fresh = lift $ lift fresh
instance MonadUnification MockUnification where
stateUnifier = lift . state
newtype IntFrac = IntFrac { toDouble :: Double }
deriving (Num, Fractional, Real, Ord, Enum, Typeable, Data, Show, Eq)
instance Termable IntFrac where
toTerm = Constant
-- This is weird and, well, bad, but it makes parameterizing the tests for numerical operators a lot
-- easier. Obviously we'll never want to depend on these operations actually behaving nicely.
instance Integral IntFrac where
quotRem (IntFrac d1) (IntFrac d2) = quotRem (floor d1) (floor d2)
toInteger (IntFrac d) = floor d
runMockUnification :: MockUnification a -> Maybe (a, Unifier)
runMockUnification m =
let st = runMaybeT m
(ma, u) = runVarGenerator $ runStateT st M.empty
in case ma of
Just a -> Just (a, u)
Nothing -> Nothing
renameWithContext :: Renamer -> Int -> Term a -> Term a
renameWithContext renamer fresh t =
let r = put renamer >> renameTerm t
vg = put fresh >> runRenamedT r
in evalState vg fresh
renamePredWithContext :: Renamer -> Int -> Predicate -> Predicate
renamePredWithContext renamer fresh p =
let r = put renamer >> renamePredicate p
vg = put fresh >> runRenamedT r
in evalState vg fresh
renameGoalWithContext :: Renamer -> Int -> Goal -> Goal
renameGoalWithContext renamer fresh g =
let r = put renamer >> renameGoal g
vg = put fresh >> runRenamedT r
in evalState vg fresh
doRenameClause :: HornClause -> HornClause
doRenameClause c = runVarGenerator $ renameClause c
test = describeModule "Control.Hspl.Internal.Unification" $ do
describe "a unifier" $ do
it "should have a singleton substitution operator" $
True // Var "x" `shouldBe` M.singleton (Var "x") (toTerm True)
when "empty" $ do
it "should act as an identity of composition" $ do
let u = toTerm True // Var "x"
u `compose` M.empty `shouldBe` u
M.empty `compose` u `shouldBe` u
it "should act as an identity of unification" $ do
let t = toTerm (Var "x" :: Var Bool)
unify M.empty t `shouldBe` t
it "should not allow terms to replace variables of a different type" $ do
#if __GLASGOW_HASKELL__ >= 800
-- This should work
evaluate $ toTerm True // (Var "x" :: Var Bool)
-- But this should not
shouldNotTypecheck $ toTerm True // (Var "x" :: Var Char)
#else
pendingWith "ShouldNotTypecheck tests require GHC >= 8.0"
#endif
it "is a subunifier of another if the other contains all of the substitutions of the first" $ do
'a' // Var "x" `shouldSatisfy` (`isSubunifierOf` ('a' // Var "x" <> True // Var "y"))
'a' // Var "x" <> True // Var "y" `shouldSatisfy`
(`isSubunifierOf` ('a' // Var "x" <> True // Var "y"))
'a' // Var "x" <> True // Var "y" `shouldSatisfy`
(`isSubunifierOf` ('a' // Var "x" <> 'b' // Var "x'" <> True // Var "y" <> () // Var "z"))
it "is not a subunifier of another which does not contain a substitution in the first" $
'a' // Var "x" <> 'b' // Var "y" `shouldSatisfy` not . (`isSubunifierOf` ('a' // Var "x"))
it "is not a subunifier of another which does not contain a submap of the first" $
'a' // Var "x" <> True // Var "y" `shouldSatisfy` not . (`isSubunifierOf` ('a' // Var "y"))
it "should return the unification status of a variable" $ do
findVar M.empty (Var "x" :: Var Bool) `shouldBe` Ununified
findVar (toTerm 'a' // Var "x") (Var "x" :: Var Bool) `shouldBe` Ununified
findVar (toTerm True // Var "y") (Var "x" :: Var Bool) `shouldBe` Ununified
findVar (toTerm True // Var "x") (Var "x" :: Var Bool) `shouldBe` Unified True
let t = adt Just (Var "y" :: Var Bool)
findVar (t // Var "x") (Var "x" :: Var (Maybe Bool)) `shouldBe` Partial t
findVar ((Var "y" :: Var Char) // Var "x" <> 'a' // Var "y") (Var "x" :: Var Char) `shouldBe`
Unified 'a'
findVar ((Var "y" :: Var (Maybe Char)) // Var "x" <> adt Just (Var "z" :: Var Char) // Var "y")
(Var "x" :: Var (Maybe Char)) `shouldBe`
Partial (adt Just (Var "z" :: Var Char))
describe "freeIn" $ do
it "should accurately detect free variables" $ do
freeIn (Var "x" :: Var Char) (toTerm (Var "x" :: Var Char)) `shouldBe` True
freeIn (Var "x" :: Var Char) (toTerm (Var "y" :: Var Char)) `shouldBe` False
it "should determine that there are no variables in a constant" $
freeIn (Var "x" :: Var Char) (toTerm 'a') `shouldBe` False
it "should recurse over the arguments of an ADT constructor" $
freeIn (Var "x" :: Var Char) (adt Just (Var "x" :: Var Char)) `shouldBe` True
it "should recurse over elements of a tuple" $
freeIn (Var "x" :: Var Char) (toTerm (True, 'a', Var "x" :: Var Char)) `shouldBe` True
it "should recurse over elements of a list" $ do
freeIn (Var "x" :: Var Char) (toTerm "") `shouldBe` False
freeIn (Var "x" :: Var Char) (List $ Cons (toTerm $ Var "x") (toTerm "foo")) `shouldBe` True
it "should identify variables in an appended list" $ do
freeIn (Var "xs" :: Var String) (List $ Append (Var "xs") (toTerm "foo")) `shouldBe` True
freeIn (Var "ys" :: Var String)
(List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys")) `shouldBe` True
withParams [Sum, Difference, Product, Quotient, IntQuotient, Modulus] $ \op ->
it "should recurse over operands of a binary operator" $ do
freeIn (Var "x" :: Var IntFrac) (toTerm (Var "x") `op` toTerm (IntFrac 0)) `shouldBe` True
freeIn (Var "x" :: Var IntFrac) (toTerm (IntFrac 0) `op` toTerm (Var "x")) `shouldBe` True
describe "term unification" $ do
let getUs :: TermEntry a => Term a -> Term a -> [Unifier]
getUs t1 t2 = runUnificationT $ mgu t1 t2
context "of anonymous variables" $ do
it "should always succeed" $ do
getUs (toTerm Anon) (toTerm 'a') `shouldBe` [M.empty]
getUs (toTerm 'a') (toTerm Anon) `shouldBe` [M.empty]
getUs (toTerm (Anon :: Var Char)) (toTerm Anon) `shouldBe` [M.empty]
it "should bind multiple anonymous variables to different values" $
getUs (toTerm ('a', Anon)) (toTerm (Anon, 'b')) `shouldBe` [M.empty]
when "both terms are variables" $
it "should keep user-defined variables over fresh variables where possible" $ do
getUs (toTerm (Var "x" :: Var Char)) (toTerm (Fresh 0 :: Var Char)) `shouldBe`
[toTerm (Var "x" :: Var Char) // Fresh 0]
getUs (toTerm (Fresh 0 :: Var Char)) (toTerm (Var "x" :: Var Char)) `shouldBe`
[toTerm (Var "x" :: Var Char) // Fresh 0]
when "one term is a variable" $ do
it "should unify with any term" $ do
getUs (toTerm $ Var "x") (toTerm True) `shouldBe` [toTerm True // Var "x"]
getUs (toTerm True) (toTerm $ Var "x") `shouldBe` [toTerm True // Var "x"]
getUs (toTerm (Var "x" :: Var Char)) (toTerm (Var "y" :: Var Char)) `shouldBe`
[toTerm (Var "y" :: Var Char) // Var "x"]
getUs (toTerm (Var "x" :: Var Char)) (toTerm (Var "x" :: Var Char)) `shouldBe` [M.empty]
-- ^ This should NOT fail the occurs check!
it "should fail when the term being substituted contains the variable (occurs check)" $ do
getUs (toTerm (Var "xs" :: Var [Bool]))
(List $ Cons (toTerm True) (toTerm $ Var "xs")) `shouldBe` []
getUs (toTerm (Var "x" :: Var RecursiveType))
(adt Rec (Var "x" :: Var RecursiveType)) `shouldBe` []
it "should match the tail of a list" $ do
getUs (toTerm "foo") (List $ Cons (toTerm 'f') (toTerm $ Var "xs")) `shouldBe` [toTerm "oo" // Var "xs"]
getUs (List $ Cons (toTerm 'f') (toTerm $ Var "xs")) (toTerm "foo") `shouldBe` [toTerm "oo" // Var "xs"]
getUs (toTerm "foo") (List $ Cons (toTerm 'f') (toTerm Anon)) `shouldBe` [M.empty]
getUs (List $ Cons (toTerm 'f') (toTerm Anon)) (toTerm "foo") `shouldBe` [M.empty]
when "both elements are constants" $ do
it "should unify equal constants" $ do
getUs (toTerm True) (toTerm True) `shouldBe` [M.empty]
getUs (toTerm 'a') (toTerm 'a') `shouldBe` [M.empty]
it "should fail to unify unequal constants" $ do
getUs (toTerm True) (toTerm False) `shouldBe` []
getUs (toTerm 'a') (toTerm 'b') `shouldBe` []
when "both terms are tuples" $ do
it "should unify the elements in sequence" $
getUs (toTerm ('a', Var "x" :: Var Bool)) (toTerm (Var "y" :: Var Char, True)) `shouldBe`
[toTerm 'a' // Var "y" <> toTerm True // Var "x"]
it "should fail to unify if any element fails" $ do
getUs (toTerm ('a', Var "x" :: Var Char)) (toTerm ('b', 'c')) `shouldBe` []
getUs (toTerm (Var "x" :: Var Char, 'a')) (toTerm ('b', 'c')) `shouldBe` []
it "should apply each intermediate unifier to the remaining terms before unifying them" $
getUs (toTerm ('a', Var "x" :: Var Char)) (toTerm (Var "x" :: Var Char, Var "y" :: Var Char)) `shouldBe`
[toTerm 'a' // Var "x" <> toTerm 'a' // Var "y"]
it "should fail to unify tuples of different lengths" $
#if __GLASGOW_HASKELL__ >= 800
shouldNotTypecheck $ getUs (toTerm ('a', 'b')) (toTerm ('a', 'b', Var "x" :: Var Char))
#else
pendingWith "ShouldNotTypecheck tests require GHC >= 8.0"
#endif
when "both terms are lists" $ do
it "should unify the elements in sequence" $
getUs (toTerm [toTerm 'a', toTerm (Var "x" :: Var Char)])
(toTerm [toTerm (Var "y" :: Var Char), toTerm 'b']) `shouldBe`
[toTerm 'a' // Var "y" <> toTerm 'b' // Var "x"]
it "should unify a variable with the tail of a list" $
getUs (toTerm "abc") (List $ Cons (toTerm 'a') (toTerm $ Var "xs")) `shouldBe`
[toTerm "bc" // Var "xs"]
it "should fail to unify if any element fails" $ do
getUs (toTerm [toTerm 'a', toTerm (Var "x" :: Var Char)]) (toTerm ['b', 'c']) `shouldBe` []
getUs (toTerm [toTerm (Var "x" :: Var Char), toTerm 'a']) (toTerm ['b', 'c']) `shouldBe` []
it "should apply each intermediate unifier to the remaining terms before unifying them" $
getUs (toTerm [toTerm 'a', toTerm (Var "x" :: Var Char)])
(toTerm [toTerm (Var "x" :: Var Char), toTerm (Var "y" :: Var Char)]) `shouldBe`
[toTerm 'a' // Var "x" <> toTerm 'a' // Var "y"]
it "should fail to unify lists of different lengths" $
getUs (toTerm ['a', 'b']) (toTerm [toTerm 'a', toTerm 'b', toTerm (Var "x" :: Var Char)]) `shouldBe`
[]
it "should match a variable-appended list with a constant-appended list" $ do
let us = getUs (List $ Append (Var "xs") (toTerm $ Var "ys"))
(List $ Append (Var "zs") (toTerm "foo"))
length us `shouldBe` 5
-- xs, zs same length
findVar (us !! 0) (Var "xs" :: Var String) `shouldBe` Partial (toTerm $ Var "zs")
findVar (us !! 0) (Var "ys") `shouldBe` Unified "foo"
findVar (us !! 0) (Var "zs" :: Var String) `shouldBe` Ununified
-- xs longer than zs
findVar (us !! 1) (Var "xs") `shouldBe` Partial (List $ Append (Var "zs") (toTerm "foo"))
findVar (us !! 1) (Var "ys") `shouldBe` Unified ""
findVar (us !! 1) (Var "zs" :: Var String) `shouldBe` Ununified
findVar (us !! 2) (Var "xs") `shouldBe` Partial (List $ Append (Var "zs") (toTerm "fo"))
findVar (us !! 2) (Var "ys") `shouldBe` Unified "o"
findVar (us !! 2) (Var "zs" :: Var String) `shouldBe` Ununified
findVar (us !! 3) (Var "xs") `shouldBe` Partial (List $ Append (Var "zs") (toTerm "f"))
findVar (us !! 3) (Var "ys") `shouldBe` Unified "oo"
findVar (us !! 3) (Var "zs" :: Var String) `shouldBe` Ununified
-- zs longer than xs
findVar (us !! 4) (Var "zs" :: Var String) `shouldBe`
Partial (List $ Append (Var "xs") (toTerm $ Fresh 0))
findVar (us !! 4) (Var "ys" :: Var String) `shouldBe`
Partial (List $ Append (Fresh 0) (toTerm "foo"))
findVar (us !! 4) (Var "xs" :: Var String) `shouldBe` Ununified
it "should match two variable-appended lists" $ do
let us = getUs (List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys"))
(List $ Append (Var "as") (toTerm $ Var "bs"))
length us `shouldBe` 3
-- xs, as same length
findVar (us !! 0) (Var "xs" :: Var String) `shouldBe` Partial (toTerm $ Var "as")
findVar (us !! 0) (Var "ys" :: Var String) `shouldBe` Partial (toTerm $ Var "bs")
findVar (us !! 0) (Var "as" :: Var String) `shouldBe` Ununified
findVar (us !! 0) (Var "bs" :: Var String) `shouldBe` Ununified
-- xs longer than as
findVar (us !! 1) (Var "xs" :: Var String) `shouldBe`
Partial (List $ Append (Var "as") (toTerm $ Fresh 0))
findVar (us !! 1) (Var "ys" :: Var String) `shouldBe` Ununified
findVar (us !! 1) (Var "as" :: Var String) `shouldBe` Ununified
findVar (us !! 1) (Var "bs" :: Var String) `shouldBe`
Partial (List $ Append (Fresh 0) (toTerm $ Var "ys"))
-- as longer than xs
findVar (us !! 2) (Var "as" :: Var String) `shouldBe`
Partial (List $ Append (Var "xs") (toTerm $ Fresh 0))
findVar (us !! 2) (Var "bs" :: Var String) `shouldBe` Ununified
findVar (us !! 2) (Var "xs" :: Var String) `shouldBe` Ununified
findVar (us !! 2) (Var "ys" :: Var String) `shouldBe`
Partial (List $ Append (Fresh 0) (toTerm $ Var "bs"))
withParams [getUs, flip getUs] $ \go -> do
it "should match an appended list with a concrete list" $ do
let us = go (toTerm "foo") (List $ Append (Var "xs") (toTerm "o"))
length us `shouldBe` 1
findVar (head us) (Var "xs") `shouldBe` Unified "fo"
it "should match an appended list with a partial list" $ do
let us = go (List $ Cons (toTerm 'f') (toTerm $ Var "tail"))
(List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys"))
length us `shouldBe` 2
findVar (head us) (Var "xs") `shouldBe` Partial (List $ Cons (toTerm 'f') (toTerm $ Fresh 0))
findVar (head us) (Var "ys" :: Var String) `shouldBe` Ununified
findVar (head us) (Var "tail") `shouldBe`
Partial (List $ Append (Fresh 0 :: Var String) (toTerm $ Var "ys"))
findVar (last us) (Var "xs") `shouldBe` Unified ""
findVar (last us) (Var "ys") `shouldBe` Partial (List $ Cons (toTerm 'f') (toTerm $ Var "tail"))
findVar (last us) (Var "tail" :: Var String) `shouldBe` Ununified
it "should match an appended list with an empty list" $
go (List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys")) (List Nil) `shouldBe`
[(List Nil :: Term String) // Var "xs" <> (List Nil :: Term String) // Var "ys"]
it "should match an appended list nondeterministically" $ do
let us = go (List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys")) (toTerm "a")
length us `shouldBe` 2
findVar (head us) (Var "xs") `shouldBe` Unified "a"
findVar (head us) (Var "ys") `shouldBe` Unified ""
findVar (last us) (Var "xs") `shouldBe` Unified ""
findVar (last us) (Var "ys") `shouldBe` Unified "a"
when "both terms are ADTs" $ do
it "should unify terms with matching constructors by unifying the arguments" $ do
getUs (adt Just 'a') (adt Just (Var "x")) `shouldBe` [toTerm 'a' // Var "x"]
getUs (adt Just (Var "x")) (adt Just 'a') `shouldBe` [toTerm 'a' // Var "x"]
it "should apply the unifier of respective arguments to subsequent arguments before unifying them" $
getUs (adt TwoChars ('a', Var "x" :: Var Char))
(adt TwoChars (Var "x" :: Var Char, Var "y" :: Var Char)) `shouldBe`
[toTerm 'a' // Var "x" <> toTerm 'a' // Var "y"]
it "should fail to unify terms with different constructors" $ do
getUs (adt Left 'a') (adt Right 'a') `shouldBe` []
getUs (adt Left 'a') (adt Right True) `shouldBe` []
getUs (adt Left (Var "x" :: Var Char)) (adt Right (Var "y" :: Var Char)) `shouldBe` []
when "both terms are arithmetic expressions" $ do
it "should unify terms of the same type of expression by unifying the operands" $ do
getUs (Sum (toTerm $ Var "x") (toTerm (1 :: Int)))
(Sum (toTerm (2 :: Int)) (toTerm $ Var "y")) `shouldBe`
[toTerm (1 :: Int) // Var "y" <> toTerm (2 :: Int) // Var "x"]
getUs (Difference (toTerm $ Var "x") (toTerm (1 :: Int)))
(Difference (toTerm (2 :: Int)) (toTerm $ Var "y")) `shouldBe`
[toTerm (1 :: Int) // Var "y" <> toTerm (2 :: Int) // Var "x"]
getUs (Product (toTerm $ Var "x") (toTerm (1 :: Int)))
(Product (toTerm (2 :: Int)) (toTerm $ Var "y")) `shouldBe`
[toTerm (1 :: Int) // Var "y" <> toTerm (2 :: Int) // Var "x"]
getUs (Quotient (toTerm $ Var "x") (toTerm (1.0 :: Double)))
(Quotient (toTerm (2.0 :: Double)) (toTerm $ Var "y")) `shouldBe`
[toTerm (1.0 :: Double) // Var "y" <> toTerm (2.0 :: Double) // Var "x"]
getUs (IntQuotient (toTerm $ Var "x") (toTerm (1 :: Int)))
(IntQuotient (toTerm (2 :: Int)) (toTerm $ Var "y")) `shouldBe`
[toTerm (1 :: Int) // Var "y" <> toTerm (2 :: Int) // Var "x"]
getUs (Modulus (toTerm $ Var "x") (toTerm (1 :: Int)))
(Modulus (toTerm (2 :: Int)) (toTerm $ Var "y")) `shouldBe`
[toTerm (1 :: Int) // Var "y" <> toTerm (2 :: Int) // Var "x"]
it "should fail to unify different types of expressions" $ do
getUs (Sum (toTerm $ Var "x") (toTerm (1 :: Int)))
(Difference (toTerm (2 :: Int)) (toTerm $ Var "y")) `shouldBe` []
getUs (Quotient (toTerm $ Var "x") (toTerm (1.0 :: Double)))
(Product (toTerm $ Var "x") (toTerm (1.0 :: Double))) `shouldBe` []
it "should prohibit unification of terms of different types" $ do
#if __GLASGOW_HASKELL__ >= 800
-- This should work
evaluate $ getUs (toTerm True) (toTerm True)
-- but not this
shouldNotTypecheck $ getUs (toTerm True) (toTerm 'a')
#else
pendingWith "ShouldNotTypecheck tests require GHC >= 8.0"
#endif
describe "term renaming" $ do
let r = M.fromList [ Entry (Var "x" :: Var Bool) (Fresh 0)
, Entry (Var "x" :: Var Char) (Fresh 1)
, Entry (Var "y" :: Var Char) (Fresh 2)
, Entry (Var "z" :: Var Char) (Fresh 3)
]
let rename = renameWithContext r 4
context "of a variable" $ do
it "should leave anonymous variables unchanged" $
rename (toTerm (Anon :: Var Char, Anon :: Var Char)) `shouldBe` toTerm (Anon, Anon)
it "should replace the variable if it appears in the renamer" $ do
rename (toTerm (Var "x" :: Var Bool)) `shouldBe` toTerm (Fresh 0 :: Var Bool)
rename (toTerm (Var "x" :: Var Char)) `shouldBe` toTerm (Fresh 1 :: Var Char)
rename (toTerm (Var "y" :: Var Char)) `shouldBe` toTerm (Fresh 2 :: Var Char)
rename (toTerm (Var "z" :: Var Char)) `shouldBe` toTerm (Fresh 3 :: Var Char)
it "should create a fresh variable if it is not in the renamer" $ do
rename (toTerm (Var "q" :: Var Char)) `shouldBe` toTerm (Fresh 4 :: Var Char)
rename (toTerm (Var "y" :: Var Bool)) `shouldBe` toTerm (Fresh 4 :: Var Bool)
context "of a constant" $
it "should return the original constant" $ do
rename (toTerm 'a') `shouldBe` toTerm 'a'
rename (toTerm True) `shouldBe` toTerm True
context "of a tuple" $ do
it "should recursively rename variables in each element" $ do
rename (toTerm (Var "x" :: Var Bool, Var "x" :: Var Char)) `shouldBe`
toTerm (Fresh 0 :: Var Bool, Fresh 1 :: Var Char)
rename (toTerm (Var "x" :: Var Bool, Var "q" :: Var Char)) `shouldBe`
toTerm (Fresh 0 :: Var Bool, Fresh 4 :: Var Char)
rename (toTerm (Var "x" :: Var Char, (Var "y" :: Var Char, Var "z" :: Var Char))) `shouldBe`
toTerm (Fresh 1 :: Var Char, (Fresh 2 :: Var Char, Fresh 3 :: Var Char))
it "should rename the same variable with the same replacement" $ do
rename (toTerm (Var "x" :: Var Bool, Var "x" :: Var Bool)) `shouldBe`
toTerm (Fresh 0 :: Var Bool, Fresh 0 :: Var Bool)
rename (toTerm (Var "q" :: Var Char, Var "q" :: Var Char)) `shouldBe`
toTerm (Fresh 4 :: Var Char, Fresh 4 :: Var Char)
context "of a list" $ do
it "should recursively rename variables in each element" $ do
rename (toTerm [Var "x" :: Var Char, Var "y" :: Var Char]) `shouldBe`
toTerm [Fresh 1 :: Var Char, Fresh 2 :: Var Char]
rename (toTerm [Var "x" :: Var Char, Var "q" :: Var Char]) `shouldBe`
toTerm [Fresh 1 :: Var Char, Fresh 4 :: Var Char]
it "should rename a variable in the tail of the list" $ do
let r = M.singleton (Var "xs" :: Var String) (Fresh 0)
renameWithContext r 1 (List $ Cons (toTerm 'a') (toTerm $ Var "xs")) `shouldBe`
List (Cons (toTerm 'a') (toTerm $ Fresh 0))
it "should rename the front and back of an appended list" $ do
let r = M.fromList [ M.Entry (Var "xs" :: Var String) (Fresh 0)
, M.Entry (Var "ys" :: Var String) (Fresh 1)
]
renameWithContext r 2 (List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys")) `shouldBe`
List (Append (Fresh 0 :: Var String) (toTerm $ Fresh 1))
it "should rename the same variable with the same replacement" $ do
rename (toTerm [Var "x" :: Var Bool, Var "x" :: Var Bool]) `shouldBe`
toTerm [Fresh 0 :: Var Bool, Fresh 0 :: Var Bool]
rename (toTerm [Var "q" :: Var Char, Var "q" :: Var Char]) `shouldBe`
toTerm [Fresh 4 :: Var Char, Fresh 4 :: Var Char]
let r = M.singleton (Var "xs" :: Var String) (Fresh 0)
renameWithContext r 1 (List $ Append (Var "xs" :: Var String) (toTerm $ Var "xs")) `shouldBe`
List (Append (Fresh 0 :: Var String) (toTerm $ Fresh 0))
context "of an ADT constructor" $ do
it "should recursively rename variables in the argument" $ do
rename (adt Just (Var "x" :: Var Char)) `shouldBe`
adt Just (Fresh 1 :: Var Char)
rename (adt Just (Var "x" :: Var Char, Var "q" :: Var Int)) `shouldBe`
adt Just (Fresh 1 :: Var Char, Fresh 4 :: Var Int)
it "should rename the same variable with the same replacement" $ do
rename (adt TwoChars (Var "x" :: Var Char, Var "x" :: Var Char)) `shouldBe`
adt TwoChars (Fresh 1 :: Var Char, Fresh 1 :: Var Char)
rename (adt TwoChars (Var "q" :: Var Char, Var "q" :: Var Char)) `shouldBe`
adt TwoChars (Fresh 4 :: Var Char, Fresh 4 :: Var Char)
context "of an arithmetic expression" $ do
it "should recursively rename variables in each operand" $ do
rename (Sum (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Sum (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 5)
rename (Difference (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Difference (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 5)
rename (Product (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Product (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 5)
rename (Quotient (toTerm (Var "x" :: Var Double)) (toTerm $ Var "y")) `shouldBe`
Quotient (toTerm (Fresh 4 :: Var Double)) (toTerm $ Fresh 5)
rename (IntQuotient (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
IntQuotient (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 5)
rename (Modulus (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Modulus (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 5)
it "should rename the same variable with the same replacement" $ do
rename (Sum (toTerm (Var "x" :: Var Int)) (toTerm $ Var "x")) `shouldBe`
Sum (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 4)
rename (Difference (toTerm (Var "x" :: Var Int)) (toTerm $ Var "x")) `shouldBe`
Difference (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 4)
rename (Product (toTerm (Var "x" :: Var Int)) (toTerm $ Var "x")) `shouldBe`
Product (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 4)
rename (Quotient (toTerm (Var "x" :: Var Double)) (toTerm $ Var "x")) `shouldBe`
Quotient (toTerm (Fresh 4 :: Var Double)) (toTerm $ Fresh 4)
rename (IntQuotient (toTerm (Var "x" :: Var Int)) (toTerm $ Var "x")) `shouldBe`
IntQuotient (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 4)
rename (Modulus (toTerm (Var "x" :: Var Int)) (toTerm $ Var "x")) `shouldBe`
Modulus (toTerm (Fresh 4 :: Var Int)) (toTerm $ Fresh 4)
describe "predicate renaming" $ do
let r = M.singleton (Var "x" :: Var Bool) (Fresh 0)
let rename = renamePredWithContext r 1
it "should rename variables in the argument if the renamer applies" $
rename (predicate "foo" (Var "x" :: Var Bool)) `shouldBe` predicate "foo" (Fresh 0 :: Var Bool)
it "should create fresh variables when the argument contains a variable not in the renamer" $
rename (predicate "foo" (Var "q" :: Var Bool)) `shouldBe` predicate "foo" (Fresh 1 ::Var Bool)
describe "goal renaming" $ do
let rename = renameGoalWithContext M.empty 0
context "of predicate goals" $ do
it "should rename variables in the predicate" $
rename (PredGoal (predicate "foo" (Var "x" :: Var Bool)) []) `shouldBe`
PredGoal (predicate "foo" (Fresh 0 :: Var Bool)) []
it "should ignore the clauses" $ do
let g = PredGoal (predicate "foo" ())
[HornClause (predicate "bar" (Var "x" :: Var Char)) Top]
rename g `shouldBe` g
withParams [IsUnified, IsVariable] $ \constr ->
context "of unary term goals" $
it "should rename variables in the term" $
rename (constr (toTerm (Var "x" :: Var Char))) `shouldBe` constr (toTerm $ Fresh 0)
context "of binary term goals" $ do
let constrs :: [Term Char -> Term Char -> Goal]
constrs = [CanUnify, Identical, Equal, LessThan]
withParams constrs $ \constr -> do
it "should rename variables in each term" $
rename (constr (toTerm (Var "x" :: Var Char)) (toTerm (Var "y" :: Var Char))) `shouldBe`
constr (toTerm (Fresh 0 :: Var Char)) (toTerm (Fresh 1 :: Var Char))
it "should rename variables in both terms the same" $
rename (constr (toTerm (Var "x" :: Var Char)) (toTerm (Var "x" :: Var Char))) `shouldBe`
constr (toTerm (Fresh 0 :: Var Char)) (toTerm (Fresh 0 :: Var Char))
context "of unary outer goals" $
withParams [CutFrame, Track, Once] $ \constr ->
it "should rename variables in the inner goal" $
rename (constr $ PredGoal (predicate "foo" (Var "x" :: Var Bool)) []) `shouldBe`
constr (PredGoal (predicate "foo" (Fresh 0 :: Var Bool)) [])
context "of binary outer goals" $
withParams [And, Or] $ \constr -> do
it "should rename variables in each goal" $
rename (constr (PredGoal (predicate "foo" (Var "x" :: Var Char)) [])
(PredGoal (predicate "bar" (Var "y" :: Var Bool)) [])) `shouldBe`
constr (PredGoal (predicate "foo" (Fresh 0 :: Var Char)) [])
(PredGoal (predicate "bar" (Fresh 1 :: Var Bool)) [])
it "should rename variables in both terms the same" $
rename (constr (PredGoal (predicate "foo" (Var "x" :: Var Char)) [])
(PredGoal (predicate "bar" (Var "x" :: Var Char)) [])) `shouldBe`
constr (PredGoal (predicate "foo" (Fresh 0 :: Var Char)) [])
(PredGoal (predicate "bar" (Fresh 0 :: Var Char)) [])
context "of ternary outer goals" $
withParams [If] $ \constr -> do
it "should rename variables in each goal" $
rename (constr (CanUnify (toTerm $ Var "x") (toTerm 'a'))
(CanUnify (toTerm $ Var "y") (toTerm 'b'))
(CanUnify (toTerm $ Var "z") (toTerm 'c'))) `shouldBe`
constr (CanUnify (toTerm $ Fresh 0) (toTerm 'a'))
(CanUnify (toTerm $ Fresh 1) (toTerm 'b'))
(CanUnify (toTerm $ Fresh 2) (toTerm 'c'))
it "should rename variables in each goal the same" $
rename (constr (CanUnify (toTerm $ Var "x") (toTerm 'a'))
(CanUnify (toTerm $ Var "x") (toTerm 'b'))
(CanUnify (toTerm $ Var "x") (toTerm 'c'))) `shouldBe`
constr (CanUnify (toTerm $ Fresh 0) (toTerm 'a'))
(CanUnify (toTerm $ Fresh 0) (toTerm 'b'))
(CanUnify (toTerm $ Fresh 0) (toTerm 'c'))
context "of unitary goals" $
withParams [Top, Bottom, Cut] $ \constr ->
it "should be a noop" $
rename constr `shouldBe` constr
withParams [Nothing, Just 42] $ \n ->
context "of Alternatives goals" $ do
let go x g xs = rename $ Alternatives n (toTerm x) g (toTerm xs)
it "should rename variables in each subcomponent" $
go (Var "x" :: Var Char) (Equal (toTerm 'a') (toTerm $ Var "y")) (Var "xs") `shouldBe`
go (Fresh 0 :: Var Char) (Equal (toTerm 'a') (toTerm $ Fresh 1)) (Fresh 2)
it "should rename the same variables the same way" $
go (Var "x" :: Var Char)
(PredGoal (predicate "foo" (Var "x" :: Var Char, Var "xs" :: Var [Char])) [])
(Var "xs") `shouldBe`
go (Fresh 0 :: Var Char)
(PredGoal (predicate "foo" (Fresh 0 :: Var Char, Fresh 1 :: Var [Char])) [])
(Fresh 1)
describe "clause renaming" $ do
let rename = doRenameClause
it "should rename variables in the positive literal" $
rename (HornClause (predicate "foo" (Var "x" :: Var Bool)) Top) `shouldBe`
HornClause (predicate "foo" (Fresh 0 :: Var Bool)) Top
it "should rename variables in the negative literal" $
rename (HornClause (predicate "foo" ())
(PredGoal (predicate "bar" (Var "x" :: Var Bool)) [])) `shouldBe`
HornClause (predicate "foo" ())
(PredGoal (predicate "bar" (Fresh 0 :: Var Bool)) [])
it "should apply renamings generated in the positive literal to the negative literal" $
rename (HornClause (predicate "foo" (Var "q" :: Var Char, Var "p" :: Var Char))
(PredGoal (predicate "bar" (Var "p" :: Var Char)) [])) `shouldBe`
HornClause (predicate "foo" (Fresh 0 :: Var Char, Fresh 1 :: Var Char))
(PredGoal (predicate "bar" (Fresh 1 :: Var Char)) [])
describe "term unifier application" $ do
context "to a variable" $ do
it "should replace the variable if there is a corresponding substitution" $
unify (toTerm 'a' // Var "x") (toTerm (Var "x" :: Var Char)) `shouldBe` toTerm 'a'
it "should return the original variable if there is no substitution" $ do
let x = toTerm (Var "x" :: Var Char)
unify M.empty x `shouldBe` x
unify (toTerm 'a' // Var "y") x `shouldBe` x -- No substitution for the right name
unify (toTerm True // Var "x") x `shouldBe` x -- No substitution for the right type
context "to a constant" $
it "should return the original constant" $ do
let u = toTerm 'a' // Var "x" <> toTerm True // Var "y"
unify u (toTerm 'z') `shouldBe` toTerm 'z'
unify u (toTerm False) `shouldBe` toTerm False
context "to a tuple" $
it "should recursively apply the unifier to each element" $ do
unify (toTerm 'a' // Var "x" <> toTerm True // Var "y")
(toTerm ("foo", Var "y" :: Var Bool, Var "x" :: Var Bool, Var "x" :: Var Char)) `shouldBe`
toTerm ("foo", True, Var "x" :: Var Bool, 'a')
unify (toTerm 'a' // Var "x" <> toTerm True // Var "y")
(toTerm (Var "x" :: Var Char, ('z', Var "y" :: Var Bool))) `shouldBe`
toTerm ('a', ('z', True))
context "to a list" $ do
it "should recursively apply the unifier to each element" $
unify (toTerm 'a' // Var "x")
(toTerm [toTerm $ Var "x", toTerm 'b', toTerm $ Var "y"]) `shouldBe`
toTerm [toTerm 'a', toTerm 'b', toTerm $ Var "y"]
it "should apply the unifier to the tail of a list" $
unify (toTerm "xyz" // Var "xs")
(List $ Cons (toTerm (Var "x" :: Var Char)) (toTerm $ Var "xs")) `shouldBe`
toTerm [toTerm $ Var "x", toTerm 'x', toTerm 'y', toTerm 'z']
it "should apply the unifier to both parts of an appended list" $
unify (toTerm "xyz" // Var "xs" <> toTerm "abc" // Var "ys")
(List $ Append (Var "xs" :: Var String) (toTerm $ Var "ys")) `shouldBe`
toTerm "xyzabc"
context "to an ADT constructor" $
it "should recursively apply the unifier to the argument" $ do
unify (toTerm 'a' // Var "x")
(adt Just (Var "x" :: Var Char)) `shouldBe`
adt Just 'a'
unify (toTerm 'a' // Var "x" <> toTerm 'b' // Var "y")
(adt TwoChars (Var "x" :: Var Char, Var "y" :: Var Char)) `shouldBe`
adt TwoChars ('a', 'b')
context "to an arithmetic expression" $
it "should recursively apply the unifier to each element" $ do
unify (toTerm (1 :: Int) // Var "x" <> (2 :: Int) // Var "y")
(Sum (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Sum (toTerm (1 :: Int)) (toTerm (2 :: Int))
unify (toTerm (1 :: Int) // Var "x" <> (2 :: Int) // Var "y")
(Difference (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Difference (toTerm (1 :: Int)) (toTerm (2 :: Int))
unify (toTerm (1 :: Int) // Var "x" <> (2 :: Int) // Var "y")
(Product (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Product (toTerm (1 :: Int)) (toTerm (2 :: Int))
unify (toTerm (1.0 :: Double) // Var "x" <> (2.0 :: Double) // Var "y")
(Quotient (toTerm (Var "x" :: Var Double)) (toTerm $ Var "y")) `shouldBe`
Quotient (toTerm (1.0 :: Double)) (toTerm (2.0 :: Double))
unify (toTerm (1 :: Int) // Var "x" <> (2 :: Int) // Var "y")
(IntQuotient (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
IntQuotient (toTerm (1 :: Int)) (toTerm (2 :: Int))
unify (toTerm (1 :: Int) // Var "x" <> (2 :: Int) // Var "y")
(Modulus (toTerm (Var "x" :: Var Int)) (toTerm $ Var "y")) `shouldBe`
Modulus (toTerm (1 :: Int)) (toTerm (2 :: Int))
it "should apply the unifier recursively" $ do
unify (adt Just (Var "y" :: Var Char) // Var "x" <> 'a' // Var "y")
(toTerm (True, Var "x" :: Var (Maybe Char)))
`shouldBe` toTerm (True, Just 'a')
unify ((Var "ys" :: Var String) // Var "xs" <> "foo" // Var "ys")
(List $ Cons (toTerm 'a') (toTerm $ Var "xs"))
`shouldBe` toTerm "afoo"
describe "predicate unifier application" $ do
it "should unify the argument when the unifier applies" $
unify (toTerm 'a' // Var "x") (predicate "foo" (Var "x" :: Var Char)) `shouldBe`
predicate "foo" 'a'
it "should return the original predicate when the unifier is irrelevant" $ do
let p = predicate "foo" (Var "x" :: Var Char)
unify M.empty p `shouldBe` p
unify (toTerm 'a' // Var "y") p `shouldBe` p
unify (toTerm True // Var "x") p `shouldBe` p
describe "goal unifier application" $ do
context "to a predicate goal" $ do
it "should unify the predicate" $
unify (toTerm 'a' // Var "x")
(PredGoal (predicate "foo" (Var "x" :: Var Char)) []) `shouldBe`
PredGoal (predicate "foo" 'a') []
it "should ignore the clauses" $ do
let g = PredGoal (predicate "foo" ()) [HornClause (predicate "bar" (Var "x" :: Var Char)) Top]
unify (toTerm 'a' // Var "x") g `shouldBe` g
withParams [IsUnified, IsVariable] $ \constr ->
context "to a unary term goal" $ do
it "should unify the term" $
unify (toTerm 'a' // Var "x") (constr $ toTerm (Var "x" :: Var Char)) `shouldBe`
constr (toTerm 'a')
it "should leave the term unchanged when the unifier does not apply" $
unify (toTerm 'a' // Var "x") (constr $ toTerm (Var "y" :: Var Char)) `shouldBe`
constr (toTerm $ Var "y")
context "to a binary term goal" $ do
let constrs :: [Term Char -> Term Char -> Goal]
constrs = [CanUnify, Identical, Equal, LessThan]
withParams constrs $ \constr -> do
it "should unify both terms" $
unify (toTerm 'a' // Var "x" <> toTerm 'b' // Var "y")
(constr (toTerm (Var "x" :: Var Char)) (toTerm (Var "y" :: Var Char))) `shouldBe`
constr (toTerm 'a') (toTerm 'b')
it "should leave either term unchanged when the unifier does not apply" $ do
let u = toTerm 'a' // Var "x"
unify u (constr (toTerm (Var "y" :: Var Char)) (toTerm (Var "x" :: Var Char))) `shouldBe`
constr (toTerm (Var "y" :: Var Char)) (toTerm 'a')
unify u (constr (toTerm (Var "x" :: Var Char)) (toTerm (Var "y" :: Var Char))) `shouldBe`
constr (toTerm 'a') (toTerm (Var "y" :: Var Char))
context "to a unary outer goal" $
withParams [CutFrame, Track, Once] $ \constr ->
it "should unify the inner goal" $
unify (toTerm 'a' // Var "x")
(constr $ PredGoal (predicate "foo" (Var "x" :: Var Char)) []) `shouldBe`
constr (PredGoal (predicate "foo" 'a') [])
context "to a binary outer goal" $
withParams [And, Or] $ \constr ->
it "should unify both inner goals" $
unify (toTerm 'a' // Var "x")
(constr (PredGoal (predicate "foo" (Var "x" :: Var Char)) [])
(PredGoal (predicate "bar" (Var "x" :: Var Char)) [])) `shouldBe`
constr (PredGoal (predicate "foo" 'a') []) (PredGoal (predicate "bar" 'a') [])
context "to a ternary outer goal" $
withParams [If] $ \constr ->
it "should unify all inner goals" $
unify (toTerm 'a' // Var "x")
(constr (PredGoal (predicate "foo" (Var "x" :: Var Char)) [])
(PredGoal (predicate "bar" (Var "x" :: Var Char)) [])
(CanUnify (toTerm $ Var "x") (toTerm 'a'))) `shouldBe`
constr (PredGoal (predicate "foo" 'a') [])
(PredGoal (predicate "bar" 'a') [])
(CanUnify (toTerm 'a') (toTerm 'a'))
context "to a unitary goal" $
withParams [Top, Bottom, Cut] $ \constr ->
it "should be a noop" $
unify (toTerm 'a' // Var "x") constr `shouldBe` constr
withParams [Nothing, Just 42] $ \n ->
context "to an Alternatives goal" $
it "should unify each subcomponent" $
unify (toTerm 'a' // Var "x" <> toTerm "foo" // Var "xs")
(Alternatives n (toTerm (Var "x" :: Var Char))
(Equal (toTerm 'a') (toTerm $ Var "x"))
(toTerm $ Var "xs")) `shouldBe`
Alternatives n (toTerm 'a') (Equal (toTerm 'a') (toTerm 'a')) (toTerm "foo")
describe "clause unifier application" $ do
it "should unify the positive literal when the unifier applies" $
unify (toTerm 'a' // Var "x")
(HornClause (predicate "foo" (Var "x" :: Var Char)) Top) `shouldBe`
HornClause (predicate "foo" 'a') Top
it "should unify the negative literal when the unifier applies" $
unify (toTerm 'a' // Var "x" <> toTerm True // Var "y")
(HornClause (predicate "foo" ())
(PredGoal (predicate "bar" (Var "x" :: Var Char)) [])) `shouldBe`
HornClause (predicate "foo" ()) (PredGoal (predicate "bar" 'a') [])
it "should leave the positive literal unchanged when the unifier does not apply" $ do
let c = HornClause (predicate "foo" (Var "x" :: Var Char)) Top
unify M.empty c `shouldBe` c
unify (toTerm 'a' // Var "y") c `shouldBe` c
unify (toTerm True // Var "x") c `shouldBe` c
it "should leave the negative literal unchanged when the unifier does not apply" $ do
let c = HornClause (predicate "foo" ()) (PredGoal (predicate "bar" (Var "x" :: Var Bool)) [])
unify (toTerm True // Var "y") c `shouldBe` c
describe "resolution" $ do
let runTest p c = runMockUnification (resolve p c)
it "should rename variables in the clause" $
runTest (predicate "foo" ())
(HornClause (predicate "foo" ())
(PredGoal (predicate "bar" (Var "x" :: Var Bool)) [])) `shouldBe`
Just (PredGoal (predicate "bar" (Fresh 0 :: Var Bool)) [], M.empty)
it "should return any unifications made" $
runTest (predicate "foo" ('a', Var "x" :: Var Bool))
(HornClause (predicate "foo" (Var "y" :: Var Char, True)) Top) `shouldBe`
Just (Top, toTerm 'a' // Fresh 0 <> toTerm True // Var "x")
it "should apply the unifier to variables in the clause" $
runTest (predicate "foo" 'a')
(HornClause (predicate "foo" (Var "x" :: Var Char))
(PredGoal (predicate "bar" (Var "x" :: Var Char)) [])) `shouldBe`
Just (PredGoal (predicate "bar" 'a') [], toTerm 'a' // Fresh 0)
it "should not apply the unifier to renamed variables" $
runTest (predicate "foo" (Var "x" :: Var Char))
(HornClause (predicate "foo" 'a')
(PredGoal (predicate "bar" (Var "x" :: Var Char)) [])) `shouldBe`
Just (PredGoal (predicate "bar" (Fresh 0 :: Var Char)) [], toTerm 'a' // Var "x")
it "should fail when the goal does not unify with the clause" $
runTest (predicate "foo" 'a') (HornClause (predicate "foo" 'b') Top) `shouldBe` Nothing
|
jbearer/hspl
|
test/UnificationTest.hs
|
mit
| 43,686
| 0
| 30
| 12,335
| 17,438
| 8,607
| 8,831
| -1
| -1
|
bmiTell :: Double -> String
bmiTell bmi
| bmi <= 18.5 = "You're underweight, you emo, you!"
| bmi <= 25.0 = "You're supposedly normal. Pffft, I bet you're ugly!"
| bmi <= 30.0 = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
|
v0lkan/learning-haskell
|
session-003/010-bmi-switch.hs
|
mit
| 282
| 1
| 8
| 64
| 70
| 32
| 38
| 6
| 1
|
module Data.Encrypted.Internal where
import Data.Aeson
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text.Encoding as TE
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Base64 as B64
import Control.Error
-- | Strict JSON encode
encodeS :: ToJSON a => a -> ByteString
encodeS = mconcat . BL.toChunks . encode
-- | Strict JSON decode
decodeS :: FromJSON a => ByteString -> Maybe a
decodeS = decode . BL.fromChunks . return
b64text :: ByteString -> Text
b64text = TE.decodeUtf8 . B64.encode
unb64text :: Text -> Maybe ByteString
unb64text = rightMay . B64.decode . TE.encodeUtf8
|
tel/enc
|
src/Data/Encrypted/Internal.hs
|
mit
| 687
| 0
| 7
| 121
| 187
| 109
| 78
| 17
| 1
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.HTMLTitleElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.HTMLTitleElement
#else
module Graphics.UI.Gtk.WebKit.DOM.HTMLTitleElement
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.HTMLTitleElement
#else
import Graphics.UI.Gtk.WebKit.DOM.HTMLTitleElement
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/HTMLTitleElement.hs
|
mit
| 465
| 0
| 5
| 39
| 33
| 26
| 7
| 4
| 0
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Book where
import GHC.Generics
import Data.Aeson
import Data.Text (Text)
import Data.Monoid
import qualified Data.Text as T
instance Ord Book where
b1@(Book a t _ _) `compare` b2@(Book a2 t2 _ _) = a2 `compare` a
data Book
= Book
{ author :: Text
, title :: Text
, location :: Text
, format :: Text
} deriving (Show, Eq, Read, Generic, ToJSON, FromJSON)
data Bookshelf
= Bookshelf
{ books :: [Book] }
deriving (Show, Eq, Read, Generic, ToJSON, FromJSON)
|
tippenein/bookshelf
|
src/Book.hs
|
mit
| 584
| 0
| 9
| 117
| 202
| 118
| 84
| 22
| 0
|
import Test.QuickCheck
import Data.List
-- testing whether idempotence property is true
prop_Idempotent :: Ord a => [a] -> Bool
prop_Idempotent xs = sort (sort xs) == sort xs
main :: IO ()
main = quickCheck(prop_Idempotent :: [Int] -> Bool) -- we need to specify concrete element type.
-- if we use [a] -> Bool, ghc will not know
-- how to generate data
|
calvinchengx/learnhaskell
|
quickcheck/prop_indempotent.hs
|
mit
| 462
| 0
| 8
| 171
| 94
| 49
| 45
| 6
| 1
|
module SlidingPuzzle (solveSlidingPuzzle) where
import Data.Vector (Vector, (!), (//))
import qualified Data.Vector as V
import Data.Maybe (fromJust, mapMaybe)
import Data.List (elemIndex)
import qualified Data.Set as S
import Control.Monad.Reader
import Control.Monad.State.Lazy
-- | Generalizing this solution for different dimensions of the board.
-- It can be 2x2, 3x3, 4x4 and so on.
type BoardDimension = Int
-- | Simple representation of the board using vector
type Board = Vector Int
-- | Deapth configuration for IDA* search
type DeapthLimit = Int
-- | Possible moves for the empty tile
data MoveDirection = North | East | South | West
-- | State of the puzzle during the search for solution. Keeps some information
-- regarding how we got to the current board (we can reconstruct the whole path
-- using this information)
data PuzzleState = PuzzleState { board :: Board -- current board representation
, dimension :: Int -- board dimension (dimension x dimension)
, emptyTile :: Int -- empty tile location
, distance :: Int -- manhattan distance of the entire board
, moves :: Int -- number of moves it took us to get to this board
, previous :: Maybe PuzzleState -- previous state we came from
} deriving (Show, Eq, Ord)
-- | Convert matrix indicies to vector index
m2v :: Int -> Int -> Int -> Int
m2v n row column = n * row + column
-- | Convert vector index to matrix indicies
v2m :: Int -> Int -> (Int, Int)
v2m n index = index `quotRem` n
-- | Given the list of tiles generate our board representation
boardFromList :: [Int] -> Board
boardFromList = V.fromList
-- | Given CORRECT input (no validation) we get the board dimension and
-- the list of tiles from it.
parseInput :: String -> (BoardDimension, [Int])
parseInput input =
let [n]:tiles = map (map read . words) (lines input)
in (n, concat tiles)
-- | Sliding puzzle can be solved only if this condition is held:
-- *** zeroRow + numberOfInversions must be even ***
-- where zeroRow is the row number of empty tile (row index starts from 1)
-- numberOfInversions is the amount of elements Ai and Aj such that i < j but Ai > Aj
isSolvable :: BoardDimension -> [Int] -> Bool
isSolvable n tiles =
let zeroRow = 1 + fromJust (0 `elemIndex` tiles) `div` n
numberOfInversions = length [x | (x, xi) <- zip tiles [1..] :: [(Int,Int)], (y, yi) <- zip tiles [1..], x /= 0, y /= 0, yi > xi, x > y]
in (zeroRow + numberOfInversions) `mod` 2 == 0
-- | Check if all the tiles are at the correct place
isSolutionFound :: PuzzleState -> Bool
isSolutionFound puzzleState = distance puzzleState == 0
-- | Update puzzle state after switching empty tile with tile found at position (row, column)
-- In order to update manhattan distance of the entire board we no longer need to compute manhattan
-- distance for every tile, as we only move 2 tiles, one of them is an empty tile which always has
-- manhattan distance equal to 0. So we take old distance, subtract manhattan distance of the tile
-- we are moving, move the tile, add new manhattan distance of that tile to the board distance.
updatePuzzleState :: PuzzleState -> Int -> Int -> PuzzleState
updatePuzzleState puzzleState row column =
puzzleState { board = board'
, emptyTile = k
, distance = newDistance
, moves = moves puzzleState + 1
, previous = Just puzzleState }
where
n = dimension puzzleState
k = m2v n row column
b = board puzzleState
board' = b // [(emptyTile puzzleState, b ! k), (k, 0)]
newDistance = distance puzzleState - manhattan (b ! k) n k + manhattan (b ! k) n (emptyTile puzzleState)
-- | Update puzzle state if the empty tile is not moving off the board
makeMove :: PuzzleState -> MoveDirection -> Maybe PuzzleState
makeMove puzzleState direction =
case direction of
North -> if row <= 0 then Nothing else Just $ updatePuzzleState puzzleState (row - 1) column
East -> if column >= n - 1 then Nothing else Just $ updatePuzzleState puzzleState row (column + 1)
South -> if row >= n - 1 then Nothing else Just $ updatePuzzleState puzzleState (row + 1) column
West -> if column <= 0 then Nothing else Just $ updatePuzzleState puzzleState row (column - 1)
where
n = dimension puzzleState
(row, column) = v2m n (emptyTile puzzleState)
-- | Find all possible states which can be achieved by making a move in any direction on the current board
generatePossibleStates :: PuzzleState -> [PuzzleState]
generatePossibleStates puzzleState = mapMaybe (makeMove puzzleState) [North, East, South, West]
-- | Manhattan distance of a tile at vector index on a board with dimensions n x n
manhattan :: Int -> Int -> Int -> Int
manhattan tile n index = if tile == 0 then 0 else rowDistance + columnDistance
where
(row, column) = v2m n index
(tileRow, tileColumn) = (tile - 1) `quotRem` n
rowDistance = abs (row - tileRow)
columnDistance = abs (column - tileColumn)
-- | Manhattan distance of the entire board
boardDistance :: BoardDimension -> Board -> Int
boardDistance n currentBoard = sum $ map (\index -> manhattan (currentBoard ! index) n index) [0..n*n-1]
-- | Construct the path from initial board to the solution
solutionPath :: PuzzleState -> [Board]
solutionPath puzzleState = reverse $ boards puzzleState
where
boards currentState = case previous currentState of
Nothing -> [board currentState]
Just previousState -> board currentState : boards previousState
-- | Deapth first search for the solution with given deapth limit which we do not exceed
--
-- ReaderT is responsible for storing constant data like dimensions of the board and deapth limit.
-- StateT (S.Set Board) is responsible for storing visited states so that we do not visit them again.
-- StateT XXX is responsible for keeping search state, which helps during the search and also when
-- the solution is found it helps to easily trace how we reached that solution.
search :: ReaderT DeapthLimit (StateT (S.Set Board) (StateT PuzzleState Maybe)) ()
search = do
deapthLimit <- ask
visited <- lift get
puzzleState <- (lift . lift) get
unless (isSolutionFound puzzleState) $ do
let validStates s = S.notMember (board s) visited && distance s + moves s <= deapthLimit
let possibleStates = filter validStates (generatePossibleStates puzzleState)
case possibleStates of
[] -> mzero
_ -> msum $ map (\possibleState -> do lift $ put (S.insert (board possibleState) visited); lift . lift $ put possibleState; search) possibleStates
-- | IDA* search.
--
-- Given an initial deapth limit we try to find a solution and we are not successful
-- then we increase the limit by some constant value and try again. We do it until
-- the solution is finally found.
findSolution :: PuzzleState -> DeapthLimit -> Maybe [Board]
findSolution puzzleState deapthLimit =
let solution = execStateT (runStateT (runReaderT search deapthLimit) (S.singleton (board puzzleState))) puzzleState
in case solution of
Just finalState -> Just $ solutionPath finalState
Nothing -> findSolution puzzleState (deapthLimit + 5)
-- | Solve the sliding puzzle.
-- Make sure that the given puzzle is solvable and if it is then start the IDA* search.
-- If the search is unsuccessful then increase the deapth limit and repeat the search.
-- Do it until the solution is finally found.
solveSlidingPuzzle :: String -> Maybe [[Int]]
solveSlidingPuzzle input =
let (n, tiles) = parseInput input
solvable = isSolvable n tiles
initialBoard = boardFromList tiles
emptyTileLocation = fromJust $ V.elemIndex 0 initialBoard
puzzleState = PuzzleState initialBoard n emptyTileLocation (boardDistance n initialBoard) 0 Nothing
in if solvable
then fmap (map V.toList) $ findSolution puzzleState (distance puzzleState)
else Nothing
|
ksaveljev/sliding-puzzle
|
SlidingPuzzle.hs
|
mit
| 8,260
| 0
| 24
| 1,985
| 1,792
| 972
| 820
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable, OverlappingInstances #-}
{-# LANGUAGE NoMonomorphismRestriction, DeriveDataTypeable #-}
module Control.SQL
( squery, query, logged, reed
, myconnect, collectRows, disconnect
, Statement, getFieldValue, getFieldValueMB, getFieldsTypes
, Query (..), Action (..), Modifier (..)
, Id (..), Bind (..)
, Expression (..), ToEx (..), Control.SQL.equals, ands
, sqlExceptions
)
where
import Autolib.Reader as R
import Autolib.ToDoc as T
import qualified Local
import Data.List
import Data.Typeable
import Text.ParserCombinators.Parsec.Expr
import Mysqlconnect
import Database.HSQL.MySQL hiding ( query, collectRows )
import qualified Database.HSQL.MySQL
import Control.Monad ( when )
-------------------------------------------------------------------------------
-- | structured query
squery :: Connection -> Query -> IO Statement
squery con scom = query con (show scom)
-- | excessive logging
query :: Connection -> String -> IO Statement
query con com = do
logged com
Database.HSQL.MySQL.query con com
`catchSql` \ e -> do
logged $ "query: " ++ (show e)
error $ "SQLqueries.error in query:" ++ (show e) ++ com
collectRows fun stat = do
i <- Database.HSQL.MySQL.collectRows fun stat
`catchSql` \ e -> do
logged $ "collectRows: " ++ (show e)
error $ "SQLqueries.error in collectRows: " ++ (show e)
logged ( show i )
return i
logfile = "/tmp/HSQL.log"
logged cs = when ( Local.debug ) $ do
appendFile logfile "logged:"
appendFile logfile cs
appendFile logfile strich
strich = "\n--------------------------------\n"
-- reed :: ( Read a, Show a, Typeable a ) => String -> a
reed cs = case ( readsPrec 0 cs ) of
[(x, "")] -> x
sonst ->
error $ unlines [ "kein parse."
, "für eingabe:", cs
, "für typ:" , show (typeOf (fst $ head sonst ) )
, "readsPrec:" , show sonst
]
--------------------------------------------------------------------------------
data Id = Id [ String ] deriving Typeable
instance T.ToDoc Id where
toDoc (Id ids) = hcat $ intersperse (T.char '.') $ map text ids
instance R.Reader Id where
atomic_reader = do
ids <- R.my_identifier `R.sepBy1` R.my_dot
return $ Id ids
-------------------------------------------------------------------------------
data Query = Query Action [ Modifier ] deriving Typeable
instance T.ToDoc Query where
toDoc (Query a ms) = T.vcat [ T.toDoc a
, ( T.nest 4 $ T.vcat $ map T.toDoc ms ) T.<+> T.char ';'
]
instance R.Reader Query where
atomic_reader = do
a <- reader
ms <- R.many reader
R.my_semi
return $ Query a ms
-------------------------------------------------------------------------------
data Bind = Bind Expression (Maybe Id) deriving Typeable
instance T.ToDoc Bind where
toDoc (Bind e mi) = case mi of
Nothing -> T.toDoc e
Just i -> T.hsep [ T.toDoc e, T.text "as", T.toDoc i ]
instance R.Reader Bind where
atomic_reader = do
e <- R.reader
mi <- R.option Nothing $ do
-- FIXME: wenn man hier ... <|> R.my_reserved "AS" schreibt,
-- gibt es trotzdem keinen parse für "vorlesung.VNr AS VNr"
-- aber mit "vorlesung.VNr as VNr" geht es
R.my_reserved "as"
fmap Just R.reader
return $ Bind e mi
----------------------------------------------------------------------------
data Action = Select [ Bind ]
| Insert Id [(Id, Expression)]
| Update Id [(Id, Expression)]
| Delete Id
deriving Typeable
instance T.ToDoc Action where
toDoc (Select bs) = T.text "SELECT"
<+> T.sepBy T.comma ( map T.toDoc bs )
toDoc (Insert tab pairs) = T.text "INSERT" <+> T.vcat
[ T.text "INTO" <+> T.toDoc tab <+> T.dutch_tuple ( map ( T.toDoc . fst ) pairs )
, T.text "VALUES" <+> T.dutch_tuple ( map ( T.toDoc . snd ) pairs )
]
toDoc (Update tab pairs) = T.text "UPDATE" <+> T.toDoc tab
<+> T.text "SET" <+> T.sepBy T.comma ( do
(e, v) <- pairs
return $ hsep [ T.toDoc e, T.equals, T.toDoc v ]
)
toDoc (Delete tab) = T.text "DELETE" <+> T.text "FROM" <+> T.toDoc tab
instance R.Reader Action where
atomic_reader = do
R.my_reserved "SELECT" ; bs <- reader `R.sepBy` R.my_comma ; return $ Select bs
-- TODO: complete this
----------------------------------------------------------------------------
-- | note: arguments to Left_Join should be Table_References, not Ids
data Table_Reference = Table_Id Id
| Left_Join Id
Id
Expression -- ^ join condition
deriving Typeable
instance T.ToDoc Table_Reference where
toDoc (Table_Id id) = T.toDoc id
toDoc (Left_Join l r c) =
T.hsep [ T.toDoc l , T.text "LEFT", T.text "JOIN", T.toDoc r
, T.text "ON", T.toDoc c
]
instance R.Reader Table_Reference where
atomic_reader = do
l <- R.reader
option ( Table_Id l ) $ do
R.my_reserved "LEFT"
R.my_reserved "JOIN"
r <- R.reader
R.my_reserved "ON"
c <- R.reader
return $ Left_Join l r c
----------------------------------------------------------------------------
data Modifier = From [ Table_Reference ]
| Where Expression
| Using [ Bind ]
deriving Typeable
instance T.ToDoc Modifier where
toDoc (From ids) = T.text "FROM" <+> T.sepBy T.comma ( map T.toDoc ids )
toDoc (Where e) = T.text "WHERE" <+> T.toDoc e
toDoc (Using b) = T.text "USING" <+> T.sepBy T.comma ( map T.toDoc b )
instance R.Reader Modifier where
atomic_reader = do { R.my_reserved "FROM" ; ids <- many1 reader ; return $ From ids }
-- TODO: complete this
-------------------------------------------------------------------------------
data Expression = ENull
| EId Id
| EInteger Integer
-- | ETime ClockTime -- TODO
| EString String
| EFun Id [ Expression ]
| EBinop String Expression Expression -- ^ completely parenthesized
deriving Typeable
quote :: String -> String
quote cs = do
c <- cs
if c `elem` [ '\'', '"', '\\', '`' ]
then [ '\\' , c ]
else [ c ]
instance T.ToDoc Expression where
toDoc (ENull) = text "NULL"
toDoc (EId id) = T.toDoc id
toDoc (EInteger i) = T.toDoc i
-- NOTE: this should NEVER cut off a string (max_string_length)
toDoc (EString s) = T.doubleQuotes $ T.text $ quote s
-- note: open par must come immediately after function symbol (no <+>)
toDoc (EFun fun args) = T.toDoc fun <> T.dutch_tuple ( map T.toDoc args )
toDoc (EBinop "BETWEEN" x (EBinop "AND" y z))
= T.parens $ T.fsep [ T.toDoc x, T.text "BETWEEN"
, T.toDoc y, T.text "AND", T.toDoc z
]
toDoc (EBinop op x y) = T.parens $ T.fsep [ T.toDoc x, T.text op, T.toDoc y ]
instance R.Reader Expression where
atomic_reader = buildExpressionParser operators atomic
atomic :: Parser Expression
atomic = R.my_parens reader
R.<|> do { R.my_reserved "NULL" ; return $ ENull }
R.<|> id_or_fun
R.<|> do { i <- R.my_integer ; return $ EInteger i }
R.<|> do { s <- R.my_stringLiteral ; return $ EString s }
id_or_fun = do
id <- reader
args <- option Nothing $ R.my_parens $ fmap Just $ reader `R.sepBy` R.my_comma
return $ case args of
Nothing -> EId id
Just xs -> EFun id xs
operators =
let lop cs = op cs (EBinop cs) AssocLeft
op name f =
Infix ( do { R.my_symbol name; return f } R.<?> "operator" )
in [ map lop [ "*", "/" ]
, map lop [ "+", "-" ]
, map lop [ "<", "=", ">" ]
, map lop [ "IS" ]
, map lop [ "AND", "OR" ]
, map lop [ "BETWEEN" ]
]
------------------------------------------------------------------------------
class ToEx a where
toEx :: a -> Expression
------------------------------------------------------------------------------
equals :: Expression -> Expression -> Expression
equals = EBinop "="
ands :: [ Expression ] -> Expression
ands [] = Control.SQL.equals (EInteger 0) (EInteger 0)
ands xs = foldr1 (EBinop "AND") xs
|
Erdwolf/autotool-bonn
|
src/Control/SQL.hs
|
gpl-2.0
| 8,343
| 20
| 18
| 2,192
| 2,635
| 1,358
| 1,277
| 181
| 2
|
import UCI
import OpenBookModule
main :: IO ()
main = uci movesArray
|
adityashah30/haskellchess
|
chessEngine/Main.hs
|
gpl-2.0
| 69
| 0
| 7
| 12
| 31
| 14
| 17
| 4
| 1
|
predecessor = predecessor
-- comment 0
-- |comment 1
successor :: a
successor = successor
|
evolutics/haskell-formatter
|
testsuite/resources/source/comments/depends_on_displacement/single_annotation/line_pair/none_before/Output.hs
|
gpl-3.0
| 91
| 1
| 6
| 16
| 27
| 12
| 15
| 3
| 1
|
module Runtime
(RuntimeAst(..),RuntimeType(..),RuntimeFunc(..),
Compile,SourcePos,compileError,
AstType(..),AstFuncSig(..),
astTypeName,astTypeSize,astTypeImportSize,
astTypeErrorName,astTypeSourcePos,astTypeIsImport,
annotateRuntime)
where
import Compile(Compile,SourcePos,compileError)
import Check
(Ast(..),AstType(..),AstFunc(..),AstFuncSig(..),AstStmt,
astTypeName,astTypeSize,astTypeImportSize,
astTypeErrorName,astTypeSourcePos,astTypeIsImport,
astTypeImportSize)
class RuntimeType rtt where
annotateType :: AstType -> Compile rtt
class RuntimeFunc rtf where
annotateFunc :: AstFuncSig -> Compile rtf
data RuntimeAst rtt rtf =
RuntimeAst [(AstType,Maybe rtt)] [(AstFuncSig,Either AstStmt rtf)]
annotateRuntime :: (RuntimeType rtt, RuntimeFunc rtf) =>
Ast -> Compile (RuntimeAst rtt rtf)
annotateRuntime (Ast astTypes astFuncs) = do
types <- mapM annotateAstType astTypes
funcs <- mapM annotateAstFunc astFuncs
return (RuntimeAst types funcs)
where
annotateAstType astType
| astTypeIsImport astType = do
rtt <- annotateType astType
return (astType,Just rtt)
| otherwise = return (astType,Nothing)
annotateAstFunc (AstImportFunc astFuncSig) = do
rtf <- annotateFunc astFuncSig
return (astFuncSig,Right rtf)
annotateAstFunc (AstFunc astFuncSig astStmt) =
return (astFuncSig,Left astStmt)
|
qpliu/esolang
|
blo/hs/Runtime.hs
|
gpl-3.0
| 1,451
| 0
| 12
| 280
| 441
| 242
| 199
| 35
| 2
|
#!/usr/bin/env runhaskell
--
-- Copyright 2014 Wesley Tanaka <http://wtanaka.com/>
--
-- This file is part of https://github.com/wtanaka/haskell
--
-- https://github.com/wtanaka/haskell is free software: you can
-- redistribute it and/or modify it under the terms of the GNU General
-- Public License as published by the Free Software Foundation,
-- either version 3 of the License, or (at your option) any later
-- version.
--
-- https://github.com/wtanaka/haskell is distributed in the hope that
-- it will be useful, but WITHOUT ANY WARRANTY; without even the
-- implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-- PURPOSE. See the GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with https://github.com/wtanaka/haskell . If not, see
-- <http://www.gnu.org/licenses/>.
largenum = 10000000
-- long = take largenum [1..]
as = repeat 'a'
long = take largenum as
-- asbs = 'a' : 'b' : asbs
-- long = take largenum asbs
-- O(1) memory
main = print (long !! (largenum-1))
-- O(1) memory
-- main = print (length long)
|
wtanaka/haskell
|
Mem1.hs
|
gpl-3.0
| 1,114
| 3
| 8
| 186
| 81
| 50
| 31
| 4
| 1
|
{-|
A 'Posting' represents a 'MixedAmount' being added to or subtracted from a
single 'Account'. Each 'Transaction' contains two or more postings which
should add up to 0. Postings also reference their parent transaction, so
we can get a date or description for a posting (from the transaction).
Strictly speaking, \"entry\" is probably a better name for these.
-}
module Hledger.Data.Posting
where
import Data.List
import Data.Ord
import Data.Time.Calendar
import Test.HUnit
import Text.Printf
import Hledger.Utils
import Hledger.Data.Types
import Hledger.Data.Amount
import Hledger.Data.AccountName
import Hledger.Data.Dates (nulldate, spanContainsDate)
instance Show Posting where show = showPosting
nullposting :: Posting
nullposting = Posting False "" nullmixedamt "" RegularPosting [] Nothing
showPosting :: Posting -> String
showPosting (Posting{paccount=a,pamount=amt,pcomment=com,ptype=t}) =
concatTopPadded [showaccountname a ++ " ", showamount amt, comment]
where
ledger3ishlayout = False
acctnamewidth = if ledger3ishlayout then 25 else 22
showaccountname = printf ("%-"++(show acctnamewidth)++"s") . bracket . elideAccountName width
(bracket,width) = case t of
BalancedVirtualPosting -> (\s -> "["++s++"]", acctnamewidth-2)
VirtualPosting -> (\s -> "("++s++")", acctnamewidth-2)
_ -> (id,acctnamewidth)
showamount = padleft 12 . showMixedAmountOrZero
comment = if null com then "" else " ; " ++ com
-- XXX refactor
showPostingForRegister :: Posting -> String
showPostingForRegister (Posting{paccount=a,pamount=amt,ptype=t}) =
concatTopPadded [showaccountname a ++ " ", showamount amt]
where
ledger3ishlayout = False
acctnamewidth = if ledger3ishlayout then 25 else 22
showaccountname = printf ("%-"++(show acctnamewidth)++"s") . bracket . elideAccountName width
(bracket,width) = case t of
BalancedVirtualPosting -> (\s -> "["++s++"]", acctnamewidth-2)
VirtualPosting -> (\s -> "("++s++")", acctnamewidth-2)
_ -> (id,acctnamewidth)
showamount = padleft 12 . showMixedAmountOrZeroWithoutPrice
isReal :: Posting -> Bool
isReal p = ptype p == RegularPosting
isVirtual :: Posting -> Bool
isVirtual p = ptype p == VirtualPosting
isBalancedVirtual :: Posting -> Bool
isBalancedVirtual p = ptype p == BalancedVirtualPosting
hasAmount :: Posting -> Bool
hasAmount = (/= missingamt) . pamount
postingTypeFromAccountName a
| head a == '[' && last a == ']' = BalancedVirtualPosting
| head a == '(' && last a == ')' = VirtualPosting
| otherwise = RegularPosting
accountNamesFromPostings :: [Posting] -> [AccountName]
accountNamesFromPostings = nub . map paccount
sumPostings :: [Posting] -> MixedAmount
sumPostings = sumMixedAmountsPreservingHighestPrecision . map pamount
postingDate :: Posting -> Day
postingDate p = maybe nulldate tdate $ ptransaction p
-- |Is this posting cleared? If this posting was individually marked
-- as cleared, returns True. Otherwise, return the parent
-- transaction's cleared status or, if there is no parent
-- transaction, return False.
postingCleared :: Posting -> Bool
postingCleared p = if pstatus p
then True
else maybe False tstatus $ ptransaction p
-- | Does this posting fall within the given date span ?
isPostingInDateSpan :: DateSpan -> Posting -> Bool
isPostingInDateSpan s = spanContainsDate s . postingDate
isEmptyPosting :: Posting -> Bool
isEmptyPosting = isZeroMixedAmount . pamount
-- | Get the minimal date span which contains all the postings, or
-- DateSpan Nothing Nothing if there are none.
postingsDateSpan :: [Posting] -> DateSpan
postingsDateSpan [] = DateSpan Nothing Nothing
postingsDateSpan ps = DateSpan (Just $ postingDate $ head ps') (Just $ addDays 1 $ postingDate $ last ps')
where ps' = sortBy (comparing postingDate) ps
tests_Hledger_Data_Posting = TestList [
]
|
trygvis/hledger
|
hledger-lib/Hledger/Data/Posting.hs
|
gpl-3.0
| 4,052
| 0
| 14
| 834
| 1,007
| 542
| 465
| 68
| 5
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Handler.UserById where
import Import
$(deriveJSON defaultOptions ''User)
getUserByIdR :: String -> Handler Value
getUserByIdR uId = runDB (selectFirst [UserIdent ==. (pack uId)] []) >>= returnJson
getFinishUserR :: String -> String -> String -> Handler Value
getFinishUserR email name city = do
runDB $ updateWhere [UserIdent ==. (pack email)] [ UserName =. Just (pack name), UserCity =. Just (pack city) ]
return $ object [ "status" .= ("ok" :: Text) ]
getUserByKeyR :: UserId -> Handler Value
getUserByKeyR uId = runDB (get uId) >>= returnJson
|
weshack/thelist
|
TheList/Handler/UserById.hs
|
gpl-3.0
| 604
| 0
| 13
| 102
| 219
| 110
| 109
| -1
| -1
|
module NLP.Grabber.Download where
import Control.Monad.Trans
import Control.Monad.Trans.Maybe
import Data.Tree.NTree.TypeDefs
import Network.HTTP
import Network.URI
import Text.XML.HXT.Core
import Text.XML.HXT.HTTP
import Data.Text (Text)
import qualified Data.Text as T
import Control.Applicative
openUrl :: Text -> MaybeT IO Text
openUrl url = let surl = T.unpack url in
case parseURI surl of
Nothing -> fail ""
Just u -> T.pack <$> liftIO ( getResponseBody =<< simpleHTTP (mkRequest GET u))
download :: Bool -> Text -> IO (IOSArrow XmlTree (NTree XNode))
download asHTML url = let surl=T.unpack url in
return $ readDocument config surl
where
config= if asHTML
then withParseHTML yes : baseConfig
else baseConfig
baseConfig = [
withWarnings no
, withHTTP []
, withRedirect True
, withEncodingErrors no]
|
azapps/nlp.hs
|
src/NLP/Grabber/Download.hs
|
gpl-3.0
| 972
| 0
| 16
| 280
| 282
| 151
| 131
| 27
| 2
|
-- | Round 1A 2010 Problem A. Rotate
-- https://code.google.com/codejam/contest/544101/dashboard#s=p0
module Rotate where
-- constant imports
import Text.ParserCombinators.Parsec
import Text.Parsec
import System.IO (openFile, hClose, hGetContents, hPutStrLn, IOMode(ReadMode), stderr)
import Debug.Trace (trace)
-- variable imports
import qualified Data.Set as S
import Data.List (group, sort, sortBy)
-- variable Data
data TestCase = TestCase
Int -- ^ number of rows/cols
Int -- ^ number of ks in a row, col or diag
[String] -- ^ rows (matrix of 'K', 'B' or '.')
deriving (Show, Eq, Ord)
-- variable implementation
solveCase c@(TestCase numRows numKs rows) = solve c
solve c@(TestCase numRows numKs rows) = final $
sortBy desc $ map head $ filter (pLength numKs) $ concat $ map group $ allRows numRows $ rotate rows
allRows k rs = concat $ zipWith ($) [transpose, diagonals k, id] (repeat rs)
final xs
| pR && pB = "Both"
| pR = "Red"
| pB = "Blue"
| otherwise = "Neither"
where
pR = any (=='R') xs
pB = any (=='B') xs
gravityRight rows = map (shiftRight '.') rows
rotate = gravityRight
pLength k xs = length xs >= k
asc x y
| x < y = LT
| x > y = GT
| otherwise = EQ
desc x = rev . asc x
where
rev GT = LT
rev LT = GT
rev x = x
transpose :: [[a]] -> [[a]]
transpose (xs:[]) = [[x] | x <- xs]
transpose (xs:xss) = zipWith (:) xs (transpose xss)
shiftRight' :: Eq a => a -> [a] -> (Int, [a])
shiftRight' x = foldr fn (0, [])
where
fn ch (n, xs)
| ch == x = (n + 1, xs)
| otherwise = (n, ch:xs)
shiftRight :: Eq a => a -> [a] -> [a]
shiftRight x s = pad x n xs
where
(n, xs) = shiftRight' x s
pad :: a -> Int -> [a] -> [a]
pad _ 0 s = s
pad ch n s = pad ch (n-1) $ ch:s
starters :: Int -> [(Int, Int)]
starters n = concat $ [[(x, 0) | x <- [0..n-1]]
, [(n-1, y) | y <- [1..n-1]]
, [(0, y) | y <- [1..n-1]]]
indices :: ((Int, Int) -> (Int, Int)) -> Int -> (Int, Int) -> [(Int, Int)]
indices fn n start = takeWhile check $ iterate fn start
where
check (r, c) = c >= 0 && c < n && r >= 0 && r < n
indices1 = indices fn
where
fn (r, c) = (r - 1, c + 1)
indices2 = indices fn
where
fn (r, c) = (r + 1, c + 1)
diagIndices :: Int -> [[(Int, Int)]]
diagIndices n = filter test $ concat [d1, d2]
where
d1 = map (indices1 n) (starters n)
d2 = map (indices2 n) (starters n)
test (x:[]) = False
test _ = True
getNm :: [[a]] -> (Int, Int) -> a
getNm xss (r, c) = (xss !! r) !! c
diagonals :: Int -> [[a]] -> [[a]]
diagonals n xss = map (map (getNm xss)) $ diagIndices n
test50 :: [[Int]]
test50 = replicate 50 [1..50]
test4 :: [[Int]]
test4 = groupN 4 [1..16]
groupN _ [] = []
groupN n xs = as : groupN n bs
where
(as, bs) = splitAt n xs
-- Parser (variable part)
parseRow n = do
row <- count n $ oneOf "BR."
eol <|> eof
return row
parseSingleCase = do
numRows <- parseInt
char ' '
numKs <- parseInt
eol
rows <- count numRows $ parseRow numRows
return $ TestCase numRows numKs rows
eol :: GenParser Char st ()
eol = char '\n' >> return ()
parseIntegral :: Integral a => (String -> a) -> GenParser Char st a
parseIntegral rd = rd <$> (plus <|> minus <|> number)
where
plus = char '+' *> number
minus = (:) <$> char '-' <*> number
number = many1 digit
parseInteger :: GenParser Char st Integer
parseInteger = parseIntegral (read :: String -> Integer)
parseIntegers :: GenParser Char st [Integer]
parseIntegers = parseInteger `sepBy` (char ' ')
parseInt :: GenParser Char st Int
parseInt = parseIntegral (read :: String -> Int)
parseInts :: GenParser Char st [Int]
parseInts = parseInt `sepBy` (char ' ')
--
-- constant part
--
-- Parsing (constant part)
-- | First number is number of test cases
data TestInput = TestInput
Int -- ^ number of 'TestCase's
[TestCase]
deriving (Show, Ord, Eq)
parseTestCases = do
numCases <- parseInt
eol
cases <- count numCases parseSingleCase
return $ TestInput numCases cases
parseCases :: String -> Either ParseError TestInput
parseCases contents = parse parseTestCases "(stdin)" contents
-- main
runOnContent :: String -> IO ()
runOnContent content = do
let parsed = parseCases content
case parsed of
Right (TestInput _ cases) -> mapM_ putStrLn (output (solveCases cases))
Left err -> hPutStrLn stderr $ show err
where
solveCases xs = map solveCase xs
consCase n s = "Case #" ++ (show n) ++ ": " ++ s
output xs = zipWith consCase [1..] xs
-- | command line implementation
run = do
cs <- getContents
runOnContent cs
main = run
|
dirkz/google-code-jam-haskell
|
practice/src/Rotate.hs
|
mpl-2.0
| 4,736
| 0
| 14
| 1,242
| 2,050
| 1,079
| 971
| 126
| 3
|
-- Lookup each entry in lexicon-dict mapping and replace
-- declarations in lexicon with corresponding ones from dict
import Text.Regex
import Data.Maybe
mapFile="lex-dict-mapping.txt"
dicFile="DictMlt.gf"
lexFile="LexiconMlt.gf"
-- Strip whitespace from a string
strip = lstrip . rstrip
lstrip = dropWhile (`elem` " \t")
rstrip = reverse . lstrip . reverse
-- Read file into lines, splitting each at given separator
readAndSplitAt :: FilePath -> String -> IO [(String,String)]
readAndSplitAt file sep = do
content <- readFile file
return [ (strip (x!!0), strip (x!!1))
| x <- map (splitRegex (mkRegex sep)) (lines content)
, length x > 1]
main = do
-- Load map and construct replacements
ms <- readAndSplitAt mapFile "="
ds <- readAndSplitAt dicFile "="
let rs = [ (k, fromJust (lookup v1 ds))
| (k,v1) <- ms
, isJust (lookup v1 ds) ]
-- Perform replacements
content <- readFile lexFile
let ls' = map replaceLine (lines content)
replaceLine :: String -> String
replaceLine l =
let k = strip $ takeWhile (/='=') l
in case lookup k rs of
Just v -> " " ++ k ++ " = " ++ v -- ++ " -- auto-replaced"
_ -> l
-- Output contents
-- writeFile lexFile (unlines ls')
putStr $ unlines ls'
|
johnjcamilleri/Maltese-GF-Resource-Grammar
|
scripts/update_lexicon.hs
|
lgpl-3.0
| 1,303
| 7
| 17
| 328
| 421
| 204
| 217
| 29
| 2
|
--
-- Minio Haskell SDK, (C) 2017 Minio, Inc.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
module Network.Minio.Errors where
import Control.Exception
import qualified Network.HTTP.Conduit as NC
import Lib.Prelude
---------------------------------
-- Errors
---------------------------------
-- | Various validation errors
data MErrV = MErrVSinglePUTSizeExceeded Int64
| MErrVPutSizeExceeded Int64
| MErrVETagHeaderNotFound
| MErrVInvalidObjectInfoResponse
| MErrVInvalidSrcObjSpec Text
| MErrVInvalidSrcObjByteRange (Int64, Int64)
| MErrVCopyObjSingleNoRangeAccepted
| MErrVRegionNotSupported Text
| MErrVXmlParse Text
| MErrVInvalidBucketName Text
| MErrVInvalidObjectName Text
| MErrVInvalidUrlExpiry Int
deriving (Show, Eq)
instance Exception MErrV
-- | Errors returned by S3 compatible service
data ServiceErr = BucketAlreadyExists
| BucketAlreadyOwnedByYou
| NoSuchBucket
| InvalidBucketName
| NoSuchKey
| ServiceErr Text Text
deriving (Show, Eq)
instance Exception ServiceErr
toServiceErr :: Text -> Text -> ServiceErr
toServiceErr "NoSuchKey" _ = NoSuchKey
toServiceErr "NoSuchBucket" _ = NoSuchBucket
toServiceErr "InvalidBucketName" _ = InvalidBucketName
toServiceErr "BucketAlreadyOwnedByYou" _ = BucketAlreadyOwnedByYou
toServiceErr "BucketAlreadyExists" _ = BucketAlreadyExists
toServiceErr code message = ServiceErr code message
-- | Errors thrown by the library
data MinioErr = MErrHTTP NC.HttpException
| MErrIO IOException
| MErrService ServiceErr
| MErrValidation MErrV
deriving (Show)
instance Eq MinioErr where
MErrHTTP _ == MErrHTTP _ = True
MErrHTTP _ == _ = False
MErrIO _ == MErrIO _ = True
MErrIO _ == _ = False
MErrService a == MErrService b = a == b
MErrService _ == _ = False
MErrValidation a == MErrValidation b = a == b
MErrValidation _ == _ = False
instance Exception MinioErr
|
donatello/minio-hs
|
src/Network/Minio/Errors.hs
|
apache-2.0
| 2,790
| 0
| 7
| 789
| 431
| 233
| 198
| 48
| 1
|
module Aeson.Merge where
import Data.Aeson.Types
import Data.HashMap.Strict as HMS
import Data.Text (unpack)
-- Recursively merges together two JSON Objects. Right-biased; latter elements override earlier ones.
-- Copied from travis-meta-yaml/Data.Aeson.Merge, because it has ridiculous dependencies for what it does.
mergeJson :: Value -> Value -> Value
mergeJson (Object a) (Object b) = Object (HMS.unionWith mergeJson a b)
mergeJson _ b = b
-- Matches Value.String - convenient for function composition.
string :: Value -> String
string (String s) = unpack s
string x = error $ show x ++ " is not a String"
|
rdnetto/powerline-hs
|
src/Aeson/Merge.hs
|
apache-2.0
| 615
| 0
| 8
| 98
| 138
| 75
| 63
| 10
| 1
|
{-# LANGUAGE CPP, MagicHash #-}
module Data.Binary.Serialise.CBOR.FlatTerm (
FlatTerm(..),
toFlatTerm,
fromFlatTerm
) where
import Data.Binary.Serialise.CBOR.Encoding (Encoding(..))
import qualified Data.Binary.Serialise.CBOR.Encoding as Enc
import Data.Binary.Serialise.CBOR.Decoding as Dec
import GHC.Word (Word(W#), Word8(W8#), Word64(W64#))
import GHC.Int (Int64(I64#))
import GHC.Exts (Int(I#), Int#, Word#, Float#, Double#)
import GHC.Float (Float(F#), Double(D#), float2Double)
import Data.Word
import Data.Int
import Data.Text (Text)
import Data.ByteString (ByteString)
#include "MachDeps.h"
#if WORD_SIZE_IN_BITS == 64
#define ARCH_64bit
#elif WORD_SIZE_IN_BITS == 32
#else
#error expected WORD_SIZE_IN_BITS to be 32 or 64
#endif
type FlatTerm = [TermToken]
data TermToken =
TkInt {-# UNPACK #-} !Int
| TkInteger !Integer
| TkBytes {-# UNPACK #-} !ByteString
| TkBytesBegin
| TkString {-# UNPACK #-} !Text
| TkStringBegin
| TkListLen {-# UNPACK #-} !Word
| TkListBegin
| TkMapLen {-# UNPACK #-} !Word
| TkMapBegin
| TkBreak
| TkTag {-# UNPACK #-} !Word64
| TkBool !Bool
| TkNull
| TkUndef
| TkSimple {-# UNPACK #-} !Word8
| TkFloat16 {-# UNPACK #-} !Float
| TkFloat32 {-# UNPACK #-} !Float
| TkFloat64 {-# UNPACK #-} !Double
deriving (Eq, Ord, Show)
toFlatTerm :: Encoding -> FlatTerm
toFlatTerm (Encoding tb) = convFlatTerm (tb Enc.TkEnd)
convFlatTerm :: Enc.Tokens -> FlatTerm
convFlatTerm (Enc.TkWord w ts)
| w <= maxInt = TkInt (fromIntegral w) : convFlatTerm ts
| otherwise = TkInteger (fromIntegral w) : convFlatTerm ts
convFlatTerm (Enc.TkWord64 w ts)
| w <= maxInt = TkInt (fromIntegral w) : convFlatTerm ts
| otherwise = TkInteger (fromIntegral w) : convFlatTerm ts
convFlatTerm (Enc.TkInt n ts) = TkInt n : convFlatTerm ts
convFlatTerm (Enc.TkInt64 n ts)
| n <= maxInt = TkInt (fromIntegral n) : convFlatTerm ts
| otherwise = TkInteger (fromIntegral n) : convFlatTerm ts
convFlatTerm (Enc.TkInteger n ts)
| n <= maxInt && n >= minInt = TkInt (fromIntegral n) : convFlatTerm ts
| otherwise = TkInteger n : convFlatTerm ts
convFlatTerm (Enc.TkBytes bs ts) = TkBytes bs : convFlatTerm ts
convFlatTerm (Enc.TkBytesBegin ts) = TkBytesBegin : convFlatTerm ts
convFlatTerm (Enc.TkString st ts) = TkString st : convFlatTerm ts
convFlatTerm (Enc.TkStringBegin ts) = TkStringBegin : convFlatTerm ts
convFlatTerm (Enc.TkListLen n ts) = TkListLen n : convFlatTerm ts
convFlatTerm (Enc.TkListBegin ts) = TkListBegin : convFlatTerm ts
convFlatTerm (Enc.TkMapLen n ts) = TkMapLen n : convFlatTerm ts
convFlatTerm (Enc.TkMapBegin ts) = TkMapBegin : convFlatTerm ts
convFlatTerm (Enc.TkTag n ts) = TkTag (fromIntegral n) : convFlatTerm ts
convFlatTerm (Enc.TkTag64 n ts) = TkTag n : convFlatTerm ts
convFlatTerm (Enc.TkBool b ts) = TkBool b : convFlatTerm ts
convFlatTerm (Enc.TkNull ts) = TkNull : convFlatTerm ts
convFlatTerm (Enc.TkUndef ts) = TkUndef : convFlatTerm ts
convFlatTerm (Enc.TkSimple n ts) = TkSimple n : convFlatTerm ts
convFlatTerm (Enc.TkFloat16 f ts) = TkFloat16 f : convFlatTerm ts
convFlatTerm (Enc.TkFloat32 f ts) = TkFloat32 f : convFlatTerm ts
convFlatTerm (Enc.TkFloat64 f ts) = TkFloat64 f : convFlatTerm ts
convFlatTerm (Enc.TkBreak ts) = TkBreak : convFlatTerm ts
convFlatTerm Enc.TkEnd = []
maxInt, minInt, maxInt64, minInt64,
maxWord, minWord, maxWord64, minWord64 :: Num n => n
maxInt = fromIntegral (maxBound :: Int)
minInt = fromIntegral (minBound :: Int)
maxInt64 = fromIntegral (maxBound :: Int64)
minInt64 = fromIntegral (minBound :: Int64)
maxWord = fromIntegral (maxBound :: Word)
minWord = fromIntegral (minBound :: Word)
maxWord64 = fromIntegral (maxBound :: Word64)
minWord64 = fromIntegral (minBound :: Word64)
unI# :: Int -> Int#
unI# (I# i#) = i#
#ifdef ARCH_64bit
unI64# :: Int64 -> Int#
#else
unI64# :: Int64 -> Int64#
#endif
unI64# (I64# i#) = i#
unW# :: Word -> Word#
unW# (W# w#) = w#
unW8# :: Word8 -> Word#
unW8# (W8# w#) = w#
#ifdef ARCH_64bit
unW64# :: Word64 -> Word#
#else
unW64# :: Word64 -> Word64#
#endif
unW64# (W64# w#) = w#
unF# :: Float -> Float#
unF# (F# f#) = f#
unD# :: Double -> Double#
unD# (D# f#) = f#
fromFlatTerm :: Decoder a -> FlatTerm -> Either String a
fromFlatTerm decoder =
go (getDecodeAction decoder)
where
go (ConsumeWord k) (TkInt n : ts)
| n >= 0 = go (k (unW# (fromIntegral n))) ts
go (ConsumeNegWord k) (TkInt n : ts)
| n < 0 = go (k (unW# (fromIntegral (-n)))) ts
go (ConsumeInt k) (TkInt n : ts) = go (k (unI# n)) ts
go (ConsumeInteger k) (TkInt n : ts) = go (k (fromIntegral n)) ts
go (ConsumeInteger k) (TkInteger n : ts) = go (k n) ts
go (ConsumeListLen k) (TkListLen n : ts)
| n <= maxInt = go (k (unI# (fromIntegral n))) ts
go (ConsumeMapLen k) (TkMapLen n : ts)
| n <= maxInt = go (k (unI# (fromIntegral n))) ts
go (ConsumeTag k) (TkTag n : ts)
| n <= maxWord = go (k (unW# (fromIntegral n))) ts
-- 64bit variants for 32bit machines
#ifndef ARCH_64bit
go (ConsumeWord64 k) =
go (ConsumeNegWord64 k) =
go (ConsumeInt64 k) =
go (ConsumeListLen64 k) =
go (ConsumeMapLen64 k) =
go (ConsumeTag64 k) =
#endif
go (ConsumeFloat k) (TkFloat16 f : ts) = go (k (unF# f)) ts
go (ConsumeFloat k) (TkFloat32 f : ts) = go (k (unF# f)) ts
go (ConsumeDouble k) (TkFloat16 f : ts) = go (k (unD# (float2Double f))) ts
go (ConsumeDouble k) (TkFloat32 f : ts) = go (k (unD# (float2Double f))) ts
go (ConsumeDouble k) (TkFloat64 f : ts) = go (k (unD# f)) ts
go (ConsumeBytes k) (TkBytes bs : ts) = go (k bs) ts
go (ConsumeString k) (TkString st : ts) = go (k st) ts
go (ConsumeBool k) (TkBool b : ts) = go (k b) ts
go (ConsumeSimple k) (TkSimple n : ts) = go (k (unW8# n)) ts
go (ConsumeBytesIndef da) (TkBytesBegin : ts) = go da ts
go (ConsumeStringIndef da) (TkStringBegin : ts) = go da ts
go (ConsumeListLenIndef da) (TkListBegin : ts) = go da ts
go (ConsumeMapLenIndef da) (TkMapBegin : ts) = go da ts
go (ConsumeNull da) (TkNull : ts) = go da ts
go (ConsumeListLenOrIndef k) (TkListLen n : ts)
| n <= maxInt = go (k (unI# (fromIntegral n))) ts
go (ConsumeListLenOrIndef k) (TkListBegin : ts) = go (k (-1#)) ts
go (ConsumeMapLenOrIndef k) (TkMapLen n : ts)
| n <= maxInt = go (k (unI# (fromIntegral n))) ts
go (ConsumeMapLenOrIndef k) (TkMapBegin : ts) = go (k (-1#)) ts
go (ConsumeBreakOr k) (TkBreak : ts) = go (k True) ts
go (ConsumeBreakOr k) ts@(_ : _ ) = go (k False) ts
-- go (PeekTokenType k) =
go (Fail msg) _ = Left msg
go (Done x) [] = Right x
go (Done _) ts = Left ("trailing tokens: " ++ show (take 5 ts))
go (ConsumeWord _) ts = unexpected "decodeWord" ts
go (ConsumeNegWord _) ts = unexpected "decodeNegWord" ts
go (ConsumeInt _) ts = unexpected "decodeInt" ts
go (ConsumeInteger _) ts = unexpected "decodeInteger" ts
go (ConsumeListLen _) ts = unexpected "decodeListLen" ts
go (ConsumeMapLen _) ts = unexpected "decodeMapLen" ts
go (ConsumeTag _) ts = unexpected "decodeTag" ts
#ifndef ARCH_64bit
go (ConsumeWord64 _) = unexpected "decodeWord64" ts
go (ConsumeNegWord64 _) = unexpected "decodeNegWord64" ts
go (ConsumeInt64 _) = unexpected "decodeInt64" ts
go (ConsumeListLen64 _) = unexpected "decodeListLen64" ts
go (ConsumeMapLen64 _) = unexpected "decodeMapLen64" ts
go (ConsumeTag64 _) = unexpected "decodeTag64" ts
#endif
go (ConsumeFloat _) ts = unexpected "decodeFloat" ts
go (ConsumeDouble _) ts = unexpected "decodeDouble" ts
go (ConsumeBytes _) ts = unexpected "decodeBytes" ts
go (ConsumeString _) ts = unexpected "decodeString" ts
go (ConsumeBool _) ts = unexpected "decodeBool" ts
go (ConsumeSimple _) ts = unexpected "decodeSimple" ts
go (ConsumeBytesIndef _) ts = unexpected "decodeBytesIndef" ts
go (ConsumeStringIndef _) ts = unexpected "decodeStringIndef" ts
go (ConsumeListLenIndef _) ts = unexpected "decodeListLenIndef" ts
go (ConsumeMapLenIndef _) ts = unexpected "decodeMapLenIndef" ts
go (ConsumeNull _) ts = unexpected "decodeNull" ts
go (ConsumeListLenOrIndef _) ts = unexpected "decodeListLenOrIndef" ts
go (ConsumeMapLenOrIndef _) ts = unexpected "decodeMapLenOrIndef" ts
go (ConsumeBreakOr _) ts = unexpected "decodeBreakOr" ts
unexpected name [] = Left $ name ++ ": unexpected end of input"
unexpected name (tok:_) = Left $ name ++ ": unexpected token " ++ show tok
data Loc = TopLevelSingle
| TopLevelSequence
| InString Int Loc
| InBytes Int Loc
| InListN Int Int Loc
| InList Int Loc
| InMapNKey Int Int Loc
| InMapNVal Int Int Loc
| InMapKey Int Loc
| InMapVal Int Loc
| InTagged Word64 Loc
deriving Show
validFlatTerm :: FlatTerm -> Bool
validFlatTerm ts =
either (const False) (const True) $ do
ts' <- validateTerm TopLevelSingle ts
case ts' of
[] -> return ()
_ -> Left "trailing data"
type Valid a = Either String a
validateTerm :: Loc -> [TermToken] -> Valid [TermToken]
validateTerm _loc (TkInt _ : ts) = return ts
validateTerm _loc (TkInteger _ : ts) = return ts
validateTerm _loc (TkBytes _ : ts) = return ts
validateTerm loc (TkBytesBegin : ts) = validateBytes loc 0 ts
validateTerm _loc (TkString _ : ts) = return ts
validateTerm loc (TkStringBegin : ts) = validateString loc 0 ts
validateTerm loc (TkListLen len : ts)
| len <= maxInt = validateListN loc 0 (fromIntegral len) ts
| otherwise = Left "list len too long (> max int)"
validateTerm loc (TkListBegin : ts) = validateList loc 0 ts
validateTerm loc (TkMapLen len : ts)
| len <= maxInt = validateMapN loc 0 (fromIntegral len) ts
| otherwise = Left "map len too long (> max int)"
validateTerm loc (TkMapBegin : ts) = validateMap loc 0 ts
validateTerm loc (TkTag w : ts) = validateTerm (InTagged w loc) ts
validateTerm _loc (TkBool _ : ts) = return ts
validateTerm _loc (TkNull : ts) = return ts
validateTerm _loc (TkUndef : ts) = return ts
validateTerm loc (TkBreak : _) = unexpectedToken TkBreak loc
validateTerm _loc (TkSimple _ : ts) = return ts
validateTerm _loc (TkFloat16 _ : ts) = return ts
validateTerm _loc (TkFloat32 _ : ts) = return ts
validateTerm _loc (TkFloat64 _ : ts) = return ts
validateTerm loc [] = unexpectedEof loc
unexpectedToken :: TermToken -> Loc -> Valid a
unexpectedToken tok loc = Left $ "unexpected token " ++ show tok
++ ", in context " ++ show loc
unexpectedEof :: Loc -> Valid a
unexpectedEof loc = Left $ "unexpected end of input in context " ++ show loc
validateBytes :: Loc -> Int -> [TermToken] -> Either String [TermToken]
validateBytes _ _ (TkBreak : ts) = return ts
validateBytes ploc i (TkBytes _ : ts) = validateBytes ploc (i+1) ts
validateBytes ploc i (tok : _) = unexpectedToken tok (InBytes i ploc)
validateBytes ploc i [] = unexpectedEof (InBytes i ploc)
validateString :: Loc -> Int -> [TermToken] -> Either String [TermToken]
validateString _ _ (TkBreak : ts) = return ts
validateString ploc i (TkString _ : ts) = validateString ploc (i+1) ts
validateString ploc i (tok : _) = unexpectedToken tok (InString i ploc)
validateString ploc i [] = unexpectedEof (InString i ploc)
validateListN :: Loc -> Int -> Int -> [TermToken] -> Either String [TermToken]
validateListN _ i len ts | i == len = return ts
validateListN ploc i len ts = do
ts' <- validateTerm (InListN i len ploc) ts
validateListN ploc (i+1) len ts'
validateList :: Loc -> Int -> [TermToken] -> Either String [TermToken]
validateList _ _ (TkBreak : ts) = return ts
validateList ploc i ts = do
ts' <- validateTerm (InList i ploc) ts
validateList ploc (i+1) ts'
validateMapN :: Loc -> Int -> Int -> [TermToken] -> Either String [TermToken]
validateMapN _ i len ts | i == len = return ts
validateMapN ploc i len ts = do
ts' <- validateTerm (InMapNKey i len ploc) ts
ts'' <- validateTerm (InMapNVal i len ploc) ts'
validateMapN ploc (i+1) len ts''
validateMap :: Loc -> Int -> [TermToken] -> Either String [TermToken]
validateMap _ _ (TkBreak : ts) = return ts
validateMap ploc i ts = do
ts' <- validateTerm (InMapKey i ploc) ts
ts'' <- validateTerm (InMapVal i ploc) ts'
validateMap ploc (i+1) ts''
|
thoughtpolice/binary-serialise-cbor
|
Data/Binary/Serialise/CBOR/FlatTerm.hs
|
bsd-3-clause
| 13,549
| 3
| 16
| 3,811
| 5,035
| 2,515
| 2,520
| -1
| -1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.CullVertex
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/EXT/cull_vertex.txt EXT_cull_vertex> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.CullVertex (
-- * Enums
gl_CULL_VERTEX_EXT,
gl_CULL_VERTEX_EYE_POSITION_EXT,
gl_CULL_VERTEX_OBJECT_POSITION_EXT,
-- * Functions
glCullParameterdvEXT,
glCullParameterfvEXT
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/EXT/CullVertex.hs
|
bsd-3-clause
| 824
| 0
| 4
| 97
| 58
| 46
| 12
| 8
| 0
|
module JavaScript.AceAjax.Raw.CommandManager where
import qualified GHCJS.Types as GHCJS
import qualified GHCJS.Marshal as GHCJS
import qualified Data.Typeable
import GHCJS.FFI.TypeScript
import GHCJS.DOM.Types (HTMLElement)
import JavaScript.AceAjax.Raw.Types
foreign import javascript "$1.byName" byName :: CommandManager -> IO (GHCJS.JSRef obj0)
foreign import javascript "$1.commands" commands :: CommandManager -> IO (GHCJS.JSRef obj0)
foreign import javascript "$1.platform" platform :: CommandManager -> IO (GHCJS.JSString)
foreign import javascript "$1.addCommands($2)" addCommands :: (CommandManager) -> (GHCJS.JSArray (EditorCommand)) -> IO (())
foreign import javascript "$1.addCommand($2)" addCommand :: (CommandManager) -> (EditorCommand) -> IO (())
foreign import javascript "$1.exec($2,$3,$4)" exec :: (CommandManager) -> (GHCJS.JSString) -> (Editor) -> (GHCJS.JSRef obj0) -> IO (())
|
fpco/ghcjs-from-typescript
|
ghcjs-ace/JavaScript/AceAjax/Raw/CommandManager.hs
|
bsd-3-clause
| 900
| 15
| 8
| 98
| 260
| 148
| 112
| 13
| 0
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
module Database.Relational.Schema.SQLServerSyscat.Columns where
import Data.Int (Int32)
import Database.Record.TH (derivingShow)
import Database.Relational.Query.TH (defineTableTypesAndRecordDefault)
$(defineTableTypesAndRecordDefault
"sys" "columns"
[
-- column schema type length NULL
-- --------------------- ------- ------------------- -------- ------
-- object_id sys int 4 No
("object_id", [t|Int32|]),
-- name sys sysname(nvarchar) 128 Yes
("name", [t|Maybe String|]),
-- column_id sys int 4 No
("column_id", [t|Int32|]),
-- system_type_id sys tinyint 1 No
--("system_type_id", [t|Int32|]),
-- user_type_id sys int 4 No
("user_type_id", [t|Int32|]),
-- max_length sys smallint 2 No
--("max_length", [t|Int32|]),
-- precision sys tinyint 1 No
--("precision", [t|Int32|]),
-- scale sys tinyint 1 No
--("scale", [t|Int32|]),
-- collation_name sys sysname(nvarchar) 128 Yes
--("collation_name", [t|Maybe String|]),
-- is_nullable sys bit 1 Yes
("is_nullable", [t|Maybe Bool|])--,
-- is_ansi_padded sys bit 1 No
--("is_ansi_padded", [t|Bool|]),
-- is_rowguidcol sys bit 1 No
--("is_rowguidcol", [t|Bool|]),
-- is_identity sys bit 1 No
--("is_identity", [t|Bool|]),
-- is_computed sys bit 1 No
--("is_computed", [t|Bool|]),
-- is_filestream sys bit 1 No
--("is_filestream", [t|Bool|]),
-- is_replicated sys bit 1 Yes
--("is_replicated", [t|Maybe Bool|]),
-- is_non_sql_subscribed sys bit 1 Yes
--("is_non_sql_subscribed", [t|Maybe Bool|]),
-- is_merge_published sys bit 1 Yes
--("is_merge_published", [t|Maybe Bool|]),
-- is_dts_repllicated sys bit 1 Yes
--("is_dts_replicated", [t|Maybe Bool|]),
-- is_xml_document sys bit 1 No
--("is_xml_document", [t|Bool|]),
-- xml_collection_id sys int 4 No
--("xml_collection_id", [t|Int32|]),
-- default_object_id sys int 4 No
--("default_object_id", [t|Int32|]),
-- rule_object_id sys int 4 No
--("rule_object_id", [t|Int32|]),
-- is_sparse sys bit 1 Yes
--("is_sparse", [t|Maybe Bool|]),
-- is_column_set sys bit 1 Yes
--("is_column_set", [t|Maybe Bool|])
]
[derivingShow])
|
yuga/haskell-relational-record-driver-sqlserver
|
src/Database/Relational/Schema/SQLServerSyscat/Columns.hs
|
bsd-3-clause
| 3,121
| 0
| 9
| 1,282
| 176
| 137
| 39
| 15
| 0
|
module Database.Hitcask.Compact where
import Database.Hitcask.Types
import Database.Hitcask.Restore
import Database.Hitcask.Parsing
import Database.Hitcask.Put
import Database.Hitcask.Hint
import Database.Hitcask.Logs
import Control.Concurrent.STM
import qualified Data.HashMap.Strict as M
import System.IO
import System.Directory
import qualified Data.ByteString.Char8 as B
import Data.Serialize.Get
compact :: Hitcask -> IO ()
compact db = do
immutable <- allNonActive db
merged <- mapM compactLogFile immutable
replaceNonActive db merged
removeAlreadyMerged immutable
allNonActive :: Hitcask -> IO [LogFile]
allNonActive db = fmap nonActive . readTVarIO $ logs db
nonActive :: HitcaskLogs -> [LogFile]
nonActive l = filter (not . isMerged . path) . M.elems $ M.delete (path $ current l) (files l)
compactLogFile :: LogFile -> IO (MergingLog, KeyDir)
compactLogFile l = do
currentContent <- readState l
writeMergedContent l currentContent
readState :: LogFile -> IO (M.HashMap Key (Timestamp, Value))
readState f = do
let h = handle f
hSeek h AbsoluteSeek 0
wholeFile <- readImmutableLog f
let r = parseMany wholeFile readLogEntry'
return $! M.fromList (reverse r)
readLogEntry' :: Get (Key, (Timestamp, Value))
readLogEntry' = do
_ <- remaining
crc <- getWord32be --crc
ts <- getInt32
keySize <- getWord32be
vSize <- getWord32be
key <- getByteString $ fromIntegral keySize
value <- getByteString $ fromIntegral vSize
checkValue value crc
return (key, (ts, value))
writeMergedContent :: LogFile -> M.HashMap Key (Int, Value) -> IO (MergingLog, KeyDir)
writeMergedContent l ks = do
newLog <- createMergedLog l
r <- mapM (appendToLog' newLog) (M.toList ks)
closeHint (hintFile newLog)
return (newLog, M.fromList r)
appendToLog' :: MergingLog -> (Key, (Timestamp, Value)) -> IO (Key, ValueLocation)
appendToLog' (MergingLog l _ h) (key, (time, value)) = do
loc <- writeValueWithTimestamp l time key value
writeHint h key loc
return (key, loc)
createMergedLog :: LogFile -> IO MergingLog
createMergedLog (LogFile _ p) = do
l <- openLogFile (p ++ ".merged")
h <- createHintFile p
return $! MergingLog l p h
readImmutableLog :: LogFile -> IO B.ByteString
readImmutableLog (LogFile h _) = do
s <- hFileSize h
hSeek h AbsoluteSeek 0
B.hGetNonBlocking h (fromIntegral s)
replaceNonActive :: Hitcask -> [(MergingLog, KeyDir)] -> IO ()
replaceNonActive db s = atomically $ mapM_ (swapInLog db) s
swapInLog :: Hitcask -> (MergingLog, KeyDir) -> STM ()
swapInLog db (l, mergedKeys) = do
modifyTVar (logs db) (addMergedLog l)
modifyTVar (keys db) $ \m ->
addMergedKeyDir m mergedKeys
addMergedLog :: MergingLog -> HitcaskLogs -> HitcaskLogs
addMergedLog newLog = removeMergedLog newLog . addNewLog newLog
removeMergedLog :: MergingLog -> HitcaskLogs -> HitcaskLogs
removeMergedLog l ls = ls { files = M.delete (originalFilePath l) (files ls) }
addNewLog :: MergingLog -> HitcaskLogs -> HitcaskLogs
addNewLog l ls = ls { files = M.insert (path $ mergedLog l)
(mergedLog l)
(files ls) }
addMergedKeyDir :: KeyDir -> KeyDir -> KeyDir
addMergedKeyDir = M.unionWith latestWrite
latestWrite :: ValueLocation -> ValueLocation -> ValueLocation
latestWrite v1 v2
| timestamp v1 > timestamp v2 = v1
| otherwise = v2
removeAlreadyMerged :: [LogFile] -> IO ()
removeAlreadyMerged = mapM_ kill
where kill x = do
hClose $ handle x
removeFile $ path x
|
tcrayford/hitcask
|
Database/Hitcask/Compact.hs
|
bsd-3-clause
| 3,522
| 0
| 11
| 680
| 1,260
| 630
| 630
| 92
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Cloud.AWS.EC2.Types.NetworkInterface
( NetworkInterface(..)
, NetworkInterfaceAssociation(..)
, NetworkInterfaceAttachment(..)
, NetworkInterfaceParam(..)
, NetworkInterfacePrivateIpAddress(..)
, NetworkInterfaceStatus(..)
, SecondaryPrivateIpAddressParam(..)
) where
import Cloud.AWS.EC2.Types.Common (Group, ResourceTag)
import Cloud.AWS.Lib.FromText (deriveFromText)
import Data.Text (Text)
import Data.Time (UTCTime)
import Data.IP (IPv4)
data NetworkInterface = NetworkInterface
{ networkInterfaceId :: Text
, networkInterfaceSubnetId :: Text
, networkInterfaceVpcId :: Text
, networkInterfaceAvailabilityZone :: Text
, networkInterfaceDescription :: Maybe Text
, networkInterfaceOwnerId :: Text
, networkInterfaceRequesterId :: Maybe Text
, networkInterfaceRequesterManaged :: Text
, networkInterfaceStatus :: NetworkInterfaceStatus
, networkInterfaceMacAddress :: Text
, networkInterfacePrivateIpAddress :: IPv4
, networkInterfacePrivateDnsName :: Maybe Text
, networkInterfaceSourceDestCheck :: Bool
, networkInterfaceGroupSet :: [Group]
, networkInterfaceAttachment
:: Maybe NetworkInterfaceAttachment
, networkInterfaceAssociation
:: Maybe NetworkInterfaceAssociation
, networkInterfaceTagSet :: [ResourceTag]
, networkInterfacePrivateIpAddressesSet
:: [NetworkInterfacePrivateIpAddress]
}
deriving (Show, Read, Eq)
data NetworkInterfaceAssociation = NetworkInterfaceAssociation
{ networkInterfaceAssociationAttachmentId :: Maybe Text
, networkInterfaceAssociationInstanceId :: Maybe Text
, networkInterfaceAssociationPublicIp :: IPv4
, networkInterfaceAssociationPublicDnsName :: Maybe Text
, networkInterfaceAssociationIpOwnerId :: Text
, networkInterfaceAssociationId :: Maybe Text
}
deriving (Show, Read, Eq)
data NetworkInterfaceAttachment = NetworkInterfaceAttachment
{ networkInterfaceAttachmentId :: Text
, networkInterfaceAttachmentInstanceId :: Maybe Text
, networkInterfaceAttachmentInstanceOwnerId :: Text
, networkInterfaceAttachmentDeviceIndex :: Int
, networkInterfaceAttachmentStatus :: Text
, networkInterfaceAttachmentAttachTime :: UTCTime
, networkInterfaceAttachmentDeleteOnTermination :: Bool
}
deriving (Show, Read, Eq)
data NetworkInterfaceParam
= NetworkInterfaceParamCreate
{ networkInterfaceParamCreateDeviceIndex :: Int
, networkInterfaceParamCreateSubnetId :: Text
, networkInterfaceParamCreateDescription :: Text
, networkInterfaceParamCreatePrivateIpAddress
:: Maybe IPv4
, networkInterfaceParamCreatePrivateIpAddresses
:: SecondaryPrivateIpAddressParam
, networkInterfaceParamCreateSecurityGroupIds :: [Text]
, networkInterfaceParamCreateDeleteOnTermination :: Bool
}
| NetworkInterfaceParamAttach
{ networkInterfaceParamAttachInterfaceId :: Text
, networkInterfaceParamAttachDeviceIndex :: Int
, networkInterfaceParamAttachDeleteOnTermination :: Bool
}
deriving (Show, Read, Eq)
data NetworkInterfacePrivateIpAddress
= NetworkInterfacePrivateIpAddress
{ networkInterfacePrivateIpAddressPrivateIpAddress :: IPv4
, networkInterfacePrivateIpAddressDnsName :: Maybe Text
, networkInterfacePrivateIpAddressPrimary :: Bool
, networkInterfacePrivateIpAddressAssociation
:: Maybe NetworkInterfaceAssociation
}
deriving (Show, Read, Eq)
data NetworkInterfaceStatus
= NetworkInterfaceStatusAvailable
| NetworkInterfaceStatusInUse
| NetworkInterfaceStatusPending
deriving (Show, Read, Eq)
data SecondaryPrivateIpAddressParam
= SecondaryPrivateIpAddressParamNothing
| SecondaryPrivateIpAddressParamCount Int
| SecondaryPrivateIpAddressParamSpecified
{ secondaryPrivateIpAddressParamSpecifiedAddresses :: [IPv4]
, secondaryPrivateIpAddressParamSpecifiedPrimary
:: Maybe Int
}
deriving (Show, Read, Eq)
deriveFromText "NetworkInterfaceStatus"
["available", "in-use", "pending"]
|
worksap-ate/aws-sdk
|
Cloud/AWS/EC2/Types/NetworkInterface.hs
|
bsd-3-clause
| 4,176
| 0
| 9
| 749
| 660
| 404
| 256
| 93
| 0
|
module Aws.Core.Credentials (
Credentials (..),
credentialsDefaultFile,
credentialsDefaultKey,
loadCredentialsFromFile,
loadCredentialsFromEnv,
loadCredentialsFromEnvOrFile,
loadCredentialsDefault,
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (mplus)
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.ByteString as B
import Data.List (find)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import System.Directory (getHomeDirectory)
import System.Environment (getEnvironment)
import System.FilePath ((</>))
-- | AWS access credentials.
data Credentials = Credentials {
accessKeyID :: B.ByteString,
secretAccessKey :: B.ByteString
} deriving (Show)
-- | The file where access credentials are loaded, when using 'loadCredentialsDefault'.
--
-- Value: /<user directory>/@/.aws-keys@
credentialsDefaultFile :: MonadIO io => io FilePath
credentialsDefaultFile = liftIO $ (</> ".aws-keys") <$> getHomeDirectory
-- | The key to be used in the access credential file that is loaded, when using 'loadCredentialsDefault'.
--
-- Value: @default@
credentialsDefaultKey :: T.Text
credentialsDefaultKey = "default"
-- | Load credentials from a (text) file given a key name.
--
-- The file consists of a sequence of lines, each in the following format:
--
-- @keyName awsKeyID awsKeySecret@
loadCredentialsFromFile :: MonadIO io => FilePath -> T.Text -> io (Maybe Credentials)
loadCredentialsFromFile file key = liftIO $ do
contents <- map T.words . T.lines <$> T.readFile file
return $ do
[_key, keyID, secret] <- find (hasKey key) contents
return Credentials { accessKeyID = T.encodeUtf8 keyID, secretAccessKey = T.encodeUtf8 secret }
where
hasKey _ [] = False
hasKey k (k2 : _) = k == k2
-- | Load credentials from the environment variables @AWS_ACCESS_KEY_ID@ and @AWS_ACCESS_KEY_SECRET@
-- (or @AWS_SECRET_ACCESS_KEY@), if possible.
loadCredentialsFromEnv :: MonadIO io => io (Maybe Credentials)
loadCredentialsFromEnv = liftIO $ do
env <- getEnvironment
let lk = flip lookup env
keyID = lk "AWS_ACCESS_KEY_ID"
secret = lk "AWS_ACCESS_KEY_SECRET" `mplus` lk "AWS_SECRET_ACCESS_KEY"
return (Credentials <$> (T.encodeUtf8 . T.pack <$> keyID) <*> (T.encodeUtf8 . T.pack <$> secret))
-- | Load credentials from environment variables if possible, or alternatively from a file with a given key name.
--
-- See 'loadCredentialsFromEnv' and 'loadCredentialsFromFile' for details.
loadCredentialsFromEnvOrFile :: MonadIO io => FilePath -> T.Text -> io (Maybe Credentials)
loadCredentialsFromEnvOrFile file key =
do
envcr <- loadCredentialsFromEnv
case envcr of
Just cr -> return (Just cr)
Nothing -> loadCredentialsFromFile file key
-- | Load credentials from environment variables if possible, or alternative from the default file with the default
-- key name.
--
-- Default file: /<user directory>/@/.aws-keys@
-- Default key name: @default@
--
-- See 'loadCredentialsFromEnv' and 'loadCredentialsFromFile' for details.
loadCredentialsDefault :: MonadIO io => io (Maybe Credentials)
loadCredentialsDefault = do
file <- credentialsDefaultFile
loadCredentialsFromEnvOrFile file credentialsDefaultKey
|
RayRacine/aws
|
Aws/Core/Credentials.hs
|
bsd-3-clause
| 3,449
| 0
| 15
| 677
| 672
| 374
| 298
| 53
| 2
|
-- The Timber compiler <timber-lang.org>
--
-- Copyright 2008-2009 Johan Nordlander <nordland@csee.ltu.se>
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the names of the copyright holder and any identified
-- contributors, nor the names of their affiliations, may be used to
-- endorse or promote products derived from this software without
-- specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
-- OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
-- ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-- OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-- STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-- ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
module Token where
import Char
data Token
= VarId (String,String)
| ConId (String,String)
| VarSym (String,String)
| ConSym (String,String)
| IntTok String
| FloatTok String
| Character Char
| StringTok String
{-
Symbols
-}
| LeftParen
| RightParen
| SemiColon
| LeftCurly
| RightCurly
| VRightCurly -- a virtual close brace
| LeftSquare
| RightSquare
| Comma
| BackQuote
{-
Reserved operators
-}
| Assign
| Dot
| DotDot
| DoubleColon
| Equals
| Backslash
| Bar
| LeftArrow
| RightArrow
| Tilde
| Wildcard
| Backslash2
{-
Reserved Ids
-}
| KW_Action
| KW_After
| KW_Before
| KW_Case
| KW_Class
| KW_Data
| KW_Default
| KW_Do
| KW_Else
| KW_Elsif
| KW_Extern
| KW_Forall
| KW_If
| KW_Import
| KW_Instance
| KW_In
| KW_Let
| KW_Module
| KW_New
| KW_Of
| KW_Private
| KW_Request
| KW_Result
| KW_Struct
| KW_Then
| KW_Type
| KW_Typeclass
| KW_Use
| KW_Where
| EOF
deriving (Eq, Show)
reserved_ops :: [(String, Token)]
reserved_ops
= [
( ".", Dot ),
( "..", DotDot ),
( "::", DoubleColon ),
( ":=", Assign ),
( "=", Equals ),
( "\\", Backslash ),
( "|", Bar ),
( "<-", LeftArrow ),
( "->", RightArrow ),
( "_", Wildcard ),
( "\\\\", Backslash2 )
]
reserved_ids :: [(String, Token)]
reserved_ids
= [
( "action", KW_Action ),
( "after", KW_After ),
( "before", KW_Before ),
( "case", KW_Case ),
( "class", KW_Class ),
( "data", KW_Data ),
( "default", KW_Default),
( "do", KW_Do ),
( "else", KW_Else ),
( "elsif", KW_Elsif ),
( "extern", KW_Extern ),
( "forall", KW_Forall ),
( "if", KW_If ),
( "import", KW_Import ),
( "instance", KW_Instance ),
( "in", KW_In ),
( "let", KW_Let ),
( "module", KW_Module ),
( "new", KW_New ),
( "of", KW_Of ),
( "private", KW_Private ),
( "request", KW_Request ),
( "result", KW_Result ),
( "struct", KW_Struct ),
( "then", KW_Then ),
( "type", KW_Type ),
( "typeclass", KW_Typeclass ),
( "use", KW_Use ),
( "where", KW_Where )
]
tab_length = 8 :: Int
isIdent c = isAlpha c || isDigit c || c == '\'' || c == '_'
isSymbol c = elem c ":!#$%&*+./<=>?@\\^|-~"
data LexInt =
Decimal (String,String)
| Octal (String,String)
| Hexadecimal (String,String)
|
mattias-lundell/timber-llvm
|
src/Token.hs
|
bsd-3-clause
| 4,684
| 0
| 10
| 1,588
| 768
| 507
| 261
| 117
| 1
|
module Main where
import Test.Tasty
import Test.Tasty.TestSet
import qualified Test.Unit.Module
import qualified Test.Unit.Persistence
import qualified Test.Unit.UserStory
import qualified Test.Property.Persistence
import qualified Test.Regression.UserStory
main = do
Test.Tasty.defaultMain $ buildTestTree "" $ do
Test.Unit.Module.tests
Test.Unit.Persistence.tests
Test.Unit.UserStory.tests
Test.Regression.UserStory.tests
Test.Property.Persistence.tests
{-
main = do
Test.Property.Persistence.createTestData 1
-}
|
andorp/bead
|
test/TestMain.hs
|
bsd-3-clause
| 543
| 0
| 10
| 71
| 108
| 67
| 41
| 15
| 1
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleContexts #-}
module Binary where
import Text.Parsec
import Control.Applicative
import Numeric (readHex)
import System.IO
import Control.Monad
import Data.Array.IO
import Control.Exception.Base
import Data.Word
import Data.Bits
import qualified Data.ByteString.Internal as BS (c2w, w2c)
readBinary :: IOUArray Int Word8 -> FilePath -> Word16 -> IO ()
readBinary arr filename origin = do
--arr <- newArray (0, 0xffff) 0 :: IO (IOUArray Int Word8)
handle <- openBinaryFile filename ReadMode
contents <- hGetContents handle
-- n <- hGetArray handle arr 0xffff
-- putStrLn $ "Read " ++ show n ++ " bytes"
-- hClose handle
forM_ (zip [0..] contents) $ \(i, c) ->
writeArray arr (i+fromIntegral origin) (BS.c2w c)
|
dpiponi/Bine
|
src/Binary.hs
|
bsd-3-clause
| 809
| 0
| 12
| 148
| 195
| 109
| 86
| 19
| 1
|
{-# OPTIONS_GHC -Wall -Werror #-}
module Main(main) where
import Control.DeepSeq
import Lobster.Common
main :: IO ()
main = do
(options,fns) <- processOptions
domain <- parseAndInterpretPolicyFiles_ options fns
seq (rnf domain) $ return ()
|
GaloisInc/sk-dev-platform
|
libs/lobster-validate/Main.hs
|
bsd-3-clause
| 248
| 0
| 10
| 41
| 81
| 42
| 39
| 9
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.STM
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- Software Transactional Memory: a modular composable concurrency
-- abstraction. See
--
-- * /Composable memory transactions/, by Tim Harris, Simon Marlow, Simon
-- Peyton Jones, and Maurice Herlihy, in /ACM Conference on Principles
-- and Practice of Parallel Programming/ 2005.
-- <http://research.microsoft.com/Users/simonpj/papers/stm/index.htm>
--
-- This module only defines the 'STM' monad; you probably want to
-- import "Control.Concurrent.STM" (which exports "Control.Monad.STM").
-----------------------------------------------------------------------------
module Control.Monad.STM (
STM,
atomically,
#ifdef __GLASGOW_HASKELL__
always,
alwaysSucceeds,
retry,
orElse,
check,
#endif
throwSTM,
catchSTM
) where
#ifdef __GLASGOW_HASKELL__
#if ! (MIN_VERSION_base(4,3,0))
import GHC.Conc hiding (catchSTM)
import Control.Monad ( MonadPlus(..) )
import Control.Exception
#else
import GHC.Conc
#endif
import GHC.Exts
import Control.Monad.Fix
#else
import Control.Sequential.STM
#endif
#ifdef __GLASGOW_HASKELL__
#if ! (MIN_VERSION_base(4,3,0))
import Control.Applicative
import Control.Monad (ap)
#endif
#endif
#ifdef __GLASGOW_HASKELL__
#if ! (MIN_VERSION_base(4,3,0))
instance MonadPlus STM where
mzero = retry
mplus = orElse
instance Applicative STM where
pure = return
(<*>) = ap
instance Alternative STM where
empty = retry
(<|>) = orElse
#endif
check :: Bool -> STM ()
check b = if b then return () else retry
#endif
#if ! (MIN_VERSION_base(4,3,0))
-- |Exception handling within STM actions.
catchSTM :: Exception e => STM a -> (e -> STM a) -> STM a
catchSTM (STM m) handler = STM $ catchSTM# m handler'
where
handler' e = case fromException e of
Just e' -> case handler e' of STM m' -> m'
Nothing -> raiseIO# e
-- | A variant of 'throw' that can only be used within the 'STM' monad.
--
-- Throwing an exception in @STM@ aborts the transaction and propagates the
-- exception.
--
-- Although 'throwSTM' has a type that is an instance of the type of 'throw', the
-- two functions are subtly different:
--
-- > throw e `seq` x ===> throw e
-- > throwSTM e `seq` x ===> x
--
-- The first example will cause the exception @e@ to be raised,
-- whereas the second one won\'t. In fact, 'throwSTM' will only cause
-- an exception to be raised when it is used within the 'STM' monad.
-- The 'throwSTM' variant should be used in preference to 'throw' to
-- raise an exception within the 'STM' monad because it guarantees
-- ordering with respect to other 'STM' operations, whereas 'throw'
-- does not.
throwSTM :: Exception e => e -> STM a
throwSTM e = STM $ raiseIO# (toException e)
#endif
data STMret a = STMret (State# RealWorld) a
liftSTM :: STM a -> State# RealWorld -> STMret a
liftSTM (STM m) = \s -> case m s of (# s', r #) -> STMret s' r
instance MonadFix STM where
mfix k = STM $ \s ->
let ans = liftSTM (k r) s
STMret _ r = ans
in case ans of STMret s' x -> (# s', x #)
|
gridaphobe/packages-stm
|
Control/Monad/STM.hs
|
bsd-3-clause
| 3,596
| 0
| 14
| 743
| 560
| 324
| 236
| 23
| 2
|
{-# LANGUAGE UndecidableInstances
, TypeSynonymInstances
, MultiParamTypeClasses
, DeriveDataTypeable
, FlexibleInstances
, OverloadedStrings
, TemplateHaskell
, FlexibleContexts #-}
module MFlow.Wai(
module MFlow.Cookies
,module MFlow
,waiMessageFlow)
where
import Data.Typeable
import Network.Wai
import Control.Concurrent.MVar(modifyMVar_, readMVar)
import Control.Monad(when)
import qualified Data.ByteString.Lazy.Char8 as B(empty,pack, unpack, length, ByteString,tail)
import Data.ByteString.Lazy(fromChunks)
import Data.ByteString.UTF8 hiding (span)
import qualified Data.ByteString.Char8 as SB -- hiding (pack, unpack)
import Control.Concurrent(ThreadId(..))
import System.IO.Unsafe
import Control.Concurrent.MVar
import Control.Concurrent
import Control.Monad.Trans
import Control.Exception
import qualified Data.Map as M
import Data.Maybe
import Data.TCache
import Data.TCache.DefaultPersistence
import Control.Workflow hiding (Indexable(..))
import MFlow
import MFlow.Cookies
import Data.Monoid
import MFlow.Wai.Response
import Network.Wai
import Network.Wai.Parse
import qualified Data.Conduit.Binary as CB
import Control.Monad.Trans.Resource
import Network.HTTP.Types
import Data.Conduit
import Data.Conduit.Lazy
import qualified Data.Conduit.List as CList
import Data.CaseInsensitive
import System.Time
import System.Directory
import System.IO
import qualified Data.Text as T
--import Debug.Trace
--(!>) = flip trace
toApp :: (Request -> IO Response) -> Application
-- #if MIN_VERSION_wai(3, 0, 0)
toApp f req sendResponse = f req >>= sendResponse
-- #else
-- toApp = id
-- #endif
flow= "flow"
instance Processable Request where
pwfPath env= if Prelude.null sc then [noScript] else Prelude.map T.unpack sc
where
sc= let p= pathInfo env
p'= reverse p
in case p' of
[] -> []
p' -> if T.null $ head p' then reverse(tail p') else p
puser env = fromMaybe anonymous $ fmap toString $ lookup ( mk $ fromString cookieuser) $ requestHeaders env
pind env= fromMaybe (error ": No FlowID") $ fmap toString $ lookup (mk flow) $ requestHeaders env
getParams= mkParams1 . requestHeaders
where
mkParams1 = Prelude.map mkParam1
mkParam1 ( x,y)= (toString $ original x, toString y)
waiMessageFlow :: Application
waiMessageFlow = toApp $ \req1 -> do
let httpreq1= requestHeaders req1
let cookies = getCookies httpreq1
(flowval , retcookies) <- case lookup flow cookies of
Just fl -> return (fl, [])
Nothing -> do
fl <- liftIO $ newFlow
return (fl, [UnEncryptedCookie (flow, fl, "/",Nothing):: Cookie])
{- for state persistence in cookies
putStateCookie req1 cookies
let retcookies= case getStateCookie req1 of
Nothing -> retcookies1
Just ck -> ck:retcookies1
-}
(params,files) <- case parseMethod $ requestMethod req1 of
Right GET -> do
return (Prelude.map (\(x,y) -> (x,fromMaybe "" y)) $ queryString req1,[])
Right POST -> do
case getRequestBodyType req1 of
Nothing -> error $ "getRequestBodyType: "
Just rbt ->
runResourceT $ withInternalState $ \state -> liftIO $ do
let backend file info= do
(key, (fp, h)) <- flip runInternalState state $ allocate (do
tempDir <- getTemporaryDirectory
openBinaryTempFile tempDir "upload.tmp") (\(_, h) -> hClose h)
CB.sinkHandle h
lift $ release key
return fp
---- #if MIN_VERSION_wai(3, 0, 0)
let backend' file info getBS = do
let src = do
bs <- liftIO getBS
when (not $ SB.null bs) $ do
Data.Conduit.yield bs
src
src $$ backend file info
sinkRequestBody backend' rbt (requestBody req1)
---- #else
---- requestBody req1 $$ sinkRequestBody backend rbt
---- #endif
---- let fileparams= Prelude.map (\(param,FileInfo filename contentype content)
---- -> (param, SB.pack content )) files
---- let fileparams= Prelude.map (\(param,fileinfo)
---- -> (param, fileinfo )) files
---- return $ fileparams++ params
let filesp= Prelude.map (\(param,FileInfo filename contentype tempfile)
-> (mk param, fromString $ show(filename,contentype,tempfile) )) files
-- let filesp= Prelude.map (\(a,b) -> ( mk a, fromString $ show b)) files
let req = case retcookies of
[] -> req1{requestHeaders= filesp ++ mkParams (params ++ cookies) ++ requestHeaders req1}
_ -> req1{requestHeaders= filesp ++ mkParams ((flow, flowval): params ++ cookies) ++ requestHeaders req1}
(resp',th) <- liftIO $ msgScheduler req -- !> (show $ requestHeaders req)
let resp= case (resp',retcookies) of
(_,[]) -> resp'
(error@(Error _),_) -> error
(HttpData hs co str,_) -> HttpData hs (co++ retcookies) str
return $ toResponse resp
------persistent state in cookies (not tested)
tvresources :: MVar (Maybe ( M.Map string string))
tvresources= unsafePerformIO $ newMVar Nothing
statCookieName= "stat"
putStateCookie req cookies=
case lookup statCookieName cookies of
Nothing -> return ()
Just (statCookieName, str , "/", _) -> modifyMVar_ tvresources $
\mmap -> case mmap of
Just map -> return $ Just $ M.insert (keyResource req) str map
Nothing -> return $ Just $ M.fromList [((keyResource req), str) ]
getStateCookie req= do
mr<- readMVar tvresources
case mr of
Nothing -> return Nothing
Just map -> case M.lookup (keyResource req) map of
Nothing -> return Nothing
Just str -> do
swapMVar tvresources Nothing
return $ Just (statCookieName, str , "/")
{-
persistInCookies= setPersist PersistStat{readStat=readResource, writeStat=writeResource, deleteStat=deleteResource}
where
writeResource stat= modifyMVar_ tvresources $ \mmap ->
case mmap of
Just map-> return $ Just $ M.insert (keyResource stat) (serialize stat) map
Nothing -> return $ Just $ M.fromList [((keyResource stat), (serialize stat)) ]
readResource stat= do
mstr <- withMVar tvresources $ \mmap ->
case mmap of
Just map -> return $ M.lookup (keyResource stat) map
Nothing -> return Nothing
case mstr of
Nothing -> return Nothing
Just str -> return $ deserialize str
deleteResource stat= modifyMVar_ tvresources $ \mmap->
case mmap of
Just map -> return $ Just $ M.delete (keyResource stat) map
Nothing -> return $ Nothing
-}
|
agocorona/MFlow
|
src/MFlow/Wai.hs
|
bsd-3-clause
| 7,934
| 0
| 38
| 2,870
| 1,646
| 875
| 771
| 125
| 7
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeFamilies, Rank2Types, ScopedTypeVariables #-}
-- |
-- Module : Data.Vector.Storable
-- Copyright : (c) Roman Leshchinskiy 2009-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <rl@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable
--
-- 'Storable'-based vectors.
--
module Data.Vector.Storable (
-- * Storable vectors
Vector, MVector(..), Storable,
-- * Accessors
-- ** Length information
length, null,
-- ** Indexing
(!), (!?), head, last,
unsafeIndex, unsafeHead, unsafeLast,
-- ** Monadic indexing
indexM, headM, lastM,
unsafeIndexM, unsafeHeadM, unsafeLastM,
-- ** Extracting subvectors (slicing)
slice, init, tail, take, drop, splitAt,
unsafeSlice, unsafeInit, unsafeTail, unsafeTake, unsafeDrop,
-- * Construction
-- ** Initialisation
empty, singleton, replicate, generate, iterateN,
-- ** Monadic initialisation
replicateM, generateM, create,
-- ** Unfolding
unfoldr, unfoldrN,
constructN, constructrN,
-- ** Enumeration
enumFromN, enumFromStepN, enumFromTo, enumFromThenTo,
-- ** Concatenation
cons, snoc, (++), concat,
-- ** Restricting memory usage
force,
-- * Modifying vectors
-- ** Bulk updates
(//), update_,
unsafeUpd, unsafeUpdate_,
-- ** Accumulations
accum, accumulate_,
unsafeAccum, unsafeAccumulate_,
-- ** Permutations
reverse, backpermute, unsafeBackpermute,
-- ** Safe destructive updates
modify,
-- * Elementwise operations
-- ** Mapping
map, imap, concatMap,
-- ** Monadic mapping
mapM, mapM_, forM, forM_,
-- ** Zipping
zipWith, zipWith3, zipWith4, zipWith5, zipWith6,
izipWith, izipWith3, izipWith4, izipWith5, izipWith6,
-- ** Monadic zipping
zipWithM, zipWithM_,
-- * Working with predicates
-- ** Filtering
filter, ifilter, filterM,
takeWhile, dropWhile,
-- ** Partitioning
partition, unstablePartition, span, break,
-- ** Searching
elem, notElem, find, findIndex, findIndices, elemIndex, elemIndices,
-- * Folding
foldl, foldl1, foldl', foldl1', foldr, foldr1, foldr', foldr1',
ifoldl, ifoldl', ifoldr, ifoldr',
-- ** Specialised folds
all, any, and, or,
sum, product,
maximum, maximumBy, minimum, minimumBy,
minIndex, minIndexBy, maxIndex, maxIndexBy,
-- ** Monadic folds
foldM, foldM', fold1M, fold1M',
foldM_, foldM'_, fold1M_, fold1M'_,
-- * Prefix sums (scans)
prescanl, prescanl',
postscanl, postscanl',
scanl, scanl', scanl1, scanl1',
prescanr, prescanr',
postscanr, postscanr',
scanr, scanr', scanr1, scanr1',
-- * Conversions
-- ** Lists
toList, fromList, fromListN,
-- ** Other vector types
G.convert, unsafeCast,
-- ** Mutable vectors
freeze, thaw, copy, unsafeFreeze, unsafeThaw, unsafeCopy,
-- * Raw pointers
unsafeFromForeignPtr, unsafeFromForeignPtr0,
unsafeToForeignPtr, unsafeToForeignPtr0,
unsafeWith
) where
import qualified Data.Vector.Generic as G
import Data.Vector.Storable.Mutable ( MVector(..) )
import Data.Vector.Storable.Internal
import qualified Data.Vector.Fusion.Bundle as Bundle
import Foreign.Storable
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Marshal.Array ( advancePtr, copyArray )
import Control.DeepSeq ( NFData )
import Control.Monad.ST ( ST )
import Control.Monad.Primitive
import Prelude hiding ( length, null,
replicate, (++), concat,
head, last,
init, tail, take, drop, splitAt, reverse,
map, concatMap,
zipWith, zipWith3, zip, zip3, unzip, unzip3,
filter, takeWhile, dropWhile, span, break,
elem, notElem,
foldl, foldl1, foldr, foldr1,
all, any, and, or, sum, product, minimum, maximum,
scanl, scanl1, scanr, scanr1,
enumFromTo, enumFromThenTo,
mapM, mapM_ )
import qualified Prelude
import Data.Typeable ( Typeable )
import Data.Data ( Data(..) )
import Text.Read ( Read(..), readListPrecDefault )
import Data.Monoid ( Monoid(..) )
#include "vector.h"
-- | 'Storable'-based vectors
data Vector a = Vector {-# UNPACK #-} !Int
{-# UNPACK #-} !(ForeignPtr a)
deriving ( Typeable )
instance NFData (Vector a)
instance (Show a, Storable a) => Show (Vector a) where
showsPrec = G.showsPrec
instance (Read a, Storable a) => Read (Vector a) where
readPrec = G.readPrec
readListPrec = readListPrecDefault
instance (Data a, Storable a) => Data (Vector a) where
gfoldl = G.gfoldl
toConstr _ = error "toConstr"
gunfold _ _ = error "gunfold"
dataTypeOf _ = G.mkType "Data.Vector.Storable.Vector"
dataCast1 = G.dataCast
type instance G.Mutable Vector = MVector
instance Storable a => G.Vector Vector a where
{-# INLINE basicUnsafeFreeze #-}
basicUnsafeFreeze (MVector n fp) = return $ Vector n fp
{-# INLINE basicUnsafeThaw #-}
basicUnsafeThaw (Vector n fp) = return $ MVector n fp
{-# INLINE basicLength #-}
basicLength (Vector n _) = n
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice i n (Vector _ fp) = Vector n (updPtr (`advancePtr` i) fp)
{-# INLINE basicUnsafeIndexM #-}
basicUnsafeIndexM (Vector _ fp) i = return
. unsafeInlineIO
$ withForeignPtr fp $ \p ->
peekElemOff p i
{-# INLINE basicUnsafeCopy #-}
basicUnsafeCopy (MVector n fp) (Vector _ fq)
= unsafePrimToPrim
$ withForeignPtr fp $ \p ->
withForeignPtr fq $ \q ->
copyArray p q n
{-# INLINE elemseq #-}
elemseq _ = seq
-- See http://trac.haskell.org/vector/ticket/12
instance (Storable a, Eq a) => Eq (Vector a) where
{-# INLINE (==) #-}
xs == ys = Bundle.eq (G.stream xs) (G.stream ys)
{-# INLINE (/=) #-}
xs /= ys = not (Bundle.eq (G.stream xs) (G.stream ys))
-- See http://trac.haskell.org/vector/ticket/12
instance (Storable a, Ord a) => Ord (Vector a) where
{-# INLINE compare #-}
compare xs ys = Bundle.cmp (G.stream xs) (G.stream ys)
{-# INLINE (<) #-}
xs < ys = Bundle.cmp (G.stream xs) (G.stream ys) == LT
{-# INLINE (<=) #-}
xs <= ys = Bundle.cmp (G.stream xs) (G.stream ys) /= GT
{-# INLINE (>) #-}
xs > ys = Bundle.cmp (G.stream xs) (G.stream ys) == GT
{-# INLINE (>=) #-}
xs >= ys = Bundle.cmp (G.stream xs) (G.stream ys) /= LT
instance Storable a => Monoid (Vector a) where
{-# INLINE mempty #-}
mempty = empty
{-# INLINE mappend #-}
mappend = (++)
{-# INLINE mconcat #-}
mconcat = concat
-- Length
-- ------
-- | /O(1)/ Yield the length of the vector.
length :: Storable a => Vector a -> Int
{-# INLINE length #-}
length = G.length
-- | /O(1)/ Test whether a vector if empty
null :: Storable a => Vector a -> Bool
{-# INLINE null #-}
null = G.null
-- Indexing
-- --------
-- | O(1) Indexing
(!) :: Storable a => Vector a -> Int -> a
{-# INLINE (!) #-}
(!) = (G.!)
-- | O(1) Safe indexing
(!?) :: Storable a => Vector a -> Int -> Maybe a
{-# INLINE (!?) #-}
(!?) = (G.!?)
-- | /O(1)/ First element
head :: Storable a => Vector a -> a
{-# INLINE head #-}
head = G.head
-- | /O(1)/ Last element
last :: Storable a => Vector a -> a
{-# INLINE last #-}
last = G.last
-- | /O(1)/ Unsafe indexing without bounds checking
unsafeIndex :: Storable a => Vector a -> Int -> a
{-# INLINE unsafeIndex #-}
unsafeIndex = G.unsafeIndex
-- | /O(1)/ First element without checking if the vector is empty
unsafeHead :: Storable a => Vector a -> a
{-# INLINE unsafeHead #-}
unsafeHead = G.unsafeHead
-- | /O(1)/ Last element without checking if the vector is empty
unsafeLast :: Storable a => Vector a -> a
{-# INLINE unsafeLast #-}
unsafeLast = G.unsafeLast
-- Monadic indexing
-- ----------------
-- | /O(1)/ Indexing in a monad.
--
-- The monad allows operations to be strict in the vector when necessary.
-- Suppose vector copying is implemented like this:
--
-- > copy mv v = ... write mv i (v ! i) ...
--
-- For lazy vectors, @v ! i@ would not be evaluated which means that @mv@
-- would unnecessarily retain a reference to @v@ in each element written.
--
-- With 'indexM', copying can be implemented like this instead:
--
-- > copy mv v = ... do
-- > x <- indexM v i
-- > write mv i x
--
-- Here, no references to @v@ are retained because indexing (but /not/ the
-- elements) is evaluated eagerly.
--
indexM :: (Storable a, Monad m) => Vector a -> Int -> m a
{-# INLINE indexM #-}
indexM = G.indexM
-- | /O(1)/ First element of a vector in a monad. See 'indexM' for an
-- explanation of why this is useful.
headM :: (Storable a, Monad m) => Vector a -> m a
{-# INLINE headM #-}
headM = G.headM
-- | /O(1)/ Last element of a vector in a monad. See 'indexM' for an
-- explanation of why this is useful.
lastM :: (Storable a, Monad m) => Vector a -> m a
{-# INLINE lastM #-}
lastM = G.lastM
-- | /O(1)/ Indexing in a monad without bounds checks. See 'indexM' for an
-- explanation of why this is useful.
unsafeIndexM :: (Storable a, Monad m) => Vector a -> Int -> m a
{-# INLINE unsafeIndexM #-}
unsafeIndexM = G.unsafeIndexM
-- | /O(1)/ First element in a monad without checking for empty vectors.
-- See 'indexM' for an explanation of why this is useful.
unsafeHeadM :: (Storable a, Monad m) => Vector a -> m a
{-# INLINE unsafeHeadM #-}
unsafeHeadM = G.unsafeHeadM
-- | /O(1)/ Last element in a monad without checking for empty vectors.
-- See 'indexM' for an explanation of why this is useful.
unsafeLastM :: (Storable a, Monad m) => Vector a -> m a
{-# INLINE unsafeLastM #-}
unsafeLastM = G.unsafeLastM
-- Extracting subvectors (slicing)
-- -------------------------------
-- | /O(1)/ Yield a slice of the vector without copying it. The vector must
-- contain at least @i+n@ elements.
slice :: Storable a
=> Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> Vector a
-> Vector a
{-# INLINE slice #-}
slice = G.slice
-- | /O(1)/ Yield all but the last element without copying. The vector may not
-- be empty.
init :: Storable a => Vector a -> Vector a
{-# INLINE init #-}
init = G.init
-- | /O(1)/ Yield all but the first element without copying. The vector may not
-- be empty.
tail :: Storable a => Vector a -> Vector a
{-# INLINE tail #-}
tail = G.tail
-- | /O(1)/ Yield at the first @n@ elements without copying. The vector may
-- contain less than @n@ elements in which case it is returned unchanged.
take :: Storable a => Int -> Vector a -> Vector a
{-# INLINE take #-}
take = G.take
-- | /O(1)/ Yield all but the first @n@ elements without copying. The vector may
-- contain less than @n@ elements in which case an empty vector is returned.
drop :: Storable a => Int -> Vector a -> Vector a
{-# INLINE drop #-}
drop = G.drop
-- | /O(1)/ Yield the first @n@ elements paired with the remainder without copying.
--
-- Note that @'splitAt' n v@ is equivalent to @('take' n v, 'drop' n v)@
-- but slightly more efficient.
{-# INLINE splitAt #-}
splitAt :: Storable a => Int -> Vector a -> (Vector a, Vector a)
splitAt = G.splitAt
-- | /O(1)/ Yield a slice of the vector without copying. The vector must
-- contain at least @i+n@ elements but this is not checked.
unsafeSlice :: Storable a => Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> Vector a
-> Vector a
{-# INLINE unsafeSlice #-}
unsafeSlice = G.unsafeSlice
-- | /O(1)/ Yield all but the last element without copying. The vector may not
-- be empty but this is not checked.
unsafeInit :: Storable a => Vector a -> Vector a
{-# INLINE unsafeInit #-}
unsafeInit = G.unsafeInit
-- | /O(1)/ Yield all but the first element without copying. The vector may not
-- be empty but this is not checked.
unsafeTail :: Storable a => Vector a -> Vector a
{-# INLINE unsafeTail #-}
unsafeTail = G.unsafeTail
-- | /O(1)/ Yield the first @n@ elements without copying. The vector must
-- contain at least @n@ elements but this is not checked.
unsafeTake :: Storable a => Int -> Vector a -> Vector a
{-# INLINE unsafeTake #-}
unsafeTake = G.unsafeTake
-- | /O(1)/ Yield all but the first @n@ elements without copying. The vector
-- must contain at least @n@ elements but this is not checked.
unsafeDrop :: Storable a => Int -> Vector a -> Vector a
{-# INLINE unsafeDrop #-}
unsafeDrop = G.unsafeDrop
-- Initialisation
-- --------------
-- | /O(1)/ Empty vector
empty :: Storable a => Vector a
{-# INLINE empty #-}
empty = G.empty
-- | /O(1)/ Vector with exactly one element
singleton :: Storable a => a -> Vector a
{-# INLINE singleton #-}
singleton = G.singleton
-- | /O(n)/ Vector of the given length with the same value in each position
replicate :: Storable a => Int -> a -> Vector a
{-# INLINE replicate #-}
replicate = G.replicate
-- | /O(n)/ Construct a vector of the given length by applying the function to
-- each index
generate :: Storable a => Int -> (Int -> a) -> Vector a
{-# INLINE generate #-}
generate = G.generate
-- | /O(n)/ Apply function n times to value. Zeroth element is original value.
iterateN :: Storable a => Int -> (a -> a) -> a -> Vector a
{-# INLINE iterateN #-}
iterateN = G.iterateN
-- Unfolding
-- ---------
-- | /O(n)/ Construct a vector by repeatedly applying the generator function
-- to a seed. The generator function yields 'Just' the next element and the
-- new seed or 'Nothing' if there are no more elements.
--
-- > unfoldr (\n -> if n == 0 then Nothing else Just (n,n-1)) 10
-- > = <10,9,8,7,6,5,4,3,2,1>
unfoldr :: Storable a => (b -> Maybe (a, b)) -> b -> Vector a
{-# INLINE unfoldr #-}
unfoldr = G.unfoldr
-- | /O(n)/ Construct a vector with at most @n@ by repeatedly applying the
-- generator function to the a seed. The generator function yields 'Just' the
-- next element and the new seed or 'Nothing' if there are no more elements.
--
-- > unfoldrN 3 (\n -> Just (n,n-1)) 10 = <10,9,8>
unfoldrN :: Storable a => Int -> (b -> Maybe (a, b)) -> b -> Vector a
{-# INLINE unfoldrN #-}
unfoldrN = G.unfoldrN
-- | /O(n)/ Construct a vector with @n@ elements by repeatedly applying the
-- generator function to the already constructed part of the vector.
--
-- > constructN 3 f = let a = f <> ; b = f <a> ; c = f <a,b> in f <a,b,c>
--
constructN :: Storable a => Int -> (Vector a -> a) -> Vector a
{-# INLINE constructN #-}
constructN = G.constructN
-- | /O(n)/ Construct a vector with @n@ elements from right to left by
-- repeatedly applying the generator function to the already constructed part
-- of the vector.
--
-- > constructrN 3 f = let a = f <> ; b = f<a> ; c = f <b,a> in f <c,b,a>
--
constructrN :: Storable a => Int -> (Vector a -> a) -> Vector a
{-# INLINE constructrN #-}
constructrN = G.constructrN
-- Enumeration
-- -----------
-- | /O(n)/ Yield a vector of the given length containing the values @x@, @x+1@
-- etc. This operation is usually more efficient than 'enumFromTo'.
--
-- > enumFromN 5 3 = <5,6,7>
enumFromN :: (Storable a, Num a) => a -> Int -> Vector a
{-# INLINE enumFromN #-}
enumFromN = G.enumFromN
-- | /O(n)/ Yield a vector of the given length containing the values @x@, @x+y@,
-- @x+y+y@ etc. This operations is usually more efficient than 'enumFromThenTo'.
--
-- > enumFromStepN 1 0.1 5 = <1,1.1,1.2,1.3,1.4>
enumFromStepN :: (Storable a, Num a) => a -> a -> Int -> Vector a
{-# INLINE enumFromStepN #-}
enumFromStepN = G.enumFromStepN
-- | /O(n)/ Enumerate values from @x@ to @y@.
--
-- /WARNING:/ This operation can be very inefficient. If at all possible, use
-- 'enumFromN' instead.
enumFromTo :: (Storable a, Enum a) => a -> a -> Vector a
{-# INLINE enumFromTo #-}
enumFromTo = G.enumFromTo
-- | /O(n)/ Enumerate values from @x@ to @y@ with a specific step @z@.
--
-- /WARNING:/ This operation can be very inefficient. If at all possible, use
-- 'enumFromStepN' instead.
enumFromThenTo :: (Storable a, Enum a) => a -> a -> a -> Vector a
{-# INLINE enumFromThenTo #-}
enumFromThenTo = G.enumFromThenTo
-- Concatenation
-- -------------
-- | /O(n)/ Prepend an element
cons :: Storable a => a -> Vector a -> Vector a
{-# INLINE cons #-}
cons = G.cons
-- | /O(n)/ Append an element
snoc :: Storable a => Vector a -> a -> Vector a
{-# INLINE snoc #-}
snoc = G.snoc
infixr 5 ++
-- | /O(m+n)/ Concatenate two vectors
(++) :: Storable a => Vector a -> Vector a -> Vector a
{-# INLINE (++) #-}
(++) = (G.++)
-- | /O(n)/ Concatenate all vectors in the list
concat :: Storable a => [Vector a] -> Vector a
{-# INLINE concat #-}
concat = G.concat
-- Monadic initialisation
-- ----------------------
-- | /O(n)/ Execute the monadic action the given number of times and store the
-- results in a vector.
replicateM :: (Monad m, Storable a) => Int -> m a -> m (Vector a)
{-# INLINE replicateM #-}
replicateM = G.replicateM
-- | /O(n)/ Construct a vector of the given length by applying the monadic
-- action to each index
generateM :: (Monad m, Storable a) => Int -> (Int -> m a) -> m (Vector a)
{-# INLINE generateM #-}
generateM = G.generateM
-- | Execute the monadic action and freeze the resulting vector.
--
-- @
-- create (do { v \<- new 2; write v 0 \'a\'; write v 1 \'b\'; return v }) = \<'a','b'\>
-- @
create :: Storable a => (forall s. ST s (MVector s a)) -> Vector a
{-# INLINE create #-}
-- NOTE: eta-expanded due to http://hackage.haskell.org/trac/ghc/ticket/4120
create p = G.create p
-- Restricting memory usage
-- ------------------------
-- | /O(n)/ Yield the argument but force it not to retain any extra memory,
-- possibly by copying it.
--
-- This is especially useful when dealing with slices. For example:
--
-- > force (slice 0 2 <huge vector>)
--
-- Here, the slice retains a reference to the huge vector. Forcing it creates
-- a copy of just the elements that belong to the slice and allows the huge
-- vector to be garbage collected.
force :: Storable a => Vector a -> Vector a
{-# INLINE force #-}
force = G.force
-- Bulk updates
-- ------------
-- | /O(m+n)/ For each pair @(i,a)@ from the list, replace the vector
-- element at position @i@ by @a@.
--
-- > <5,9,2,7> // [(2,1),(0,3),(2,8)] = <3,9,8,7>
--
(//) :: Storable a => Vector a -- ^ initial vector (of length @m@)
-> [(Int, a)] -- ^ list of index/value pairs (of length @n@)
-> Vector a
{-# INLINE (//) #-}
(//) = (G.//)
-- | /O(m+min(n1,n2))/ For each index @i@ from the index vector and the
-- corresponding value @a@ from the value vector, replace the element of the
-- initial vector at position @i@ by @a@.
--
-- > update_ <5,9,2,7> <2,0,2> <1,3,8> = <3,9,8,7>
--
update_ :: Storable a
=> Vector a -- ^ initial vector (of length @m@)
-> Vector Int -- ^ index vector (of length @n1@)
-> Vector a -- ^ value vector (of length @n2@)
-> Vector a
{-# INLINE update_ #-}
update_ = G.update_
-- | Same as ('//') but without bounds checking.
unsafeUpd :: Storable a => Vector a -> [(Int, a)] -> Vector a
{-# INLINE unsafeUpd #-}
unsafeUpd = G.unsafeUpd
-- | Same as 'update_' but without bounds checking.
unsafeUpdate_ :: Storable a => Vector a -> Vector Int -> Vector a -> Vector a
{-# INLINE unsafeUpdate_ #-}
unsafeUpdate_ = G.unsafeUpdate_
-- Accumulations
-- -------------
-- | /O(m+n)/ For each pair @(i,b)@ from the list, replace the vector element
-- @a@ at position @i@ by @f a b@.
--
-- > accum (+) <5,9,2> [(2,4),(1,6),(0,3),(1,7)] = <5+3, 9+6+7, 2+4>
accum :: Storable a
=> (a -> b -> a) -- ^ accumulating function @f@
-> Vector a -- ^ initial vector (of length @m@)
-> [(Int,b)] -- ^ list of index/value pairs (of length @n@)
-> Vector a
{-# INLINE accum #-}
accum = G.accum
-- | /O(m+min(n1,n2))/ For each index @i@ from the index vector and the
-- corresponding value @b@ from the the value vector,
-- replace the element of the initial vector at
-- position @i@ by @f a b@.
--
-- > accumulate_ (+) <5,9,2> <2,1,0,1> <4,6,3,7> = <5+3, 9+6+7, 2+4>
--
accumulate_ :: (Storable a, Storable b)
=> (a -> b -> a) -- ^ accumulating function @f@
-> Vector a -- ^ initial vector (of length @m@)
-> Vector Int -- ^ index vector (of length @n1@)
-> Vector b -- ^ value vector (of length @n2@)
-> Vector a
{-# INLINE accumulate_ #-}
accumulate_ = G.accumulate_
-- | Same as 'accum' but without bounds checking.
unsafeAccum :: Storable a => (a -> b -> a) -> Vector a -> [(Int,b)] -> Vector a
{-# INLINE unsafeAccum #-}
unsafeAccum = G.unsafeAccum
-- | Same as 'accumulate_' but without bounds checking.
unsafeAccumulate_ :: (Storable a, Storable b) =>
(a -> b -> a) -> Vector a -> Vector Int -> Vector b -> Vector a
{-# INLINE unsafeAccumulate_ #-}
unsafeAccumulate_ = G.unsafeAccumulate_
-- Permutations
-- ------------
-- | /O(n)/ Reverse a vector
reverse :: Storable a => Vector a -> Vector a
{-# INLINE reverse #-}
reverse = G.reverse
-- | /O(n)/ Yield the vector obtained by replacing each element @i@ of the
-- index vector by @xs'!'i@. This is equivalent to @'map' (xs'!') is@ but is
-- often much more efficient.
--
-- > backpermute <a,b,c,d> <0,3,2,3,1,0> = <a,d,c,d,b,a>
backpermute :: Storable a => Vector a -> Vector Int -> Vector a
{-# INLINE backpermute #-}
backpermute = G.backpermute
-- | Same as 'backpermute' but without bounds checking.
unsafeBackpermute :: Storable a => Vector a -> Vector Int -> Vector a
{-# INLINE unsafeBackpermute #-}
unsafeBackpermute = G.unsafeBackpermute
-- Safe destructive updates
-- ------------------------
-- | Apply a destructive operation to a vector. The operation will be
-- performed in place if it is safe to do so and will modify a copy of the
-- vector otherwise.
--
-- @
-- modify (\\v -> write v 0 \'x\') ('replicate' 3 \'a\') = \<\'x\',\'a\',\'a\'\>
-- @
modify :: Storable a => (forall s. MVector s a -> ST s ()) -> Vector a -> Vector a
{-# INLINE modify #-}
modify p = G.modify p
-- Mapping
-- -------
-- | /O(n)/ Map a function over a vector
map :: (Storable a, Storable b) => (a -> b) -> Vector a -> Vector b
{-# INLINE map #-}
map = G.map
-- | /O(n)/ Apply a function to every element of a vector and its index
imap :: (Storable a, Storable b) => (Int -> a -> b) -> Vector a -> Vector b
{-# INLINE imap #-}
imap = G.imap
-- | Map a function over a vector and concatenate the results.
concatMap :: (Storable a, Storable b) => (a -> Vector b) -> Vector a -> Vector b
{-# INLINE concatMap #-}
concatMap = G.concatMap
-- Monadic mapping
-- ---------------
-- | /O(n)/ Apply the monadic action to all elements of the vector, yielding a
-- vector of results
mapM :: (Monad m, Storable a, Storable b) => (a -> m b) -> Vector a -> m (Vector b)
{-# INLINE mapM #-}
mapM = G.mapM
-- | /O(n)/ Apply the monadic action to all elements of a vector and ignore the
-- results
mapM_ :: (Monad m, Storable a) => (a -> m b) -> Vector a -> m ()
{-# INLINE mapM_ #-}
mapM_ = G.mapM_
-- | /O(n)/ Apply the monadic action to all elements of the vector, yielding a
-- vector of results. Equvalent to @flip 'mapM'@.
forM :: (Monad m, Storable a, Storable b) => Vector a -> (a -> m b) -> m (Vector b)
{-# INLINE forM #-}
forM = G.forM
-- | /O(n)/ Apply the monadic action to all elements of a vector and ignore the
-- results. Equivalent to @flip 'mapM_'@.
forM_ :: (Monad m, Storable a) => Vector a -> (a -> m b) -> m ()
{-# INLINE forM_ #-}
forM_ = G.forM_
-- Zipping
-- -------
-- | /O(min(m,n))/ Zip two vectors with the given function.
zipWith :: (Storable a, Storable b, Storable c)
=> (a -> b -> c) -> Vector a -> Vector b -> Vector c
{-# INLINE zipWith #-}
zipWith = G.zipWith
-- | Zip three vectors with the given function.
zipWith3 :: (Storable a, Storable b, Storable c, Storable d)
=> (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d
{-# INLINE zipWith3 #-}
zipWith3 = G.zipWith3
zipWith4 :: (Storable a, Storable b, Storable c, Storable d, Storable e)
=> (a -> b -> c -> d -> e)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
{-# INLINE zipWith4 #-}
zipWith4 = G.zipWith4
zipWith5 :: (Storable a, Storable b, Storable c, Storable d, Storable e,
Storable f)
=> (a -> b -> c -> d -> e -> f)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f
{-# INLINE zipWith5 #-}
zipWith5 = G.zipWith5
zipWith6 :: (Storable a, Storable b, Storable c, Storable d, Storable e,
Storable f, Storable g)
=> (a -> b -> c -> d -> e -> f -> g)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f -> Vector g
{-# INLINE zipWith6 #-}
zipWith6 = G.zipWith6
-- | /O(min(m,n))/ Zip two vectors with a function that also takes the
-- elements' indices.
izipWith :: (Storable a, Storable b, Storable c)
=> (Int -> a -> b -> c) -> Vector a -> Vector b -> Vector c
{-# INLINE izipWith #-}
izipWith = G.izipWith
-- | Zip three vectors and their indices with the given function.
izipWith3 :: (Storable a, Storable b, Storable c, Storable d)
=> (Int -> a -> b -> c -> d)
-> Vector a -> Vector b -> Vector c -> Vector d
{-# INLINE izipWith3 #-}
izipWith3 = G.izipWith3
izipWith4 :: (Storable a, Storable b, Storable c, Storable d, Storable e)
=> (Int -> a -> b -> c -> d -> e)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
{-# INLINE izipWith4 #-}
izipWith4 = G.izipWith4
izipWith5 :: (Storable a, Storable b, Storable c, Storable d, Storable e,
Storable f)
=> (Int -> a -> b -> c -> d -> e -> f)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f
{-# INLINE izipWith5 #-}
izipWith5 = G.izipWith5
izipWith6 :: (Storable a, Storable b, Storable c, Storable d, Storable e,
Storable f, Storable g)
=> (Int -> a -> b -> c -> d -> e -> f -> g)
-> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
-> Vector f -> Vector g
{-# INLINE izipWith6 #-}
izipWith6 = G.izipWith6
-- Monadic zipping
-- ---------------
-- | /O(min(m,n))/ Zip the two vectors with the monadic action and yield a
-- vector of results
zipWithM :: (Monad m, Storable a, Storable b, Storable c)
=> (a -> b -> m c) -> Vector a -> Vector b -> m (Vector c)
{-# INLINE zipWithM #-}
zipWithM = G.zipWithM
-- | /O(min(m,n))/ Zip the two vectors with the monadic action and ignore the
-- results
zipWithM_ :: (Monad m, Storable a, Storable b)
=> (a -> b -> m c) -> Vector a -> Vector b -> m ()
{-# INLINE zipWithM_ #-}
zipWithM_ = G.zipWithM_
-- Filtering
-- ---------
-- | /O(n)/ Drop elements that do not satisfy the predicate
filter :: Storable a => (a -> Bool) -> Vector a -> Vector a
{-# INLINE filter #-}
filter = G.filter
-- | /O(n)/ Drop elements that do not satisfy the predicate which is applied to
-- values and their indices
ifilter :: Storable a => (Int -> a -> Bool) -> Vector a -> Vector a
{-# INLINE ifilter #-}
ifilter = G.ifilter
-- | /O(n)/ Drop elements that do not satisfy the monadic predicate
filterM :: (Monad m, Storable a) => (a -> m Bool) -> Vector a -> m (Vector a)
{-# INLINE filterM #-}
filterM = G.filterM
-- | /O(n)/ Yield the longest prefix of elements satisfying the predicate
-- without copying.
takeWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a
{-# INLINE takeWhile #-}
takeWhile = G.takeWhile
-- | /O(n)/ Drop the longest prefix of elements that satisfy the predicate
-- without copying.
dropWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a
{-# INLINE dropWhile #-}
dropWhile = G.dropWhile
-- Parititioning
-- -------------
-- | /O(n)/ Split the vector in two parts, the first one containing those
-- elements that satisfy the predicate and the second one those that don't. The
-- relative order of the elements is preserved at the cost of a sometimes
-- reduced performance compared to 'unstablePartition'.
partition :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE partition #-}
partition = G.partition
-- | /O(n)/ Split the vector in two parts, the first one containing those
-- elements that satisfy the predicate and the second one those that don't.
-- The order of the elements is not preserved but the operation is often
-- faster than 'partition'.
unstablePartition :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE unstablePartition #-}
unstablePartition = G.unstablePartition
-- | /O(n)/ Split the vector into the longest prefix of elements that satisfy
-- the predicate and the rest without copying.
span :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE span #-}
span = G.span
-- | /O(n)/ Split the vector into the longest prefix of elements that do not
-- satisfy the predicate and the rest without copying.
break :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a)
{-# INLINE break #-}
break = G.break
-- Searching
-- ---------
infix 4 `elem`
-- | /O(n)/ Check if the vector contains an element
elem :: (Storable a, Eq a) => a -> Vector a -> Bool
{-# INLINE elem #-}
elem = G.elem
infix 4 `notElem`
-- | /O(n)/ Check if the vector does not contain an element (inverse of 'elem')
notElem :: (Storable a, Eq a) => a -> Vector a -> Bool
{-# INLINE notElem #-}
notElem = G.notElem
-- | /O(n)/ Yield 'Just' the first element matching the predicate or 'Nothing'
-- if no such element exists.
find :: Storable a => (a -> Bool) -> Vector a -> Maybe a
{-# INLINE find #-}
find = G.find
-- | /O(n)/ Yield 'Just' the index of the first element matching the predicate
-- or 'Nothing' if no such element exists.
findIndex :: Storable a => (a -> Bool) -> Vector a -> Maybe Int
{-# INLINE findIndex #-}
findIndex = G.findIndex
-- | /O(n)/ Yield the indices of elements satisfying the predicate in ascending
-- order.
findIndices :: Storable a => (a -> Bool) -> Vector a -> Vector Int
{-# INLINE findIndices #-}
findIndices = G.findIndices
-- | /O(n)/ Yield 'Just' the index of the first occurence of the given element or
-- 'Nothing' if the vector does not contain the element. This is a specialised
-- version of 'findIndex'.
elemIndex :: (Storable a, Eq a) => a -> Vector a -> Maybe Int
{-# INLINE elemIndex #-}
elemIndex = G.elemIndex
-- | /O(n)/ Yield the indices of all occurences of the given element in
-- ascending order. This is a specialised version of 'findIndices'.
elemIndices :: (Storable a, Eq a) => a -> Vector a -> Vector Int
{-# INLINE elemIndices #-}
elemIndices = G.elemIndices
-- Folding
-- -------
-- | /O(n)/ Left fold
foldl :: Storable b => (a -> b -> a) -> a -> Vector b -> a
{-# INLINE foldl #-}
foldl = G.foldl
-- | /O(n)/ Left fold on non-empty vectors
foldl1 :: Storable a => (a -> a -> a) -> Vector a -> a
{-# INLINE foldl1 #-}
foldl1 = G.foldl1
-- | /O(n)/ Left fold with strict accumulator
foldl' :: Storable b => (a -> b -> a) -> a -> Vector b -> a
{-# INLINE foldl' #-}
foldl' = G.foldl'
-- | /O(n)/ Left fold on non-empty vectors with strict accumulator
foldl1' :: Storable a => (a -> a -> a) -> Vector a -> a
{-# INLINE foldl1' #-}
foldl1' = G.foldl1'
-- | /O(n)/ Right fold
foldr :: Storable a => (a -> b -> b) -> b -> Vector a -> b
{-# INLINE foldr #-}
foldr = G.foldr
-- | /O(n)/ Right fold on non-empty vectors
foldr1 :: Storable a => (a -> a -> a) -> Vector a -> a
{-# INLINE foldr1 #-}
foldr1 = G.foldr1
-- | /O(n)/ Right fold with a strict accumulator
foldr' :: Storable a => (a -> b -> b) -> b -> Vector a -> b
{-# INLINE foldr' #-}
foldr' = G.foldr'
-- | /O(n)/ Right fold on non-empty vectors with strict accumulator
foldr1' :: Storable a => (a -> a -> a) -> Vector a -> a
{-# INLINE foldr1' #-}
foldr1' = G.foldr1'
-- | /O(n)/ Left fold (function applied to each element and its index)
ifoldl :: Storable b => (a -> Int -> b -> a) -> a -> Vector b -> a
{-# INLINE ifoldl #-}
ifoldl = G.ifoldl
-- | /O(n)/ Left fold with strict accumulator (function applied to each element
-- and its index)
ifoldl' :: Storable b => (a -> Int -> b -> a) -> a -> Vector b -> a
{-# INLINE ifoldl' #-}
ifoldl' = G.ifoldl'
-- | /O(n)/ Right fold (function applied to each element and its index)
ifoldr :: Storable a => (Int -> a -> b -> b) -> b -> Vector a -> b
{-# INLINE ifoldr #-}
ifoldr = G.ifoldr
-- | /O(n)/ Right fold with strict accumulator (function applied to each
-- element and its index)
ifoldr' :: Storable a => (Int -> a -> b -> b) -> b -> Vector a -> b
{-# INLINE ifoldr' #-}
ifoldr' = G.ifoldr'
-- Specialised folds
-- -----------------
-- | /O(n)/ Check if all elements satisfy the predicate.
all :: Storable a => (a -> Bool) -> Vector a -> Bool
{-# INLINE all #-}
all = G.all
-- | /O(n)/ Check if any element satisfies the predicate.
any :: Storable a => (a -> Bool) -> Vector a -> Bool
{-# INLINE any #-}
any = G.any
-- | /O(n)/ Check if all elements are 'True'
and :: Vector Bool -> Bool
{-# INLINE and #-}
and = G.and
-- | /O(n)/ Check if any element is 'True'
or :: Vector Bool -> Bool
{-# INLINE or #-}
or = G.or
-- | /O(n)/ Compute the sum of the elements
sum :: (Storable a, Num a) => Vector a -> a
{-# INLINE sum #-}
sum = G.sum
-- | /O(n)/ Compute the produce of the elements
product :: (Storable a, Num a) => Vector a -> a
{-# INLINE product #-}
product = G.product
-- | /O(n)/ Yield the maximum element of the vector. The vector may not be
-- empty.
maximum :: (Storable a, Ord a) => Vector a -> a
{-# INLINE maximum #-}
maximum = G.maximum
-- | /O(n)/ Yield the maximum element of the vector according to the given
-- comparison function. The vector may not be empty.
maximumBy :: Storable a => (a -> a -> Ordering) -> Vector a -> a
{-# INLINE maximumBy #-}
maximumBy = G.maximumBy
-- | /O(n)/ Yield the minimum element of the vector. The vector may not be
-- empty.
minimum :: (Storable a, Ord a) => Vector a -> a
{-# INLINE minimum #-}
minimum = G.minimum
-- | /O(n)/ Yield the minimum element of the vector according to the given
-- comparison function. The vector may not be empty.
minimumBy :: Storable a => (a -> a -> Ordering) -> Vector a -> a
{-# INLINE minimumBy #-}
minimumBy = G.minimumBy
-- | /O(n)/ Yield the index of the maximum element of the vector. The vector
-- may not be empty.
maxIndex :: (Storable a, Ord a) => Vector a -> Int
{-# INLINE maxIndex #-}
maxIndex = G.maxIndex
-- | /O(n)/ Yield the index of the maximum element of the vector according to
-- the given comparison function. The vector may not be empty.
maxIndexBy :: Storable a => (a -> a -> Ordering) -> Vector a -> Int
{-# INLINE maxIndexBy #-}
maxIndexBy = G.maxIndexBy
-- | /O(n)/ Yield the index of the minimum element of the vector. The vector
-- may not be empty.
minIndex :: (Storable a, Ord a) => Vector a -> Int
{-# INLINE minIndex #-}
minIndex = G.minIndex
-- | /O(n)/ Yield the index of the minimum element of the vector according to
-- the given comparison function. The vector may not be empty.
minIndexBy :: Storable a => (a -> a -> Ordering) -> Vector a -> Int
{-# INLINE minIndexBy #-}
minIndexBy = G.minIndexBy
-- Monadic folds
-- -------------
-- | /O(n)/ Monadic fold
foldM :: (Monad m, Storable b) => (a -> b -> m a) -> a -> Vector b -> m a
{-# INLINE foldM #-}
foldM = G.foldM
-- | /O(n)/ Monadic fold over non-empty vectors
fold1M :: (Monad m, Storable a) => (a -> a -> m a) -> Vector a -> m a
{-# INLINE fold1M #-}
fold1M = G.fold1M
-- | /O(n)/ Monadic fold with strict accumulator
foldM' :: (Monad m, Storable b) => (a -> b -> m a) -> a -> Vector b -> m a
{-# INLINE foldM' #-}
foldM' = G.foldM'
-- | /O(n)/ Monadic fold over non-empty vectors with strict accumulator
fold1M' :: (Monad m, Storable a) => (a -> a -> m a) -> Vector a -> m a
{-# INLINE fold1M' #-}
fold1M' = G.fold1M'
-- | /O(n)/ Monadic fold that discards the result
foldM_ :: (Monad m, Storable b) => (a -> b -> m a) -> a -> Vector b -> m ()
{-# INLINE foldM_ #-}
foldM_ = G.foldM_
-- | /O(n)/ Monadic fold over non-empty vectors that discards the result
fold1M_ :: (Monad m, Storable a) => (a -> a -> m a) -> Vector a -> m ()
{-# INLINE fold1M_ #-}
fold1M_ = G.fold1M_
-- | /O(n)/ Monadic fold with strict accumulator that discards the result
foldM'_ :: (Monad m, Storable b) => (a -> b -> m a) -> a -> Vector b -> m ()
{-# INLINE foldM'_ #-}
foldM'_ = G.foldM'_
-- | /O(n)/ Monadic fold over non-empty vectors with strict accumulator
-- that discards the result
fold1M'_ :: (Monad m, Storable a) => (a -> a -> m a) -> Vector a -> m ()
{-# INLINE fold1M'_ #-}
fold1M'_ = G.fold1M'_
-- Prefix sums (scans)
-- -------------------
-- | /O(n)/ Prescan
--
-- @
-- prescanl f z = 'init' . 'scanl' f z
-- @
--
-- Example: @prescanl (+) 0 \<1,2,3,4\> = \<0,1,3,6\>@
--
prescanl :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE prescanl #-}
prescanl = G.prescanl
-- | /O(n)/ Prescan with strict accumulator
prescanl' :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE prescanl' #-}
prescanl' = G.prescanl'
-- | /O(n)/ Scan
--
-- @
-- postscanl f z = 'tail' . 'scanl' f z
-- @
--
-- Example: @postscanl (+) 0 \<1,2,3,4\> = \<1,3,6,10\>@
--
postscanl :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE postscanl #-}
postscanl = G.postscanl
-- | /O(n)/ Scan with strict accumulator
postscanl' :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE postscanl' #-}
postscanl' = G.postscanl'
-- | /O(n)/ Haskell-style scan
--
-- > scanl f z <x1,...,xn> = <y1,...,y(n+1)>
-- > where y1 = z
-- > yi = f y(i-1) x(i-1)
--
-- Example: @scanl (+) 0 \<1,2,3,4\> = \<0,1,3,6,10\>@
--
scanl :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE scanl #-}
scanl = G.scanl
-- | /O(n)/ Haskell-style scan with strict accumulator
scanl' :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
{-# INLINE scanl' #-}
scanl' = G.scanl'
-- | /O(n)/ Scan over a non-empty vector
--
-- > scanl f <x1,...,xn> = <y1,...,yn>
-- > where y1 = x1
-- > yi = f y(i-1) xi
--
scanl1 :: Storable a => (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanl1 #-}
scanl1 = G.scanl1
-- | /O(n)/ Scan over a non-empty vector with a strict accumulator
scanl1' :: Storable a => (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanl1' #-}
scanl1' = G.scanl1'
-- | /O(n)/ Right-to-left prescan
--
-- @
-- prescanr f z = 'reverse' . 'prescanl' (flip f) z . 'reverse'
-- @
--
prescanr :: (Storable a, Storable b) => (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE prescanr #-}
prescanr = G.prescanr
-- | /O(n)/ Right-to-left prescan with strict accumulator
prescanr' :: (Storable a, Storable b) => (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE prescanr' #-}
prescanr' = G.prescanr'
-- | /O(n)/ Right-to-left scan
postscanr :: (Storable a, Storable b) => (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE postscanr #-}
postscanr = G.postscanr
-- | /O(n)/ Right-to-left scan with strict accumulator
postscanr' :: (Storable a, Storable b) => (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE postscanr' #-}
postscanr' = G.postscanr'
-- | /O(n)/ Right-to-left Haskell-style scan
scanr :: (Storable a, Storable b) => (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE scanr #-}
scanr = G.scanr
-- | /O(n)/ Right-to-left Haskell-style scan with strict accumulator
scanr' :: (Storable a, Storable b) => (a -> b -> b) -> b -> Vector a -> Vector b
{-# INLINE scanr' #-}
scanr' = G.scanr'
-- | /O(n)/ Right-to-left scan over a non-empty vector
scanr1 :: Storable a => (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanr1 #-}
scanr1 = G.scanr1
-- | /O(n)/ Right-to-left scan over a non-empty vector with a strict
-- accumulator
scanr1' :: Storable a => (a -> a -> a) -> Vector a -> Vector a
{-# INLINE scanr1' #-}
scanr1' = G.scanr1'
-- Conversions - Lists
-- ------------------------
-- | /O(n)/ Convert a vector to a list
toList :: Storable a => Vector a -> [a]
{-# INLINE toList #-}
toList = G.toList
-- | /O(n)/ Convert a list to a vector
fromList :: Storable a => [a] -> Vector a
{-# INLINE fromList #-}
fromList = G.fromList
-- | /O(n)/ Convert the first @n@ elements of a list to a vector
--
-- @
-- fromListN n xs = 'fromList' ('take' n xs)
-- @
fromListN :: Storable a => Int -> [a] -> Vector a
{-# INLINE fromListN #-}
fromListN = G.fromListN
-- Conversions - Unsafe casts
-- --------------------------
-- | /O(1)/ Unsafely cast a vector from one element type to another.
-- The operation just changes the type of the underlying pointer and does not
-- modify the elements.
--
-- The resulting vector contains as many elements as can fit into the
-- underlying memory block.
--
unsafeCast :: forall a b. (Storable a, Storable b) => Vector a -> Vector b
{-# INLINE unsafeCast #-}
unsafeCast (Vector n fp)
= Vector ((n * sizeOf (undefined :: a)) `div` sizeOf (undefined :: b))
(castForeignPtr fp)
-- Conversions - Mutable vectors
-- -----------------------------
-- | /O(1)/ Unsafe convert a mutable vector to an immutable one without
-- copying. The mutable vector may not be used after this operation.
unsafeFreeze
:: (Storable a, PrimMonad m) => MVector (PrimState m) a -> m (Vector a)
{-# INLINE unsafeFreeze #-}
unsafeFreeze = G.unsafeFreeze
-- | /O(1)/ Unsafely convert an immutable vector to a mutable one without
-- copying. The immutable vector may not be used after this operation.
unsafeThaw
:: (Storable a, PrimMonad m) => Vector a -> m (MVector (PrimState m) a)
{-# INLINE unsafeThaw #-}
unsafeThaw = G.unsafeThaw
-- | /O(n)/ Yield a mutable copy of the immutable vector.
thaw :: (Storable a, PrimMonad m) => Vector a -> m (MVector (PrimState m) a)
{-# INLINE thaw #-}
thaw = G.thaw
-- | /O(n)/ Yield an immutable copy of the mutable vector.
freeze :: (Storable a, PrimMonad m) => MVector (PrimState m) a -> m (Vector a)
{-# INLINE freeze #-}
freeze = G.freeze
-- | /O(n)/ Copy an immutable vector into a mutable one. The two vectors must
-- have the same length. This is not checked.
unsafeCopy
:: (Storable a, PrimMonad m) => MVector (PrimState m) a -> Vector a -> m ()
{-# INLINE unsafeCopy #-}
unsafeCopy = G.unsafeCopy
-- | /O(n)/ Copy an immutable vector into a mutable one. The two vectors must
-- have the same length.
copy :: (Storable a, PrimMonad m) => MVector (PrimState m) a -> Vector a -> m ()
{-# INLINE copy #-}
copy = G.copy
-- Conversions - Raw pointers
-- --------------------------
-- | /O(1)/ Create a vector from a 'ForeignPtr' with an offset and a length.
--
-- The data may not be modified through the 'ForeignPtr' afterwards.
--
-- If your offset is 0 it is more efficient to use 'unsafeFromForeignPtr0'.
unsafeFromForeignPtr :: Storable a
=> ForeignPtr a -- ^ pointer
-> Int -- ^ offset
-> Int -- ^ length
-> Vector a
{-# INLINE unsafeFromForeignPtr #-}
unsafeFromForeignPtr fp i n = unsafeFromForeignPtr0 fp' n
where
fp' = updPtr (`advancePtr` i) fp
{-# RULES
"unsafeFromForeignPtr fp 0 n -> unsafeFromForeignPtr0 fp n " forall fp n.
unsafeFromForeignPtr fp 0 n = unsafeFromForeignPtr0 fp n
#-}
-- | /O(1)/ Create a vector from a 'ForeignPtr' and a length.
--
-- It is assumed the pointer points directly to the data (no offset).
-- Use `unsafeFromForeignPtr` if you need to specify an offset.
--
-- The data may not be modified through the 'ForeignPtr' afterwards.
unsafeFromForeignPtr0 :: Storable a
=> ForeignPtr a -- ^ pointer
-> Int -- ^ length
-> Vector a
{-# INLINE unsafeFromForeignPtr0 #-}
unsafeFromForeignPtr0 fp n = Vector n fp
-- | /O(1)/ Yield the underlying 'ForeignPtr' together with the offset to the
-- data and its length. The data may not be modified through the 'ForeignPtr'.
unsafeToForeignPtr :: Storable a => Vector a -> (ForeignPtr a, Int, Int)
{-# INLINE unsafeToForeignPtr #-}
unsafeToForeignPtr (Vector n fp) = (fp, 0, n)
-- | /O(1)/ Yield the underlying 'ForeignPtr' together with its length.
--
-- You can assume the pointer points directly to the data (no offset).
--
-- The data may not be modified through the 'ForeignPtr'.
unsafeToForeignPtr0 :: Storable a => Vector a -> (ForeignPtr a, Int)
{-# INLINE unsafeToForeignPtr0 #-}
unsafeToForeignPtr0 (Vector n fp) = (fp, n)
-- | Pass a pointer to the vector's data to the IO action. The data may not be
-- modified through the 'Ptr.
unsafeWith :: Storable a => Vector a -> (Ptr a -> IO b) -> IO b
{-# INLINE unsafeWith #-}
unsafeWith (Vector n fp) = withForeignPtr fp
|
rleshchinskiy/vector
|
Data/Vector/Storable.hs
|
bsd-3-clause
| 44,864
| 0
| 14
| 9,857
| 9,906
| 5,487
| 4,419
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : Text.Parsec.Token
-- Copyright : (c) Daan Leijen 1999-2001, (c) Paolo Martini 2007
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : derek.a.elkins@gmail.com
-- Stability : provisional
-- Portability : non-portable (uses local universal quantification: PolymorphicComponents)
--
-- A helper module to parse lexical elements (tokens). See 'makeTokenParser'
-- for a description of how to use it.
--
-----------------------------------------------------------------------------
{-# LANGUAGE PolymorphicComponents #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind -fno-warn-name-shadowing #-}
module Text.Parsec.Token
( LanguageDef
, GenLanguageDef (..)
, TokenParser
, GenTokenParser (..)
, makeTokenParser
) where
import Data.Char ( isAlpha, toLower, toUpper, isSpace, digitToInt )
import Data.List ( nub, sort )
import Control.Monad.Identity
import Text.Parsec.Prim
import Text.Parsec.Char
import Text.Parsec.Combinator
-----------------------------------------------------------
-- Language Definition
-----------------------------------------------------------
type LanguageDef st = GenLanguageDef String st Identity
-- | The @GenLanguageDef@ type is a record that contains all parameterizable
-- features of the 'Text.Parsec.Token' module. The module 'Text.Parsec.Language'
-- contains some default definitions.
data GenLanguageDef s u m
= LanguageDef {
-- | Describes the start of a block comment. Use the empty string if the
-- language doesn't support block comments. For example \"\/*\".
commentStart :: String,
-- | Describes the end of a block comment. Use the empty string if the
-- language doesn't support block comments. For example \"*\/\".
commentEnd :: String,
-- | Describes the start of a line comment. Use \"fail \"No Comments\"\"
-- if the language doesn't support line comments.
commentLine :: ParsecT s u m String,
-- | Set to 'True' if the language supports nested block comments.
nestedComments :: Bool,
-- | This parser should accept any start characters of identifiers. For
-- example @letter \<|> char \"_\"@.
identStart :: ParsecT s u m Char,
-- | This parser should accept any legal tail characters of identifiers.
-- For example @alphaNum \<|> char \"_\"@.
identLetter :: ParsecT s u m Char,
-- | This parser should accept any start characters of operators. For
-- example @oneOf \":!#$%&*+.\/\<=>?\@\\\\^|-~\"@
opStart :: ParsecT s u m Char,
-- | This parser should accept any legal tail characters of operators.
-- Note that this parser should even be defined if the language doesn't
-- support user-defined operators, or otherwise the 'reservedOp'
-- parser won't work correctly.
opLetter :: ParsecT s u m Char,
-- | The list of reserved identifiers.
reservedNames :: [String],
-- | The list of reserved operators.
reservedOpNames:: [String],
-- | Set to 'True' if the language is case sensitive.
caseSensitive :: Bool
}
-----------------------------------------------------------
-- A first class module: TokenParser
-----------------------------------------------------------
type TokenParser st = GenTokenParser String st Identity
-- | The type of the record that holds lexical parsers that work on
-- @s@ streams with state @u@ over a monad @m@.
data GenTokenParser s u m
= TokenParser {
-- | This lexeme parser parses a legal identifier. Returns the identifier
-- string. This parser will fail on identifiers that are reserved
-- words. Legal identifier (start) characters and reserved words are
-- defined in the 'LanguageDef' that is passed to
-- 'makeTokenParser'. An @identifier@ is treated as
-- a single token using 'try'.
identifier :: ParsecT s u m String,
-- | The lexeme parser @reserved name@ parses @symbol
-- name@, but it also checks that the @name@ is not a prefix of a
-- valid identifier. A @reserved@ word is treated as a single token
-- using 'try'.
reserved :: String -> ParsecT s u m (),
-- | This lexeme parser parses a legal operator. Returns the name of the
-- operator. This parser will fail on any operators that are reserved
-- operators. Legal operator (start) characters and reserved operators
-- are defined in the 'LanguageDef' that is passed to
-- 'makeTokenParser'. An @operator@ is treated as a
-- single token using 'try'.
operator :: ParsecT s u m String,
-- |The lexeme parser @reservedOp name@ parses @symbol
-- name@, but it also checks that the @name@ is not a prefix of a
-- valid operator. A @reservedOp@ is treated as a single token using
-- 'try'.
reservedOp :: String -> ParsecT s u m (),
-- | This lexeme parser parses a single literal character. Returns the
-- literal character value. This parsers deals correctly with escape
-- sequences. The literal character is parsed according to the grammar
-- rules defined in the Haskell report (which matches most programming
-- languages quite closely).
charLiteral :: ParsecT s u m Char,
-- | This lexeme parser parses a literal string. Returns the literal
-- string value. This parsers deals correctly with escape sequences and
-- gaps. The literal string is parsed according to the grammar rules
-- defined in the Haskell report (which matches most programming
-- languages quite closely).
stringLiteral :: ParsecT s u m String,
-- | This lexeme parser parses a natural number (a positive whole
-- number). Returns the value of the number. The number can be
-- specified in 'decimal', 'hexadecimal' or
-- 'octal'. The number is parsed according to the grammar
-- rules in the Haskell report.
natural :: ParsecT s u m Integer,
-- | This lexeme parser parses an integer (a whole number). This parser
-- is like 'natural' except that it can be prefixed with
-- sign (i.e. \'-\' or \'+\'). Returns the value of the number. The
-- number can be specified in 'decimal', 'hexadecimal'
-- or 'octal'. The number is parsed according
-- to the grammar rules in the Haskell report.
integer :: ParsecT s u m Integer,
-- | This lexeme parser parses a floating point value. Returns the value
-- of the number. The number is parsed according to the grammar rules
-- defined in the Haskell report.
float :: ParsecT s u m Double,
-- | This lexeme parser parses either 'natural' or a 'float'.
-- Returns the value of the number. This parsers deals with
-- any overlap in the grammar rules for naturals and floats. The number
-- is parsed according to the grammar rules defined in the Haskell report.
naturalOrFloat :: ParsecT s u m (Either Integer Double),
-- | Parses a positive whole number in the decimal system. Returns the
-- value of the number.
decimal :: ParsecT s u m Integer,
-- | Parses a positive whole number in the hexadecimal system. The number
-- should be prefixed with \"0x\" or \"0X\". Returns the value of the
-- number.
hexadecimal :: ParsecT s u m Integer,
-- | Parses a positive whole number in the octal system. The number
-- should be prefixed with \"0o\" or \"0O\". Returns the value of the
-- number.
octal :: ParsecT s u m Integer,
-- | Lexeme parser @symbol s@ parses 'string' @s@ and skips
-- trailing white space.
symbol :: String -> ParsecT s u m String,
-- | @lexeme p@ first applies parser @p@ and than the 'whiteSpace'
-- parser, returning the value of @p@. Every lexical
-- token (lexeme) is defined using @lexeme@, this way every parse
-- starts at a point without white space. Parsers that use @lexeme@ are
-- called /lexeme/ parsers in this document.
--
-- The only point where the 'whiteSpace' parser should be
-- called explicitly is the start of the main parser in order to skip
-- any leading white space.
--
-- > mainParser = do{ whiteSpace
-- > ; ds <- many (lexeme digit)
-- > ; eof
-- > ; return (sum ds)
-- > }
lexeme :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Parses any white space. White space consists of /zero/ or more
-- occurrences of a 'space', a line comment or a block (multi
-- line) comment. Block comments may be nested. How comments are
-- started and ended is defined in the 'LanguageDef'
-- that is passed to 'makeTokenParser'.
whiteSpace :: ParsecT s u m (),
-- | Lexeme parser @parens p@ parses @p@ enclosed in parenthesis,
-- returning the value of @p@.
parens :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser @braces p@ parses @p@ enclosed in braces (\'{\' and
-- \'}\'), returning the value of @p@.
braces :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser @angles p@ parses @p@ enclosed in angle brackets (\'\<\'
-- and \'>\'), returning the value of @p@.
angles :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser @brackets p@ parses @p@ enclosed in brackets (\'[\'
-- and \']\'), returning the value of @p@.
brackets :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | DEPRECATED: Use 'brackets'.
squares :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser |semi| parses the character \';\' and skips any
-- trailing white space. Returns the string \";\".
semi :: ParsecT s u m String,
-- | Lexeme parser @comma@ parses the character \',\' and skips any
-- trailing white space. Returns the string \",\".
comma :: ParsecT s u m String,
-- | Lexeme parser @colon@ parses the character \':\' and skips any
-- trailing white space. Returns the string \":\".
colon :: ParsecT s u m String,
-- | Lexeme parser @dot@ parses the character \'.\' and skips any
-- trailing white space. Returns the string \".\".
dot :: ParsecT s u m String,
-- | Lexeme parser @semiSep p@ parses /zero/ or more occurrences of @p@
-- separated by 'semi'. Returns a list of values returned by
-- @p@.
semiSep :: forall a . ParsecT s u m a -> ParsecT s u m [a],
-- | Lexeme parser @semiSep1 p@ parses /one/ or more occurrences of @p@
-- separated by 'semi'. Returns a list of values returned by @p@.
semiSep1 :: forall a . ParsecT s u m a -> ParsecT s u m [a],
-- | Lexeme parser @commaSep p@ parses /zero/ or more occurrences of
-- @p@ separated by 'comma'. Returns a list of values returned
-- by @p@.
commaSep :: forall a . ParsecT s u m a -> ParsecT s u m [a],
-- | Lexeme parser @commaSep1 p@ parses /one/ or more occurrences of
-- @p@ separated by 'comma'. Returns a list of values returned
-- by @p@.
commaSep1 :: forall a . ParsecT s u m a -> ParsecT s u m [a]
}
-----------------------------------------------------------
-- Given a LanguageDef, create a token parser.
-----------------------------------------------------------
-- | The expression @makeTokenParser language@ creates a 'GenTokenParser'
-- record that contains lexical parsers that are
-- defined using the definitions in the @language@ record.
--
-- The use of this function is quite stylized - one imports the
-- appropiate language definition and selects the lexical parsers that
-- are needed from the resulting 'GenTokenParser'.
--
-- > module Main where
-- >
-- > import Text.Parsec
-- > import qualified Text.Parsec.Token as P
-- > import Text.Parsec.Language (haskellDef)
-- >
-- > -- The parser
-- > ...
-- >
-- > expr = parens expr
-- > <|> identifier
-- > <|> ...
-- >
-- >
-- > -- The lexer
-- > lexer = P.makeTokenParser haskellDef
-- >
-- > parens = P.parens lexer
-- > braces = P.braces lexer
-- > identifier = P.identifier lexer
-- > reserved = P.reserved lexer
-- > ...
makeTokenParser :: (Stream s m Char)
=> GenLanguageDef s u m -> GenTokenParser s u m
makeTokenParser languageDef
= TokenParser{ identifier = identifier
, reserved = reserved
, operator = operator
, reservedOp = reservedOp
, charLiteral = charLiteral
, stringLiteral = stringLiteral
, natural = natural
, integer = integer
, float = float
, naturalOrFloat = naturalOrFloat
, decimal = decimal
, hexadecimal = hexadecimal
, octal = octal
, symbol = symbol
, lexeme = lexeme
, whiteSpace = whiteSpace
, parens = parens
, braces = braces
, angles = angles
, brackets = brackets
, squares = brackets
, semi = semi
, comma = comma
, colon = colon
, dot = dot
, semiSep = semiSep
, semiSep1 = semiSep1
, commaSep = commaSep
, commaSep1 = commaSep1
}
where
-----------------------------------------------------------
-- Bracketing
-----------------------------------------------------------
parens p = between (symbol "(") (symbol ")") p
braces p = between (symbol "{") (symbol "}") p
angles p = between (symbol "<") (symbol ">") p
brackets p = between (symbol "[") (symbol "]") p
semi = symbol ";"
comma = symbol ","
dot = symbol "."
colon = symbol ":"
commaSep p = sepBy p comma
semiSep p = sepBy p semi
commaSep1 p = sepBy1 p comma
semiSep1 p = sepBy1 p semi
-----------------------------------------------------------
-- Chars & Strings
-----------------------------------------------------------
charLiteral = lexeme (between (char '\'')
(char '\'' <?> "end of character")
characterChar )
<?> "character"
characterChar = charLetter <|> charEscape
<?> "literal character"
charEscape = do{ _ <- char '\\'; escapeCode }
charLetter = satisfy (\c -> (c /= '\'') && (c /= '\\') && (c > '\026'))
stringLiteral = lexeme (
do{ str <- between (char '"')
(char '"' <?> "end of string")
(many stringChar)
; return (foldr (maybe id (:)) "" str)
}
<?> "literal string")
stringChar = do{ c <- stringLetter; return (Just c) }
<|> stringEscape
<?> "string character"
stringLetter = satisfy (\c -> (c /= '"') && (c /= '\\') && (c > '\026'))
stringEscape = do{ _ <- char '\\'
; do{ _ <- escapeGap ; return Nothing }
<|> do{ _ <- escapeEmpty; return Nothing }
<|> do{ esc <- escapeCode; return (Just esc) }
}
escapeEmpty = char '&'
escapeGap = do{ _ <- many1 space
; char '\\' <?> "end of string gap"
}
-- escape codes
escapeCode = charEsc <|> charNum <|> charAscii <|> charControl
<?> "escape code"
charControl = do{ char '^'
; code <- upper
; return (toEnum (fromEnum code - fromEnum 'A'))
}
charNum = do{ code <- decimal
<|> do{ char 'o'; number 8 octDigit }
<|> do{ char 'x'; number 16 hexDigit }
; return (toEnum (fromInteger code))
}
charEsc = choice (map parseEsc escMap)
where
parseEsc (c,code) = do{ char c; return code }
charAscii = choice (map parseAscii asciiMap)
where
parseAscii (asc,code) = try (do{ string asc; return code })
-- escape code tables
escMap = zip ("abfnrtv\\\"\'") ("\a\b\f\n\r\t\v\\\"\'")
asciiMap = zip (ascii3codes ++ ascii2codes) (ascii3 ++ ascii2)
ascii2codes = ["BS","HT","LF","VT","FF","CR","SO","SI","EM",
"FS","GS","RS","US","SP"]
ascii3codes = ["NUL","SOH","STX","ETX","EOT","ENQ","ACK","BEL",
"DLE","DC1","DC2","DC3","DC4","NAK","SYN","ETB",
"CAN","SUB","ESC","DEL"]
ascii2 = ['\BS','\HT','\LF','\VT','\FF','\CR','\SO','\SI',
'\EM','\FS','\GS','\RS','\US','\SP']
ascii3 = ['\NUL','\SOH','\STX','\ETX','\EOT','\ENQ','\ACK',
'\BEL','\DLE','\DC1','\DC2','\DC3','\DC4','\NAK',
'\SYN','\ETB','\CAN','\SUB','\ESC','\DEL']
-----------------------------------------------------------
-- Numbers
-----------------------------------------------------------
naturalOrFloat = lexeme (natFloat) <?> "number"
float = lexeme floating <?> "float"
integer = lexeme int <?> "integer"
natural = lexeme nat <?> "natural"
-- floats
floating = do{ n <- decimal
; fractExponent n
}
natFloat = do{ char '0'
; zeroNumFloat
}
<|> decimalFloat
zeroNumFloat = do{ n <- hexadecimal <|> octal
; return (Left n)
}
<|> decimalFloat
<|> fractFloat 0
<|> return (Left 0)
decimalFloat = do{ n <- decimal
; option (Left n)
(fractFloat n)
}
fractFloat n = do{ f <- fractExponent n
; return (Right f)
}
fractExponent n = do{ fract <- fraction
; expo <- option 1.0 exponent'
; return ((fromInteger n + fract)*expo)
}
<|>
do{ expo <- exponent'
; return ((fromInteger n)*expo)
}
fraction = do{ char '.'
; digits <- many1 digit <?> "fraction"
; return (foldr op 0.0 digits)
}
<?> "fraction"
where
op d f = (f + fromIntegral (digitToInt d))/10.0
exponent' = do{ oneOf "eE"
; f <- sign
; e <- decimal <?> "exponent"
; return (power (f e))
}
<?> "exponent"
where
power e | e < 0 = 1.0/power(-e)
| otherwise = fromInteger (10^e)
-- integers and naturals
int = do{ f <- lexeme sign
; n <- nat
; return (f n)
}
sign = (char '-' >> return negate)
<|> (char '+' >> return id)
<|> return id
nat = zeroNumber <|> decimal
zeroNumber = do{ char '0'
; hexadecimal <|> octal <|> decimal <|> return 0
}
<?> ""
decimal = number 10 digit
hexadecimal = do{ oneOf "xX"; number 16 hexDigit }
octal = do{ oneOf "oO"; number 8 octDigit }
number base baseDigit
= do{ digits <- many1 baseDigit
; let n = foldl (\x d -> base*x + toInteger (digitToInt d)) 0 digits
; seq n (return n)
}
-----------------------------------------------------------
-- Operators & reserved ops
-----------------------------------------------------------
reservedOp name =
lexeme $ try $
do{ string name
; notFollowedBy (opLetter languageDef) <?> ("end of " ++ show name)
}
operator =
lexeme $ try $
do{ name <- oper
; if (isReservedOp name)
then unexpected ("reserved operator " ++ show name)
else return name
}
oper =
do{ c <- (opStart languageDef)
; cs <- many (opLetter languageDef)
; return (c:cs)
}
<?> "operator"
isReservedOp name =
isReserved (sort (reservedOpNames languageDef)) name
-----------------------------------------------------------
-- Identifiers & Reserved words
-----------------------------------------------------------
reserved name =
lexeme $ try $
do{ caseString name
; notFollowedBy (identLetter languageDef) <?> ("end of " ++ show name)
}
caseString name
| caseSensitive languageDef = string name
| otherwise = do{ walk name; return name }
where
walk [] = return ()
walk (c:cs) = do{ caseChar c <?> msg; walk cs }
caseChar c | isAlpha c = char (toLower c) <|> char (toUpper c)
| otherwise = char c
msg = show name
identifier =
lexeme $ try $
do{ name <- ident
; if (isReservedName name)
then unexpected ("reserved word " ++ show name)
else return name
}
ident
= do{ c <- identStart languageDef
; cs <- many (identLetter languageDef)
; return (c:cs)
}
<?> "identifier"
isReservedName name
= isReserved theReservedNames caseName
where
caseName | caseSensitive languageDef = name
| otherwise = map toLower name
isReserved names name
= scan names
where
scan [] = False
scan (r:rs) = case (compare r name) of
LT -> scan rs
EQ -> True
GT -> False
theReservedNames
| caseSensitive languageDef = sortedNames
| otherwise = map (map toLower) sortedNames
where
sortedNames = sort (reservedNames languageDef)
-----------------------------------------------------------
-- White space & symbols
-----------------------------------------------------------
symbol name
= lexeme (string name)
lexeme p
= do{ x <- p; whiteSpace; return x }
--whiteSpace
whiteSpace
| noMulti = skipMany (simpleSpace <|> oneLineComment <?> "")
| otherwise = skipMany (simpleSpace <|> oneLineComment <|> multiLineComment <?> "")
where
noMulti = null (commentStart languageDef)
simpleSpace =
skipMany1 (satisfy isSpace)
oneLineComment =
do{ try (commentLine languageDef)
; skipMany (satisfy (/= '\n'))
; return ()
}
multiLineComment =
do { try (string (commentStart languageDef))
; inComment
}
inComment
| nestedComments languageDef = inCommentMulti
| otherwise = inCommentSingle
inCommentMulti
= do{ try (string (commentEnd languageDef)) ; return () }
<|> do{ multiLineComment ; inCommentMulti }
<|> do{ skipMany1 (noneOf startEnd) ; inCommentMulti }
<|> do{ oneOf startEnd ; inCommentMulti }
<?> "end of comment"
where
startEnd = nub (commentEnd languageDef ++ commentStart languageDef)
inCommentSingle
= do{ try (string (commentEnd languageDef)); return () }
<|> do{ skipMany1 (noneOf startEnd) ; inCommentSingle }
<|> do{ oneOf startEnd ; inCommentSingle }
<?> "end of comment"
where
startEnd = nub (commentEnd languageDef ++ commentStart languageDef)
|
maurer/15-411-Haskell-Base-Code
|
src/Text/Parsec/Token.hs
|
bsd-3-clause
| 25,627
| 0
| 18
| 9,331
| 4,204
| 2,295
| 1,909
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Base.Identity
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- A unique identity. All Python objects have a unique identity. In CPython
-- the identity is the machine address of the object. That relies on the
-- object being kept at a fixed virtual address in memory. We can't do the
-- same thing in Haskell because the garbage collector can move data around.
--
-- TODO: consider lazy identities, which are only constructed on demand.
--
-----------------------------------------------------------------------------
module Berp.Base.Identity (Identity, newIdentity) where
import Berp.Base.Unique
import Berp.Base.Hash (Hash (..))
import Berp.Base.LiftedIO (MonadIO, liftIO)
type Identity = Unique
newIdentity :: MonadIO m => m Unique
newIdentity = liftIO newUnique
instance Hash Identity where
hash = hashUnique
{-
Some comments on this important module.
All Python objects have an identity which is unique to the object.
The identity of an object can be obtained by the primitive function id().
id is a "built in type or function".
It returns an integer (class 'int'). According to the __doc__ for id
in Python 3.0 it says:
id(...)
id(object) -> integer
Return the identity of an object. This is guaranteed to be unique among
simultaneously existing objects. (Hint: it's the object's memory address.)
There's probably Python code out there which depends on the result being
an actual integer. But it would be nicer if it returned an abstract type.
There's also a builtin called hash():
hash(...)
hash(object) -> integer
Return a hash value for the object. Two objects with the same value have
the same hash value. The reverse is not necessarily true, but likely.
In some cases the hash function uses the identity of the object to obtain the hash
value.
The hash is quite useful, particularly because it is used to allow an object to be
a key in a dictionary.
CPython's garbage collector does not move objects allocated on the heap.
This means it can use the address of the object as its
identity. Obviously this is problematic if we want to use GHC's collector which
does move objects.
Thus we must generate a unique identity for all objects when they are constructed.
A couple of important considerations:
a) The scheme must scale. We should not have any limit on the number of
identities that we can generate.
b) As computation time goes on we'd like to keep
a handle on the size of individual identities. A constant size would be
ideal, but we might allow for growth in the size of the identity value
if it has reasonable asymptotic behaviour.
c) It should work well with threads. Global counters must be atomically
updated.
d) It is better if the scheme is portable (does not rely on deep GHC magic).
e) Should be fast.
A couple of options for implementation:
1) A global mutable Integer counter protected by an MVar.
- Satisfies a).
- Size of counter grows logaithmically, but very slowly, so may be
practical for the vast majority of applications. So probably
satisfies b).
- Will work with threads, but at what cost? Each time an object
is constructed the running thread must take the lock on the MVar,
increment the counter, and release the lock. Incrementing an
Integer is not trivial, so there may be lock contention.
Probably satisfies c), but the significance of the time costs
are unknown.
- MVars are not too magical, so probably satisfies d).
2) A Stable Name.
- Satisfies a). The number of stable names is only limited to the
number of objects in memory (I think).
- Satisfies b). The size of a stable name is constant. (good).
- Satisfies c). I don't think there is any issue with thread
contention.
- I think stable names are part of the FFI, so should be portable.
Better check this. However, I don't think they work with parallel
Haskell at present. Is this important? Hard to say.
Regarding the speed of each method: it is hard to say without measuring
them on real programs. My intution is that Stable Names have some advantage
in multi-threaded programs because they don't go via MVars (or maybe they
do, if the stable name table is locked in the runtime - better check this).
I'm also a bit concerned that Stable Names were not designed to support a very
large number of objects, and so may perform badly on Python programs which
allocate many objects.
-}
|
bjpop/berp
|
libs/src/Berp/Base/Identity.hs
|
bsd-3-clause
| 4,905
| 0
| 6
| 1,136
| 111
| 74
| 37
| 10
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
module Lucid.Foundation.Structure.BlockGrid where
import qualified Data.Text as T
import Data.Monoid
small_block_grid_ :: Int -> T.Text
small_block_grid_ n = " small-block-grid-" <> (T.pack $ show n) <> " "
medium_block_grid_ :: Int -> T.Text
medium_block_grid_ n = " medium-block-grid-" <> (T.pack $ show n) <> " "
large_block_grid_ :: Int -> T.Text
large_block_grid_ n = " large-block-grid-" <> (T.pack $ show n) <> " "
|
athanclark/lucid-foundation
|
src/Lucid/Foundation/Structure/BlockGrid.hs
|
bsd-3-clause
| 501
| 0
| 9
| 78
| 140
| 77
| 63
| 11
| 1
|
module TestParsing (parsingTests) where
import Data (LispVal(..))
import Parsing(parseExpr)
import Test.Hspec
import Test.QuickCheck hiding (Result)
import qualified Text.Parsec as P
import TestArbitraryData()
parsingTests :: Spec
parsingTests =
describe "Parsing tests" $ do
parsingNumbers
parsingStrings
parsingLists
parsingDottedLists
parsingItself
-- Utility functions and type alias
type Result = Either P.ParseError LispVal
parser :: String -> String -> Result
parser = P.parse parseExpr
parses :: Result -> LispVal -> Bool
parses res value = res == Right value
-- STRINGS
parseString :: String -> Result
parseString = parser "string"
parsingStrings :: Spec
parsingStrings = describe "Parsing Strings" $ do
emptyString
normalString
escapedQuotesString
escapedCharacters
emptyString :: Spec
emptyString = it "can parse empty string" $
parseString "\"\"" `parses` String ""
normalString :: Spec
normalString = it "can parse \"Hello World!\"" $
parseString "\"Hello World!\"" `parses` String "Hello World!"
escapedQuotesString :: Spec
escapedQuotesString = it "can parse a string with \\\"escaped quotes \\\" " $
parseString "\"\\\"\"" `parses` String "\""
escapedCharacters :: Spec
escapedCharacters = it "can parse a string with \\\" \\n \\t \\r \\\\" $
parseString "\" \\n\\r\\\\ \"" `parses` String " \n\r\\ "
-- NUMBERS
parseNumber :: String -> Result
parseNumber = parser "number"
parsingNumbers :: Spec
parsingNumbers = describe "Parsing integers:" $ do
it "parses a random integer" $ property prop_integerAreParsed
it "parses +10" $
parseNumber "+10" `parses` Number 10
prop_integerAreParsed :: Integer -> Bool
prop_integerAreParsed num = parseNumber (show num) `parses` Number num
-- LISTS
parseList :: String -> Result
parseList = parser "list"
parsingLists :: Spec
parsingLists = describe "Parsing lists" $ do
it "parses an empty list" $
parseList "()" `parses` List []
it "parses (2 23 #t) list" $
parseList "(2 23 #t)" `parses` List [Number 2, Number 23, Bool True]
it "parses nested list: (2 (4 59) #t)" $
parseList "(2 (4 59) #t)" `parses` List [Number 2, List [Number 4, Number 59], Bool True]
-- DOTTED LISTS
parseDotList :: String -> Result
parseDotList = parser "dotted list"
parsingDottedLists :: Spec
parsingDottedLists = describe "Parsing dotted lists" $
it "parses dotted list (#t . #t)" $
parseDotList "(#t . #t)" `parses` DottedList [Bool True] (Bool True)
-- SHOW
parsingItself :: Spec
parsingItself = describe "parsing string repr of LispVals" $ do
it "can parse show repr of String" prop_parseShowString
it "can parse show repr of Number" prop_parseShowNumber
it "can parse show repr of Bool" prop_parseShowBool
it "can parse arbitrary LispVal" $ property prop_parseArbitraryLispVal
prop_parseShowString :: Property
prop_parseShowString = property $ prop_parseArbitraryLispVal . String
prop_parseShowNumber :: Property
prop_parseShowNumber = property $ prop_parseArbitraryLispVal . Number
prop_parseShowBool :: Property
prop_parseShowBool = property $ prop_parseArbitraryLispVal . Bool
prop_parseArbitraryLispVal :: LispVal -> Bool
prop_parseArbitraryLispVal lispVal =
parser "arbitrary" (show lispVal) `parses` lispVal
|
davideGiovannini/scheme-repl
|
test/TestParsing.hs
|
bsd-3-clause
| 3,300
| 0
| 13
| 590
| 788
| 404
| 384
| 80
| 1
|
{- |
Module : Text.Paraphrase.Debug
Description : Debugging utils to test parsers
Copyright : (c) Ivan Lazar Miljenovic
License : 3-Clause BSD-style
Maintainer : Ivan.Miljenovic@gmail.com
Extra utility functions to obtain internal state of the parser.
-}
module Text.Paraphrase.Debug where
import Text.Paraphrase.Errors (ParseError (LogRequested), ParsingErrors,
createFinalLog)
import Text.Paraphrase.Types
-- -----------------------------------------------------------------------------
isParserCommitted :: Parser e s Bool
isParserCommitted = P $ \ pSt _fl sc -> sc pSt (isCommitted pSt)
getCurrentLog :: Parser e s (ParsingErrors e s)
getCurrentLog = P $ \ pSt _fl sc ->
sc pSt (createFinalLog (mergedLog pSt) LogRequested (input pSt))
|
ivan-m/paraphrase
|
src/Text/Paraphrase/Debug.hs
|
bsd-3-clause
| 810
| 0
| 11
| 163
| 142
| 79
| 63
| 9
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : Database.EventStore.Internal.Utils
-- Copyright : (C) 2016 Yorick Laupa
-- License : (see the file LICENSE)
--
-- Maintainer : Yorick Laupa <yo.eight@gmail.com>
-- Stability : provisional
-- Portability : non-portable
--
--------------------------------------------------------------------------------
module Database.EventStore.Internal.Utils (prettyWord8) where
--------------------------------------------------------------------------------
import Prelude (String)
import Numeric
--------------------------------------------------------------------------------
import Database.EventStore.Internal.Prelude
--------------------------------------------------------------------------------
prettyWord8 :: Word8 -> String
prettyWord8 w = "0x" <> padding (showHex w "")
--------------------------------------------------------------------------------
padding :: String -> String
padding [x] = ['0',x]
padding xs = xs
|
YoEight/eventstore
|
Database/EventStore/Internal/Utils.hs
|
bsd-3-clause
| 1,018
| 0
| 8
| 85
| 112
| 71
| 41
| 9
| 1
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, OverlappingInstances, OverloadedStrings, RecordWildCards #-}
module Demo3Shared (module Demo3SharedNOVCHAN,
module Demo3Vchan) where
import Demo3SharedNOVCHAN
import Demo3Vchan
import qualified Control.Monad.Trans.State as T
import Control.Monad.Trans
|
armoredsoftware/protocol
|
tpm/mainline/shared/Demo3Shared.hs
|
bsd-3-clause
| 317
| 2
| 4
| 37
| 39
| 28
| 11
| 7
| 0
|
module DogTypes where
data PugType = PugData
data HuskyType a = HuskyData
data DogueDeBordeaux doge = DogueDeBordeaux doge
-- Can't do this
-- a isn't in scope as specified by the left
-- side of the = sign
-- data Foo = Bar a
myPug = PugData :: PugType
myPug' = PugData -- we don't have to be explicit
myHusky :: HuskyType a
myHusky = HuskyData
-- a remains polymorphic
myOtherHusky :: Num a => HuskyType a
myOtherHusky = HuskyData
-- a gets scoped to Num a
myOtherOtherHusky :: HuskyType [[[Int]]]
myOtherOtherHusky = HuskyData
myDoge :: DogueDeBordeaux Int
myDoge = DogueDeBordeaux 10
-- String doesn't agree with 10
-- myDoge' :: DogueDeBordeaux String
-- myDoge' = DogueDeBordeaux 10
data Doggies a =
Husky a
| Mastiff a
deriving (Eq, Show)
-- Chapter 11 Excercises: Dog Types
-- 1. Is Doggies a type or data constructor?
-- type
-- 2. What is the kind of Doggies?
-- Doggies :: * -> *
-- 3. What is the kind of Doggies String?
-- Doggies String :: *
-- 4. What is the type of Husky 10?
-- Husky 10 :: Num a => Doggies a
-- 5. What is the type of Husky (10 :: Integer)?
-- Husky (10 :: Integer) :: Doggies Integer
-- 6. What is the type of Mastiff "Scooby Doo"?
-- Mastiff "Scooby Doo" :: Doggies [Char]
-- 7. Is DogueDeBordeaux a type or data constructor?
-- both
-- 8. What is the type of DogueDeBordeaux?
-- My answer: a -> DogueDeBordeaux a
-- Real answer:
-- DogueDeBordeaux :: doge -> DogueDeBordeaux doge
-- 9. What is the type of DogueDeBordeaux "doggie"?
-- My answer: DogueDeBordeaux [Char] :: DogueDeBordeaux [Char]
-- Real answer: (My answer mixes term and type-level)
-- DogueDeBordeaux "doggie" :: DogueDeBordeaux [Char]
|
brodyberg/Notes
|
ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/DogTypesChapter11.hs
|
mit
| 1,671
| 0
| 8
| 328
| 172
| 112
| 60
| 18
| 1
|
{-# LANGUAGE NoImplicitPrelude, DeriveFunctor, FlexibleInstances, GeneralizedNewtypeDeriving, MultiParamTypeClasses, BangPatterns, RecordWildCards, TypeFamilies, TemplateHaskell #-}
module Lamdu.Infer.Internal.Monad
( Results(..), subst, constraints
, Context(..), ctxResults, initialContext
, InferCtx(..), inferCtx
, Infer
, throwError
, tell, tellSubst
, tellRowConstraint
, listen, listenNoTell
, getSubst
, listenSubst
, getSkolems, addSkolems
, narrowTVScope, getSkolemsInScope
, VarKind
, freshInferredVar, freshInferredVarName
) where
import Control.Lens (Lens')
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Control.Lens.Tuple
import Control.Monad.Trans.State (StateT(..))
import qualified Control.Monad.Trans.State as State
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Semigroup (Semigroup(..))
import qualified Data.Set as Set
import Data.String (IsString(..))
import Lamdu.Calc.Type (Type)
import qualified Lamdu.Calc.Type as T
import Lamdu.Calc.Type.Constraints (Constraints(..))
import qualified Lamdu.Calc.Type.Constraints as Constraints
import qualified Lamdu.Calc.Type.Vars as TV
import Lamdu.Infer.Error (Error)
import qualified Lamdu.Infer.Error as Err
import qualified Lamdu.Infer.Internal.Constraints as Constraints
import Lamdu.Infer.Internal.Scope (SkolemScope)
import qualified Lamdu.Infer.Internal.Scope as Scope
import Lamdu.Infer.Internal.Subst (Subst)
import qualified Lamdu.Infer.Internal.Subst as Subst
import Prelude.Compat
data SkolemsInScope = SkolemsInScope
{ _sisTVs :: Map T.TypeVar SkolemScope
, _sisRVs :: Map T.RowVar SkolemScope
} deriving (Eq, Ord)
instance Semigroup SkolemsInScope where
SkolemsInScope tvs0 rvs0 <> SkolemsInScope tvs1 rvs1 =
SkolemsInScope (tvs0 <> tvs1) (rvs0 <> rvs1)
instance Monoid SkolemsInScope where
mempty = SkolemsInScope mempty mempty
mappend = (<>)
Lens.makeLenses ''SkolemsInScope
class Subst.HasVar t => VarKind t where
skolemsInScopeMap :: Lens' SkolemsInScope (Map (T.Var t) SkolemScope)
instance VarKind Type where
skolemsInScopeMap = sisTVs
{-# INLINE skolemsInScopeMap #-}
instance VarKind T.Row where
skolemsInScopeMap = sisRVs
{-# INLINE skolemsInScopeMap #-}
data InferState = InferState
{ _inferSupply :: {-# UNPACK #-}!Int
, _inferSkolems :: {-# UNPACK #-}!TV.TypeVars
, _inferSkolemConstraints :: !Constraints
, _inferSkolemsInScope :: {-# UNPACK #-}!SkolemsInScope
} deriving (Eq, Ord)
Lens.makeLenses ''InferState
data Results = Results
{ _subst :: {-# UNPACK #-} !Subst
, _constraints :: !Constraints
} deriving (Eq, Ord)
Lens.makeLenses ''Results
emptyResults :: Results
emptyResults = Results mempty mempty
{-# INLINE emptyResults #-}
verifySkolemConstraints :: InferState -> Constraints -> Either Error ()
verifySkolemConstraints state newConstraints
| Constraints.null unexpectedConstraints = Right ()
| otherwise = Left $ Err.UnexpectedSkolemConstraint unexpectedConstraints
where
unexpectedConstraints =
Constraints.intersect (_inferSkolems state) newConstraints
`Constraints.difference` _inferSkolemConstraints state
{-# INLINE verifySkolemConstraints #-}
appendResults :: Context -> Results -> Either Error Results
appendResults (Context (Results s0 c0) state) (Results s1 c1) =
do
Constraints.Substituted newC c0' <- Constraints.applySubst s1 c0
-- TODO: c1 is usually empty, but c0' will contain ALL of c0,
-- even though we're only interested in the NEW constraints
-- that come from applySubst. Change applySubst to return a
-- set of NEW constraints separately from the SUBST
-- constraints and only verify skolem constraints against the
-- new ones.
verifySkolemConstraints state (newC <> c1)
pure $ Results (s0 <> s1) (c0' <> c1)
{-# INLINE appendResults #-}
data Context = Context
{ _ctxResults :: {-# UNPACK #-} !Results
, _ctxState :: {-# UNPACK #-} !InferState
} deriving (Eq, Ord)
Lens.makeLenses ''Context
initialContext :: Context
initialContext =
Context
{ _ctxResults = emptyResults
, _ctxState =
InferState
{ _inferSupply = 0
, _inferSkolems = mempty
, _inferSkolemConstraints = mempty
, _inferSkolemsInScope = mempty
}
}
-- We use StateT, but it is composed of an actual stateful fresh
-- supply and a component used as a writer avoiding the
-- associativity/performance issues of WriterT
newtype InferCtx m a = Infer { run :: StateT Context m a }
deriving (Functor, Applicative, Monad)
inferCtx ::
Lens.Iso
(InferCtx m a)
(InferCtx n b)
(StateT Context m a)
(StateT Context n b)
inferCtx = Lens.iso run Infer
type Infer = InferCtx (Either Error)
throwError :: Error -> Infer a
throwError = Infer . StateT . const . Left
{-# INLINE throwError #-}
getSkolems :: Monad m => InferCtx m TV.TypeVars
getSkolems = Infer $ Lens.use (ctxState . inferSkolems)
{-# INLINE getSkolems #-}
addSkolems :: Monad m => TV.TypeVars -> Constraints -> InferCtx m ()
addSkolems skolems skolemConstraints =
Infer $ Lens.zoom ctxState $
do
inferSkolems <>= skolems
inferSkolemConstraints <>= skolemConstraints
{-# INLINE addSkolems #-}
tell :: Results -> Infer ()
tell w =
Infer $ StateT $ \c ->
do
!newRes <- appendResults c w
Right ((), c { _ctxResults = newRes} )
{-# INLINE tell #-}
tellSubst :: Subst.HasVar t => T.Var t -> t -> Infer ()
tellSubst v t = tell $ emptyResults { _subst = Subst.new v t }
{-# INLINE tellSubst #-}
tellConstraints :: Constraints -> Infer ()
tellConstraints x = tell $ emptyResults { _constraints = x }
{-# INLINE tellConstraints #-}
singleForbiddenField :: T.Var T.Row -> T.Tag -> Constraints
singleForbiddenField v tag =
Constraints $ Map.singleton v $
Constraints.CompositeVar
{ _forbiddenFields = Set.singleton tag
}
tellRowConstraint :: T.RowVar -> T.Tag -> Infer ()
tellRowConstraint v = tellConstraints . singleForbiddenField v
{-# INLINE tellRowConstraint #-}
listen :: Infer a -> Infer (a, Results)
listen (Infer (StateT act)) =
Infer $ StateT $ \c0 ->
do
(y, c1) <- act c0 { _ctxResults = emptyResults }
!w <- appendResults c0 (_ctxResults c1)
Right ((y, _ctxResults c1), c1 { _ctxResults = w} )
{-# INLINE listen #-}
-- Duplicate of listen because building one on top of the other has a
-- large (~15%) performance penalty.
listenNoTell :: Monad m => InferCtx m a -> InferCtx m (a, Results)
listenNoTell (Infer (StateT act)) =
Infer $ StateT $ \c0 ->
do
(y, c1) <- act c0 { _ctxResults = emptyResults }
pure ((y, _ctxResults c1), c1 { _ctxResults = _ctxResults c0} )
{-# INLINE listenNoTell #-}
nextInt :: Monad m => StateT Int m Int
nextInt =
do
old <- State.get
id += 1
pure old
{-# INLINE nextInt #-}
freshInferredVarName ::
(VarKind t, Monad m) => SkolemScope -> String -> InferCtx m (T.Var t)
freshInferredVarName skolemScope prefix =
do
oldSupply <- Lens.zoom inferSupply nextInt
let varName = fromString $ prefix ++ show oldSupply
inferSkolemsInScope . skolemsInScopeMap . Lens.at varName ?= skolemScope
pure varName
& Lens.zoom ctxState
& Infer
{-# INLINE freshInferredVarName #-}
getSkolemsInScope :: (Monad m, VarKind t) => T.Var t -> InferCtx m SkolemScope
getSkolemsInScope varName =
Lens.use
(ctxState . inferSkolemsInScope . skolemsInScopeMap . Lens.ix varName)
& Infer
{-# INLINE getSkolemsInScope #-}
narrowTVScope :: (Monad m, VarKind t) => SkolemScope -> T.Var t -> InferCtx m ()
narrowTVScope skolems varName =
ctxState . inferSkolemsInScope . skolemsInScopeMap . Lens.at varName .
Lens._Just . Scope.skolemScopeVars %= TV.intersection (skolems ^. Scope.skolemScopeVars)
& Infer
{-# INLINE narrowTVScope #-}
freshInferredVar :: Monad m => VarKind t => SkolemScope -> String -> InferCtx m t
freshInferredVar skolemScope = fmap TV.lift . freshInferredVarName skolemScope
{-# INLINE freshInferredVar #-}
listenSubst :: Infer a -> Infer (a, Subst)
listenSubst x = listen x <&> _2 %~ _subst
{-# INLINE listenSubst #-}
getSubst :: Monad m => InferCtx m Subst
getSubst = Infer $ State.gets (_subst . _ctxResults)
{-# INLINE getSubst #-}
|
Peaker/Algorithm-W-Step-By-Step
|
src/Lamdu/Infer/Internal/Monad.hs
|
gpl-3.0
| 8,590
| 0
| 13
| 1,861
| 2,233
| 1,210
| 1,023
| -1
| -1
|
-- | This module defines a simple command line interface for the SubScript
-- interpreter. If your solution is correct, this module should just
-- work.
module Main
(main)
where
import SubsAst
import SubsInterpreter
import SubsParser(parseString)
import Control.Monad(forM_)
import Data.List(intercalate)
import qualified Data.Map as Map
import System.Environment(getArgs)
-- | nice display of JavaScript values
nice :: Value -> String
nice (IntVal v) = show v
nice TrueVal = "true"
nice FalseVal = "false"
nice (StringVal s) = show s
nice UndefinedVal = "undefined"
nice (ArrayVal vs) = "["++ intercalate ", " (map nice vs) ++"]"
main :: IO ()
main = do args <- getArgs
case args of
[file] -> do
s <- readFile file
case parseString s of
Left e -> error $ show e
Right prog ->
case runProg prog of
Left e -> error $ show e
Right res ->
forM_ (Map.toList res) (\(n,v) -> putStrLn $ n ++ " = " ++ nice v)
_ ->
error "Give me a (single) argument!"
|
Rathcke/uni
|
ap/exam/src/subs/Subs.hs
|
gpl-3.0
| 1,145
| 0
| 23
| 366
| 333
| 171
| 162
| 30
| 4
|
{- |
Module : Data.RME.Base
Copyright : Galois, Inc. 2016
License : BSD3
Maintainer : huffman@galois.com
Stability : experimental
Portability : portable
Reed-Muller Expansion normal form for Boolean Formulas.
-}
module Data.RME.Base
( RME
, true, false, lit
, constant, isBool
, compl, xor, conj, disj, iff, mux
, eval
, sat, allsat
, degree
, depth, size
, explode
) where
-- | Boolean formulas in Algebraic Normal Form, using a representation
-- based on the Reed-Muller expansion.
-- Invariants: The last argument to a `Node` constructor should never
-- be `R0`. Also the `Int` arguments should strictly increase as you
-- go deeper in the tree.
data RME = Node !Int !RME !RME | R0 | R1
deriving (Eq, Show)
-- | Evaluate formula with given variable assignment.
eval :: RME -> (Int -> Bool) -> Bool
eval anf v =
case anf of
R0 -> False
R1 -> True
Node n a b -> (eval a v) /= (v n && eval b v)
-- | Normalizing constructor.
node :: Int -> RME -> RME -> RME
node _ a R0 = a
node n a b = Node n a b
-- | Constant true formula.
true :: RME
true = R1
-- | Constant false formula.
false :: RME
false = R0
-- | Boolean constant formulas.
constant :: Bool -> RME
constant False = false
constant True = true
-- | Test whether an RME formula is a constant boolean.
isBool :: RME -> Maybe Bool
isBool R0 = Just False
isBool R1 = Just True
isBool _ = Nothing
-- | Boolean literals.
lit :: Int -> RME
lit n = Node n R0 R1
-- | Logical complement.
compl :: RME -> RME
compl R0 = R1
compl R1 = R0
compl (Node n a b) = Node n (compl a) b
-- | Logical exclusive-or.
xor :: RME -> RME -> RME
xor R0 y = y
xor R1 y = compl y
xor x R0 = x
xor x R1 = compl x
xor x@(Node i a b) y@(Node j c d)
| i < j = Node i (xor a y) b
| j < i = Node j (xor x c) d
| otherwise = node i (xor a c) (xor b d)
-- | Logical conjunction.
conj :: RME -> RME -> RME
conj R0 _ = R0
conj R1 y = y
conj _ R0 = R0
conj x R1 = x
conj x@(Node i a b) y@(Node j c d)
| i < j = node i (conj a y) (conj b y)
| j < i = node j (conj x c) (conj x d)
| otherwise = node i ac (xor ac (conj (xor a b) (xor c d)))
where ac = conj a c
-- | Logical disjunction.
disj :: RME -> RME -> RME
disj R0 y = y
disj R1 _ = R1
disj x R0 = x
disj _ R1 = R1
disj x@(Node i a b) y@(Node j c d)
| i < j = node i (disj a y) (conj b (compl y))
| j < i = node j (disj x c) (conj (compl x) d)
| otherwise = node i ac (xor ac (disj (xor a b) (xor c d)))
where ac = disj a c
-- | Logical equivalence.
iff :: RME -> RME -> RME
iff x y = xor (compl x) y
{-
iff R0 y = compl y
iff R1 y = y
iff x R0 = compl x
iff x R1 = x
iff x@(Node i a b) y@(Node j c d)
| i < j = Node i (iff a y) b
| j < i = Node j (iff x c) d
| otherwise = node i (iff a c) (xor b d)
-}
-- | Logical if-then-else.
mux :: RME -> RME -> RME -> RME
--mux w x y = xor (conj w x) (conj (compl w) y)
mux R0 _ y = y
mux R1 x _ = x
mux b x y = xor (conj b (xor x y)) y
{-
mux R0 x y = y
mux R1 x y = x
mux w R0 y = conj (compl w) y
mux w R1 y = disj w y
mux w x R0 = conj w x
mux w x R1 = disj (compl w) x
mux w@(Node i a b) x@(Node j c d) y@(Node k e f)
| i < j && i < k = node i (mux a x y) (conj b (xor x y))
| j < i && j < k = node i (mux w c y) (conj w d)
| k < i && k < j = node i (mux w x e) (conj (compl w) f)
| i == j && i < k = node i (mux a c y) _
-}
-- | Satisfiability checker.
sat :: RME -> Maybe [(Int, Bool)]
sat R0 = Nothing
sat R1 = Just []
sat (Node n a b) =
case sat a of
Just xs -> Just ((n, False) : xs)
Nothing -> fmap ((n, True) :) (sat b)
-- | List of all satisfying assignments.
allsat :: RME -> [[(Int, Bool)]]
allsat R0 = []
allsat R1 = [[]]
allsat (Node n a b) =
map ((n, False) :) (allsat a) ++ map ((n, True) :) (allsat (xor a b))
-- | Maximum polynomial degree.
degree :: RME -> Int
degree R0 = 0
degree R1 = 0
degree (Node _ a b) = max (degree a) (1 + degree b)
-- | Tree depth.
depth :: RME -> Int
depth R0 = 0
depth R1 = 0
depth (Node _ a b) = 1 + max (depth a) (depth b)
-- | Tree size.
size :: RME -> Int
size R0 = 1
size R1 = 1
size (Node _ a b) = 1 + size a + size b
-- | Convert to an explicit polynomial representation.
explode :: RME -> [[Int]]
explode R0 = []
explode R1 = [[]]
explode (Node i a b) = explode a ++ map (i:) (explode b)
|
GaloisInc/saw-script
|
rme/src/Data/RME/Base.hs
|
bsd-3-clause
| 4,281
| 0
| 12
| 1,166
| 1,662
| 861
| 801
| 107
| 3
|
-- (c) The University of Glasgow 2006-2012
{-# LANGUAGE CPP #-}
module Kind (
-- * Main data type
Kind, typeKind,
-- ** Predicates on Kinds
isLiftedTypeKind, isUnliftedTypeKind,
isConstraintKind,
returnsTyCon, returnsConstraintKind,
isConstraintKindCon,
okArrowArgKind, okArrowResultKind,
classifiesTypeWithValues,
isStarKind, isStarKindSynonymTyCon,
isLevityPolymorphic, isLevityPolymorphic_maybe
) where
#include "HsVersions.h"
import {-# SOURCE #-} Type ( typeKind, coreViewOneStarKind )
import TyCoRep
import TyCon
import Var
import PrelNames
import Data.Maybe
import Util ( (<&&>) )
{-
************************************************************************
* *
Functions over Kinds
* *
************************************************************************
Note [Kind Constraint and kind *]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The kind Constraint is the kind of classes and other type constraints.
The special thing about types of kind Constraint is that
* They are displayed with double arrow:
f :: Ord a => a -> a
* They are implicitly instantiated at call sites; so the type inference
engine inserts an extra argument of type (Ord a) at every call site
to f.
However, once type inference is over, there is *no* distinction between
Constraint and *. Indeed we can have coercions between the two. Consider
class C a where
op :: a -> a
For this single-method class we may generate a newtype, which in turn
generates an axiom witnessing
Ord a ~ (a -> a)
so on the left we have Constraint, and on the right we have *.
See Trac #7451.
Bottom line: although '*' and 'Constraint' are distinct TyCons, with
distinct uniques, they are treated as equal at all times except
during type inference.
-}
isConstraintKind :: Kind -> Bool
isConstraintKindCon :: TyCon -> Bool
isConstraintKindCon tc = tyConUnique tc == constraintKindTyConKey
isConstraintKind (TyConApp tc _) = isConstraintKindCon tc
isConstraintKind _ = False
-- | Does the given type "end" in the given tycon? For example @k -> [a] -> *@
-- ends in @*@ and @Maybe a -> [a]@ ends in @[]@.
returnsTyCon :: Unique -> Type -> Bool
returnsTyCon tc_u (ForAllTy _ ty) = returnsTyCon tc_u ty
returnsTyCon tc_u (TyConApp tc' _) = tc' `hasKey` tc_u
returnsTyCon _ _ = False
returnsConstraintKind :: Kind -> Bool
returnsConstraintKind = returnsTyCon constraintKindTyConKey
-- | Tests whether the given type looks like "TYPE v", where v is a variable.
isLevityPolymorphic :: Kind -> Bool
isLevityPolymorphic = isJust . isLevityPolymorphic_maybe
-- | Retrieves a levity variable in the given kind, if the kind is of the
-- form "TYPE v".
isLevityPolymorphic_maybe :: Kind -> Maybe TyVar
isLevityPolymorphic_maybe k
| Just k' <- coreViewOneStarKind k = isLevityPolymorphic_maybe k'
isLevityPolymorphic_maybe (TyConApp tc [TyVarTy v])
| tc `hasKey` tYPETyConKey
= Just v
isLevityPolymorphic_maybe _ = Nothing
--------------------------------------------
-- Kinding for arrow (->)
-- Says when a kind is acceptable on lhs or rhs of an arrow
-- arg -> res
okArrowArgKind, okArrowResultKind :: Kind -> Bool
okArrowArgKind = classifiesTypeWithValues <&&> (not . isLevityPolymorphic)
okArrowResultKind = classifiesTypeWithValues
-----------------------------------------
-- Subkinding
-- The tc variants are used during type-checking, where ConstraintKind
-- is distinct from all other kinds
-- After type-checking (in core), Constraint and liftedTypeKind are
-- indistinguishable
-- | Does this classify a type allowed to have values? Responds True to things
-- like *, #, TYPE Lifted, TYPE v, Constraint.
classifiesTypeWithValues :: Kind -> Bool
-- ^ True of any sub-kind of OpenTypeKind
classifiesTypeWithValues t | Just t' <- coreViewOneStarKind t = classifiesTypeWithValues t'
classifiesTypeWithValues (TyConApp tc [_]) = tc `hasKey` tYPETyConKey
classifiesTypeWithValues _ = False
-- | Is this kind equivalent to *?
isStarKind :: Kind -> Bool
isStarKind k | Just k' <- coreViewOneStarKind k = isStarKind k'
isStarKind (TyConApp tc [TyConApp l []]) = tc `hasKey` tYPETyConKey
&& l `hasKey` liftedDataConKey
isStarKind _ = False
-- See Note [Kind Constraint and kind *]
-- | Is the tycon @Constraint@?
isStarKindSynonymTyCon :: TyCon -> Bool
isStarKindSynonymTyCon tc = tc `hasKey` constraintKindTyConKey
|
nushio3/ghc
|
compiler/types/Kind.hs
|
bsd-3-clause
| 4,685
| 0
| 10
| 1,054
| 576
| 320
| 256
| 52
| 1
|
module Distribution.Utils.NubList
( NubList -- opaque
, toNubList -- smart construtor
, fromNubList
, overNubList
, NubListR
, toNubListR
, fromNubListR
, overNubListR
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Simple.Utils
import qualified Text.Read as R
-- | NubList : A de-duplicated list that maintains the original order.
newtype NubList a =
NubList { fromNubList :: [a] }
deriving Eq
-- NubList assumes that nub retains the list order while removing duplicate
-- elements (keeping the first occurence). Documentation for "Data.List.nub"
-- does not specifically state that ordering is maintained so we will add a test
-- for that to the test suite.
-- | Smart constructor for the NubList type.
toNubList :: Ord a => [a] -> NubList a
toNubList list = NubList $ ordNub list
-- | Lift a function over lists to a function over NubLists.
overNubList :: Ord a => ([a] -> [a]) -> NubList a -> NubList a
overNubList f (NubList list) = toNubList . f $ list
-- | Monoid operations on NubLists.
-- For a valid Monoid instance we need to satistfy the required monoid laws;
-- identity, associativity and closure.
--
-- Identity : by inspection:
-- mempty `mappend` NubList xs == NubList xs `mappend` mempty
--
-- Associativity : by inspection:
-- (NubList xs `mappend` NubList ys) `mappend` NubList zs
-- == NubList xs `mappend` (NubList ys `mappend` NubList zs)
--
-- Closure : appending two lists of type a and removing duplicates obviously
-- does not change the type.
instance Ord a => Monoid (NubList a) where
mempty = NubList []
mappend = (<>)
instance Ord a => Semigroup (NubList a) where
(NubList xs) <> (NubList ys) = NubList $ xs `listUnion` ys
instance Show a => Show (NubList a) where
show (NubList list) = show list
instance (Ord a, Read a) => Read (NubList a) where
readPrec = readNubList toNubList
-- | Helper used by NubList/NubListR's Read instances.
readNubList :: (Read a) => ([a] -> l a) -> R.ReadPrec (l a)
readNubList toList = R.parens . R.prec 10 $ fmap toList R.readPrec
-- | Binary instance for 'NubList a' is the same as for '[a]'. For 'put', we
-- just pull off constructor and put the list. For 'get', we get the list and
-- make a 'NubList' out of it using 'toNubList'.
instance (Ord a, Binary a) => Binary (NubList a) where
put (NubList l) = put l
get = fmap toNubList get
-- | NubListR : A right-biased version of 'NubList'. That is @toNubListR
-- ["-XNoFoo", "-XFoo", "-XNoFoo"]@ will result in @["-XFoo", "-XNoFoo"]@,
-- unlike the normal 'NubList', which is left-biased. Built on top of
-- 'ordNubRight' and 'listUnionRight'.
newtype NubListR a =
NubListR { fromNubListR :: [a] }
deriving Eq
-- | Smart constructor for the NubListR type.
toNubListR :: Ord a => [a] -> NubListR a
toNubListR list = NubListR $ ordNubRight list
-- | Lift a function over lists to a function over NubListRs.
overNubListR :: Ord a => ([a] -> [a]) -> NubListR a -> NubListR a
overNubListR f (NubListR list) = toNubListR . f $ list
instance Ord a => Monoid (NubListR a) where
mempty = NubListR []
mappend = (<>)
instance Ord a => Semigroup (NubListR a) where
(NubListR xs) <> (NubListR ys) = NubListR $ xs `listUnionRight` ys
instance Show a => Show (NubListR a) where
show (NubListR list) = show list
instance (Ord a, Read a) => Read (NubListR a) where
readPrec = readNubList toNubListR
|
sopvop/cabal
|
Cabal/Distribution/Utils/NubList.hs
|
bsd-3-clause
| 3,463
| 0
| 9
| 710
| 793
| 432
| 361
| 50
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeOperators #-}
module DirectoryServerRest where
import Control.Monad.Trans.Except
import Control.Monad.IO.Class
import Data.Aeson
import Data.Proxy
import GHC.Generics
import Network.Wai
import Network.Wai.Handler.Warp
import Network.HTTP.Client (newManager, defaultManagerSettings)
import Servant.API
import Servant.Client
import Servant
import System.IO
import System.Directory
import qualified Data.Map as M
import Control.Concurrent.STM
import CommonServer
import CommonServerAPI
type Uuid = Int
type Address = String
type Port = String
type Filename = String
type Timestamp = IO String
--Server data type allows me to pass address and port details easily
data DirectoryServer = DirectoryServer
{ server :: CServer
, filemappings :: TVar (M.Map Filename Filemapping)
, fileservers :: TVar (M.Map Uuid Fileserver)
, fileservercount :: TVar Int
}
data Filemapping = Filemapping
{ fmfilename :: Filename
, fmuuid :: Uuid
, fmtimestamp :: Timestamp
}
data Fileserver = Fileserver
{ fsuuid :: Uuid,
fserver :: CServer
}
files :: ClientM String
download :: Maybe String -> ClientM File
upload :: File -> ClientM CommonServer.Response
directoryApi :: Proxy DirectoryApi
directoryApi = Proxy
directoryServerApi :: Server DirectoryApi
directoryServerApi =
getFiles :<|>
getFilesFrom :<|>
openFile :<|>
closeFile
directoryClientApi :: Proxy FileApi
directoryClientApi = Proxy
files :<|> download :<|> upload = client directoryClientApi
directoryApp :: Application
directoryApp = serve directoryApi directoryServerApi
mkApp :: IO()
mkApp = do
directoryServer <- newDirectoryServer "localhost" "8082"
run 8082 directoryApp
getFiles :: APIHandler [FilePath]
getFiles = do
let x = (DirectoryServer a b c)
elements <- elems directoryServer
getFilesFrom :: Uuid -> APIHandler [FilePath]
getFilesFrom x = do
fs <- lookupFileServer directoryServer x
case fs of
(Just fs) -> do
manager <- newManager defaultManagerSettings
result <- runClientM (ClientEnv manager (BaseUrl Http "localhost" 8081 ""))
case result of
Right (files) -> do
return files
newDirectoryServer :: String -> String -> IO DirectoryServer
newDirectoryServer address port = atomically $ do DirectoryServer <$> return address <*> return port <*> newTVar M.empty <*> newTVar M.empty <*> newTVar 0
--lookupFileserver :: DirectoryServer -> Uuid -> STM (Maybe Fileserver)
--lookupFileserver DirectoryServer{..} uuid = M.lookup uuid <$> readTVar fileservers
|
Garygunn94/DFS
|
.stack-work/intero/intero84265nt.hs
|
bsd-3-clause
| 2,749
| 0
| 17
| 576
| 643
| 346
| 297
| -1
| -1
|
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# OPTIONS -Wall #-}
import Language.Paraiso.Generator (generateIO)
import qualified Language.Paraiso.Generator.Claris as C
import qualified Language.Paraiso.Generator.Native as Native
import Language.Paraiso.Name
import Language.Paraiso.Prelude
main :: IO ()
main = do
_ <- generateIO Native.defaultSetup{Native.language = Native.CPlusPlus} $
sampleProgram 8 5
return ()
sampleProgram :: Int -> Int -> C.Program
sampleProgram x1 x2 =
C.Program {
C.progName = mkName "simple",
C.topLevel =
[ C.Exclusive C.SourceFile $ C.StmtPrpr $ C.PrprInclude C.Chevron "iostream" ,
C.FuncDef $ (C.function tInt (mkName "main"))
{ C.funcBody= mainBody },
C.FuncDef $ (C.function tInt (mkName "calc"))
{ C.funcArgs = [varX, varY] ,
C.funcBody = calcBody
}
]
}
where
varX = C.Var tInt (mkName "x")
varY = C.Var tInt (mkName "y")
varZ = C.Var tInt (mkName "z")
mainBody =
[C.StmtExpr $ cout << message << endl,
C.StmtReturn $ C.toDyn (0::Int) ]
calcBody =
[C.StmtExpr $ C.VarDefSub varZ (C.toDyn(2::Int)),
C.StmtExpr $ C.Op2Infix "+=" (C.VarExpr varZ)
$ C.Op2Infix "*" (C.VarExpr varX) (C.VarExpr varY),
C.StmtReturn $ (C.VarExpr varZ)
]
cout = C.VarExpr $ C.Var C.UnknownType $ mkName "std::cout"
endl = C.VarExpr $ C.Var C.UnknownType $ mkName "std::endl"
message = C.FuncCallUsr (mkName "calc") [C.toDyn x1, C.toDyn x2]
infixl 1 <<
(<<) = C.Op2Infix "<<"
tInt :: C.TypeRep
tInt = C.typeOf (undefined :: Int)
|
nushio3/Paraiso
|
attic/Claris/Simple.hs
|
bsd-3-clause
| 1,693
| 0
| 13
| 447
| 590
| 316
| 274
| 41
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
module SortNetObs where
import Obsidian
import Obsidian.Run.CUDA.Exec as CUDA hiding (exec)
import qualified Data.Vector.Storable as V
import Prelude hiding (zip, reverse )
import qualified Prelude as P
import Control.Monad.State(lift)
import Data.Word
riffle' :: (t *<=* Block, Data a, ASize s) => Pull s a -> Push t s (a,a)
riffle' = push . uncurry zip . halve
compareSwap :: (Scalar a, Ord a) => (Exp a,Exp a) -> (Exp a,Exp a)
compareSwap (a,b) = ifThenElse (b <* a) (b,a) (a,b)
shexRev' :: (Array (Push t), Compute t, Data a)
=> ((a,a) -> (a,a)) -> SPull a -> SPush t a
shexRev' cmp arr =
let (arr1,arr2) = halve arr
arr2' = reverse arr2
arr' = (push arr1) `append` (push arr2')
in
exec $ do
arr'' <- compute arr'
rep (logBaseI 2 (len arr)) (compute . core cmp) arr''
where
core c = unpairP . fmap c . riffle'
sort :: forall a . (Scalar a, Ord a) => SPull (Exp a) -> SPush Block (Exp a)
sort = divideAndConquer $ shexRev' compareSwap
sortObs :: (Scalar a, Ord a) => DPull (Exp a) -> DPush Grid (Exp a)
sortObs arr = asGridMap sort (splitUp 1024 arr)
divideAndConquer:: forall a . Data a => (forall t . (Array (Push t), Compute t) => SPull a -> SPush t a) -> SPull a -> SPush Block a
divideAndConquer f arr = execBlock $ doIt (logLen - 1) arr
where logLen = logBaseI 2 (len arr)
doIt 0 a =
do
return $ (f :: SPull a -> SPush Block a) a
doIt n a | currLen > 1024 = blockBody
| currLen > 32 = warpBody
| otherwise = threadBody
where
currLen = 2^(logLen - n)
arrs = splitUp currLen a
blockBody =
do
arr' <- compute
$ asBlockMap (f :: SPull a -> SPush Block a)
$ arrs
doIt (n - 1) arr'
warpBody =
do
arr' <- compute
$ asBlockMap (f :: SPull a -> SPush Warp a)
$ arrs
doIt (n - 1) arr'
threadBody =
do
arr' <- compute
$ asBlockMap (f :: SPull a -> SPush Thread a)
$ arrs
doIt (n - 1) arr'
-- | runSortObs: run the given binary on the GPU
runSortObs ctx kern inputs size sorted =
withCUDA' ctx $
do
useVector inputs $ \i ->
allocaVector size $ \ o ->
do
fill o 0
o <== (1,kern) <> i
r <- peekCUDAVector o
return r
|
aesadde/AccObsBenchmarks
|
Sort/haskell/src/SortNetObs.hs
|
bsd-3-clause
| 2,739
| 0
| 17
| 970
| 1,035
| 532
| 503
| 71
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
module System.Console.CmdArgs.Test.GetOpt where
import Data.Data
import System.Console.CmdArgs.GetOpt
import qualified System.Console.CmdArgs.Explicit as Explicit
import System.Console.CmdArgs.Test.Util
data Flag = Verbose | Version | Name String | Output String | Arg String deriving (Show,Data,Typeable)
options :: [OptDescr Flag]
options =
[Option ['v'] ["verbose"] (NoArg Verbose) "verbosely list files",
Option ['V','?'] ["version","release"] (NoArg Version) "show version info",
Option ['o'] ["output"] (OptArg out "FILE") "use FILE for dump",
Option ['n'] ["name"] (ReqArg Name "USER") "only dump USER's files"]
out :: Maybe String -> Flag
out Nothing = Output "stdout"
out (Just o) = Output o
tester :: [String] -> (String,String)
tester cmdline = case getOpt Permute options cmdline of
(o,n,[] ) -> let x = "options=" ++ show o ++ " args=" ++ show n in (x,x)
(_,_,errs) -> ("failed", unlines errs ++ usageInfo header options)
where header = "Usage: foobar [OPTION...] files..."
mode = (convert "GetOpt compatibility demo" options){Explicit.modeNames=["getopt"]}
demo = [newDemo print mode]
test = do
tester ["foo","-v"] === "options=[Verbose] args=[\"foo\"]"
tester ["foo","--","-v"] === "options=[] args=[\"foo\",\"-v\"]"
tester ["-?o","--name","bar","--na=baz"] === "options=[Version,Output \"stdout\",Name \"bar\",Name \"baz\"] args=[]"
tester ["--ver","foo"] === "failed"
a === b | fst a == b = success
| otherwise = failure "Mismatch in GetOpt" [("Wanted",b),("Got",fst a)]
|
copland/cmdargs
|
System/Console/CmdArgs/Test/GetOpt.hs
|
bsd-3-clause
| 1,689
| 0
| 15
| 363
| 547
| 300
| 247
| 30
| 2
|
{- |
Module : $Id$
Description : converting Kif to CASL
Copyright : (c) C.Maeder, Uni Bremen 2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : portable
convert .kif to .casl
-}
module Main where
import CASL.Kif
import CASL.Kif2CASL
import CASL.ToDoc ()
import Common.Utils
import Common.DocUtils
import Text.ParserCombinators.Parsec
import System.Environment
main :: IO ()
main = getArgs >>= mapM_ process
process :: String -> IO ()
process s = do
e <- parseFromFile kifProg s
case e of
Left err -> print err
Right l -> do
let f = fst (stripSuffix [".kif"] s) ++ ".casl"
writeFile f $ showDoc (kif2CASL l) "\n"
|
keithodulaigh/Hets
|
CASL/fromKif.hs
|
gpl-2.0
| 742
| 0
| 19
| 166
| 176
| 89
| 87
| 18
| 2
|
module Deriving.Typeable(deriveTypeable) where
import Deriving.Type
import Deriving.Util
import FrontEnd.HsSyn
import FrontEnd.Syn.Q
import Name.Names
deriveTypeable :: Int -> Name -> Derive -> Module -> Data -> Q HsDecl
deriveTypeable tnum tname der mod d@D { .. } = do
mkInstN tnum der mod d tname []
|
hvr/jhc
|
src/Deriving/Typeable.hs
|
mit
| 309
| 0
| 10
| 51
| 108
| 58
| 50
| -1
| -1
|
module Infix1 where
data Inf = Nil | Int :* [Int]
f :: Inf -> Int
f Nil = 0
f ((x :* xs))
= case xs of
xs@[] -> x + (head xs)
xs@(b_1 : b_2) -> x + (head xs)
f ((x :* xs)) = x + (head xs)
|
kmate/HaRe
|
old/testing/introCase/InfixIn1AST.hs
|
bsd-3-clause
| 223
| 0
| 10
| 86
| 135
| 74
| 61
| 9
| 2
|
{-# LANGUAGE ScopedTypeVariables, DatatypeContexts #-}
module Main where
{-
- This is a test framework for Arrays, using QuickCheck
-
-}
import qualified Data.Array as Array
import Data.List
import Control.Monad ( liftM2, liftM3, liftM4 )
import System.Random
import Data.Ix
import Data.List( (\\) )
infixl 9 !, //
infixr 0 ==>
infix 1 `classify`
prop_array =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
Array.array b vs
`same_arr`
array b vs
prop_listArray =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (vector (length [fst b..snd b]))
$ \ (vs :: [Bool]) ->
Array.listArray b vs == Array.array b (zipWith (\ a b -> (a,b))
(Array.range b) vs)
prop_indices =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
in Array.indices arr == ((Array.range . Array.bounds) arr)
prop_elems =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
in Array.elems arr == [arr Array.! i | i <- Array.indices arr]
prop_assocs =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
in Array.assocs arr == [(i, arr Array.! i) | i <- Array.indices arr]
prop_slashslash =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
us = []
in arr Array.// us == Array.array (Array.bounds arr)
([(i,arr Array.! i)
| i <- Array.indices arr \\ [i | (i,_) <- us]]
++ us)
prop_accum =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
forAll (genIVPs b 10) $ \ (us :: [(Int,Int)]) ->
forAll (choose (0,length us))
$ \ n ->
let us' = take n us in
forAll arbitrary $ \ (fn :: Int -> Int -> Int) ->
let arr = Array.array b vs
in Array.accum fn arr us'
== foldl (\a (i,v) -> a Array.// [(i,fn (a Array.! i) v)]) arr us'
prop_accumArray =
forAll arbitrary $ \ (f :: Int -> Int -> Int) ->
forAll arbitrary $ \ (z :: Int) ->
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
Array.accumArray f z b vs == Array.accum f
(Array.array b [(i,z) | i <- Array.range b]) vs
same_arr :: (Eq b) => Array.Array Int b -> Array Int b -> Bool
same_arr a1 a2 = a == c && b == d
&& all (\ n -> (a1 Array.! n) == (a2 ! n)) [a..b]
where (a,b) = Array.bounds a1 :: (Int,Int)
(c,d) = bounds a2 :: (Int,Int)
genBounds :: Gen (Int,Int)
genBounds = do m <- choose (0,20)
n <- choose (minBound,maxBound-m)
return (n,n+m-1)
genIVP :: Arbitrary a => (Int,Int) -> Gen (Int,a)
genIVP b = do { i <- choose b
; v <- arbitrary
; return (i,v)
}
genIVPs :: Arbitrary a => (Int,Int) -> Int -> Gen [(Int,a)]
genIVPs b@(low,high) s
= do { let is = [low..high]
; vs <- vector (length is)
; shuffle s (zip is vs)
}
prop_id = forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (ivps :: [(Int,Int)]) ->
label (show (ivps :: [(Int,Int)])) True
-- rift takes a list, split it (using an Int argument),
-- and then rifts together the split lists into one.
-- Think: rifting a pack of cards.
rift :: Int -> [a] -> [a]
rift n xs = comb (drop n xs) (take n xs)
where
comb (a:as) (b:bs) = a : b : comb as bs
comb (a:as) [] = a : as
comb [] (b:bs) = b : bs
comb [] [] = []
-- suffle makes n random rifts. Typically after
-- log n rifts, the list is in a pretty random order.
-- (where n is the number of elements in the list)
shuffle :: Int -> [a] -> Gen [a]
shuffle 0 m = return m
shuffle n m = do { r <- choose (1,length m)
; shuffle (n-1) (rift r m)
}
prop_shuffle =
forAll (shuffle 10 [1..10::Int]) $ \ lst ->
label (show lst) True
------------------------------------------------------------------------------
main = do test prop_array
test prop_listArray
test prop_indices
test prop_elems
test prop_assocs
test prop_slashslash
test prop_accum
test prop_accumArray
instance Show (a -> b) where { show _ = "<FN>" }
------------------------------------------------------------------------------
data (Ix a) => Array a b = MkArray (a,a) (a -> b) deriving ()
array :: (Ix a) => (a,a) -> [(a,b)] -> Array a b
array b ivs =
if and [inRange b i | (i,_) <- ivs]
then MkArray b
(\j -> case [v | (i,v) <- ivs, i == j] of
[v] -> v
[] -> error "Array.!: \
\undefined array element"
_ -> error "Array.!: \
\multiply defined array element")
else error "Array.array: out-of-range array association"
listArray :: (Ix a) => (a,a) -> [b] -> Array a b
listArray b vs = array b (zipWith (\ a b -> (a,b)) (range b) vs)
(!) :: (Ix a) => Array a b -> a -> b
(!) (MkArray _ f) = f
bounds :: (Ix a) => Array a b -> (a,a)
bounds (MkArray b _) = b
indices :: (Ix a) => Array a b -> [a]
indices = range . bounds
elems :: (Ix a) => Array a b -> [b]
elems a = [a!i | i <- indices a]
assocs :: (Ix a) => Array a b -> [(a,b)]
assocs a = [(i, a!i) | i <- indices a]
(//) :: (Ix a) => Array a b -> [(a,b)] -> Array a b
a // us = array (bounds a)
([(i,a!i) | i <- indices a \\ [i | (i,_) <- us]]
++ us)
accum :: (Ix a) => (b -> c -> b) -> Array a b -> [(a,c)]
-> Array a b
accum f = foldl (\a (i,v) -> a // [(i,f (a!i) v)])
accumArray :: (Ix a) => (b -> c -> b) -> b -> (a,a) -> [(a,c)]
-> Array a b
accumArray f z b = accum f (array b [(i,z) | i <- range b])
ixmap :: (Ix a, Ix b) => (a,a) -> (a -> b) -> Array b c
-> Array a c
ixmap b f a = array b [(i, a ! f i) | i <- range b]
instance (Ix a) => Functor (Array a) where
fmap fn (MkArray b f) = MkArray b (fn . f)
instance (Ix a, Eq b) => Eq (Array a b) where
a == a' = assocs a == assocs a'
instance (Ix a, Ord b) => Ord (Array a b) where
a <= a' = assocs a <= assocs a'
instance (Ix a, Show a, Show b) => Show (Array a b) where
showsPrec p a = showParen (p > 9) (
showString "array " .
shows (bounds a) . showChar ' ' .
shows (assocs a) )
instance (Ix a, Read a, Read b) => Read (Array a b) where
readsPrec p = readParen (p > 9)
(\r -> [(array b as, u) | ("array",s) <- lex r,
(b,t) <- reads s,
(as,u) <- reads t ])
--------------------------------------------------------------------
-- QuickCheck v.0.2
-- DRAFT implementation; last update 000104.
-- Koen Claessen, John Hughes.
-- This file represents work in progress, and might change at a later date.
--------------------------------------------------------------------
-- Generator
newtype Gen a
= Gen (Int -> StdGen -> a)
sized :: (Int -> Gen a) -> Gen a
sized fgen = Gen (\n r -> let Gen m = fgen n in m n r)
resize :: Int -> Gen a -> Gen a
resize n (Gen m) = Gen (\_ r -> m n r)
rand :: Gen StdGen
rand = Gen (\n r -> r)
promote :: (a -> Gen b) -> Gen (a -> b)
promote f = Gen (\n r -> \a -> let Gen m = f a in m n r)
variant :: Int -> Gen a -> Gen a
variant v (Gen m) = Gen (\n r -> m n (rands r !! (v+1)))
where
rands r0 = r1 : rands r2 where (r1, r2) = split r0
generate :: Int -> StdGen -> Gen a -> a
generate n rnd (Gen m) = m size rnd'
where
(size, rnd') = randomR (0, n) rnd
instance Functor Gen where
fmap f m = m >>= return . f
instance Monad Gen where
return a = Gen (\n r -> a)
Gen m >>= k =
Gen (\n r0 -> let (r1,r2) = split r0
Gen m' = k (m n r1)
in m' n r2)
-- derived
--choose :: Random a => (a, a) -> Gen a
choose bounds = ((fst . randomR bounds) `fmap` rand)
elements :: [a] -> Gen a
elements xs = (xs !!) `fmap` choose (0, length xs - 1)
vector :: Arbitrary a => Int -> Gen [a]
vector n = sequence [ arbitrary | i <- [1..n] ]
oneof :: [Gen a] -> Gen a
oneof gens = elements gens >>= id
frequency :: [(Int, Gen a)] -> Gen a
frequency xs = choose (1, tot) >>= (`pick` xs)
where
tot = sum (map fst xs)
pick n ((k,x):xs)
| n <= k = x
| otherwise = pick (n-k) xs
-- general monadic
two :: Monad m => m a -> m (a, a)
two m = liftM2 (,) m m
three :: Monad m => m a -> m (a, a, a)
three m = liftM3 (,,) m m m
four :: Monad m => m a -> m (a, a, a, a)
four m = liftM4 (,,,) m m m m
--------------------------------------------------------------------
-- Arbitrary
class Arbitrary a where
arbitrary :: Gen a
coarbitrary :: a -> Gen b -> Gen b
instance Arbitrary () where
arbitrary = return ()
coarbitrary _ = variant 0
instance Arbitrary Bool where
arbitrary = elements [True, False]
coarbitrary b = if b then variant 0 else variant 1
instance Arbitrary Int where
arbitrary = sized $ \n -> choose (-n,n)
coarbitrary n = variant (if n >= 0 then 2*n else 2*(-n) + 1)
instance Arbitrary Integer where
arbitrary = sized $ \n -> choose (-fromIntegral n,fromIntegral n)
coarbitrary n = variant (fromInteger (if n >= 0 then 2*n else 2*(-n) + 1))
instance Arbitrary Float where
arbitrary = liftM3 fraction arbitrary arbitrary arbitrary
coarbitrary x = coarbitrary (decodeFloat x)
instance Arbitrary Double where
arbitrary = liftM3 fraction arbitrary arbitrary arbitrary
coarbitrary x = coarbitrary (decodeFloat x)
fraction a b c = fromInteger a + (fromInteger b / (abs (fromInteger c) + 1))
instance (Arbitrary a, Arbitrary b) => Arbitrary (a, b) where
arbitrary = liftM2 (,) arbitrary arbitrary
coarbitrary (a, b) = coarbitrary a . coarbitrary b
instance (Arbitrary a, Arbitrary b, Arbitrary c) => Arbitrary (a, b, c) where
arbitrary = liftM3 (,,) arbitrary arbitrary arbitrary
coarbitrary (a, b, c) = coarbitrary a . coarbitrary b . coarbitrary c
instance (Arbitrary a, Arbitrary b, Arbitrary c, Arbitrary d)
=> Arbitrary (a, b, c, d)
where
arbitrary = liftM4 (,,,) arbitrary arbitrary arbitrary arbitrary
coarbitrary (a, b, c, d) =
coarbitrary a . coarbitrary b . coarbitrary c . coarbitrary d
instance Arbitrary a => Arbitrary [a] where
arbitrary = sized (\n -> choose (0,n) >>= vector)
coarbitrary [] = variant 0
coarbitrary (a:as) = coarbitrary a . variant 1 . coarbitrary as
instance (Arbitrary a, Arbitrary b) => Arbitrary (a -> b) where
arbitrary = promote (`coarbitrary` arbitrary)
coarbitrary f gen = arbitrary >>= ((`coarbitrary` gen) . f)
--------------------------------------------------------------------
-- Testable
data Result
= Result { ok :: Maybe Bool, stamp :: [String], arguments :: [String] }
nothing :: Result
nothing = Result{ ok = Nothing, stamp = [], arguments = [] }
newtype Property
= Prop (Gen Result)
result :: Result -> Property
result res = Prop (return res)
evaluate :: Testable a => a -> Gen Result
evaluate a = gen where Prop gen = property a
class Testable a where
property :: a -> Property
instance Testable () where
property _ = result nothing
instance Testable Bool where
property b = result (nothing{ ok = Just b })
instance Testable Result where
property res = result res
instance Testable Property where
property prop = prop
instance (Arbitrary a, Show a, Testable b) => Testable (a -> b) where
property f = forAll arbitrary f
forAll :: (Show a, Testable b) => Gen a -> (a -> b) -> Property
forAll gen body = Prop $
do a <- gen
res <- evaluate (body a)
return (argument a res)
where
argument a res = res{ arguments = show a : arguments res }
(==>) :: Testable a => Bool -> a -> Property
True ==> a = property a
False ==> a = property ()
label :: Testable a => String -> a -> Property
label s a = Prop (add `fmap` evaluate a)
where
add res = res{ stamp = s : stamp res }
classify :: Testable a => Bool -> String -> a -> Property
classify True name = label name
classify False _ = property
trivial :: Testable a => Bool -> a -> Property
trivial = (`classify` "trivial")
collect :: (Show a, Testable b) => a -> b -> Property
collect v = label (show v)
--------------------------------------------------------------------
-- Testing
data Config = Config
{ configMaxTest :: Int
, configMaxFail :: Int
, configSize :: Int -> Int
, configEvery :: Int -> [String] -> String
}
quick :: Config
quick = Config
{ configMaxTest = 100
, configMaxFail = 1000
, configSize = (+ 3) . (`div` 2)
, configEvery = \n args -> let s = show n in s ++ ","
}
verbose :: Config
verbose = quick
{ configEvery = \n args -> show n ++ ":\n" ++ unlines args
}
test, quickCheck, verboseCheck :: Testable a => a -> IO ()
test = check quick
quickCheck = check quick
verboseCheck = check verbose
check :: Testable a => Config -> a -> IO ()
check config a =
do rnd <- newStdGen
tests config (evaluate a) rnd 0 0 []
tests :: Config -> Gen Result -> StdGen -> Int -> Int -> [[String]] -> IO ()
tests config gen rnd0 ntest nfail stamps
| ntest == configMaxTest config = do done "OK, passed" ntest stamps
| nfail == configMaxFail config = do done "Arguments exhausted after" ntest stamps
| otherwise =
do putStr (configEvery config ntest (arguments result))
case ok result of
Nothing ->
tests config gen rnd1 ntest (nfail+1) stamps
Just True ->
tests config gen rnd1 (ntest+1) nfail (stamp result:stamps)
Just False ->
putStr ( "Falsifiable, after "
++ show ntest
++ " tests:\n"
++ unlines (arguments result)
)
where
result = generate (configSize config ntest) rnd2 gen
(rnd1,rnd2) = split rnd0
done :: String -> Int -> [[String]] -> IO ()
done mesg ntest stamps =
do putStr ( mesg ++ " " ++ show ntest ++ " tests" ++ table )
where
table = display
. map entry
. reverse
. sort
. map pairLength
. group
. sort
. filter (not . null)
$ stamps
display [] = ".\n"
display [x] = " (" ++ x ++ ").\n"
display xs = ".\n" ++ unlines (map (++ ".") xs)
pairLength xss@(xs:_) = (length xss, xs)
entry (n, xs) = percentage n ntest
++ " "
++ concat (intersperse ", " xs)
percentage n m = show ((100 * n) `div` m) ++ "%"
--------------------------------------------------------------------
-- the end.
{-
instance Observable StdGen where { observer = observeBase }
instance Observable a => Observable (Gen a) where
observer (Gen a) = send "Gen" (return (Gen) << a)
-}
|
frantisekfarka/ghc-dsi
|
testsuite/tests/array/should_run/arr016.hs
|
bsd-3-clause
| 15,808
| 25
| 28
| 5,016
| 6,792
| 3,566
| 3,226
| -1
| -1
|
{-# LANGUAGE TypeFamilies, PartialTypeSignatures #-}
-- In Trac #12033 this was called HsakellBug.hs
module T12033 where
tripleStoreToRuleSet :: v -> v
tripleStoreToRuleSet getAtom
= makeTuple getAtom
where
makeRule v = makeExpression v
makeTuple v = makeExpression v
makeExpression :: _
makeExpression v = makeTuple getAtom
|
ezyang/ghc
|
testsuite/tests/partial-sigs/should_compile/T12033.hs
|
bsd-3-clause
| 341
| 0
| 7
| 61
| 66
| 34
| 32
| 9
| 1
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Layout (relayout, cycleLayout, insertViewInOutput) where
import Control.Lens
import StackSet
import WLC hiding (size)
import Tree
import LayoutType
relayout :: StackSet i WLCViewPtr WLCOutputPtr -> IO ()
relayout s =
do forOf_ (current . _Just) s layoutScreen
forOf_ (visible . each) s layoutScreen
forOf_ (current . _Just . workspace . tree . focusT . treeElements . _Just .
focusL .
_Left)
s
(\v -> wlcViewFocus v >> wlcViewBringToFront v)
forOf_ (current . _Just . screen) s wlcOutputFocus
layoutScreen :: Screen i WLCViewPtr WLCOutputPtr -> IO ()
layoutScreen (Screen w sid res) = do
wlcOutputSetMask sid (w ^. mask)
layoutWorkspace res w
-- | Resize all views on the workspace according to the current layout
layoutWorkspace :: WLCSize -> Workspace i WLCViewPtr -> IO ()
layoutWorkspace size ws =
layoutTree size
(integrateTree (ws ^. tree))
layoutTree :: WLCSize -> Tree Layout WLCViewPtr -> IO ()
layoutTree screenSize mainTree =
go (WLCGeometry (WLCOrigin 0 0)
screenSize)
mainTree
where go _ (Tree _ Nothing) = return ()
go (WLCGeometry (WLCOrigin x y) size) (Tree l (Just z)) =
let arrangement =
getLayout l size z &
each .
_2 %~
(\(WLCGeometry (WLCOrigin x' y') size') ->
WLCGeometry
(WLCOrigin (x + x')
(y + y'))
size')
in mapM_ recurse arrangement
recurse ((Left v),geometry) =
wlcViewSetGeometry v geometry >>
wlcViewSetState v WlcBitMaximized True
recurse ((Right t),geometry) =
go geometry t
-- | insert the view into workspace that is focused on the output
insertViewInOutput :: Layout
-> WLCViewPtr
-> WLCOutputPtr
-> StackSet i WLCViewPtr WLCOutputPtr
-> StackSet i WLCViewPtr WLCOutputPtr
insertViewInOutput l v output s =
modifyWithOutput (insertUp l v)
output
s
cycleLayout :: Layout -> Layout
cycleLayout (Layout _ "Tabbed") = horizontalLayout
cycleLayout (Layout _ "Horizontal") = verticalLayout
cycleLayout (Layout _ "Vertical") = tallLayout
cycleLayout (Layout _ "Tall") = wideLayout
cycleLayout (Layout _ "Wide") = tabbedLayout
cycleLayout _ = horizontalLayout
|
cocreature/reactand
|
src/Layout.hs
|
isc
| 2,721
| 0
| 18
| 912
| 731
| 368
| 363
| -1
| -1
|
module Data.BinomialHeap'(
BinomialHeap(..),
rank, root, tree, link, insTree, removeMinTree,
empty,
isEmpty,
insert,
merge,
findMin,
deleteMin,
) where
import Data.Heap
data Tree a = N a [Tree a] deriving (Show,Eq)
newtype BinomialHeap a = BH [(Int, Tree a)]
link :: Ord a => (Int, Tree a) -> (Int, Tree a) -> (Int, Tree a)
link (r, t1@(N x1 c1)) (_, t2@(N x2 c2))
| x1 <= x2 = (r+1, N x1 (t2:c1))
| otherwise = (r+1, N x2 (t1:c2))
rank :: (Int, Tree a) -> Int
rank = fst
root :: (Int, Tree a) -> a
root (_, (N x _)) = x
tree :: BinomialHeap a -> [(Int, Tree a)]
tree (BH ts) = ts
insTree :: Ord a => (Int, Tree a) -> [(Int, Tree a)] -> [(Int, Tree a)]
insTree t [] = [t]
insTree t ts@(t':ts')
| rank t < rank t' = t:ts
| otherwise = insTree (link t t') ts'
mrg :: Ord a => [(Int, Tree a)] -> [(Int, Tree a)] -> [(Int, Tree a)]
mrg xs [] = xs
mrg [] ys = ys
mrg xs@(x:xs') ys@(y:ys')
| rank x < rank y = x : mrg xs' ys
| rank y < rank x = y : mrg xs ys'
| otherwise = insTree (link x y) (mrg xs' ys')
removeMinTree :: Ord a => [(Int, Tree a)] -> ((Int, Tree a), [(Int, Tree a)])
removeMinTree [t] = (t, [])
removeMinTree (t:ts) = let (t', ts') = removeMinTree ts in
if root t <= root t'
then (t, ts)
else (t', t:ts')
reverseTree :: Int -> [Tree a] -> [(Int, Tree a)]
reverseTree r ts = reverseTree' r ts []
reverseTree' :: Int -> [Tree a] -> [(Int, Tree a)] -> [(Int, Tree a)]
reverseTree' _ [] ts = ts
reverseTree' r (t:ts) ts' = reverseTree' (r - 1) ts $ (r, t):ts'
instance Heap BinomialHeap where
empty = BH []
isEmpty (BH []) = True
isEmpty _ = False
insert x (BH ts) = BH $ insTree (0, (N x [])) ts
merge (BH xs) (BH ys) = BH $ mrg xs ys
findMin (BH ts) = let (t, _) = removeMinTree ts in root t
deleteMin (BH ts) = let ((r, N _ ts1), ts2) = removeMinTree ts in
BH $ mrg (reverseTree (r-1) ts1) ts2
|
syunta/PFDS
|
Data/BinomialHeap'.hs
|
mit
| 1,992
| 0
| 13
| 573
| 1,207
| 638
| 569
| 54
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.