code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TupleSections #-}
module Stack.Setup
( setupEnv
, ensureCompiler
, ensureDockerStackExe
, SetupOpts (..)
, defaultStackSetupYaml
) where
import Control.Applicative
import Control.Exception.Enclosed (catchIO, tryAny)
import Control.Monad (liftM, when, join, void, unless)
import Control.Monad.Catch
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, ReaderT (..), asks)
import Control.Monad.State (get, put, modify)
import Control.Monad.Trans.Control
import Crypto.Hash (SHA1(SHA1))
import Data.Aeson.Extended
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as LBS
import Data.Char (isSpace)
import Data.Conduit (Conduit, ($$), (=$), await, yield, awaitForever)
import qualified Data.Conduit.Binary as CB
import Data.Conduit.Lift (evalStateC)
import qualified Data.Conduit.List as CL
import Data.Either
import Data.Foldable hiding (concatMap, or, maximum)
import Data.IORef
import Data.IORef.RunOnce (runOnce)
import Data.List hiding (concat, elem, maximumBy)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Ord (comparing)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import Data.Time.Clock (NominalDiffTime, diffUTCTime, getCurrentTime)
import Data.Typeable (Typeable)
import qualified Data.Yaml as Yaml
import Distribution.System (OS, Arch (..), Platform (..))
import qualified Distribution.System as Cabal
import Distribution.Text (simpleParse)
import Language.Haskell.TH as TH
import Network.HTTP.Client.Conduit
import Network.HTTP.Download.Verified
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
import qualified Paths_stack as Meta
import Prelude hiding (concat, elem) -- Fix AMP warning
import Safe (readMay)
import Stack.Build (build)
import Stack.Config (resolvePackageEntry, loadConfig)
import Stack.Constants (distRelativeDir, stackProgName)
import Stack.Exec (defaultEnvSettings)
import Stack.Fetch
import Stack.GhcPkg (createDatabase, getCabalPkgVer, getGlobalDB, mkGhcPackagePath)
import Stack.Setup.Installed
import Stack.Types
import Stack.Types.Internal (HasTerminal, HasReExec, HasLogLevel)
import Stack.Types.StackT
import qualified System.Directory as D
import System.Environment (getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import System.FilePath (searchPathSeparator)
import qualified System.FilePath as FP
import System.Process (rawSystem)
import System.Process.Read
import System.Process.Run (runIn)
import Text.Printf (printf)
-- | Default location of the stack-setup.yaml file
defaultStackSetupYaml :: String
defaultStackSetupYaml =
"https://raw.githubusercontent.com/fpco/stackage-content/master/stack/stack-setup-2.yaml"
data SetupOpts = SetupOpts
{ soptsInstallIfMissing :: !Bool
, soptsUseSystem :: !Bool
, soptsWantedCompiler :: !CompilerVersion
, soptsCompilerCheck :: !VersionCheck
, soptsStackYaml :: !(Maybe (Path Abs File))
-- ^ If we got the desired GHC version from that file
, soptsForceReinstall :: !Bool
, soptsSanityCheck :: !Bool
-- ^ Run a sanity check on the selected GHC
, soptsSkipGhcCheck :: !Bool
-- ^ Don't check for a compatible GHC version/architecture
, soptsSkipMsys :: !Bool
-- ^ Do not use a custom msys installation on Windows
, soptsUpgradeCabal :: !Bool
-- ^ Upgrade the global Cabal library in the database to the newest
-- version. Only works reliably with a stack-managed installation.
, soptsResolveMissingGHC :: !(Maybe Text)
-- ^ Message shown to user for how to resolve the missing GHC
, soptsStackSetupYaml :: !String
-- ^ Location of the main stack-setup.yaml file
, soptsGHCBindistURL :: !(Maybe String)
-- ^ Alternate GHC binary distribution (requires custom GHCVariant)
}
deriving Show
data SetupException = UnsupportedSetupCombo OS Arch
| MissingDependencies [String]
| UnknownCompilerVersion Text CompilerVersion [CompilerVersion]
| UnknownOSKey Text
| GHCSanityCheckCompileFailed ReadProcessException (Path Abs File)
| WantedMustBeGHC
| RequireCustomGHCVariant
| ProblemWhileDecompressing (Path Abs File)
| SetupInfoMissingSevenz
| GHCJSRequiresStandardVariant
| GHCJSNotBooted
| DockerStackExeNotFound Version Text
deriving Typeable
instance Exception SetupException
instance Show SetupException where
show (UnsupportedSetupCombo os arch) = concat
[ "I don't know how to install GHC for "
, show (os, arch)
, ", please install manually"
]
show (MissingDependencies tools) =
"The following executables are missing and must be installed: " ++
intercalate ", " tools
show (UnknownCompilerVersion oskey wanted known) = concat
[ "No information found for "
, compilerVersionString wanted
, ".\nSupported versions for OS key '" ++ T.unpack oskey ++ "': "
, intercalate ", " (map show known)
]
show (UnknownOSKey oskey) =
"Unable to find installation URLs for OS key: " ++
T.unpack oskey
show (GHCSanityCheckCompileFailed e ghc) = concat
[ "The GHC located at "
, toFilePath ghc
, " failed to compile a sanity check. Please see:\n\n"
, " https://github.com/commercialhaskell/stack/blob/release/doc/install_and_upgrade.md\n\n"
, "for more information. Exception was:\n"
, show e
]
show WantedMustBeGHC =
"The wanted compiler must be GHC"
show RequireCustomGHCVariant =
"A custom --ghc-variant must be specified to use --ghc-bindist"
show (ProblemWhileDecompressing archive) =
"Problem while decompressing " ++ toFilePath archive
show SetupInfoMissingSevenz =
"SetupInfo missing Sevenz EXE/DLL"
show GHCJSRequiresStandardVariant =
"stack does not yet support using --ghc-variant with GHCJS"
show GHCJSNotBooted =
"GHCJS does not yet have its boot packages installed. Use \"stack setup\" to attempt to run ghcjs-boot."
show (DockerStackExeNotFound stackVersion osKey) = concat
[ stackProgName
, "-"
, versionString stackVersion
, " executable not found for "
, T.unpack osKey
, "\nUse the '"
, T.unpack dockerStackExeArgName
, "' option to specify a location"]
-- | Modify the environment variables (like PATH) appropriately, possibly doing installation too
setupEnv :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasBuildConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, HasGHCVariant env, MonadBaseControl IO m)
=> Maybe Text -- ^ Message to give user when necessary GHC is not available
-> m EnvConfig
setupEnv mResolveMissingGHC = do
bconfig <- asks getBuildConfig
let platform = getPlatform bconfig
wc = whichCompiler (bcWantedCompiler bconfig)
sopts = SetupOpts
{ soptsInstallIfMissing = configInstallGHC $ bcConfig bconfig
, soptsUseSystem = configSystemGHC $ bcConfig bconfig
, soptsWantedCompiler = bcWantedCompiler bconfig
, soptsCompilerCheck = configCompilerCheck $ bcConfig bconfig
, soptsStackYaml = Just $ bcStackYaml bconfig
, soptsForceReinstall = False
, soptsSanityCheck = False
, soptsSkipGhcCheck = configSkipGHCCheck $ bcConfig bconfig
, soptsSkipMsys = configSkipMsys $ bcConfig bconfig
, soptsUpgradeCabal = False
, soptsResolveMissingGHC = mResolveMissingGHC
, soptsStackSetupYaml = defaultStackSetupYaml
, soptsGHCBindistURL = Nothing
}
mghcBin <- ensureCompiler sopts
-- Modify the initial environment to include the GHC path, if a local GHC
-- is being used
menv0 <- getMinimalEnvOverride
let env = removeHaskellEnvVars
$ augmentPathMap (maybe [] edBins mghcBin)
$ unEnvOverride menv0
menv <- mkEnvOverride platform env
compilerVer <- getCompilerVersion menv wc
cabalVer <- getCabalPkgVer menv wc
packages <- mapM
(resolvePackageEntry menv (bcRoot bconfig))
(bcPackageEntries bconfig)
let envConfig0 = EnvConfig
{ envConfigBuildConfig = bconfig
, envConfigCabalVersion = cabalVer
, envConfigCompilerVersion = compilerVer
, envConfigPackages = Map.fromList $ concat packages
}
-- extra installation bin directories
mkDirs <- runReaderT extraBinDirs envConfig0
let mpath = Map.lookup "PATH" env
mkDirs' = map toFilePath . mkDirs
depsPath = augmentPath (mkDirs' False) mpath
localsPath = augmentPath (mkDirs' True) mpath
deps <- runReaderT packageDatabaseDeps envConfig0
createDatabase menv wc deps
localdb <- runReaderT packageDatabaseLocal envConfig0
createDatabase menv wc localdb
globaldb <- getGlobalDB menv wc
let mkGPP locals = mkGhcPackagePath locals localdb deps globaldb
distDir <- runReaderT distRelativeDir envConfig0
executablePath <- liftIO getExecutablePath
utf8EnvVars <- getUtf8LocaleVars menv
envRef <- liftIO $ newIORef Map.empty
let getEnvOverride' es = do
m <- readIORef envRef
case Map.lookup es m of
Just eo -> return eo
Nothing -> do
eo <- mkEnvOverride platform
$ Map.insert "PATH" (if esIncludeLocals es then localsPath else depsPath)
$ (if esIncludeGhcPackagePath es
then Map.insert
(case wc of { Ghc -> "GHC_PACKAGE_PATH"; Ghcjs -> "GHCJS_PACKAGE_PATH" })
(mkGPP (esIncludeLocals es))
else id)
$ (if esStackExe es
then Map.insert "STACK_EXE" (T.pack executablePath)
else id)
$ (if esLocaleUtf8 es
then Map.union utf8EnvVars
else id)
-- For reasoning and duplication, see: https://github.com/fpco/stack/issues/70
$ Map.insert "HASKELL_PACKAGE_SANDBOX" (T.pack $ toFilePathNoTrailingSep deps)
$ Map.insert "HASKELL_PACKAGE_SANDBOXES"
(T.pack $ if esIncludeLocals es
then intercalate [searchPathSeparator]
[ toFilePathNoTrailingSep localdb
, toFilePathNoTrailingSep deps
, ""
]
else intercalate [searchPathSeparator]
[ toFilePathNoTrailingSep deps
, ""
])
$ Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePathNoTrailingSep distDir)
$ env
!() <- atomicModifyIORef envRef $ \m' ->
(Map.insert es eo m', ())
return eo
return EnvConfig
{ envConfigBuildConfig = bconfig
{ bcConfig = maybe id addIncludeLib mghcBin
(bcConfig bconfig)
{ configEnvOverride = getEnvOverride' }
}
, envConfigCabalVersion = cabalVer
, envConfigCompilerVersion = compilerVer
, envConfigPackages = envConfigPackages envConfig0
}
-- | Add the include and lib paths to the given Config
addIncludeLib :: ExtraDirs -> Config -> Config
addIncludeLib (ExtraDirs _bins includes libs) config = config
{ configExtraIncludeDirs = Set.union
(configExtraIncludeDirs config)
(Set.fromList $ map T.pack includes)
, configExtraLibDirs = Set.union
(configExtraLibDirs config)
(Set.fromList $ map T.pack libs)
}
-- | Ensure compiler (ghc or ghcjs) is installed and provide the PATHs to add if necessary
ensureCompiler :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, HasGHCVariant env, MonadBaseControl IO m)
=> SetupOpts
-> m (Maybe ExtraDirs)
ensureCompiler sopts = do
let wc = whichCompiler (soptsWantedCompiler sopts)
when (getGhcVersion (soptsWantedCompiler sopts) < $(mkVersion "7.8")) $ do
$logWarn "stack will almost certainly fail with GHC below version 7.8"
$logWarn "Valiantly attempting to run anyway, but I know this is doomed"
$logWarn "For more information, see: https://github.com/commercialhaskell/stack/issues/648"
$logWarn ""
-- Check the available GHCs
menv0 <- getMinimalEnvOverride
msystem <-
if soptsUseSystem sopts
then getSystemCompiler menv0 wc
else return Nothing
Platform expectedArch _ <- asks getPlatform
let needLocal = case msystem of
Nothing -> True
Just _ | soptsSkipGhcCheck sopts -> False
Just (system, arch) ->
not (isWanted system) ||
arch /= expectedArch
isWanted = isWantedCompiler (soptsCompilerCheck sopts) (soptsWantedCompiler sopts)
-- If we need to install a GHC, try to do so
mtools <- if needLocal
then do
getSetupInfo' <- runOnce (getSetupInfo (soptsStackSetupYaml sopts) =<< asks getHttpManager)
localPrograms <- asks $ configLocalPrograms . getConfig
installed <- listInstalled localPrograms
-- Install GHC
ghcVariant <- asks getGHCVariant
config <- asks getConfig
ghcPkgName <- parsePackageNameFromString ("ghc" ++ ghcVariantSuffix ghcVariant)
let installedCompiler =
case wc of
Ghc -> getInstalledTool installed ghcPkgName (isWanted . GhcVersion)
Ghcjs -> getInstalledGhcjs installed isWanted
compilerTool <- case installedCompiler of
Just tool -> return tool
Nothing
| soptsInstallIfMissing sopts -> do
si <- getSetupInfo'
downloadAndInstallCompiler
si
(soptsWantedCompiler sopts)
(soptsCompilerCheck sopts)
(soptsGHCBindistURL sopts)
| otherwise -> do
throwM $ CompilerVersionMismatch
msystem
(soptsWantedCompiler sopts, expectedArch)
ghcVariant
(soptsCompilerCheck sopts)
(soptsStackYaml sopts)
(fromMaybe
("Try running \"stack setup\" to install the correct GHC into "
<> T.pack (toFilePath (configLocalPrograms config)))
$ soptsResolveMissingGHC sopts)
-- Install msys2 on windows, if necessary
platform <- asks getPlatform
mmsys2Tool <- case platform of
Platform _ Cabal.Windows | not (soptsSkipMsys sopts) ->
case getInstalledTool installed $(mkPackageName "msys2") (const True) of
Just tool -> return (Just tool)
Nothing
| soptsInstallIfMissing sopts -> do
si <- getSetupInfo'
osKey <- getOSKey platform
VersionedDownloadInfo version info <-
case Map.lookup osKey $ siMsys2 si of
Just x -> return x
Nothing -> error $ "MSYS2 not found for " ++ T.unpack osKey
let tool = Tool (PackageIdentifier $(mkPackageName "msys2") version)
Just <$> downloadAndInstallTool (configLocalPrograms config) si info tool (installMsys2Windows osKey)
| otherwise -> do
$logWarn "Continuing despite missing tool: msys2"
return Nothing
_ -> return Nothing
return $ Just (compilerTool, mmsys2Tool)
else return Nothing
mpaths <- case mtools of
Nothing -> return Nothing
Just (compilerTool, mmsys2Tool) -> do
let idents = catMaybes [Just compilerTool, mmsys2Tool]
paths <- mapM extraDirs idents
return $ Just $ mconcat paths
menv <-
case mpaths of
Nothing -> return menv0
Just ed -> do
config <- asks getConfig
let m = augmentPathMap (edBins ed) (unEnvOverride menv0)
mkEnvOverride (configPlatform config) (removeHaskellEnvVars m)
when (soptsUpgradeCabal sopts) $ do
unless needLocal $ do
$logWarn "Trying to upgrade Cabal library on a GHC not installed by stack."
$logWarn "This may fail, caveat emptor!"
upgradeCabal menv wc
case mtools of
Just (ToolGhcjs cv, _) -> ensureGhcjsBooted menv cv (soptsInstallIfMissing sopts)
_ -> return ()
when (soptsSanityCheck sopts) $ sanityCheck menv wc
return mpaths
-- Ensure Docker container-compatible 'stack' executable is downloaded
ensureDockerStackExe
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Platform -> m (Path Abs File)
ensureDockerStackExe containerPlatform = do
config <- asks getConfig
containerPlatformDir <- runReaderT platformOnlyRelDir containerPlatform
let programsPath = configLocalProgramsBase config </> containerPlatformDir
stackVersion = fromCabalVersion Meta.version
tool = Tool (PackageIdentifier $(mkPackageName "stack") stackVersion)
stackExePath <- (</> $(mkRelFile "stack")) <$> installDir programsPath tool
stackExeExists <- fileExists stackExePath
unless stackExeExists $
do
$logInfo $ mconcat ["Downloading Docker-compatible ", T.pack stackProgName, " executable"]
si <- getSetupInfo defaultStackSetupYaml =<< asks getHttpManager
osKey <- getOSKey containerPlatform
info <-
case Map.lookup osKey (siStack si) of
Just versions ->
case Map.lookup stackVersion versions of
Just x -> return x
Nothing -> throwM (DockerStackExeNotFound stackVersion osKey)
Nothing -> throwM (DockerStackExeNotFound stackVersion osKey)
_ <-
downloadAndInstallTool
programsPath
si
info
tool
installDockerStackExe
return ()
return stackExePath
-- | Install the newest version of Cabal globally
upgradeCabal :: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, MonadBaseControl IO m, MonadMask m)
=> EnvOverride
-> WhichCompiler
-> m ()
upgradeCabal menv wc = do
let name = $(mkPackageName "Cabal")
rmap <- resolvePackages menv Set.empty (Set.singleton name)
newest <-
case Map.keys rmap of
[] -> error "No Cabal library found in index, cannot upgrade"
[PackageIdentifier name' version]
| name == name' -> return version
x -> error $ "Unexpected results for resolvePackages: " ++ show x
installed <- getCabalPkgVer menv wc
if installed >= newest
then $logInfo $ T.concat
[ "Currently installed Cabal is "
, T.pack $ versionString installed
, ", newest is "
, T.pack $ versionString newest
, ". I'm not upgrading Cabal."
]
else withCanonicalizedSystemTempDirectory "stack-cabal-upgrade" $ \tmpdir -> do
$logInfo $ T.concat
[ "Installing Cabal-"
, T.pack $ versionString newest
, " to replace "
, T.pack $ versionString installed
]
let ident = PackageIdentifier name newest
m <- unpackPackageIdents menv tmpdir Nothing (Set.singleton ident)
compilerPath <- join $ findExecutable menv (compilerExeName wc)
newestDir <- parseRelDir $ versionString newest
let installRoot = toFilePath $ parent (parent compilerPath)
</> $(mkRelDir "new-cabal")
</> newestDir
dir <-
case Map.lookup ident m of
Nothing -> error $ "upgradeCabal: Invariant violated, dir missing"
Just dir -> return dir
runIn dir (compilerExeName wc) menv ["Setup.hs"] Nothing
platform <- asks getPlatform
let setupExe = toFilePath $ dir </>
(case platform of
Platform _ Cabal.Windows -> $(mkRelFile "Setup.exe")
_ -> $(mkRelFile "Setup"))
dirArgument name' = concat
[ "--"
, name'
, "dir="
, installRoot FP.</> name'
]
runIn dir setupExe menv
( "configure"
: map dirArgument (words "lib bin data doc")
)
Nothing
runIn dir setupExe menv ["build"] Nothing
runIn dir setupExe menv ["install"] Nothing
$logInfo "New Cabal library installed"
-- | Get the version of the system compiler, if available
getSystemCompiler :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> WhichCompiler -> m (Maybe (CompilerVersion, Arch))
getSystemCompiler menv wc = do
let exeName = case wc of
Ghc -> "ghc"
Ghcjs -> "ghcjs"
exists <- doesExecutableExist menv exeName
if exists
then do
eres <- tryProcessStdout Nothing menv exeName ["--info"]
let minfo = do
Right bs <- Just eres
pairs <- readMay $ S8.unpack bs :: Maybe [(String, String)]
version <- lookup "Project version" pairs >>= parseVersionFromString
arch <- lookup "Target platform" pairs >>= simpleParse . takeWhile (/= '-')
return (version, arch)
case (wc, minfo) of
(Ghc, Just (version, arch)) -> return (Just (GhcVersion version, arch))
(Ghcjs, Just (_, arch)) -> do
eversion <- tryAny $ getCompilerVersion menv Ghcjs
case eversion of
Left _ -> return Nothing
Right version -> return (Just (version, arch))
(_, Nothing) -> return Nothing
else return Nothing
-- | Download the most recent SetupInfo
getSetupInfo
:: (MonadIO m, MonadThrow m, MonadLogger m, MonadReader env m, HasConfig env)
=> String -> Manager -> m SetupInfo
getSetupInfo stackSetupYaml manager = do
config <- asks getConfig
setupInfos <-
mapM
loadSetupInfo
(SetupInfoFileOrURL stackSetupYaml :
configSetupInfoLocations config)
return (mconcat setupInfos)
where
loadSetupInfo (SetupInfoInline si) = return si
loadSetupInfo (SetupInfoFileOrURL urlOrFile) = do
bs <-
case parseUrl urlOrFile of
Just req -> do
bss <-
liftIO $
flip runReaderT manager $
withResponse req $
\res ->
responseBody res $$ CL.consume
return $ S8.concat bss
Nothing -> liftIO $ S.readFile urlOrFile
(si,warnings) <- either throwM return (Yaml.decodeEither' bs)
when (urlOrFile /= defaultStackSetupYaml) $
logJSONWarnings urlOrFile warnings
return si
getInstalledTool :: [Tool] -- ^ already installed
-> PackageName -- ^ package to find
-> (Version -> Bool) -- ^ which versions are acceptable
-> Maybe Tool
getInstalledTool installed name goodVersion =
if null available
then Nothing
else Just $ Tool $ maximumBy (comparing packageIdentifierVersion) available
where
available = mapMaybe goodPackage installed
goodPackage (Tool pi') =
if packageIdentifierName pi' == name &&
goodVersion (packageIdentifierVersion pi')
then Just pi'
else Nothing
goodPackage _ = Nothing
getInstalledGhcjs :: [Tool]
-> (CompilerVersion -> Bool)
-> Maybe Tool
getInstalledGhcjs installed goodVersion =
if null available
then Nothing
else Just $ ToolGhcjs $ maximum available
where
available = mapMaybe goodPackage installed
goodPackage (ToolGhcjs cv) = if goodVersion cv then Just cv else Nothing
goodPackage _ = Nothing
downloadAndInstallTool :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Path Abs Dir
-> SetupInfo
-> DownloadInfo
-> Tool
-> (SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> m ())
-> m Tool
downloadAndInstallTool programsDir si downloadInfo tool installer = do
(file, at) <- downloadFromInfo programsDir downloadInfo tool
dir <- installDir programsDir tool
unmarkInstalled programsDir tool
installer si file at dir
markInstalled programsDir tool
return tool
downloadAndInstallCompiler :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasGHCVariant env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadBaseControl IO m)
=> SetupInfo
-> CompilerVersion
-> VersionCheck
-> (Maybe String)
-> m Tool
downloadAndInstallCompiler si wanted@(GhcVersion{}) versionCheck mbindistURL = do
ghcVariant <- asks getGHCVariant
(selectedVersion, downloadInfo) <- case mbindistURL of
Just bindistURL -> do
case ghcVariant of
GHCCustom _ -> return ()
_ -> throwM RequireCustomGHCVariant
case wanted of
GhcVersion version ->
return (version, DownloadInfo (T.pack bindistURL) Nothing Nothing)
_ ->
throwM WantedMustBeGHC
_ -> do
ghcKey <- getGhcKey
case Map.lookup ghcKey $ siGHCs si of
Nothing -> throwM $ UnknownOSKey ghcKey
Just pairs -> getWantedCompilerInfo ghcKey versionCheck wanted GhcVersion pairs
config <- asks getConfig
let installer =
case configPlatform config of
Platform _ Cabal.Windows -> installGHCWindows selectedVersion
_ -> installGHCPosix selectedVersion
$logInfo $
"Preparing to install GHC" <>
(case ghcVariant of
GHCStandard -> ""
v -> " (" <> T.pack (ghcVariantName v) <> ")") <>
" to an isolated location."
$logInfo "This will not interfere with any system-level installation."
ghcPkgName <- parsePackageNameFromString ("ghc" ++ ghcVariantSuffix ghcVariant)
let tool = Tool $ PackageIdentifier ghcPkgName selectedVersion
downloadAndInstallTool (configLocalPrograms config) si downloadInfo tool installer
downloadAndInstallCompiler si wanted@(GhcjsVersion version _) versionCheck _mbindistUrl = do
config <- asks getConfig
ghcVariant <- asks getGHCVariant
case ghcVariant of
GHCStandard -> return ()
_ -> throwM GHCJSRequiresStandardVariant
(selectedVersion, downloadInfo) <- case Map.lookup "source" $ siGHCJSs si of
Nothing -> throwM $ UnknownOSKey "source"
Just pairs -> getWantedCompilerInfo "source" versionCheck wanted id pairs
$logInfo "Preparing to install GHCJS to an isolated location."
$logInfo "This will not interfere with any system-level installation."
downloadAndInstallTool (configLocalPrograms config) si downloadInfo (ToolGhcjs selectedVersion) (installGHCJS version)
getWantedCompilerInfo :: (Ord k, MonadThrow m)
=> Text
-> VersionCheck
-> CompilerVersion
-> (k -> CompilerVersion)
-> Map k a
-> m (k, a)
getWantedCompilerInfo key versionCheck wanted toCV pairs = do
case mpair of
Just pair -> return pair
Nothing -> throwM $ UnknownCompilerVersion key wanted (map toCV (Map.keys pairs))
where
mpair =
listToMaybe $
sortBy (flip (comparing fst)) $
filter (isWantedCompiler versionCheck wanted . toCV . fst) (Map.toList pairs)
getGhcKey :: (MonadReader env m, MonadThrow m, HasPlatform env, HasGHCVariant env, MonadLogger m, MonadIO m, MonadCatch m, MonadBaseControl IO m)
=> m Text
getGhcKey = do
ghcVariant <- asks getGHCVariant
platform <- asks getPlatform
osKey <- getOSKey platform
return $ osKey <> T.pack (ghcVariantSuffix ghcVariant)
getOSKey :: (MonadReader env m, MonadThrow m, HasPlatform env, MonadLogger m, MonadIO m, MonadCatch m, MonadBaseControl IO m)
=> Platform -> m Text
getOSKey platform = do
case platform of
Platform I386 Cabal.Linux -> return "linux32"
Platform X86_64 Cabal.Linux -> return "linux64"
Platform I386 Cabal.OSX -> return "macosx"
Platform X86_64 Cabal.OSX -> return "macosx"
Platform I386 Cabal.FreeBSD -> return "freebsd32"
Platform X86_64 Cabal.FreeBSD -> return "freebsd64"
Platform I386 Cabal.OpenBSD -> return "openbsd32"
Platform X86_64 Cabal.OpenBSD -> return "openbsd64"
Platform I386 Cabal.Windows -> return "windows32"
Platform X86_64 Cabal.Windows -> return "windows64"
Platform arch os -> throwM $ UnsupportedSetupCombo os arch
downloadFromInfo
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Path Abs Dir -> DownloadInfo -> Tool -> m (Path Abs File, ArchiveType)
downloadFromInfo programsDir downloadInfo tool = do
at <-
case extension of
".tar.xz" -> return TarXz
".tar.bz2" -> return TarBz2
".tar.gz" -> return TarGz
".7z.exe" -> return SevenZ
_ -> error $ "Unknown extension for url: " ++ T.unpack url
relfile <- parseRelFile $ toolString tool ++ extension
let path = programsDir </> relfile
chattyDownload (T.pack (toolString tool)) downloadInfo path
return (path, at)
where
url = downloadInfoUrl downloadInfo
extension =
loop $ T.unpack url
where
loop fp
| ext `elem` [".tar", ".bz2", ".xz", ".exe", ".7z", ".gz"] = loop fp' ++ ext
| otherwise = ""
where
(fp', ext) = FP.splitExtension fp
data ArchiveType
= TarBz2
| TarXz
| TarGz
| SevenZ
installGHCPosix :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Version
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installGHCPosix version _ archiveFile archiveType destDir = do
platform <- asks getPlatform
menv0 <- getMinimalEnvOverride
menv <- mkEnvOverride platform (removeHaskellEnvVars (unEnvOverride menv0))
$logDebug $ "menv = " <> T.pack (show (unEnvOverride menv))
zipTool' <-
case archiveType of
TarXz -> return "xz"
TarBz2 -> return "bzip2"
TarGz -> return "gzip"
SevenZ -> error "Don't know how to deal with .7z files on non-Windows"
(zipTool, makeTool, tarTool) <- checkDependencies $ (,,)
<$> checkDependency zipTool'
<*> (checkDependency "gmake" <|> checkDependency "make")
<*> checkDependency "tar"
$logDebug $ "ziptool: " <> T.pack zipTool
$logDebug $ "make: " <> T.pack makeTool
$logDebug $ "tar: " <> T.pack tarTool
withCanonicalizedSystemTempDirectory "stack-setup" $ \root -> do
dir <-
liftM (root Path.</>) $
parseRelDir $
"ghc-" ++ versionString version
$logSticky $ T.concat ["Unpacking GHC into ", (T.pack . toFilePath $ root), " ..."]
$logDebug $ "Unpacking " <> T.pack (toFilePath archiveFile)
readInNull root tarTool menv ["xf", toFilePath archiveFile] Nothing
$logSticky "Configuring GHC ..."
readInNull dir (toFilePath $ dir Path.</> $(mkRelFile "configure"))
menv ["--prefix=" ++ toFilePath destDir] Nothing
$logSticky "Installing GHC ..."
readInNull dir makeTool menv ["install"] Nothing
$logStickyDone $ "Installed GHC."
$logDebug $ "GHC installed to " <> T.pack (toFilePath destDir)
installGHCJS :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadBaseControl IO m)
=> Version
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installGHCJS version si archiveFile archiveType destDir = do
platform <- asks getPlatform
menv0 <- getMinimalEnvOverride
-- This ensures that locking is disabled for the invocations of
-- stack below.
let removeLockVar = Map.delete "STACK_LOCK"
menv <- mkEnvOverride platform (removeLockVar (removeHaskellEnvVars (unEnvOverride menv0)))
$logDebug $ "menv = " <> T.pack (show (unEnvOverride menv))
-- NOTE: this is a bit of a hack - instead of using a temp
-- directory, leave the unpacked source tarball in the destination
-- directory. This way, the absolute paths in the wrapper scripts
-- will point to executables that exist in
-- src/.stack-work/install/... - see
-- https://github.com/commercialhaskell/stack/issues/1016
--
-- This is also used by 'ensureGhcjsBooted', because it can use the
-- environment of the stack.yaml which came with ghcjs, in order to
-- install cabal-install. This lets us also fix the version of
-- cabal-install used.
let unpackDir = destDir Path.</> $(mkRelDir "src")
tarComponent <- parseRelDir ("ghcjs-" ++ versionString version)
runUnpack <- case platform of
Platform _ Cabal.Windows -> return $ do
withUnpackedTarball7z "GHCJS" si archiveFile archiveType tarComponent unpackDir
_ -> do
zipTool' <-
case archiveType of
TarXz -> return "xz"
TarBz2 -> return "bzip2"
TarGz -> return "gzip"
SevenZ -> error "Don't know how to deal with .7z files on non-Windows"
(zipTool, tarTool) <- checkDependencies $ (,)
<$> checkDependency zipTool'
<*> checkDependency "tar"
$logDebug $ "ziptool: " <> T.pack zipTool
$logDebug $ "tar: " <> T.pack tarTool
return $ do
removeTreeIfExists unpackDir
readInNull destDir tarTool menv ["xf", toFilePath archiveFile] Nothing
renameDir (destDir Path.</> tarComponent) unpackDir
$logSticky $ T.concat ["Unpacking GHCJS into ", (T.pack . toFilePath $ unpackDir), " ..."]
$logDebug $ "Unpacking " <> T.pack (toFilePath archiveFile)
runUnpack
$logSticky "Setting up GHCJS build environment"
let stackYaml = unpackDir </> $(mkRelFile "stack.yaml")
destBinDir = destDir Path.</> $(mkRelDir "bin")
createTree destBinDir
envConfig <- loadGhcjsEnvConfig stackYaml destBinDir
-- On windows we need to copy options files out of the install dir. Argh!
-- This is done before the build, so that if it fails, things fail
-- earlier.
mwindowsInstallDir <- case platform of
Platform _ Cabal.Windows ->
liftM Just $ runInnerStackT envConfig installationRootLocal
_ -> return Nothing
$logSticky "Installing GHCJS (this will take a long time) ..."
runInnerStackT envConfig $
build (\_ -> return ()) Nothing defaultBuildOpts { boptsInstallExes = True }
-- Copy over *.options files needed on windows.
forM_ mwindowsInstallDir $ \dir -> do
(_, files) <- listDirectory (dir </> $(mkRelDir "bin"))
forM_ (filter ((".options" `isSuffixOf`). toFilePath) files) $ \optionsFile -> do
let dest = destDir </> $(mkRelDir "bin") </> filename optionsFile
removeFileIfExists dest
copyFile optionsFile dest
$logStickyDone "Installed GHCJS."
-- Install the downloaded stack binary distribution
installDockerStackExe
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installDockerStackExe _ archiveFile _ destDir = do
(_,tarTool) <-
checkDependencies $
(,) <$> checkDependency "gzip" <*> checkDependency "tar"
menv <- getMinimalEnvOverride
createTree destDir
readInNull
destDir
tarTool
menv
["xf", toFilePath archiveFile, "--strip-components", "1"]
Nothing
ensureGhcjsBooted :: (MonadIO m, MonadBaseControl IO m, MonadLogger m, MonadCatch m, HasConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadReader env m)
=> EnvOverride -> CompilerVersion -> Bool -> m ()
ensureGhcjsBooted menv cv shouldBoot = do
eres <- try $ sinkProcessStdout Nothing menv "ghcjs" [] (return ())
case eres of
Right () -> return ()
Left (ReadProcessException _ _ _ err) | "no input files" `S.isInfixOf` LBS.toStrict err ->
return ()
Left (ReadProcessException _ _ _ err) | "ghcjs_boot.completed" `S.isInfixOf` LBS.toStrict err ->
if not shouldBoot then throwM GHCJSNotBooted else do
config <- asks getConfig
destDir <- installDir (configLocalPrograms config) (ToolGhcjs cv)
let stackYaml = destDir </> $(mkRelFile "src/stack.yaml")
-- TODO: Remove 'actualStackYaml' and just use
-- 'stackYaml' for a version after 0.1.6. It's for
-- compatibility with the directories setup used for
-- most of the life of the development branch between
-- 0.1.5 and 0.1.6. See
-- https://github.com/commercialhaskell/stack/issues/749#issuecomment-147382783
-- This only affects the case where GHCJS has been
-- installed with an older version and not yet booted.
stackYamlExists <- fileExists stackYaml
actualStackYaml <- if stackYamlExists then return stackYaml
else case cv of
GhcjsVersion version _ ->
liftM ((destDir Path.</> $(mkRelDir "src")) Path.</>) $
parseRelFile $ "ghcjs-" ++ versionString version ++ "/stack.yaml"
_ -> fail "ensureGhcjsBooted invoked on non GhcjsVersion"
actualStackYamlExists <- fileExists actualStackYaml
when (not actualStackYamlExists) $
fail "Couldn't find GHCJS stack.yaml in old or new location."
bootGhcjs actualStackYaml destDir
Left err -> throwM err
bootGhcjs :: (MonadIO m, MonadBaseControl IO m, MonadLogger m, MonadCatch m, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadReader env m)
=> Path Abs File -> Path Abs Dir -> m ()
bootGhcjs stackYaml destDir = do
envConfig <- loadGhcjsEnvConfig stackYaml (destDir </> $(mkRelDir "bin"))
menv <- liftIO $ configEnvOverride (getConfig envConfig) defaultEnvSettings
-- Install cabal-install if missing, or if the installed one is old.
mcabal <- getCabalInstallVersion menv
shouldInstallCabal <- case mcabal of
Nothing -> do
$logInfo "No cabal-install binary found for use with GHCJS. Installing a local copy of cabal-install from source."
return True
Just v
| v < $(mkVersion "1.22.4") -> do
$logInfo $
"cabal-install found on PATH is too old to be used for booting GHCJS (version " <>
versionText v <>
"). Installing a local copy of cabal-install from source."
return True
| otherwise -> return False
when shouldInstallCabal $ do
$logSticky "Building cabal-install for use by ghcjs-boot ... "
runInnerStackT envConfig $
build (\_ -> return ())
Nothing
defaultBuildOpts { boptsTargets = ["cabal-install"] }
$logSticky "Booting GHCJS (this will take a long time) ..."
let envSettings = defaultEnvSettings { esIncludeGhcPackagePath = False }
menv' <- liftIO $ configEnvOverride (getConfig envConfig) envSettings
runAndLog Nothing "ghcjs-boot" menv' ["--clean"]
$logStickyDone "GHCJS booted."
-- TODO: something similar is done in Stack.Build.Execute. Create some utilities
-- for this?
runAndLog :: (MonadIO m, MonadBaseControl IO m, MonadLogger m)
=> Maybe (Path Abs Dir) -> String -> EnvOverride -> [String] -> m ()
runAndLog mdir name menv args = liftBaseWith $ \restore -> do
let logLines = CB.lines =$ CL.mapM_ (void . restore . monadLoggerLog $(TH.location >>= liftLoc) "" LevelInfo . toLogStr)
void $ restore $ sinkProcessStderrStdout mdir menv name args logLines logLines
loadGhcjsEnvConfig :: (MonadIO m, HasHttpManager r, MonadReader r m, HasTerminal r, HasReExec r, HasLogLevel r)
=> Path Abs File -> Path b t -> m EnvConfig
loadGhcjsEnvConfig stackYaml binPath = runInnerStackLoggingT $ do
lc <- loadConfig
(mempty
{ configMonoidInstallGHC = Just True
, configMonoidLocalBinPath = Just (toFilePath binPath)
})
(Just stackYaml)
bconfig <- lcLoadBuildConfig lc Nothing
runInnerStackT bconfig $ setupEnv Nothing
getCabalInstallVersion :: (MonadIO m, MonadBaseControl IO m, MonadLogger m, MonadCatch m)
=> EnvOverride -> m (Maybe Version)
getCabalInstallVersion menv = do
ebs <- tryProcessStdout Nothing menv "cabal" ["--numeric-version"]
case ebs of
Left _ -> return Nothing
Right bs -> Just <$> parseVersion (T.encodeUtf8 (T.dropWhileEnd isSpace (T.decodeUtf8 bs)))
-- | Check if given processes appear to be present, throwing an exception if
-- missing.
checkDependencies :: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env)
=> CheckDependency a -> m a
checkDependencies (CheckDependency f) = do
menv <- getMinimalEnvOverride
liftIO (f menv) >>= either (throwM . MissingDependencies) return
checkDependency :: String -> CheckDependency String
checkDependency tool = CheckDependency $ \menv -> do
exists <- doesExecutableExist menv tool
return $ if exists then Right tool else Left [tool]
newtype CheckDependency a = CheckDependency (EnvOverride -> IO (Either [String] a))
deriving Functor
instance Applicative CheckDependency where
pure x = CheckDependency $ \_ -> return (Right x)
CheckDependency f <*> CheckDependency x = CheckDependency $ \menv -> do
f' <- f menv
x' <- x menv
return $
case (f', x') of
(Left e1, Left e2) -> Left $ e1 ++ e2
(Left e, Right _) -> Left e
(Right _, Left e) -> Left e
(Right f'', Right x'') -> Right $ f'' x''
instance Alternative CheckDependency where
empty = CheckDependency $ \_ -> return $ Left []
CheckDependency x <|> CheckDependency y = CheckDependency $ \menv -> do
res1 <- x menv
case res1 of
Left _ -> y menv
Right x' -> return $ Right x'
installGHCWindows :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Version
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installGHCWindows version si archiveFile archiveType destDir = do
tarComponent <- parseRelDir $ "ghc-" ++ versionString version
withUnpackedTarball7z "GHC" si archiveFile archiveType tarComponent destDir
$logInfo $ "GHC installed to " <> T.pack (toFilePath destDir)
installMsys2Windows :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Text -- ^ OS Key
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installMsys2Windows osKey si archiveFile archiveType destDir = do
exists <- liftIO $ D.doesDirectoryExist $ toFilePath destDir
when exists $ liftIO (D.removeDirectoryRecursive $ toFilePath destDir) `catchIO` \e -> do
$logError $ T.pack $
"Could not delete existing msys directory: " ++
toFilePath destDir
throwM e
msys <- parseRelDir $ "msys" ++ T.unpack (fromMaybe "32" $ T.stripPrefix "windows" osKey)
withUnpackedTarball7z "MSYS2" si archiveFile archiveType msys destDir
platform <- asks getPlatform
menv0 <- getMinimalEnvOverride
let oldEnv = unEnvOverride menv0
newEnv = augmentPathMap
[toFilePath $ destDir </> $(mkRelDir "usr") </> $(mkRelDir "bin")]
oldEnv
menv <- mkEnvOverride platform newEnv
-- I couldn't find this officially documented anywhere, but you need to run
-- the shell once in order to initialize some pacman stuff. Once that run
-- happens, you can just run commands as usual.
runIn destDir "sh" menv ["--login", "-c", "true"] Nothing
-- No longer installing git, it's unreliable
-- (https://github.com/commercialhaskell/stack/issues/1046) and the
-- MSYS2-installed version has bad CRLF defaults.
--
-- Install git. We could install other useful things in the future too.
-- runIn destDir "pacman" menv ["-Sy", "--noconfirm", "git"] Nothing
-- | Unpack a compressed tarball using 7zip. Expects a single directory in
-- the unpacked results, which is renamed to the destination directory.
withUnpackedTarball7z :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> String -- ^ Name of tool, used in error messages
-> SetupInfo
-> Path Abs File -- ^ Path to archive file
-> ArchiveType
-> Path Rel Dir -- ^ Name of directory expected to be in archive.
-> Path Abs Dir -- ^ Destination directory.
-> m ()
withUnpackedTarball7z name si archiveFile archiveType srcDir destDir = do
suffix <-
case archiveType of
TarXz -> return ".xz"
TarBz2 -> return ".bz2"
TarGz -> return ".gz"
_ -> error $ name ++ " must be a tarball file"
tarFile <-
case T.stripSuffix suffix $ T.pack $ toFilePath archiveFile of
Nothing -> error $ "Invalid " ++ name ++ " filename: " ++ show archiveFile
Just x -> parseAbsFile $ T.unpack x
run7z <- setup7z si
let tmpName = (toFilePathNoTrailingSep $ dirname destDir) ++ "-tmp"
createTree (parent destDir)
withCanonicalizedTempDirectory (toFilePath $ parent destDir) tmpName $ \tmpDir -> do
let absSrcDir = tmpDir </> srcDir
removeTreeIfExists destDir
run7z (parent archiveFile) archiveFile
run7z tmpDir tarFile
removeFile tarFile `catchIO` \e ->
$logWarn (T.concat
[ "Exception when removing "
, T.pack $ toFilePath tarFile
, ": "
, T.pack $ show e
])
renameDir absSrcDir destDir
-- | Download 7z as necessary, and get a function for unpacking things.
--
-- Returned function takes an unpack directory and archive.
setup7z :: (MonadReader env m, HasHttpManager env, HasConfig env, MonadThrow m, MonadIO m, MonadIO n, MonadLogger m, MonadBaseControl IO m)
=> SetupInfo
-> m (Path Abs Dir -> Path Abs File -> n ())
setup7z si = do
dir <- asks $ configLocalPrograms . getConfig
let exe = dir </> $(mkRelFile "7z.exe")
dll = dir </> $(mkRelFile "7z.dll")
case (siSevenzDll si, siSevenzExe si) of
(Just sevenzDll, Just sevenzExe) -> do
chattyDownload "7z.dll" sevenzDll dll
chattyDownload "7z.exe" sevenzExe exe
return $ \outdir archive -> liftIO $ do
ec <- rawSystem (toFilePath exe)
[ "x"
, "-o" ++ toFilePath outdir
, "-y"
, toFilePath archive
]
when (ec /= ExitSuccess)
$ throwM (ProblemWhileDecompressing archive)
_ -> throwM SetupInfoMissingSevenz
chattyDownload :: (MonadReader env m, HasHttpManager env, MonadIO m, MonadLogger m, MonadThrow m, MonadBaseControl IO m)
=> Text -- ^ label
-> DownloadInfo -- ^ URL, content-length, and sha1
-> Path Abs File -- ^ destination
-> m ()
chattyDownload label downloadInfo path = do
let url = downloadInfoUrl downloadInfo
req <- parseUrl $ T.unpack url
$logSticky $ T.concat
[ "Preparing to download "
, label
, " ..."
]
$logDebug $ T.concat
[ "Downloading from "
, url
, " to "
, T.pack $ toFilePath path
, " ..."
]
hashChecks <- case downloadInfoSha1 downloadInfo of
Just sha1ByteString -> do
let sha1 = CheckHexDigestByteString sha1ByteString
$logDebug $ T.concat
[ "Will check against sha1 hash: "
, T.decodeUtf8With T.lenientDecode sha1ByteString
]
return [HashCheck SHA1 sha1]
Nothing -> do
$logWarn $ T.concat
[ "No sha1 found in metadata,"
, " download hash won't be checked."
]
return []
let dReq = DownloadRequest
{ drRequest = req
, drHashChecks = hashChecks
, drLengthCheck = mtotalSize
, drRetryPolicy = drRetryPolicyDefault
}
runInBase <- liftBaseWith $ \run -> return (void . run)
x <- verifiedDownload dReq path (chattyDownloadProgress runInBase)
if x
then $logStickyDone ("Downloaded " <> label <> ".")
else $logStickyDone "Already downloaded."
where
mtotalSize = downloadInfoContentLength downloadInfo
chattyDownloadProgress runInBase _ = do
_ <- liftIO $ runInBase $ $logSticky $
label <> ": download has begun"
CL.map (Sum . S.length)
=$ chunksOverTime 1
=$ go
where
go = evalStateC 0 $ awaitForever $ \(Sum size) -> do
modify (+ size)
totalSoFar <- get
liftIO $ runInBase $ $logSticky $ T.pack $
case mtotalSize of
Nothing -> chattyProgressNoTotal totalSoFar
Just 0 -> chattyProgressNoTotal totalSoFar
Just totalSize -> chattyProgressWithTotal totalSoFar totalSize
-- Example: ghc: 42.13 KiB downloaded...
chattyProgressNoTotal totalSoFar =
printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " downloaded...")
(T.unpack label)
-- Example: ghc: 50.00 MiB / 100.00 MiB (50.00%) downloaded...
chattyProgressWithTotal totalSoFar total =
printf ("%s: " <>
bytesfmt "%7.2f" totalSoFar <> " / " <>
bytesfmt "%.2f" total <>
" (%6.2f%%) downloaded...")
(T.unpack label)
percentage
where percentage :: Double
percentage = (fromIntegral totalSoFar / fromIntegral total * 100)
-- | Given a printf format string for the decimal part and a number of
-- bytes, formats the bytes using an appropiate unit and returns the
-- formatted string.
--
-- >>> bytesfmt "%.2" 512368
-- "500.359375 KiB"
bytesfmt :: Integral a => String -> a -> String
bytesfmt formatter bs = printf (formatter <> " %s")
(fromIntegral (signum bs) * dec :: Double)
(bytesSuffixes !! i)
where
(dec,i) = getSuffix (abs bs)
getSuffix n = until p (\(x,y) -> (x / 1024, y+1)) (fromIntegral n,0)
where p (n',numDivs) = n' < 1024 || numDivs == (length bytesSuffixes - 1)
bytesSuffixes :: [String]
bytesSuffixes = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB","YiB"]
-- Await eagerly (collect with monoidal append),
-- but space out yields by at least the given amount of time.
-- The final yield may come sooner, and may be a superfluous mempty.
-- Note that Integer and Float literals can be turned into NominalDiffTime
-- (these literals are interpreted as "seconds")
chunksOverTime :: (Monoid a, MonadIO m) => NominalDiffTime -> Conduit a m a
chunksOverTime diff = do
currentTime <- liftIO getCurrentTime
evalStateC (currentTime, mempty) go
where
-- State is a tuple of:
-- * the last time a yield happened (or the beginning of the sink)
-- * the accumulated awaits since the last yield
go = await >>= \case
Nothing -> do
(_, acc) <- get
yield acc
Just a -> do
(lastTime, acc) <- get
let acc' = acc <> a
currentTime <- liftIO getCurrentTime
if diff < diffUTCTime currentTime lastTime
then put (currentTime, mempty) >> yield acc'
else put (lastTime, acc')
go
-- | Perform a basic sanity check of GHC
sanityCheck :: (MonadIO m, MonadMask m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride
-> WhichCompiler
-> m ()
sanityCheck menv wc = withCanonicalizedSystemTempDirectory "stack-sanity-check" $ \dir -> do
let fp = toFilePath $ dir </> $(mkRelFile "Main.hs")
liftIO $ writeFile fp $ unlines
[ "import Distribution.Simple" -- ensure Cabal library is present
, "main = putStrLn \"Hello World\""
]
let exeName = compilerExeName wc
ghc <- join $ findExecutable menv exeName
$logDebug $ "Performing a sanity check on: " <> T.pack (toFilePath ghc)
eres <- tryProcessStdout (Just dir) menv exeName
[ fp
, "-no-user-package-db"
]
case eres of
Left e -> throwM $ GHCSanityCheckCompileFailed e ghc
Right _ -> return () -- TODO check that the output of running the command is correct
-- Remove potentially confusing environment variables
removeHaskellEnvVars :: Map Text Text -> Map Text Text
removeHaskellEnvVars =
Map.delete "GHCJS_PACKAGE_PATH" .
Map.delete "GHC_PACKAGE_PATH" .
Map.delete "HASKELL_PACKAGE_SANDBOX" .
Map.delete "HASKELL_PACKAGE_SANDBOXES" .
Map.delete "HASKELL_DIST_DIR"
-- | Get map of environment variables to set to change the locale's encoding to UTF-8
getUtf8LocaleVars
:: forall m env.
(MonadReader env m, HasPlatform env, MonadLogger m, MonadCatch m, MonadBaseControl IO m, MonadIO m)
=> EnvOverride -> m (Map Text Text)
getUtf8LocaleVars menv = do
Platform _ os <- asks getPlatform
if os == Cabal.Windows
then
-- On Windows, locale is controlled by the code page, so we don't set any environment
-- variables.
return
Map.empty
else do
let checkedVars = map checkVar (Map.toList $ eoTextMap menv)
-- List of environment variables that will need to be updated to set UTF-8 (because
-- they currently do not specify UTF-8).
needChangeVars = concatMap fst checkedVars
-- Set of locale-related environment variables that have already have a value.
existingVarNames = Set.unions (map snd checkedVars)
-- True if a locale is already specified by one of the "global" locale variables.
hasAnyExisting =
or $
map
(`Set.member` existingVarNames)
["LANG", "LANGUAGE", "LC_ALL"]
if null needChangeVars && hasAnyExisting
then
-- If no variables need changes and at least one "global" variable is set, no
-- changes to environment need to be made.
return
Map.empty
else do
-- Get a list of known locales by running @locale -a@.
elocales <- tryProcessStdout Nothing menv "locale" ["-a"]
let
-- Filter the list to only include locales with UTF-8 encoding.
utf8Locales =
case elocales of
Left _ -> []
Right locales ->
filter
isUtf8Locale
(T.lines $
T.decodeUtf8With
T.lenientDecode
locales)
mfallback = getFallbackLocale utf8Locales
when
(isNothing mfallback)
($logWarn
"Warning: unable to set locale to UTF-8 encoding; GHC may fail with 'invalid character'")
let
-- Get the new values of variables to adjust.
changes =
Map.unions $
map
(adjustedVarValue utf8Locales mfallback)
needChangeVars
-- Get the values of variables to add.
adds
| hasAnyExisting =
-- If we already have a "global" variable, then nothing needs
-- to be added.
Map.empty
| otherwise =
-- If we don't already have a "global" variable, then set LANG to the
-- fallback.
case mfallback of
Nothing -> Map.empty
Just fallback ->
Map.singleton "LANG" fallback
return (Map.union changes adds)
where
-- Determines whether an environment variable is locale-related and, if so, whether it needs to
-- be adjusted.
checkVar
:: (Text, Text) -> ([Text], Set Text)
checkVar (k,v) =
if k `elem` ["LANG", "LANGUAGE"] || "LC_" `T.isPrefixOf` k
then if isUtf8Locale v
then ([], Set.singleton k)
else ([k], Set.singleton k)
else ([], Set.empty)
-- Adjusted value of an existing locale variable. Looks for valid UTF-8 encodings with
-- same language /and/ territory, then with same language, and finally the first UTF-8 locale
-- returned by @locale -a@.
adjustedVarValue
:: [Text] -> Maybe Text -> Text -> Map Text Text
adjustedVarValue utf8Locales mfallback k =
case Map.lookup k (eoTextMap menv) of
Nothing -> Map.empty
Just v ->
case concatMap
(matchingLocales utf8Locales)
[ T.takeWhile (/= '.') v <> "."
, T.takeWhile (/= '_') v <> "_"] of
(v':_) -> Map.singleton k v'
[] ->
case mfallback of
Just fallback -> Map.singleton k fallback
Nothing -> Map.empty
-- Determine the fallback locale, by looking for any UTF-8 locale prefixed with the list in
-- @fallbackPrefixes@, and if not found, picking the first UTF-8 encoding returned by @locale
-- -a@.
getFallbackLocale
:: [Text] -> Maybe Text
getFallbackLocale utf8Locales = do
case concatMap (matchingLocales utf8Locales) fallbackPrefixes of
(v:_) -> Just v
[] ->
case utf8Locales of
[] -> Nothing
(v:_) -> Just v
-- Filter the list of locales for any with the given prefixes (case-insitive).
matchingLocales
:: [Text] -> Text -> [Text]
matchingLocales utf8Locales prefix =
filter
(\v ->
(T.toLower prefix) `T.isPrefixOf` T.toLower v)
utf8Locales
-- Does the locale have one of the encodings in @utf8Suffixes@ (case-insensitive)?
isUtf8Locale locale =
or $
map
(\v ->
T.toLower v `T.isSuffixOf` T.toLower locale)
utf8Suffixes
-- Prefixes of fallback locales (case-insensitive)
fallbackPrefixes = ["C.", "en_US.", "en_"]
-- Suffixes of UTF-8 locales (case-insensitive)
utf8Suffixes = [".UTF-8", ".utf8"]
| lukexi/stack | src/Stack/Setup.hs | bsd-3-clause | 64,425 | 0 | 31 | 21,458 | 14,412 | 7,125 | 7,287 | 1,210 | 13 |
import Test.DocTest
main :: IO ()
main = doctest ["Data/Aeson/Lens.hs"]
| ekmett/aeson-lens | doctest.hs | bsd-3-clause | 73 | 0 | 6 | 11 | 27 | 14 | 13 | 3 | 1 |
{-# LANGUAGE OverloadedStrings, DoAndIfThenElse #-}
module Git.Repository (
checkoutHead
, readHead
, resolveTree
) where
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString as B
import Text.Printf (printf)
import Git.Common (GitRepository(..), ObjectId, WithRepository)
import Numeric (readOct)
import Git.Store.Object
import Git.Store.ObjectStore
import Git.Store.Index (IndexEntry, GitFileMode(..), writeIndex, indexEntryFor)
import System.FilePath
import System.Directory
import System.Posix.Files
import Control.Monad.Reader
-- | Updates files in the working tree to match the given <tree-ish>
checkoutHead :: WithRepository ()
checkoutHead = do
repo <- ask
let dir = getName repo
tip <- readHead
maybeTree <- resolveTree tip
indexEntries <- maybe (return []) (walkTree [] dir) maybeTree
writeIndex indexEntries
-- TODO Improve error handling: Should return an error instead of
-- of implicitly skipping erroneous elements.
-- TODO support _all_ the different git modes (from https://www.kernel.org/pub/software/scm/git/docs/git-fast-import.html):
-- 100644 or 644: A normal (not-executable) file. The majority of files in most projects use this mode. If in doubt, this is what you want.
-- 100755 or 755: A normal, but executable, file.
-- 120000: A symlink, the content of the file will be the link target.
-- 160000: A gitlink, SHA-1 of the object refers to a commit in another repository. Git links can only be specified by SHA or through a commit mark. They are used to implement submodules.
-- 040000: A subdirectory. Subdirectories can only be specified by SHA or through a tree mark set with --import-marks.
walkTree :: [IndexEntry] -> FilePath -> Tree -> WithRepository [IndexEntry]
walkTree acc parent tree = do
let entries = getEntries tree
foldM handleEntry acc entries
where handleEntry acc' (TreeEntry "40000" path sha') = do
let dir = parent </> toFilePath path
liftIO $ createDirectory dir
maybeTree <- resolveTree $ toHex sha'
maybe (return acc') (walkTree acc' dir) maybeTree
handleEntry acc' (TreeEntry mode path sha') = do
repo <- ask
let fullPath = parent </> toFilePath path
content <- liftIO $ readObject repo $ toHex sha'
maybe (return acc') (\e -> do
liftIO $ B.writeFile fullPath (getBlobContent e)
let fMode = fst . head . readOct $ C.unpack mode
liftIO $ setFileMode fullPath fMode
indexEntry <- asIndexEntry fullPath sha'
return $ indexEntry : acc') content
toFilePath = C.unpack
asIndexEntry path sha' = do
stat <- liftIO $ getFileStatus path
indexEntryFor path Regular sha' stat
-- | Resolve a tree given a <tree-ish>
-- Similar to `parse_tree_indirect` defined in tree.c
resolveTree :: ObjectId -> WithRepository (Maybe Tree)
resolveTree sha' = do
repo <- ask
blob <- liftIO $ readObject repo sha'
maybe (return Nothing) walk blob
where walk (Object _ BTree sha1) = do
repo <- ask
liftIO $ readTree repo sha1
walk c@(Object _ BCommit _) = do
let maybeCommit = parseCommit $ getBlobContent c
maybe (return Nothing) extractTree maybeCommit
walk _ = return Nothing
extractTree :: Commit -> WithRepository (Maybe Tree)
extractTree commit = do
let sha' = C.unpack $ getTree commit
repo <- ask
liftIO $ readTree repo sha'
toHex :: C.ByteString -> String
toHex bytes = C.unpack bytes >>= printf "%02x"
readHead :: WithRepository ObjectId
readHead = readSymRef "HEAD"
| fcharlie/hgit | src/Git/Repository.hs | bsd-3-clause | 4,336 | 0 | 19 | 1,514 | 867 | 433 | 434 | 69 | 3 |
module Language.Cee where
-- -- $Id$
import Language.Type
import Autolib.Util.Wort
import Autolib.Util.Zufall
import Control.Monad (guard)
import Autolib.Set
import System.Random
cee :: Char -> Language -> Language
cee c l = Language
{ abbreviation = "{ u " ++ [c] ++ " v | u v in "
++ abbreviation l ++ " und |u| = |v| }"
, alphabet = union ( alphabet l ) ( unitSet c )
, contains = \ w ->
let n = length w
( u, m : v ) = splitAt ( n `div` 2 ) w
in odd n && c == m && contains l ( u ++ v )
, sample = \ count n -> do
wss <- sequence $ do
nn <- [ n, n + 1 ]
return $ do
ws <- sample l count nn
return $ do
w <- ws
let n = length w
guard $ even n
let (u, v) = splitAt (n `div` 2) w
return $ u ++ [c] ++ v
return $ concat wss
, anti_sample = \ count n -> do
-- die falschen wörter, aber mit richtigem trennzeichen
this <- sample ( cee c $ komplement l ) count n
-- die richtigen wörter (aus l), aber mit falschen trennzeichen
ws <- sample l count n
that <- sequence $ do
w <- ws
return $ do
k <- randomRIO (0, 5)
eins $ shuffle w $ replicate k c
-- zur sicherheit nochmal filtern
-- denn das trennzeichen könnte doch genau in der mitte sein
return $ filter ( not . contains ( cee c l ) ) $ this ++ that
}
| Erdwolf/autotool-bonn | src/Language/Cee.hs | gpl-2.0 | 1,412 | 37 | 18 | 482 | 494 | 268 | 226 | 37 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.ListMFADevices
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Lists the MFA devices. If the request includes the user name, then this
-- action lists all the MFA devices associated with the specified user name. If
-- you do not specify a user name, IAM determines the user name implicitly based
-- on the AWS access key ID signing the request.
--
-- You can paginate the results using the 'MaxItems' and 'Marker' parameters.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ListMFADevices.html>
module Network.AWS.IAM.ListMFADevices
(
-- * Request
ListMFADevices
-- ** Request constructor
, listMFADevices
-- ** Request lenses
, lmfadMarker
, lmfadMaxItems
, lmfadUserName
-- * Response
, ListMFADevicesResponse
-- ** Response constructor
, listMFADevicesResponse
-- ** Response lenses
, lmfadrIsTruncated
, lmfadrMFADevices
, lmfadrMarker
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data ListMFADevices = ListMFADevices
{ _lmfadMarker :: Maybe Text
, _lmfadMaxItems :: Maybe Nat
, _lmfadUserName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListMFADevices' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lmfadMarker' @::@ 'Maybe' 'Text'
--
-- * 'lmfadMaxItems' @::@ 'Maybe' 'Natural'
--
-- * 'lmfadUserName' @::@ 'Maybe' 'Text'
--
listMFADevices :: ListMFADevices
listMFADevices = ListMFADevices
{ _lmfadUserName = Nothing
, _lmfadMarker = Nothing
, _lmfadMaxItems = Nothing
}
-- | Use this only when paginating results, and only in a subsequent request
-- after you've received a response where the results are truncated. Set it to
-- the value of the 'Marker' element in the response you just received.
lmfadMarker :: Lens' ListMFADevices (Maybe Text)
lmfadMarker = lens _lmfadMarker (\s a -> s { _lmfadMarker = a })
-- | Use this only when paginating results to indicate the maximum number of MFA
-- devices you want in the response. If there are additional MFA devices beyond
-- the maximum you specify, the 'IsTruncated' response element is 'true'. This
-- parameter is optional. If you do not include it, it defaults to 100.
lmfadMaxItems :: Lens' ListMFADevices (Maybe Natural)
lmfadMaxItems = lens _lmfadMaxItems (\s a -> s { _lmfadMaxItems = a }) . mapping _Nat
-- | The name of the user whose MFA devices you want to list.
lmfadUserName :: Lens' ListMFADevices (Maybe Text)
lmfadUserName = lens _lmfadUserName (\s a -> s { _lmfadUserName = a })
data ListMFADevicesResponse = ListMFADevicesResponse
{ _lmfadrIsTruncated :: Maybe Bool
, _lmfadrMFADevices :: List "member" MFADevice
, _lmfadrMarker :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ListMFADevicesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lmfadrIsTruncated' @::@ 'Maybe' 'Bool'
--
-- * 'lmfadrMFADevices' @::@ ['MFADevice']
--
-- * 'lmfadrMarker' @::@ 'Maybe' 'Text'
--
listMFADevicesResponse :: ListMFADevicesResponse
listMFADevicesResponse = ListMFADevicesResponse
{ _lmfadrMFADevices = mempty
, _lmfadrIsTruncated = Nothing
, _lmfadrMarker = Nothing
}
-- | A flag that indicates whether there are more MFA devices to list. If your
-- results were truncated, you can make a subsequent pagination request using
-- the 'Marker' request parameter to retrieve more MFA devices in the list.
lmfadrIsTruncated :: Lens' ListMFADevicesResponse (Maybe Bool)
lmfadrIsTruncated =
lens _lmfadrIsTruncated (\s a -> s { _lmfadrIsTruncated = a })
-- | A list of MFA devices.
lmfadrMFADevices :: Lens' ListMFADevicesResponse [MFADevice]
lmfadrMFADevices = lens _lmfadrMFADevices (\s a -> s { _lmfadrMFADevices = a }) . _List
-- | If 'IsTruncated' is 'true', this element is present and contains the value to
-- use for the 'Marker' parameter in a subsequent pagination request.
lmfadrMarker :: Lens' ListMFADevicesResponse (Maybe Text)
lmfadrMarker = lens _lmfadrMarker (\s a -> s { _lmfadrMarker = a })
instance ToPath ListMFADevices where
toPath = const "/"
instance ToQuery ListMFADevices where
toQuery ListMFADevices{..} = mconcat
[ "Marker" =? _lmfadMarker
, "MaxItems" =? _lmfadMaxItems
, "UserName" =? _lmfadUserName
]
instance ToHeaders ListMFADevices
instance AWSRequest ListMFADevices where
type Sv ListMFADevices = IAM
type Rs ListMFADevices = ListMFADevicesResponse
request = post "ListMFADevices"
response = xmlResponse
instance FromXML ListMFADevicesResponse where
parseXML = withElement "ListMFADevicesResult" $ \x -> ListMFADevicesResponse
<$> x .@? "IsTruncated"
<*> x .@? "MFADevices" .!@ mempty
<*> x .@? "Marker"
instance AWSPager ListMFADevices where
page rq rs
| stop (rs ^. lmfadrIsTruncated) = Nothing
| otherwise = Just $ rq
& lmfadMarker .~ rs ^. lmfadrMarker
| kim/amazonka | amazonka-iam/gen/Network/AWS/IAM/ListMFADevices.hs | mpl-2.0 | 6,003 | 0 | 14 | 1,271 | 795 | 470 | 325 | 82 | 1 |
-- (c) The University of Glasgow 2006
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
module Unify (
tcMatchTy, tcMatchTys, tcMatchTyX, tcMatchTysX, tcUnifyTyWithTFs,
ruleMatchTyX,
-- * Rough matching
roughMatchTcs, instanceCantMatch,
typesCantMatch,
-- Side-effect free unification
tcUnifyTy, tcUnifyTys,
tcUnifyTysFG,
BindFlag(..),
UnifyResult, UnifyResultM(..),
-- Matching a type against a lifted type (coercion)
liftCoMatch
) where
#include "HsVersions.h"
import Var
import VarEnv
import VarSet
import Kind
import Name( Name )
import Type hiding ( getTvSubstEnv )
import Coercion hiding ( getCvSubstEnv )
import TyCon
import TyCoRep hiding ( getTvSubstEnv, getCvSubstEnv )
import Util
import Pair
import Outputable
import UniqFM
import Control.Monad
#if __GLASGOW_HASKELL__ > 710
import qualified Control.Monad.Fail as MonadFail
#endif
import Control.Applicative hiding ( empty )
import qualified Control.Applicative
{-
Unification is much tricker than you might think.
1. The substitution we generate binds the *template type variables*
which are given to us explicitly.
2. We want to match in the presence of foralls;
e.g (forall a. t1) ~ (forall b. t2)
That is what the RnEnv2 is for; it does the alpha-renaming
that makes it as if a and b were the same variable.
Initialising the RnEnv2, so that it can generate a fresh
binder when necessary, entails knowing the free variables of
both types.
3. We must be careful not to bind a template type variable to a
locally bound variable. E.g.
(forall a. x) ~ (forall b. b)
where x is the template type variable. Then we do not want to
bind x to a/b! This is a kind of occurs check.
The necessary locals accumulate in the RnEnv2.
-}
-- | @tcMatchTy t1 t2@ produces a substitution (over fvs(t1))
-- @s@ such that @s(t1)@ equals @t2@.
-- The returned substitution might bind coercion variables,
-- if the variable is an argument to a GADT constructor.
--
-- We don't pass in a set of "template variables" to be bound
-- by the match, because tcMatchTy (and similar functions) are
-- always used on top-level types, so we can bind any of the
-- free variables of the LHS.
tcMatchTy :: Type -> Type -> Maybe TCvSubst
tcMatchTy ty1 ty2 = tcMatchTys [ty1] [ty2]
-- | This is similar to 'tcMatchTy', but extends a substitution
tcMatchTyX :: TCvSubst -- ^ Substitution to extend
-> Type -- ^ Template
-> Type -- ^ Target
-> Maybe TCvSubst
tcMatchTyX subst ty1 ty2 = tcMatchTysX subst [ty1] [ty2]
-- | Like 'tcMatchTy' but over a list of types.
tcMatchTys :: [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot; in principle the template
-- variables could be free in the target
tcMatchTys tys1 tys2
= tcMatchTysX (mkEmptyTCvSubst in_scope) tys1 tys2
where
in_scope = mkInScopeSet (tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2)
-- | Like 'tcMatchTys', but extending a substitution
tcMatchTysX :: TCvSubst -- ^ Substitution to extend
-> [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot substitution
tcMatchTysX (TCvSubst in_scope tv_env cv_env) tys1 tys2
-- See Note [Kind coercions in Unify]
= case tc_unify_tys (const BindMe)
False -- Matching, not unifying
False -- Not an injectivity check
(mkRnEnv2 in_scope) tv_env cv_env tys1 tys2 of
Unifiable (tv_env', cv_env')
-> Just $ TCvSubst in_scope tv_env' cv_env'
_ -> Nothing
-- | This one is called from the expression matcher,
-- which already has a MatchEnv in hand
ruleMatchTyX
:: TyCoVarSet -- ^ template variables
-> RnEnv2
-> TvSubstEnv -- ^ type substitution to extend
-> Type -- ^ Template
-> Type -- ^ Target
-> Maybe TvSubstEnv
ruleMatchTyX tmpl_tvs rn_env tenv tmpl target
-- See Note [Kind coercions in Unify]
= case tc_unify_tys (matchBindFun tmpl_tvs) False False rn_env
tenv emptyCvSubstEnv [tmpl] [target] of
Unifiable (tenv', _) -> Just tenv'
_ -> Nothing
matchBindFun :: TyCoVarSet -> TyVar -> BindFlag
matchBindFun tvs tv = if tv `elemVarSet` tvs then BindMe else Skolem
{- *********************************************************************
* *
Rough matching
* *
********************************************************************* -}
-- See Note [Rough match] field in InstEnv
roughMatchTcs :: [Type] -> [Maybe Name]
roughMatchTcs tys = map rough tys
where
rough ty
| Just (ty', _) <- splitCastTy_maybe ty = rough ty'
| Just (tc,_) <- splitTyConApp_maybe ty = Just (tyConName tc)
| otherwise = Nothing
instanceCantMatch :: [Maybe Name] -> [Maybe Name] -> Bool
-- (instanceCantMatch tcs1 tcs2) returns True if tcs1 cannot
-- possibly be instantiated to actual, nor vice versa;
-- False is non-committal
instanceCantMatch (mt : ts) (ma : as) = itemCantMatch mt ma || instanceCantMatch ts as
instanceCantMatch _ _ = False -- Safe
itemCantMatch :: Maybe Name -> Maybe Name -> Bool
itemCantMatch (Just t) (Just a) = t /= a
itemCantMatch _ _ = False
{-
************************************************************************
* *
GADTs
* *
************************************************************************
Note [Pruning dead case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider data T a where
T1 :: T Int
T2 :: T a
newtype X = MkX Int
newtype Y = MkY Char
type family F a
type instance F Bool = Int
Now consider case x of { T1 -> e1; T2 -> e2 }
The question before the house is this: if I know something about the type
of x, can I prune away the T1 alternative?
Suppose x::T Char. It's impossible to construct a (T Char) using T1,
Answer = YES we can prune the T1 branch (clearly)
Suppose x::T (F a), where 'a' is in scope. Then 'a' might be instantiated
to 'Bool', in which case x::T Int, so
ANSWER = NO (clearly)
We see here that we want precisely the apartness check implemented within
tcUnifyTysFG. So that's what we do! Two types cannot match if they are surely
apart. Note that since we are simply dropping dead code, a conservative test
suffices.
-}
-- | Given a list of pairs of types, are any two members of a pair surely
-- apart, even after arbitrary type function evaluation and substitution?
typesCantMatch :: [(Type,Type)] -> Bool
-- See Note [Pruning dead case alternatives]
typesCantMatch prs = any (uncurry cant_match) prs
where
cant_match :: Type -> Type -> Bool
cant_match t1 t2 = case tcUnifyTysFG (const BindMe) [t1] [t2] of
SurelyApart -> True
_ -> False
{-
************************************************************************
* *
Unification
* *
************************************************************************
Note [Fine-grained unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do the types (x, x) and ([y], y) unify? The answer is seemingly "no" --
no substitution to finite types makes these match. But, a substitution to
*infinite* types can unify these two types: [x |-> [[[...]]], y |-> [[[...]]] ].
Why do we care? Consider these two type family instances:
type instance F x x = Int
type instance F [y] y = Bool
If we also have
type instance Looper = [Looper]
then the instances potentially overlap. The solution is to use unification
over infinite terms. This is possible (see [1] for lots of gory details), but
a full algorithm is a little more power than we need. Instead, we make a
conservative approximation and just omit the occurs check.
[1]: http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/axioms-extended.pdf
tcUnifyTys considers an occurs-check problem as the same as general unification
failure.
tcUnifyTysFG ("fine-grained") returns one of three results: success, occurs-check
failure ("MaybeApart"), or general failure ("SurelyApart").
See also Trac #8162.
It's worth noting that unification in the presence of infinite types is not
complete. This means that, sometimes, a closed type family does not reduce
when it should. See test case indexed-types/should_fail/Overlap15 for an
example.
Note [The substitution in MaybeApart]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The constructor MaybeApart carries data with it, typically a TvSubstEnv. Why?
Because consider unifying these:
(a, a, Int) ~ (b, [b], Bool)
If we go left-to-right, we start with [a |-> b]. Then, on the middle terms, we
apply the subst we have so far and discover that we need [b |-> [b]]. Because
this fails the occurs check, we say that the types are MaybeApart (see above
Note [Fine-grained unification]). But, we can't stop there! Because if we
continue, we discover that Int is SurelyApart from Bool, and therefore the
types are apart. This has practical consequences for the ability for closed
type family applications to reduce. See test case
indexed-types/should_compile/Overlap14.
Note [Unifying with skolems]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we discover that two types unify if and only if a skolem variable is
substituted, we can't properly unify the types. But, that skolem variable
may later be instantiated with a unifyable type. So, we return maybeApart
in these cases.
Note [Lists of different lengths are MaybeApart]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is unusual to call tcUnifyTys or tcUnifyTysFG with lists of different
lengths. The place where we know this can happen is from compatibleBranches in
FamInstEnv, when checking data family instances. Data family instances may be
eta-reduced; see Note [Eta reduction for data family axioms] in TcInstDcls.
We wish to say that
D :: * -> * -> *
axDF1 :: D Int ~ DFInst1
axDF2 :: D Int Bool ~ DFInst2
overlap. If we conclude that lists of different lengths are SurelyApart, then
it will look like these do *not* overlap, causing disaster. See Trac #9371.
In usages of tcUnifyTys outside of family instances, we always use tcUnifyTys,
which can't tell the difference between MaybeApart and SurelyApart, so those
usages won't notice this design choice.
-}
tcUnifyTy :: Type -> Type -- All tyvars are bindable
-> Maybe TCvSubst
-- A regular one-shot (idempotent) substitution
-- Simple unification of two types; all type variables are bindable
tcUnifyTy t1 t2 = tcUnifyTys (const BindMe) [t1] [t2]
-- | Unify two types, treating type family applications as possibly unifying
-- with anything and looking through injective type family applications.
tcUnifyTyWithTFs :: Bool -- ^ True <=> do two-way unification;
-- False <=> do one-way matching.
-- See end of sec 5.2 from the paper
-> Type -> Type -> Maybe TCvSubst
-- This algorithm is an implementation of the "Algorithm U" presented in
-- the paper "Injective type families for Haskell", Figures 2 and 3.
-- The code is incorporated with the standard unifier for convenience, but
-- its operation should match the specification in the paper.
tcUnifyTyWithTFs twoWay t1 t2
= case tc_unify_tys (const BindMe) twoWay True
rn_env emptyTvSubstEnv emptyCvSubstEnv
[t1] [t2] of
Unifiable (subst, _) -> Just $ niFixTCvSubst subst
MaybeApart (subst, _) -> Just $ niFixTCvSubst subst
-- we want to *succeed* in questionable cases. This is a
-- pre-unification algorithm.
SurelyApart -> Nothing
where
rn_env = mkRnEnv2 $ mkInScopeSet $ tyCoVarsOfTypes [t1, t2]
-----------------
tcUnifyTys :: (TyCoVar -> BindFlag)
-> [Type] -> [Type]
-> Maybe TCvSubst
-- ^ A regular one-shot (idempotent) substitution
-- that unifies the erased types. See comments
-- for 'tcUnifyTysFG'
-- The two types may have common type variables, and indeed do so in the
-- second call to tcUnifyTys in FunDeps.checkClsFD
tcUnifyTys bind_fn tys1 tys2
= case tcUnifyTysFG bind_fn tys1 tys2 of
Unifiable result -> Just result
_ -> Nothing
-- This type does double-duty. It is used in the UM (unifier monad) and to
-- return the final result. See Note [Fine-grained unification]
type UnifyResult = UnifyResultM TCvSubst
data UnifyResultM a = Unifiable a -- the subst that unifies the types
| MaybeApart a -- the subst has as much as we know
-- it must be part of an most general unifier
-- See Note [The substitution in MaybeApart]
| SurelyApart
deriving Functor
instance Applicative UnifyResultM where
pure = Unifiable
(<*>) = ap
instance Monad UnifyResultM where
SurelyApart >>= _ = SurelyApart
MaybeApart x >>= f = case f x of
Unifiable y -> MaybeApart y
other -> other
Unifiable x >>= f = f x
instance Alternative UnifyResultM where
empty = SurelyApart
a@(Unifiable {}) <|> _ = a
_ <|> b@(Unifiable {}) = b
a@(MaybeApart {}) <|> _ = a
_ <|> b@(MaybeApart {}) = b
SurelyApart <|> SurelyApart = SurelyApart
instance MonadPlus UnifyResultM
-- | @tcUnifyTysFG bind_tv tys1 tys2@ attepts to find a substitution @s@ (whose
-- domain elements all respond 'BindMe' to @bind_tv@) such that
-- @s(tys1)@ and that of @s(tys2)@ are equal, as witnessed by the returned
-- Coercions.
tcUnifyTysFG :: (TyVar -> BindFlag)
-> [Type] -> [Type]
-> UnifyResult
tcUnifyTysFG bind_fn tys1 tys2
= do { (env, _) <- tc_unify_tys bind_fn True False env
emptyTvSubstEnv emptyCvSubstEnv
tys1 tys2
; return $ niFixTCvSubst env }
where
vars = tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2
env = mkRnEnv2 $ mkInScopeSet vars
-- | This function is actually the one to call the unifier -- a little
-- too general for outside clients, though.
tc_unify_tys :: (TyVar -> BindFlag)
-> Bool -- ^ True <=> unify; False <=> match
-> Bool -- ^ True <=> doing an injectivity check
-> RnEnv2
-> TvSubstEnv -- ^ substitution to extend
-> CvSubstEnv
-> [Type] -> [Type]
-> UnifyResultM (TvSubstEnv, CvSubstEnv)
tc_unify_tys bind_fn unif inj_check rn_env tv_env cv_env tys1 tys2
= initUM bind_fn unif inj_check rn_env tv_env cv_env $
do { unify_tys kis1 kis2
; unify_tys tys1 tys2
; (,) <$> getTvSubstEnv <*> getCvSubstEnv }
where
kis1 = map typeKind tys1
kis2 = map typeKind tys2
instance Outputable a => Outputable (UnifyResultM a) where
ppr SurelyApart = text "SurelyApart"
ppr (Unifiable x) = text "Unifiable" <+> ppr x
ppr (MaybeApart x) = text "MaybeApart" <+> ppr x
{-
************************************************************************
* *
Non-idempotent substitution
* *
************************************************************************
Note [Non-idempotent substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During unification we use a TvSubstEnv/CvSubstEnv pair that is
(a) non-idempotent
(b) loop-free; ie repeatedly applying it yields a fixed point
Note [Finding the substitution fixpoint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Finding the fixpoint of a non-idempotent substitution arising from a
unification is harder than it looks, because of kinds. Consider
T k (H k (f:k)) ~ T * (g:*)
If we unify, we get the substitution
[ k -> *
, g -> H k (f:k) ]
To make it idempotent we don't want to get just
[ k -> *
, g -> H * (f:k) ]
We also want to substitute inside f's kind, to get
[ k -> *
, g -> H k (f:*) ]
If we don't do this, we may apply the substitition to something,
and get an ill-formed type, i.e. one where typeKind will fail.
This happened, for example, in Trac #9106.
This is the reason for extending env with [f:k -> f:*], in the
definition of env' in niFixTvSubst
-}
niFixTCvSubst :: TvSubstEnv -> TCvSubst
-- Find the idempotent fixed point of the non-idempotent substitution
-- See Note [Finding the substitution fixpoint]
-- ToDo: use laziness instead of iteration?
niFixTCvSubst tenv = f tenv
where
f tenv
| not_fixpoint = f (mapVarEnv (substTy subst') tenv)
| otherwise = subst
where
not_fixpoint = varSetAny in_domain range_tvs
in_domain tv = tv `elemVarEnv` tenv
range_tvs = nonDetFoldUFM (unionVarSet . tyCoVarsOfType) emptyVarSet tenv
-- It's OK to use nonDetFoldUFM here because we
-- forget the order immediately by creating a set
subst = mkTvSubst (mkInScopeSet range_tvs) tenv
-- env' extends env by replacing any free type with
-- that same tyvar with a substituted kind
-- See note [Finding the substitution fixpoint]
tenv' = extendVarEnvList tenv [ (rtv, mkTyVarTy $
setTyVarKind rtv $
substTy subst $
tyVarKind rtv)
| rtv <- nonDetEltsUFM range_tvs
-- It's OK to use nonDetEltsUFM here
-- because we forget the order
-- immediatedly by putting it in VarEnv
, not (in_domain rtv) ]
subst' = mkTvSubst (mkInScopeSet range_tvs) tenv'
niSubstTvSet :: TvSubstEnv -> TyCoVarSet -> TyCoVarSet
-- Apply the non-idempotent substitution to a set of type variables,
-- remembering that the substitution isn't necessarily idempotent
-- This is used in the occurs check, before extending the substitution
niSubstTvSet tsubst tvs
= nonDetFoldUFM (unionVarSet . get) emptyVarSet tvs
-- It's OK to nonDetFoldUFM here because we immediately forget the
-- ordering by creating a set.
where
get tv
| Just ty <- lookupVarEnv tsubst tv
= niSubstTvSet tsubst (tyCoVarsOfType ty)
| otherwise
= unitVarSet tv
{-
************************************************************************
* *
unify_ty: the main workhorse
* *
************************************************************************
Note [Specification of unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The pure unifier, unify_ty, defined in this module, tries to work out
a substitution to make two types say True to eqType. NB: eqType is
itself not purely syntactic; it accounts for CastTys;
see Note [Non-trivial definitional equality] in TyCoRep
Unlike the "impure unifers" in the typechecker (the eager unifier in
TcUnify, and the constraint solver itself in TcCanonical), the pure
unifier It does /not/ work up to ~.
The algorithm implemented here is rather delicate, and we depend on it
to uphold certain properties. This is a summary of these required
properties. Any reference to "flattening" refers to the flattening
algorithm in FamInstEnv (See Note [Flattening] in FamInstEnv), not
the flattening algorithm in the solver.
Notation:
θ,φ substitutions
ξ type-function-free types
τ,σ other types
τ♭ type τ, flattened
≡ eqType
(U1) Soundness.
If (unify τ₁ τ₂) = Unifiable θ, then θ(τ₁) ≡ θ(τ₂).
θ is a most general unifier for τ₁ and τ₂.
(U2) Completeness.
If (unify ξ₁ ξ₂) = SurelyApart,
then there exists no substitution θ such that θ(ξ₁) ≡ θ(ξ₂).
These two properties are stated as Property 11 in the "Closed Type Families"
paper (POPL'14). Below, this paper is called [CTF].
(U3) Apartness under substitution.
If (unify ξ τ♭) = SurelyApart, then (unify ξ θ(τ)♭) = SurelyApart,
for any θ. (Property 12 from [CTF])
(U4) Apart types do not unify.
If (unify ξ τ♭) = SurelyApart, then there exists no θ
such that θ(ξ) = θ(τ). (Property 13 from [CTF])
THEOREM. Completeness w.r.t ~
If (unify τ₁♭ τ₂♭) = SurelyApart,
then there exists no proof that (τ₁ ~ τ₂).
PROOF. See appendix of [CTF].
The unification algorithm is used for type family injectivity, as described
in the "Injective Type Families" paper (Haskell'15), called [ITF]. When run
in this mode, it has the following properties.
(I1) If (unify σ τ) = SurelyApart, then σ and τ are not unifiable, even
after arbitrary type family reductions. Note that σ and τ are
not flattened here.
(I2) If (unify σ τ) = MaybeApart θ, and if some
φ exists such that φ(σ) ~ φ(τ), then φ extends θ.
Furthermore, the RULES matching algorithm requires this property,
but only when using this algorithm for matching:
(M1) If (match σ τ) succeeds with θ, then all matchable tyvars
in σ are bound in θ.
Property M1 means that we must extend the substitution with,
say (a ↦ a) when appropriate during matching.
See also Note [Self-substitution when matching].
(M2) Completeness of matching.
If θ(σ) = τ, then (match σ τ) = Unifiable φ,
where θ is an extension of φ.
Sadly, property M2 and I2 conflict. Consider
type family F1 a b where
F1 Int Bool = Char
F1 Double String = Char
Consider now two matching problems:
P1. match (F1 a Bool) (F1 Int Bool)
P2. match (F1 a Bool) (F1 Double String)
In case P1, we must find (a ↦ Int) to satisfy M2.
In case P2, we must /not/ find (a ↦ Double), in order to satisfy I2. (Note
that the correct mapping for I2 is (a ↦ Int). There is no way to discover
this, but we musn't map a to anything else!)
We thus must parameterize the algorithm over whether it's being used
for an injectivity check (refrain from looking at non-injective arguments
to type families) or not (do indeed look at those arguments). This is
implemented by the uf_int_tf field of UmEnv.
(It's all a question of whether or not to include equation (7) from Fig. 2
of [ITF].)
This extra parameter is a bit fiddly, perhaps, but seemingly less so than
having two separate, almost-identical algorithms.
Note [Self-substitution when matching]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should happen when we're *matching* (not unifying) a1 with a1? We
should get a substitution [a1 |-> a1]. A successful match should map all
the template variables (except ones that disappear when expanding synonyms).
But when unifying, we don't want to do this, because we'll then fall into
a loop.
This arrangement affects the code in three places:
- If we're matching a refined template variable, don't recur. Instead, just
check for equality. That is, if we know [a |-> Maybe a] and are matching
(a ~? Maybe Int), we want to just fail.
- Skip the occurs check when matching. This comes up in two places, because
matching against variables is handled separately from matching against
full-on types.
Note that this arrangement was provoked by a real failure, where the same
unique ended up in the template as in the target. (It was a rule firing when
compiling Data.List.NonEmpty.)
Note [Matching coercion variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
type family F a
data G a where
MkG :: F a ~ Bool => G a
type family Foo (x :: G a) :: F a
type instance Foo MkG = False
We would like that to be accepted. For that to work, we need to introduce
a coercion variable on the left an then use it on the right. Accordingly,
at use sites of Foo, we need to be able to use matching to figure out the
value for the coercion. (See the desugared version:
axFoo :: [a :: *, c :: F a ~ Bool]. Foo (MkG c) = False |> (sym c)
) We never want this action to happen during *unification* though, when
all bets are off.
Note [Kind coercions in Unify]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We wish to match/unify while ignoring casts. But, we can't just ignore
them completely, or we'll end up with ill-kinded substitutions. For example,
say we're matching `a` with `ty |> co`. If we just drop the cast, we'll
return [a |-> ty], but `a` and `ty` might have different kinds. We can't
just match/unify their kinds, either, because this might gratuitously
fail. After all, `co` is the witness that the kinds are the same -- they
may look nothing alike.
So, we pass a kind coercion to the match/unify worker. This coercion witnesses
the equality between the substed kind of the left-hand type and the substed
kind of the right-hand type. Note that we do not unify kinds at the leaves
(as we did previously). We thus have
INVARIANT: In the call
unify_ty ty1 ty2 kco
it must be that subst(kco) :: subst(kind(ty1)) ~N subst(kind(ty2)), where
`subst` is the ambient substitution in the UM monad.
To get this coercion, we first have to match/unify
the kinds before looking at the types. Happily, we need look only one level
up, as all kinds are guaranteed to have kind *.
When we're working with type applications (either TyConApp or AppTy) we
need to worry about establishing INVARIANT, as the kinds of the function
& arguments aren't (necessarily) included in the kind of the result.
When unifying two TyConApps, this is easy, because the two TyCons are
the same. Their kinds are thus the same. As long as we unify left-to-right,
we'll be sure to unify types' kinds before the types themselves. (For example,
think about Proxy :: forall k. k -> *. Unifying the first args matches up
the kinds of the second args.)
For AppTy, we must unify the kinds of the functions, but once these are
unified, we can continue unifying arguments without worrying further about
kinds.
We thought, at one point, that this was all unnecessary: why should
casts be in types in the first place? But they do. In
dependent/should_compile/KindEqualities2, we see, for example the
constraint Num (Int |> (blah ; sym blah)). We naturally want to find
a dictionary for that constraint, which requires dealing with
coercions in this manner.
-}
-------------- unify_ty: the main workhorse -----------
unify_ty :: Type -> Type -- Types to be unified and a co
-> Coercion -- A coercion between their kinds
-- See Note [Kind coercions in Unify]
-> UM ()
-- See Note [Specification of unification]
-- Respects newtypes, PredTypes
unify_ty ty1 ty2 kco
| Just ty1' <- coreView ty1 = unify_ty ty1' ty2 kco
| Just ty2' <- coreView ty2 = unify_ty ty1 ty2' kco
| CastTy ty1' co <- ty1 = unify_ty ty1' ty2 (co `mkTransCo` kco)
| CastTy ty2' co <- ty2 = unify_ty ty1 ty2' (kco `mkTransCo` mkSymCo co)
unify_ty (TyVarTy tv1) ty2 kco = uVar tv1 ty2 kco
unify_ty ty1 (TyVarTy tv2) kco
= do { unif <- amIUnifying
; if unif
then umSwapRn $ uVar tv2 ty1 (mkSymCo kco)
else surelyApart } -- non-tv on left; tv on right: can't match.
unify_ty ty1 ty2 _kco
| Just (tc1, tys1) <- splitTyConApp_maybe ty1
, Just (tc2, tys2) <- splitTyConApp_maybe ty2
= if tc1 == tc2 || (isStarKind ty1 && isStarKind ty2)
then if isInjectiveTyCon tc1 Nominal
then unify_tys tys1 tys2
else do { let inj | isTypeFamilyTyCon tc1
= case familyTyConInjectivityInfo tc1 of
NotInjective -> repeat False
Injective bs -> bs
| otherwise
= repeat False
(inj_tys1, noninj_tys1) = partitionByList inj tys1
(inj_tys2, noninj_tys2) = partitionByList inj tys2
; unify_tys inj_tys1 inj_tys2
; inj_tf <- checkingInjectivity
; unless inj_tf $ -- See (end of) Note [Specification of unification]
don'tBeSoSure $ unify_tys noninj_tys1 noninj_tys2 }
else -- tc1 /= tc2
if isGenerativeTyCon tc1 Nominal && isGenerativeTyCon tc2 Nominal
then surelyApart
else maybeApart
-- Applications need a bit of care!
-- They can match FunTy and TyConApp, so use splitAppTy_maybe
-- NB: we've already dealt with type variables,
-- so if one type is an App the other one jolly well better be too
unify_ty (AppTy ty1a ty1b) ty2 _kco
| Just (ty2a, ty2b) <- tcRepSplitAppTy_maybe ty2
= unify_ty_app ty1a [ty1b] ty2a [ty2b]
unify_ty ty1 (AppTy ty2a ty2b) _kco
| Just (ty1a, ty1b) <- tcRepSplitAppTy_maybe ty1
= unify_ty_app ty1a [ty1b] ty2a [ty2b]
unify_ty (LitTy x) (LitTy y) _kco | x == y = return ()
unify_ty (ForAllTy (TvBndr tv1 _) ty1) (ForAllTy (TvBndr tv2 _) ty2) kco
= do { unify_ty (tyVarKind tv1) (tyVarKind tv2) (mkNomReflCo liftedTypeKind)
; umRnBndr2 tv1 tv2 $ unify_ty ty1 ty2 kco }
-- See Note [Matching coercion variables]
unify_ty (CoercionTy co1) (CoercionTy co2) kco
= do { unif <- amIUnifying
; c_subst <- getCvSubstEnv
; case co1 of
CoVarCo cv
| not unif
, not (cv `elemVarEnv` c_subst)
-> do { b <- tvBindFlagL cv
; if b == BindMe
then do { checkRnEnvRCo co2
; let [_, _, co_l, co_r] = decomposeCo 4 kco
-- cv :: t1 ~ t2
-- co2 :: s1 ~ s2
-- co_l :: t1 ~ s1
-- co_r :: t2 ~ s2
; extendCvEnv cv (co_l `mkTransCo`
co2 `mkTransCo`
mkSymCo co_r) }
else return () }
_ -> return () }
unify_ty ty1 _ _
| Just (tc1, _) <- splitTyConApp_maybe ty1
, not (isGenerativeTyCon tc1 Nominal)
= maybeApart
unify_ty _ ty2 _
| Just (tc2, _) <- splitTyConApp_maybe ty2
, not (isGenerativeTyCon tc2 Nominal)
= do { unif <- amIUnifying
; if unif then maybeApart else surelyApart }
unify_ty _ _ _ = surelyApart
unify_ty_app :: Type -> [Type] -> Type -> [Type] -> UM ()
unify_ty_app ty1 ty1args ty2 ty2args
| Just (ty1', ty1a) <- repSplitAppTy_maybe ty1
, Just (ty2', ty2a) <- repSplitAppTy_maybe ty2
= unify_ty_app ty1' (ty1a : ty1args) ty2' (ty2a : ty2args)
| otherwise
= do { let ki1 = typeKind ty1
ki2 = typeKind ty2
-- See Note [Kind coercions in Unify]
; unify_ty ki1 ki2 (mkNomReflCo liftedTypeKind)
; unify_ty ty1 ty2 (mkNomReflCo ki1)
; unify_tys ty1args ty2args }
unify_tys :: [Type] -> [Type] -> UM ()
unify_tys orig_xs orig_ys
= go orig_xs orig_ys
where
go [] [] = return ()
go (x:xs) (y:ys)
-- See Note [Kind coercions in Unify]
= do { unify_ty x y (mkNomReflCo $ typeKind x)
; go xs ys }
go _ _ = maybeApart -- See Note [Lists of different lengths are MaybeApart]
---------------------------------
uVar :: TyVar -- Variable to be unified
-> Type -- with this Type
-> Coercion -- :: kind tv ~N kind ty
-> UM ()
uVar tv1 ty kco
= do { -- Check to see whether tv1 is refined by the substitution
subst <- getTvSubstEnv
; case (lookupVarEnv subst tv1) of
Just ty' -> do { unif <- amIUnifying
; if unif
then unify_ty ty' ty kco -- Yes, call back into unify
else -- when *matching*, we don't want to just recur here.
-- this is because the range of the subst is the target
-- type, not the template type. So, just check for
-- normal type equality.
guard ((ty' `mkCastTy` kco) `eqType` ty) }
Nothing -> uUnrefined tv1 ty ty kco } -- No, continue
uUnrefined :: TyVar -- variable to be unified
-> Type -- with this Type
-> Type -- (version w/ expanded synonyms)
-> Coercion -- :: kind tv ~N kind ty
-> UM ()
-- We know that tv1 isn't refined
uUnrefined tv1 ty2 ty2' kco
| Just ty2'' <- coreView ty2'
= uUnrefined tv1 ty2 ty2'' kco -- Unwrap synonyms
-- This is essential, in case we have
-- type Foo a = a
-- and then unify a ~ Foo a
| TyVarTy tv2 <- ty2'
= do { tv1' <- umRnOccL tv1
; tv2' <- umRnOccR tv2
; unif <- amIUnifying
-- See Note [Self-substitution when matching]
; when (tv1' /= tv2' || not unif) $ do
{ subst <- getTvSubstEnv
-- Check to see whether tv2 is refined
; case lookupVarEnv subst tv2 of
{ Just ty' | unif -> uUnrefined tv1 ty' ty' kco
; _ -> do
{ -- So both are unrefined
-- And then bind one or the other,
-- depending on which is bindable
; b1 <- tvBindFlagL tv1
; b2 <- tvBindFlagR tv2
; let ty1 = mkTyVarTy tv1
; case (b1, b2) of
(BindMe, _) -> do { checkRnEnvR ty2 -- make sure ty2 is not a local
; extendTvEnv tv1 (ty2 `mkCastTy` mkSymCo kco) }
(_, BindMe) | unif -> do { checkRnEnvL ty1 -- ditto for ty1
; extendTvEnv tv2 (ty1 `mkCastTy` kco) }
_ | tv1' == tv2' -> return ()
-- How could this happen? If we're only matching and if
-- we're comparing forall-bound variables.
_ -> maybeApart -- See Note [Unification with skolems]
}}}}
uUnrefined tv1 ty2 ty2' kco -- ty2 is not a type variable
= do { occurs <- elemNiSubstSet tv1 (tyCoVarsOfType ty2')
; unif <- amIUnifying
; if unif && occurs -- See Note [Self-substitution when matching]
then maybeApart -- Occurs check, see Note [Fine-grained unification]
else do bindTv tv1 (ty2 `mkCastTy` mkSymCo kco) }
-- Bind tyvar to the synonym if poss
elemNiSubstSet :: TyVar -> TyCoVarSet -> UM Bool
elemNiSubstSet v set
= do { tsubst <- getTvSubstEnv
; return $ v `elemVarSet` niSubstTvSet tsubst set }
bindTv :: TyVar -> Type -> UM ()
bindTv tv ty -- ty is not a variable
= do { checkRnEnvR ty -- make sure ty mentions no local variables
; b <- tvBindFlagL tv
; case b of
Skolem -> maybeApart -- See Note [Unification with skolems]
BindMe -> extendTvEnv tv ty
}
{-
%************************************************************************
%* *
Binding decisions
* *
************************************************************************
-}
data BindFlag
= BindMe -- A regular type variable
| Skolem -- This type variable is a skolem constant
-- Don't bind it; it only matches itself
deriving Eq
{-
************************************************************************
* *
Unification monad
* *
************************************************************************
-}
data UMEnv = UMEnv { um_bind_fun :: TyVar -> BindFlag
-- the user-supplied BindFlag function
, um_unif :: Bool -- unification (True) or matching?
, um_inj_tf :: Bool -- checking for injectivity?
-- See (end of) Note [Specification of unification]
, um_rn_env :: RnEnv2 }
data UMState = UMState
{ um_tv_env :: TvSubstEnv
, um_cv_env :: CvSubstEnv }
newtype UM a = UM { unUM :: UMEnv -> UMState
-> UnifyResultM (UMState, a) }
instance Functor UM where
fmap = liftM
instance Applicative UM where
pure a = UM (\_ s -> pure (s, a))
(<*>) = ap
instance Monad UM where
fail _ = UM (\_ _ -> SurelyApart) -- failed pattern match
m >>= k = UM (\env state ->
do { (state', v) <- unUM m env state
; unUM (k v) env state' })
-- need this instance because of a use of 'guard' above
instance Alternative UM where
empty = UM (\_ _ -> Control.Applicative.empty)
m1 <|> m2 = UM (\env state ->
unUM m1 env state <|>
unUM m2 env state)
instance MonadPlus UM
#if __GLASGOW_HASKELL__ > 710
instance MonadFail.MonadFail UM where
fail _ = UM (\_tvs _subst -> SurelyApart) -- failed pattern match
#endif
initUM :: (TyVar -> BindFlag)
-> Bool -- True <=> unify; False <=> match
-> Bool -- True <=> doing an injectivity check
-> RnEnv2
-> TvSubstEnv -- subst to extend
-> CvSubstEnv
-> UM a -> UnifyResultM a
initUM badtvs unif inj_tf rn_env subst_env cv_subst_env um
= case unUM um env state of
Unifiable (_, subst) -> Unifiable subst
MaybeApart (_, subst) -> MaybeApart subst
SurelyApart -> SurelyApart
where
env = UMEnv { um_bind_fun = badtvs
, um_unif = unif
, um_inj_tf = inj_tf
, um_rn_env = rn_env }
state = UMState { um_tv_env = subst_env
, um_cv_env = cv_subst_env }
tvBindFlagL :: TyVar -> UM BindFlag
tvBindFlagL tv = UM $ \env state ->
Unifiable (state, if inRnEnvL (um_rn_env env) tv
then Skolem
else um_bind_fun env tv)
tvBindFlagR :: TyVar -> UM BindFlag
tvBindFlagR tv = UM $ \env state ->
Unifiable (state, if inRnEnvR (um_rn_env env) tv
then Skolem
else um_bind_fun env tv)
getTvSubstEnv :: UM TvSubstEnv
getTvSubstEnv = UM $ \_ state -> Unifiable (state, um_tv_env state)
getCvSubstEnv :: UM CvSubstEnv
getCvSubstEnv = UM $ \_ state -> Unifiable (state, um_cv_env state)
extendTvEnv :: TyVar -> Type -> UM ()
extendTvEnv tv ty = UM $ \_ state ->
Unifiable (state { um_tv_env = extendVarEnv (um_tv_env state) tv ty }, ())
extendCvEnv :: CoVar -> Coercion -> UM ()
extendCvEnv cv co = UM $ \_ state ->
Unifiable (state { um_cv_env = extendVarEnv (um_cv_env state) cv co }, ())
umRnBndr2 :: TyCoVar -> TyCoVar -> UM a -> UM a
umRnBndr2 v1 v2 thing = UM $ \env state ->
let rn_env' = rnBndr2 (um_rn_env env) v1 v2 in
unUM thing (env { um_rn_env = rn_env' }) state
checkRnEnv :: (RnEnv2 -> VarSet) -> VarSet -> UM ()
checkRnEnv get_set varset = UM $ \env state ->
let env_vars = get_set (um_rn_env env) in
if isEmptyVarSet env_vars || varset `disjointVarSet` env_vars
-- NB: That isEmptyVarSet is a critical optimization; it
-- means we don't have to calculate the free vars of
-- the type, often saving quite a bit of allocation.
then Unifiable (state, ())
else MaybeApart (state, ())
-- | Converts any SurelyApart to a MaybeApart
don'tBeSoSure :: UM () -> UM ()
don'tBeSoSure um = UM $ \env state ->
case unUM um env state of
SurelyApart -> MaybeApart (state, ())
other -> other
checkRnEnvR :: Type -> UM ()
checkRnEnvR ty = checkRnEnv rnEnvR (tyCoVarsOfType ty)
checkRnEnvL :: Type -> UM ()
checkRnEnvL ty = checkRnEnv rnEnvL (tyCoVarsOfType ty)
checkRnEnvRCo :: Coercion -> UM ()
checkRnEnvRCo co = checkRnEnv rnEnvR (tyCoVarsOfCo co)
umRnOccL :: TyVar -> UM TyVar
umRnOccL v = UM $ \env state ->
Unifiable (state, rnOccL (um_rn_env env) v)
umRnOccR :: TyVar -> UM TyVar
umRnOccR v = UM $ \env state ->
Unifiable (state, rnOccR (um_rn_env env) v)
umSwapRn :: UM a -> UM a
umSwapRn thing = UM $ \env state ->
let rn_env' = rnSwap (um_rn_env env) in
unUM thing (env { um_rn_env = rn_env' }) state
amIUnifying :: UM Bool
amIUnifying = UM $ \env state -> Unifiable (state, um_unif env)
checkingInjectivity :: UM Bool
checkingInjectivity = UM $ \env state -> Unifiable (state, um_inj_tf env)
maybeApart :: UM ()
maybeApart = UM (\_ state -> MaybeApart (state, ()))
surelyApart :: UM a
surelyApart = UM (\_ _ -> SurelyApart)
{-
%************************************************************************
%* *
Matching a (lifted) type against a coercion
%* *
%************************************************************************
This section defines essentially an inverse to liftCoSubst. It is defined
here to avoid a dependency from Coercion on this module.
-}
data MatchEnv = ME { me_tmpls :: TyVarSet
, me_env :: RnEnv2 }
-- | 'liftCoMatch' is sort of inverse to 'liftCoSubst'. In particular, if
-- @liftCoMatch vars ty co == Just s@, then @tyCoSubst s ty == co@,
-- where @==@ there means that the result of tyCoSubst has the same
-- type as the original co; but may be different under the hood.
-- That is, it matches a type against a coercion of the same
-- "shape", and returns a lifting substitution which could have been
-- used to produce the given coercion from the given type.
-- Note that this function is incomplete -- it might return Nothing
-- when there does indeed exist a possible lifting context.
--
-- This function is incomplete in that it doesn't respect the equality
-- in `eqType`. That is, it's possible that this will succeed for t1 and
-- fail for t2, even when t1 `eqType` t2. That's because it depends on
-- there being a very similar structure between the type and the coercion.
-- This incompleteness shouldn't be all that surprising, especially because
-- it depends on the structure of the coercion, which is a silly thing to do.
--
-- The lifting context produced doesn't have to be exacting in the roles
-- of the mappings. This is because any use of the lifting context will
-- also require a desired role. Thus, this algorithm prefers mapping to
-- nominal coercions where it can do so.
liftCoMatch :: TyCoVarSet -> Type -> Coercion -> Maybe LiftingContext
liftCoMatch tmpls ty co
= do { cenv1 <- ty_co_match menv emptyVarEnv ki ki_co ki_ki_co ki_ki_co
; cenv2 <- ty_co_match menv cenv1 ty co
(mkNomReflCo co_lkind) (mkNomReflCo co_rkind)
; return (LC (mkEmptyTCvSubst in_scope) cenv2) }
where
menv = ME { me_tmpls = tmpls, me_env = mkRnEnv2 in_scope }
in_scope = mkInScopeSet (tmpls `unionVarSet` tyCoVarsOfCo co)
-- Like tcMatchTy, assume all the interesting variables
-- in ty are in tmpls
ki = typeKind ty
ki_co = promoteCoercion co
ki_ki_co = mkNomReflCo liftedTypeKind
Pair co_lkind co_rkind = coercionKind ki_co
-- | 'ty_co_match' does all the actual work for 'liftCoMatch'.
ty_co_match :: MatchEnv -- ^ ambient helpful info
-> LiftCoEnv -- ^ incoming subst
-> Type -- ^ ty, type to match
-> Coercion -- ^ co, coercion to match against
-> Coercion -- ^ :: kind of L type of substed ty ~N L kind of co
-> Coercion -- ^ :: kind of R type of substed ty ~N R kind of co
-> Maybe LiftCoEnv
ty_co_match menv subst ty co lkco rkco
| Just ty' <- coreViewOneStarKind ty = ty_co_match menv subst ty' co lkco rkco
-- handle Refl case:
| tyCoVarsOfType ty `isNotInDomainOf` subst
, Just (ty', _) <- isReflCo_maybe co
, ty `eqType` ty'
= Just subst
where
isNotInDomainOf :: VarSet -> VarEnv a -> Bool
isNotInDomainOf set env
= noneSet (\v -> elemVarEnv v env) set
noneSet :: (Var -> Bool) -> VarSet -> Bool
noneSet f = varSetAll (not . f)
ty_co_match menv subst ty co lkco rkco
| CastTy ty' co' <- ty
= ty_co_match menv subst ty' co (co' `mkTransCo` lkco) (co' `mkTransCo` rkco)
| CoherenceCo co1 co2 <- co
= ty_co_match menv subst ty co1 (lkco `mkTransCo` mkSymCo co2) rkco
| SymCo co' <- co
= swapLiftCoEnv <$> ty_co_match menv (swapLiftCoEnv subst) ty co' rkco lkco
-- Match a type variable against a non-refl coercion
ty_co_match menv subst (TyVarTy tv1) co lkco rkco
| Just co1' <- lookupVarEnv subst tv1' -- tv1' is already bound to co1
= if eqCoercionX (nukeRnEnvL rn_env) co1' co
then Just subst
else Nothing -- no match since tv1 matches two different coercions
| tv1' `elemVarSet` me_tmpls menv -- tv1' is a template var
= if any (inRnEnvR rn_env) (tyCoVarsOfCoList co)
then Nothing -- occurs check failed
else Just $ extendVarEnv subst tv1' $
castCoercionKind co (mkSymCo lkco) (mkSymCo rkco)
| otherwise
= Nothing
where
rn_env = me_env menv
tv1' = rnOccL rn_env tv1
-- just look through SubCo's. We don't really care about roles here.
ty_co_match menv subst ty (SubCo co) lkco rkco
= ty_co_match menv subst ty co lkco rkco
ty_co_match menv subst (AppTy ty1a ty1b) co _lkco _rkco
| Just (co2, arg2) <- splitAppCo_maybe co -- c.f. Unify.match on AppTy
= ty_co_match_app menv subst ty1a [ty1b] co2 [arg2]
ty_co_match menv subst ty1 (AppCo co2 arg2) _lkco _rkco
| Just (ty1a, ty1b) <- repSplitAppTy_maybe ty1
-- yes, the one from Type, not TcType; this is for coercion optimization
= ty_co_match_app menv subst ty1a [ty1b] co2 [arg2]
ty_co_match menv subst (TyConApp tc1 tys) (TyConAppCo _ tc2 cos) _lkco _rkco
= ty_co_match_tc menv subst tc1 tys tc2 cos
ty_co_match menv subst (FunTy ty1 ty2) (TyConAppCo _ tc cos) _lkco _rkco
= ty_co_match_tc menv subst funTyCon [ty1, ty2] tc cos
ty_co_match menv subst (ForAllTy (TvBndr tv1 _) ty1)
(ForAllCo tv2 kind_co2 co2)
lkco rkco
= do { subst1 <- ty_co_match menv subst (tyVarKind tv1) kind_co2
ki_ki_co ki_ki_co
; let rn_env0 = me_env menv
rn_env1 = rnBndr2 rn_env0 tv1 tv2
menv' = menv { me_env = rn_env1 }
; ty_co_match menv' subst1 ty1 co2 lkco rkco }
where
ki_ki_co = mkNomReflCo liftedTypeKind
ty_co_match _ subst (CoercionTy {}) _ _ _
= Just subst -- don't inspect coercions
ty_co_match menv subst ty co lkco rkco
| Just co' <- pushRefl co = ty_co_match menv subst ty co' lkco rkco
| otherwise = Nothing
ty_co_match_tc :: MatchEnv -> LiftCoEnv
-> TyCon -> [Type]
-> TyCon -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_tc menv subst tc1 tys1 tc2 cos2
= do { guard (tc1 == tc2)
; ty_co_match_args menv subst tys1 cos2 lkcos rkcos }
where
Pair lkcos rkcos
= traverse (fmap mkNomReflCo . coercionKind) cos2
ty_co_match_app :: MatchEnv -> LiftCoEnv
-> Type -> [Type] -> Coercion -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_app menv subst ty1 ty1args co2 co2args
| Just (ty1', ty1a) <- repSplitAppTy_maybe ty1
, Just (co2', co2a) <- splitAppCo_maybe co2
= ty_co_match_app menv subst ty1' (ty1a : ty1args) co2' (co2a : co2args)
| otherwise
= do { subst1 <- ty_co_match menv subst ki1 ki2 ki_ki_co ki_ki_co
; let Pair lkco rkco = mkNomReflCo <$> coercionKind ki2
; subst2 <- ty_co_match menv subst1 ty1 co2 lkco rkco
; let Pair lkcos rkcos = traverse (fmap mkNomReflCo . coercionKind) co2args
; ty_co_match_args menv subst2 ty1args co2args lkcos rkcos }
where
ki1 = typeKind ty1
ki2 = promoteCoercion co2
ki_ki_co = mkNomReflCo liftedTypeKind
ty_co_match_args :: MatchEnv -> LiftCoEnv -> [Type]
-> [Coercion] -> [Coercion] -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_args _ subst [] [] _ _ = Just subst
ty_co_match_args menv subst (ty:tys) (arg:args) (lkco:lkcos) (rkco:rkcos)
= do { subst' <- ty_co_match menv subst ty arg lkco rkco
; ty_co_match_args menv subst' tys args lkcos rkcos }
ty_co_match_args _ _ _ _ _ _ = Nothing
pushRefl :: Coercion -> Maybe Coercion
pushRefl (Refl Nominal (AppTy ty1 ty2))
= Just (AppCo (Refl Nominal ty1) (mkNomReflCo ty2))
pushRefl (Refl r (FunTy ty1 ty2))
= Just (TyConAppCo r funTyCon [mkReflCo r ty1, mkReflCo r ty2])
pushRefl (Refl r (TyConApp tc tys))
= Just (TyConAppCo r tc (zipWith mkReflCo (tyConRolesX r tc) tys))
pushRefl (Refl r (ForAllTy (TvBndr tv _) ty))
= Just (mkHomoForAllCos_NoRefl [tv] (Refl r ty))
-- NB: NoRefl variant. Otherwise, we get a loop!
pushRefl (Refl r (CastTy ty co)) = Just (castCoercionKind (Refl r ty) co co)
pushRefl _ = Nothing
| vTurbine/ghc | compiler/types/Unify.hs | bsd-3-clause | 50,166 | 31 | 23 | 14,472 | 7,890 | 4,125 | 3,765 | 558 | 9 |
module WithLocalDeclIn1 where
--The application of a function is replaced by the right-hand side of the definition,
--with actual parameters replacing formals.
--In this example, unfold the first 'sq' in 'sumSquares'
--This example aims to test unfolding a function application with multiple matches.
sumSquares x y =(case x of
0 -> 0
x -> x ^ pow where pow = 2) + sq y
sq 0=0
sq x=x^pow
where pow=2 | kmate/HaRe | old/testing/foldDef/WithLocalDeclIn1.hs | bsd-3-clause | 454 | 0 | 11 | 124 | 82 | 45 | 37 | 7 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
main :: IO ()
main = undefined
| erochest/cabal-new | templates/Main.sandbox.hs | apache-2.0 | 88 | 0 | 6 | 18 | 20 | 12 | 8 | 4 | 1 |
module Termination where
import Prelude hiding (gcd, mod, map, repeat, take)
import Language.Haskell.Liquid.Prelude
fac :: Int -> Int
tfac :: Int -> Int -> Int
map :: (a -> b) -> List a -> List b
merge :: (Ord a) => List a -> List a -> List a
-------------------------------------------------------------------------
-- | Simple Termination
-------------------------------------------------------------------------
{-@ fac :: Nat -> Nat @-}
fac 0 = 1
fac 1 = 1
fac n = n * fac (n-1)
-------------------------------------------------------------------------
-- | Semantic Termination
-------------------------------------------------------------------------
{-@ gcd :: a:Nat -> b:{v:Nat | v < a} -> Int @-}
gcd :: Int -> Int -> Int
gcd a 0 = a
gcd a b = gcd b (a `mod` b)
{-@ mod :: a:Nat -> b:{v:Nat| ((v < a) && (v > 0))} -> {v:Nat | v < b} @-}
mod :: Int -> Int -> Int
mod a b | a - b > b = mod (a - b) b
| a - b < b = a - b
| a - b == b = 0
-------------------------------------------------------------------------
-- Explicit Metrics #1
-------------------------------------------------------------------------
{-@ tfac :: Nat -> n:Nat -> Nat / [n] @-}
tfac acc 0 = acc
tfac acc n = tfac (n * acc) (n-1)
-------------------------------------------------------------------------
-- Explicit Metrics #2
-------------------------------------------------------------------------
{-@ range :: lo:Nat -> hi:Nat -> [Nat] / [hi-lo] @-}
range :: Int -> Int -> [Int]
range lo hi
| lo < hi = lo : range (lo + 1) hi
| otherwise = []
-------------------------------------------------------------------------
-- | Structural Recursion
-------------------------------------------------------------------------
data List a = N | C a (List a)
{-@ measure sz :: List a -> Int
sz (C x xs) = 1 + (sz xs)
sz (N) = 0
@-}
{-@ map :: (a -> b) -> xs:List a -> (List b) / [sz xs] @-}
map _ N = N
map f (C x xs) = f x `C` map f xs
-------------------------------------------------------------------------
-- | Default Metrics
-------------------------------------------------------------------------
{-@ data List [sz] a = N | C {x :: a, xs :: List a } @-}
map' _ N = N
map' f (C x xs) = f x `C` map' f xs
-------------------------------------------------------------------------
-- | Termination Expressions Metrics
-------------------------------------------------------------------------
{-@ merge :: xs:_ -> ys:_ -> _ / [sz xs + sz ys] @-}
merge (C x xs) (C y ys)
| x < y = x `C` merge xs (y `C` ys)
| otherwise = y `C` merge (x `C` xs) ys
merge _ ys = ys
-------------------------------------------------------------------------
-- | Infinite Streams
-------------------------------------------------------------------------
{- data List [sz] a <p :: List a -> Prop>
= N | C { x :: a
, xs :: List <p> a <<p>>
}
-}
{-@ measure emp :: (List a) -> Prop
emp (N) = true
emp (C x xs) = false
@-}
{- type Stream a = {xs: List <{\v -> not (emp v)}> a | not (emp xs)} @-}
{- Lazy repeat @-}
{- repeat :: a -> Stream a @-}
-- repeat :: a -> List a
-- repeat x = x `C` repeat x
{- take :: Nat -> Stream a -> List a @-}
-- take :: Int -> List a -> List a
-- take 0 _ = N
-- take n (C x xs) = x `C` take (n-1) xs
-- take _ N = liquidError "never happens"
-----------------------------------------------------
{-@ invariant {v : List a | 0 <= sz v} @-}
| mightymoose/liquidhaskell | docs/slides/NEU14/02_Termination.hs | bsd-3-clause | 3,574 | 0 | 9 | 830 | 632 | 350 | 282 | 32 | 1 |
module Typed.TH (benchmarks) where
import Control.Applicative
import Criterion
import Data.Aeson hiding (Result)
import Data.ByteString.Builder as B
import Data.ByteString.Lazy as L
import Twitter.TH
import Typed.Common
encodeDirect :: Result -> L.ByteString
encodeDirect = encode
encodeViaValue :: Result -> L.ByteString
encodeViaValue = encode . toJSON
benchmarks :: Benchmark
benchmarks =
env ((,) <$> load "json-data/twitter100.json" <*> load "json-data/jp100.json") $ \ ~(twitter100, jp100) ->
bgroup "th" [
bgroup "direct" [
bench "twitter100" $ nf encodeDirect twitter100
, bench "jp100" $ nf encodeDirect jp100
]
, bgroup "viaValue" [
bench "twitter100" $ nf encodeViaValue twitter100
, bench "jp100" $ nf encodeViaValue jp100
]
]
| 23Skidoo/aeson | benchmarks/Typed/TH.hs | bsd-3-clause | 801 | 0 | 12 | 161 | 222 | 121 | 101 | 22 | 1 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
-- | A representation where all bindings are annotated with aliasing
-- information.
module Futhark.Representation.Aliases
( -- * The Lore definition
Aliases
, Names' (..)
, VarAliases
, ConsumedInExp
, BodyAliasing
, module Futhark.Representation.AST.Attributes.Aliases
-- * Module re-exports
, module Futhark.Representation.AST.Attributes
, module Futhark.Representation.AST.Traversals
, module Futhark.Representation.AST.Pretty
, module Futhark.Representation.AST.Syntax
-- * Adding aliases
, addAliasesToPattern
, mkAliasedLetStm
, mkAliasedBody
, mkPatternAliases
, mkBodyAliases
-- * Removing aliases
, removeProgAliases
, removeFunDefAliases
, removeExpAliases
, removeBodyAliases
, removeStmAliases
, removeLambdaAliases
, removeExtLambdaAliases
, removePatternAliases
, removeScopeAliases
-- * Tracking aliases
, AliasesAndConsumed
, trackAliases
, consumedInStms
)
where
import Control.Applicative
import Control.Monad.Identity
import Control.Monad.Reader
import Data.Maybe
import Data.Monoid
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Prelude
import Futhark.Representation.AST.Syntax
import Futhark.Representation.AST.Attributes
import Futhark.Representation.AST.Attributes.Aliases
import Futhark.Representation.AST.Traversals
import Futhark.Representation.AST.Pretty
import Futhark.Transform.Rename
import Futhark.Binder
import Futhark.Transform.Substitute
import Futhark.Analysis.Rephrase
import Futhark.Representation.AST.Attributes.Ranges()
import qualified Futhark.Util.Pretty as PP
-- | The lore for the basic representation.
data Aliases lore
-- | A wrapper around 'Names' to get around the fact that we need an
-- 'Ord' instance, which 'Names' does not have.
newtype Names' = Names' { unNames :: Names }
deriving (Show)
instance Monoid Names' where
mempty = Names' mempty
x `mappend` y = Names' $ unNames x <> unNames y
instance Eq Names' where
_ == _ = True
instance Ord Names' where
_ `compare` _ = EQ
instance Rename Names' where
rename (Names' names) = Names' <$> rename names
instance Substitute Names' where
substituteNames substs (Names' names) = Names' $ substituteNames substs names
instance FreeIn Names' where
freeIn = const mempty
instance PP.Pretty Names' where
ppr = PP.commasep . map PP.ppr . S.toList . unNames
-- | The aliases of the let-bound variable.
type VarAliases = Names'
-- | Everything consumed in the expression.
type ConsumedInExp = Names'
-- | The aliases of what is returned by the 'Body', and what is
-- consumed inside of it.
type BodyAliasing = ([VarAliases], ConsumedInExp)
instance (Annotations lore, CanBeAliased (Op lore)) =>
Annotations (Aliases lore) where
type LetAttr (Aliases lore) = (VarAliases, LetAttr lore)
type ExpAttr (Aliases lore) = (ConsumedInExp, ExpAttr lore)
type BodyAttr (Aliases lore) = (BodyAliasing, BodyAttr lore)
type FParamAttr (Aliases lore) = FParamAttr lore
type LParamAttr (Aliases lore) = LParamAttr lore
type RetType (Aliases lore) = RetType lore
type BranchType (Aliases lore) = BranchType lore
type Op (Aliases lore) = OpWithAliases (Op lore)
instance AliasesOf (VarAliases, attr) where
aliasesOf = unNames . fst
instance FreeAttr Names' where
withoutAliases :: (HasScope (Aliases lore) m, Monad m) =>
ReaderT (Scope lore) m a -> m a
withoutAliases m = do
scope <- asksScope removeScopeAliases
runReaderT m scope
instance (Attributes lore, CanBeAliased (Op lore)) => Attributes (Aliases lore) where
expTypesFromPattern =
withoutAliases . expTypesFromPattern . removePatternAliases
instance (Attributes lore, CanBeAliased (Op lore)) => Aliased (Aliases lore) where
bodyAliases = map unNames . fst . fst . bodyAttr
consumedInBody = unNames . snd . fst . bodyAttr
instance PrettyAnnot (PatElemT attr) =>
PrettyAnnot (PatElemT (VarAliases, attr)) where
ppAnnot (PatElem name bindage (Names' als, attr)) =
let alias_comment = PP.oneLine <$> aliasComment name als
in case (alias_comment, ppAnnot (PatElem name bindage attr)) of
(_, Nothing) ->
alias_comment
(Just alias_comment', Just inner_comment) ->
Just $ alias_comment' PP.</> inner_comment
(Nothing, Just inner_comment) ->
Just inner_comment
instance (Attributes lore, CanBeAliased (Op lore)) => PrettyLore (Aliases lore) where
ppExpLore (consumed, inner) e =
maybeComment $ catMaybes [expAttr,
mergeAttr,
ppExpLore inner $ removeExpAliases e]
where mergeAttr =
case e of
DoLoop _ merge _ body ->
let mergeParamAliases fparam als
| primType (paramType fparam) =
Nothing
| otherwise =
resultAliasComment (paramName fparam) als
in maybeComment $ catMaybes $
zipWith mergeParamAliases (map fst merge) $
bodyAliases body
_ -> Nothing
expAttr = case S.toList $ unNames consumed of
[] -> Nothing
als -> Just $ PP.oneLine $
PP.text "-- Consumes " <> PP.commasep (map PP.ppr als)
maybeComment :: [PP.Doc] -> Maybe PP.Doc
maybeComment [] = Nothing
maybeComment cs = Just $ PP.folddoc (PP.</>) cs
aliasComment :: (PP.Pretty a, PP.Pretty b) =>
a -> S.Set b -> Maybe PP.Doc
aliasComment name als =
case S.toList als of
[] -> Nothing
als' -> Just $ PP.oneLine $
PP.text "-- " <> PP.ppr name <> PP.text " aliases " <>
PP.commasep (map PP.ppr als')
resultAliasComment :: (PP.Pretty a, PP.Pretty b) =>
a -> S.Set b -> Maybe PP.Doc
resultAliasComment name als =
case S.toList als of
[] -> Nothing
als' -> Just $ PP.oneLine $
PP.text "-- Result of " <> PP.ppr name <> PP.text " aliases " <>
PP.commasep (map PP.ppr als')
removeAliases :: CanBeAliased (Op lore) => Rephraser Identity (Aliases lore) lore
removeAliases = Rephraser { rephraseExpLore = return . snd
, rephraseLetBoundLore = return . snd
, rephraseBodyLore = return . snd
, rephraseFParamLore = return
, rephraseLParamLore = return
, rephraseRetType = return
, rephraseBranchType = return
, rephraseOp = return . removeOpAliases
}
removeScopeAliases :: Scope (Aliases lore) -> Scope lore
removeScopeAliases = M.map unAlias
where unAlias (LetInfo (_, attr)) = LetInfo attr
unAlias (FParamInfo attr) = FParamInfo attr
unAlias (LParamInfo attr) = LParamInfo attr
unAlias (IndexInfo it) = IndexInfo it
removeProgAliases :: CanBeAliased (Op lore) =>
Prog (Aliases lore) -> Prog lore
removeProgAliases = runIdentity . rephraseProg removeAliases
removeFunDefAliases :: CanBeAliased (Op lore) =>
FunDef (Aliases lore) -> FunDef lore
removeFunDefAliases = runIdentity . rephraseFunDef removeAliases
removeExpAliases :: CanBeAliased (Op lore) =>
Exp (Aliases lore) -> Exp lore
removeExpAliases = runIdentity . rephraseExp removeAliases
removeBodyAliases :: CanBeAliased (Op lore) =>
Body (Aliases lore) -> Body lore
removeBodyAliases = runIdentity . rephraseBody removeAliases
removeStmAliases :: CanBeAliased (Op lore) =>
Stm (Aliases lore) -> Stm lore
removeStmAliases = runIdentity . rephraseStm removeAliases
removeLambdaAliases :: CanBeAliased (Op lore) =>
Lambda (Aliases lore) -> Lambda lore
removeLambdaAliases = runIdentity . rephraseLambda removeAliases
removeExtLambdaAliases :: CanBeAliased (Op lore) =>
ExtLambda (Aliases lore) -> ExtLambda lore
removeExtLambdaAliases = runIdentity . rephraseExtLambda removeAliases
removePatternAliases :: PatternT (Names', a)
-> PatternT a
removePatternAliases = runIdentity . rephrasePattern (return . snd)
addAliasesToPattern :: (Attributes lore, CanBeAliased (Op lore), Typed attr) =>
PatternT attr -> Exp (Aliases lore)
-> PatternT (VarAliases, attr)
addAliasesToPattern pat e =
uncurry Pattern $ mkPatternAliases pat e
mkAliasedBody :: (Attributes lore, CanBeAliased (Op lore)) =>
BodyAttr lore -> [Stm (Aliases lore)] -> Result -> Body (Aliases lore)
mkAliasedBody innerlore bnds res =
Body (mkBodyAliases bnds res, innerlore) bnds res
mkPatternAliases :: (Attributes lore, Aliased lore, Typed attr) =>
PatternT attr -> Exp lore
-> ([PatElemT (VarAliases, attr)],
[PatElemT (VarAliases, attr)])
mkPatternAliases pat e =
-- Some part of the pattern may be the context. This does not have
-- aliases from expAliases, so we use a hack to compute aliases of
-- the context.
let als = expAliases e ++ repeat mempty -- In case the pattern has
-- more elements (this
-- implies a type error).
context_als = mkContextAliases pat e
in (zipWith annotateBindee (patternContextElements pat) context_als,
zipWith annotateBindee (patternValueElements pat) als)
where annotateBindee bindee names =
bindee `setPatElemLore` (Names' names', patElemAttr bindee)
where names' =
case (patElemBindage bindee, patElemRequires bindee) of
(BindInPlace {}, _) -> mempty
(_, Array {}) -> names
(_, Mem _ _) -> names
_ -> mempty
mkContextAliases :: (Attributes lore, Aliased lore) =>
PatternT attr -> Exp lore
-> [Names]
mkContextAliases pat (DoLoop ctxmerge valmerge _ body) =
let ctx = loopResultContext (map fst ctxmerge) (map fst valmerge)
init_als = zip mergenames $ map (subExpAliases . snd) $ ctxmerge ++ valmerge
expand als = als <> S.unions (mapMaybe (`lookup` init_als) (S.toList als))
merge_als = zip mergenames $
map ((`S.difference` mergenames_set) . expand) $
bodyAliases body
in if length ctx == length (patternContextElements pat)
then map (fromMaybe mempty . flip lookup merge_als . paramName) ctx
else map (const mempty) $ patternContextElements pat
where mergenames = map (paramName . fst) $ ctxmerge ++ valmerge
mergenames_set = S.fromList mergenames
mkContextAliases pat (If _ tbranch fbranch _) =
take (length $ patternContextNames pat) $
zipWith (<>) (bodyAliases tbranch) (bodyAliases fbranch)
mkContextAliases pat _ =
replicate (length $ patternContextElements pat) mempty
mkBodyAliases :: Aliased lore =>
[Stm lore]
-> Result
-> BodyAliasing
mkBodyAliases bnds res =
-- We need to remove the names that are bound in bnds from the alias
-- and consumption sets. We do this by computing the transitive
-- closure of the alias map (within bnds), then removing anything
-- bound in bnds.
let (aliases, consumed) = mkStmsAliases bnds res
boundNames =
mconcat $ map (S.fromList . patternNames . stmPattern) bnds
bound = (`S.member` boundNames)
aliases' = map (S.filter (not . bound)) aliases
consumed' = S.filter (not . bound) consumed
in (map Names' aliases', Names' consumed')
mkStmsAliases :: Aliased lore =>
[Stm lore] -> [SubExp]
-> ([Names], Names)
mkStmsAliases bnds res = delve mempty bnds
where delve (aliasmap, consumed) [] =
(map (aliasClosure aliasmap . subExpAliases) res,
consumed)
delve (aliasmap, consumed) (bnd:bnds') =
delve (trackAliases (aliasmap, consumed) bnd) bnds'
aliasClosure aliasmap names =
names `S.union` mconcat (map look $ S.toList names)
where look k = M.findWithDefault mempty k aliasmap
-- | Everything consumed in the given bindings and result (even transitively).
consumedInStms :: Aliased lore => [Stm lore] -> [SubExp] -> Names
consumedInStms bnds res = snd $ mkStmsAliases bnds res
type AliasesAndConsumed = (M.Map VName Names,
Names)
trackAliases :: Aliased lore =>
AliasesAndConsumed -> Stm lore
-> AliasesAndConsumed
trackAliases (aliasmap, consumed) bnd =
let pat = stmPattern bnd
als = M.fromList $
zip (patternNames pat) (map addAliasesOfAliases $ patternAliases pat)
aliasmap' = als <> aliasmap
consumed' = consumed <> addAliasesOfAliases (consumedInStm bnd)
in (aliasmap', consumed')
where addAliasesOfAliases names = names <> aliasesOfAliases names
aliasesOfAliases = mconcat . map look . S.toList
look k = M.findWithDefault mempty k aliasmap
mkAliasedLetStm :: (Attributes lore, CanBeAliased (Op lore)) =>
Pattern lore
-> StmAux (ExpAttr lore) -> Exp (Aliases lore)
-> Stm (Aliases lore)
mkAliasedLetStm pat (StmAux cs attr) e =
Let (addAliasesToPattern pat e)
(StmAux cs (Names' $ consumedInPattern pat <> consumedInExp e, attr))
e
instance (Bindable lore, CanBeAliased (Op lore)) => Bindable (Aliases lore) where
mkExpAttr pat e =
let attr = mkExpAttr (removePatternAliases pat) $ removeExpAliases e
in (Names' $ consumedInPattern pat <> consumedInExp e, attr)
mkExpPat ctx val e =
addAliasesToPattern (mkExpPat ctx val $ removeExpAliases e) e
mkLetNames names e = do
env <- asksScope removeScopeAliases
flip runReaderT env $ do
Let pat attr _ <- mkLetNames names $ removeExpAliases e
return $ mkAliasedLetStm pat attr e
mkBody bnds res =
let Body bodylore _ _ = mkBody (map removeStmAliases bnds) res
in mkAliasedBody bodylore bnds res
instance Bindable (Aliases lore) => BinderOps (Aliases lore) where
mkBodyB = bindableMkBodyB
mkExpAttrB = bindableMkExpAttrB
mkLetNamesB = bindableMkLetNamesB
| ihc/futhark | src/Futhark/Representation/Aliases.hs | isc | 14,656 | 12 | 20 | 3,983 | 4,067 | 2,102 | 1,965 | -1 | -1 |
{-# LANGUAGE DoAndIfThenElse #-}
module IfThenElseLayout where
askCardsForExchange :: Hand -> IO [Card]
askCardsForExchange h = do
putStrLn "Wich card do you want to exchange? (Max. 3)"
response <- getLine
if length (readCards response) > 3 || not (all (flip elem h) h) then
askCardsForExchange h
else
return (readCards response)
| Pnom/haskell-ast-pretty | Test/examples/IfThenElseLayout.hs | mit | 498 | 0 | 13 | 218 | 100 | 49 | 51 | 9 | 2 |
-- Consider all integer combinations of a^b for 2 ≤ a ≤ 5 and
-- 2 ≤ b ≤ 5:
-- 2^2=4, 2^3=8, 2^4=16, 2^5=32
-- 3^2=9, 3^3=27, 3^4=81, 3^5=243
-- 4^2=16, 4^3=64, 4^4=256, 4^5=1024
-- 5^2=25, 5^3=125, 5^4=625, 5^5=3125
-- If they are then placed in numerical order, with any repeats
-- removed, we get the following sequence of 15 distinct terms:
-- 4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
-- How many distinct terms are in the sequence generated by a^b for
-- 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
module Euler.Problem029
( solution
, expFact
) where
import qualified Data.Set as S
import Math.Primes
solution :: Integral a => a -> Int
solution cap = S.size . foldr S.insert S.empty $ do
n <- [2..cap]
m <- [2..cap]
return $ expFact m $ primeFactorization n
expFact :: Integral a => a -> [(b, a)] -> [(b, a)]
expFact n = map $ \(r, e) -> (r, e * n)
| whittle/euler | src/Euler/Problem029.hs | mit | 914 | 0 | 10 | 209 | 193 | 110 | 83 | 12 | 1 |
{-# LANGUAGE PatternGuards #-}
module Plugin.Pl.PrettyPrinter (
prettyDecl,
prettyExpr,
prettyTopLevel,
) where
import Plugin.Pl.Common
import Data.Char
import Data.List (intercalate)
prettyDecl :: Decl -> String
prettyDecl (Define f e) = f ++ " = " ++ prettyExpr e
prettyDecls :: [Decl] -> String
prettyDecls = intercalate "; " . map prettyDecl
prettyExpr :: Expr -> String
prettyExpr = show . toSExpr
prettyTopLevel :: TopLevel -> String
prettyTopLevel (TLE e) = prettyExpr e
prettyTopLevel (TLD _ d) = prettyDecl d
data SExpr
= SVar !String
| SLambda ![Pattern] !SExpr
| SLet ![Decl] !SExpr
| SApp !SExpr !SExpr
| SInfix !String !SExpr !SExpr
| LeftSection !String !SExpr -- (x +)
| RightSection !String !SExpr -- (+ x)
| List ![SExpr]
| Tuple ![SExpr]
| Enum !Expr !(Maybe Expr) !(Maybe Expr)
{-# INLINE toSExprHead #-}
toSExprHead :: String -> [Expr] -> Maybe SExpr
toSExprHead hd tl
| all (==',') hd, length hd+1 == length tl
= Just . Tuple . reverse $ map toSExpr tl
| otherwise = case (hd,reverse tl) of
("enumFrom", [e]) -> Just $ Enum e Nothing Nothing
("enumFromThen", [e,e']) -> Just $ Enum e (Just e') Nothing
("enumFromTo", [e,e']) -> Just $ Enum e Nothing (Just e')
("enumFromThenTo", [e,e',e'']) -> Just $ Enum e (Just e') (Just e'')
_ -> Nothing
toSExpr :: Expr -> SExpr
toSExpr (Var _ v) = SVar v
toSExpr (Lambda v e) = case toSExpr e of
(SLambda vs e') -> SLambda (v:vs) e'
e' -> SLambda [v] e'
toSExpr (Let ds e) = SLet ds $ toSExpr e
toSExpr e | Just (hd,tl) <- getHead e, Just se <- toSExprHead hd tl = se
toSExpr e | (ls, tl) <- getList e, tl == nil
= List $ map toSExpr ls
toSExpr (App e1 e2) = case e1 of
App (Var Inf v) e0
-> SInfix v (toSExpr e0) (toSExpr e2)
Var Inf v | v /= "-"
-> LeftSection v (toSExpr e2)
Var _ "flip" | Var Inf v <- e2, v == "-" -> toSExpr $ Var Pref "subtract"
App (Var _ "flip") (Var pr v)
| v == "-" -> toSExpr $ Var Pref "subtract" `App` e2
| v == "id" -> RightSection "$" (toSExpr e2)
| Inf <- pr, any (/= ',') v -> RightSection v (toSExpr e2)
_ -> SApp (toSExpr e1) (toSExpr e2)
getHead :: Expr -> Maybe (String, [Expr])
getHead (Var _ v) = Just (v, [])
getHead (App e1 e2) = second (e2:) `fmap` getHead e1
getHead _ = Nothing
instance Show SExpr where
showsPrec _ (SVar v) = (getPrefName v ++)
showsPrec p (SLambda vs e) = showParen (p > minPrec) $ ('\\':) .
foldr (.) id (intersperse (' ':) (map (prettyPrecPattern $ maxPrec+1) vs)) .
(" -> "++) . showsPrec minPrec e
showsPrec p (SApp e1 e2) = showParen (p > maxPrec) $
showsPrec maxPrec e1 . (' ':) . showsPrec (maxPrec+1) e2
showsPrec _ (LeftSection fx e) = showParen True $
showsPrec (snd (lookupFix fx) + 1) e . (' ':) . (getInfName fx++)
showsPrec _ (RightSection fx e) = showParen True $
(getInfName fx++) . (' ':) . showsPrec (snd (lookupFix fx) + 1) e
showsPrec _ (Tuple es) = showParen True $
(concat `id` intersperse ", " (map show es) ++)
showsPrec _ (List es)
| Just cs <- mapM ((=<<) readM . fromSVar) es = shows (cs::String)
| otherwise = ('[':) .
(concat `id` intersperse ", " (map show es) ++) . (']':)
where fromSVar (SVar str) = Just str
fromSVar _ = Nothing
showsPrec _ (Enum fr tn to) = ('[':) . showString (prettyExpr fr) .
showsMaybe (((',':) . prettyExpr) `fmap` tn) . (".."++) .
showsMaybe (prettyExpr `fmap` to) . (']':)
where showsMaybe = maybe id (++)
showsPrec _ (SLet ds e) = ("let "++) . showString (prettyDecls ds ++ " in ") . shows e
showsPrec p (SInfix fx e1 e2) = showParen (p > fixity) $
showsPrec f1 e1 . (' ':) . (getInfName fx++) . (' ':) .
showsPrec f2 e2 where
fixity = snd $ lookupFix fx
(f1, f2) = case fst $ lookupFix fx of
AssocRight -> (fixity+1, fixity + infixSafe e2 AssocLeft fixity)
AssocLeft -> (fixity + infixSafe e1 AssocRight fixity, fixity+1)
AssocNone -> (fixity+1, fixity+1)
-- This is a little bit awkward, but at least seems to produce no false
-- results anymore
infixSafe :: SExpr -> Assoc -> Int -> Int
infixSafe (SInfix fx'' _ _) assoc fx'
| lookupFix fx'' == (assoc, fx') = 1
| otherwise = 0
infixSafe _ _ _ = 0 -- doesn't matter
prettyPrecPattern :: Int -> Pattern -> ShowS
prettyPrecPattern _ (PVar v) = showString v
prettyPrecPattern _ (PTuple p1 p2) = showParen True $
prettyPrecPattern 0 p1 . (", "++) . prettyPrecPattern 0 p2
prettyPrecPattern p (PCons p1 p2) = showParen (p>5) $
prettyPrecPattern 6 p1 . (':':) . prettyPrecPattern 5 p2
isOperator :: String -> Bool
isOperator s =
case break (== '.') s of
(_, "") -> isUnqualOp s
(before, _dot : rest)
| isUnqualOp before -> isUnqualOp rest
| isModule before -> isOperator rest
| otherwise -> False
where
isModule "" = False
isModule (c : cs) = isUpper c && all (\c -> isAlphaNum c || c `elem` ['\'', '_']) cs
isUnqualOp s = s /= "()" && all (\c -> isSymbol c || isPunctuation c) s
getInfName :: String -> String
getInfName str = if isOperator str then str else "`"++str++"`"
getPrefName :: String -> String
getPrefName str = if isOperator str || ',' `elem` str then "("++str++")" else str
{-
instance Show Assoc where
show AssocLeft = "AssocLeft"
show AssocRight = "AssocRight"
show AssocNone = "AssocNone"
instance Ord Assoc where
AssocNone <= _ = True
_ <= AssocNone = False
AssocLeft <= _ = True
_ <= AssocLeft = False
_ <= _ = True
-}
| jystic/pointfree | Plugin/Pl/PrettyPrinter.hs | mit | 5,634 | 0 | 16 | 1,435 | 2,431 | 1,240 | 1,191 | 159 | 6 |
{-# LANGUAGE ExistentialQuantification, Rank2Types #-}
module Build.Types.Types
(TextShow(..),
HasName(..),
HasLanguage(..),
File(..),
BuildStep(..),
Target(..),
Condition(..),
Language(..),
Provenance(..),
FileType(..),
AnyFile(..),
Task(..),
HeaderFile(..),
SourceFile(..),
ObjectFile(..),
ExecutableFile(..),
LibraryFile(..),
BuildStepType(..),
AnyBuildStep(..),
AnyTarget(..),
ExecutableTarget(..),
LibraryTarget(..),
InvocationBuildStep(..),
AmalgamateFilesBuildStep(..),
CopyFileBuildStep(..),
MakeDirectoryBuildStep(..),
ConditionalBuildStep(..),
ConditionType(..),
AnyCondition(..),
AndCondition(..),
OrCondition(..),
NotCondition(..),
PathExistsCondition(..),
FileExistsCondition(..),
DirectoryExistsCondition(..),
Mode(..),
Project(..),
Defaults(..),
ProjectSpecification(..),
SubprojectSpecification(..),
AnyTargetSpecification(..),
TargetSpecification(..),
ExecutableSpecification(..),
LibrarySpecification(..),
InvocationSpecification(..),
Buildfile(..))
where
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import Control.Lens
import Data.Typeable
class TextShow textShow where
textShow :: textShow -> Text.Text
class HasName hasName where
name :: Simple Lens hasName Text.Text
class HasLanguage hasLanguage where
language :: Simple Lens hasLanguage Language
class (TextShow file, Eq file, Ord file, Typeable file) => File file where
fromAnyFile :: AnyFile -> Maybe file
fileType :: Getter file FileType
path :: Simple Lens file Text.Text
provenance :: Simple Lens file Provenance
class (HasName target, TextShow target, Typeable target) => Target target where
targetBuildSteps :: Project -> Task -> target -> [AnyBuildStep]
targetPrerequisites :: Simple Lens target (Set.Set Text.Text)
targetProducts :: Getter target (Set.Set AnyFile)
class (TextShow buildStep, Eq buildStep, Ord buildStep, Typeable buildStep)
=> BuildStep buildStep where
fromAnyBuildStep :: AnyBuildStep -> Maybe buildStep
buildStepType :: Getter buildStep BuildStepType
buildStepInputs :: Getter buildStep (Set.Set AnyFile)
buildStepOutputs :: Getter buildStep (Set.Set AnyFile)
performBuildStep :: buildStep -> IO Bool
class (TextShow condition, Eq condition, Ord condition, Typeable condition)
=> Condition condition where
fromAnyCondition :: AnyCondition -> Maybe condition
conditionType :: Getter condition ConditionType
explainCondition :: condition -> Text.Text
testCondition :: condition -> IO Bool
class (HasName target, TextShow target, Typeable target)
=> TargetSpecification target where
fromAnyTargetSpecification :: AnyTargetSpecification -> Maybe target
data AnyFile = forall file . File file => AnyFile file
data AnyTarget = forall target . Target target => AnyTarget target
data AnyBuildStep =
forall buildStep . BuildStep buildStep => AnyBuildStep buildStep
data AnyCondition =
forall condition . Condition condition => AnyCondition condition
data BuildStepType
= InvocationBuildStepType
| AmalgamateFilesBuildStepType
| CopyFileBuildStepType
| MakeDirectoryBuildStepType
| ConditionalBuildStepType
data ConditionType
= AndConditionType
| OrConditionType
| NotConditionType
| PathExistsConditionType
| FileExistsConditionType
| DirectoryExistsConditionType
data Language
= CLanguage
| HaskellLanguage
data Provenance
= InputProvenance
| BuiltProvenance
| SystemProvenance
data FileType
= UnknownFileType
| HeaderFileType Language
| SourceFileType Language
| ObjectFileType
| ExecutableFileType
| LibraryFileType
data Task
= AmalgamationTask
| BinaryTask
| TestTask
| DebugTask
| CleanTask
data HeaderFile =
HeaderFile {
_headerFileLanguage :: Language,
_headerFilePath :: Text.Text,
_headerFileProvenance :: Provenance
}
data SourceFile =
SourceFile {
_sourceFileLanguage :: Language,
_sourceFilePath :: Text.Text,
_sourceFileProvenance :: Provenance
}
data ObjectFile =
ObjectFile {
_objectFilePath :: Text.Text,
_objectFileProvenance :: Provenance
}
data ExecutableFile =
ExecutableFile {
_executableFilePath :: Text.Text,
_executableFileProvenance :: Provenance
}
data LibraryFile =
LibraryFile {
_libraryFilePath :: Text.Text,
_libraryFileProvenance :: Provenance
}
data InvocationBuildStep =
InvocationBuildStep {
_invocationBuildStepExecutable :: ExecutableFile,
_invocationBuildStepParameters :: [Text.Text],
_invocationBuildStepInputs :: Set.Set AnyFile,
_invocationBuildStepOutputs :: Set.Set AnyFile
}
data AmalgamateFilesBuildStep =
AmalgamateFilesBuildStep {
_amalgamateFilesBuildStepOutput :: SourceFile,
_amalgamateFilesBuildStepInputs :: [SourceFile]
}
data CopyFileBuildStep =
CopyFileBuildStep {
_copyFileBuildStepInput :: AnyFile,
_copyFileBuildStepOutputPath :: Text.Text
}
data MakeDirectoryBuildStep =
MakeDirectoryBuildStep {
_makeDirectoryBuildStepPath :: Text.Text
}
data ConditionalBuildStep =
ConditionalBuildStep {
_conditionalBuildStepCondition :: AnyCondition,
_conditionalBuildStepWhenTrue :: [AnyBuildStep],
_conditionalBuildStepWhenFalse :: [AnyBuildStep]
}
data AndCondition =
AndCondition {
_andConditionItems :: [AnyCondition]
}
data OrCondition =
OrCondition {
_orConditionItems :: [AnyCondition]
}
data NotCondition =
NotCondition {
_notConditionItem :: AnyCondition
}
data PathExistsCondition =
PathExistsCondition {
_pathExistsConditionPath :: Text.Text
}
data FileExistsCondition =
FileExistsCondition {
_fileExistsConditionPath :: Text.Text
}
data DirectoryExistsCondition =
DirectoryExistsCondition {
_directoryExistsConditionPath :: Text.Text
}
data ExecutableTarget =
ExecutableTarget {
_executableTargetName :: Text.Text,
_executableTargetPrerequisites :: Set.Set Text.Text,
_executableTargetPrivateHeaders :: Set.Set HeaderFile,
_executableTargetSources :: Set.Set SourceFile,
_executableTargetExtraInvocations :: [InvocationBuildStep]
}
data LibraryTarget =
LibraryTarget {
_libraryTargetName :: Text.Text,
_libraryTargetPrerequisites :: Set.Set Text.Text,
_libraryTargetPublicHeaders :: Set.Set HeaderFile,
_libraryTargetPrivateHeaders :: Set.Set HeaderFile,
_libraryTargetSources :: Set.Set SourceFile,
_libraryTargetExtraInvocations :: [InvocationBuildStep]
}
data Mode
= HelpMode
| TaskMode Task (Maybe Text.Text)
data Project =
Project {
_projectRootPath :: Text.Text,
_projectName :: Text.Text,
_projectTargets :: Map.Map Text.Text AnyTarget
}
data Defaults =
Defaults {
_defaultsTarget :: Maybe Text.Text
}
data InvocationSpecification =
InvocationSpecification {
_invocationSpecificationExecutable :: Text.Text,
_invocationSpecificationParameters :: [Text.Text],
_invocationSpecificationInputs :: [Text.Text],
_invocationSpecificationOutputs :: [Text.Text]
}
data ExecutableSpecification =
ExecutableSpecification {
_executableSpecificationName :: Text.Text,
_executableSpecificationPrerequisites :: Set.Set Text.Text,
_executableSpecificationPrivateHeaders :: Set.Set Text.Text,
_executableSpecificationSources :: Set.Set Text.Text,
_executableSpecificationExtraInvocations :: [InvocationSpecification]
}
data LibrarySpecification =
LibrarySpecification {
_librarySpecificationName :: Text.Text,
_librarySpecificationPrerequisites :: Set.Set Text.Text,
_librarySpecificationPublicHeaders :: Set.Set Text.Text,
_librarySpecificationPrivateHeaders :: Set.Set Text.Text,
_librarySpecificationSources :: Set.Set Text.Text,
_librarySpecificationExtraInvocations :: [InvocationSpecification]
}
data AnyTargetSpecification =
forall target . TargetSpecification target => AnyTargetSpecification target
data ProjectSpecification =
ProjectSpecification {
_projectSpecificationName :: Text.Text,
_projectSpecificationDefaultTarget :: Maybe Text.Text,
_projectSpecificationTargets :: [AnyTargetSpecification],
_projectSpecificationSubprojects :: Set.Set Text.Text
}
data SubprojectSpecification =
SubprojectSpecification {
_subprojectSpecificationDefaultTarget :: Maybe Text.Text,
_subprojectSpecificationTargets :: [AnyTargetSpecification],
_subprojectSpecificationSubprojects :: Set.Set Text.Text
}
data Buildfile
= ProjectBuildfile ProjectSpecification
| SubprojectBuildfile SubprojectSpecification
| IreneKnapp/modern-data | Tools/Build/Haskell/Build/Types/Types.hs | mit | 8,912 | 0 | 10 | 1,688 | 1,935 | 1,140 | 795 | 248 | 0 |
{-# LANGUAGE CPP #-}
-- Copyright © 2008 Melissa E. O'Neill
-- Used without permission
-- | "Genuine Sieve of Eratosthenes" in pure lists. From
-- the paper of the same title by Melissa E. O'Neill,
-- <http://www.cs.hmc.edu/~oneill/papers/Sieve-JFP.pdf>.
-- This is the version with evens struck but without the
-- full wheel, for comparison purposes. The priority queues
-- are custom-crafted: see the comments there for details.
import Data.Word
import DefaultMain
#ifdef USE_MPQ
import MPQ
#else
import PQ
#endif
primes :: [Word64]
primes = 2 : sieve [3,5..]
sieve :: [Word64] -> [Word64]
sieve [] = []
sieve (x0:xs0) =
x0 : sieve' xs0 (insertprime x0 xs0 empty)
where
insertprime p xs table = insert (p*p) (map (* p) xs) table
sieve' [] _ = []
sieve' (x:xs) table
| nextComposite <= x = sieve' xs (adjust table)
| otherwise = x : sieve' xs (insertprime x xs table)
where
nextComposite = minKey table
adjust table'
| n <= x = adjust (deleteMinAndInsert n' ns table')
| otherwise = table'
where
(n, n':ns) = findMin table'
main :: IO ()
main = defaultMain (Just primes) Nothing
| BartMassey/genuine-sieve | oneill-sieve.hs | mit | 1,180 | 0 | 12 | 285 | 328 | 172 | 156 | 22 | 2 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
-- Translate from the 'Stage1' IR to the 'Flat' IR.
--
-- As the name of the latter suggests, this involves flattening the namepace.
module Trans.Stage1ToFlat (cgrToCgr) where
import Data.Word
import qualified Data.Map as M
import qualified Data.Vector as V
import qualified IR.Common as C
import qualified IR.Flat as Flat
import qualified IR.Name as Name
import qualified IR.Stage1 as Stage1
type NodeMap = M.Map Word64 Flat.Node
cgrToCgr :: Stage1.CodeGenReq -> Flat.CodeGenReq
cgrToCgr Stage1.CodeGenReq{allFiles, reqFiles=inFiles} = Flat.CodeGenReq
{ reqFiles = outFiles
, allNodes
}
where
outFiles = map (reqFileToFile fileMap) inFiles
fileMap = M.fromList
[ (fileId, fileToNodes nodeMap file)
| file@Stage1.File{fileId} <- allFiles
]
allNodes = concatMap snd (M.toList fileMap)
nodeMap = M.fromList [(nodeId, node) | node@Flat.Node{nodeId} <- allNodes]
fileToNodes :: NodeMap -> Stage1.File -> [Flat.Node]
fileToNodes nodeMap Stage1.File{fileNodes, fileId} =
concatMap
(\(unQ, node) ->
nestedToNodes nodeMap fileId (Name.unQToLocal unQ) node []
)
fileNodes
reqFileToFile :: M.Map Word64 [Flat.Node] -> Stage1.ReqFile -> Flat.File
reqFileToFile fileMap Stage1.ReqFile{fileName, file=Stage1.File{fileId}} =
Flat.File
{ nodes = fileMap M.! fileId
, fileName
, fileId
}
paramsToParams :: NodeMap -> Word64 -> [Name.UnQ] -> [C.TypeParamRef Flat.Node]
paramsToParams nodeMap nodeId names =
[ C.TypeParamRef
{ paramName = name
, paramIndex = i
, paramScope = nodeMap M.! nodeId
}
| (i, name) <- zip [0..] names
]
applyBrandNode :: C.MapBrand Flat.Node -> Flat.Node -> C.ListBrand Flat.Node
applyBrandNode m Flat.Node{typeParams} = applyBrandParams typeParams m
applyBrandParams :: [C.TypeParamRef Flat.Node] -> C.MapBrand Flat.Node -> C.ListBrand Flat.Node
applyBrandParams params m = C.ListBrand $ map (`applyBrandParam` m) params
applyBrandParam
:: C.TypeParamRef Flat.Node
-> C.MapBrand Flat.Node
-> C.PtrType (C.ListBrand Flat.Node) Flat.Node
applyBrandParam param@C.TypeParamRef{paramIndex, paramScope=Flat.Node{nodeId}} (C.MapBrand m) =
case M.lookup nodeId m of
Nothing -> C.PtrParam param
Just (C.Bind bindings) ->
let binding = bindings V.! paramIndex in
case binding of
C.Unbound -> C.PtrParam param
C.BoundType ty -> applyBrandPtrType ty
type ApplyBrandFn f
= f (C.MapBrand Flat.Node) Flat.Node
-> f (C.ListBrand Flat.Node) Flat.Node
applyBrandCompositeType :: ApplyBrandFn C.CompositeType
applyBrandCompositeType (C.StructType n b) = C.StructType n (applyBrandNode b n)
applyBrandInterfaceType :: ApplyBrandFn C.InterfaceType
applyBrandInterfaceType (C.InterfaceType n b) = C.InterfaceType n (applyBrandNode b n)
applyBrandValue :: ApplyBrandFn C.Value
applyBrandValue = \case
C.VoidValue -> C.VoidValue
C.WordValue v t -> C.WordValue v t
C.PtrValue t p -> C.PtrValue (applyBrandPtrType t) p
applyBrandPtrType :: ApplyBrandFn C.PtrType
applyBrandPtrType = \case
C.ListOf t -> C.ListOf $ applyBrandType t
C.PrimPtr p -> C.PrimPtr p
C.PtrInterface t -> C.PtrInterface (applyBrandInterfaceType t)
C.PtrComposite t -> C.PtrComposite (applyBrandCompositeType t)
C.PtrParam p -> C.PtrParam p
applyBrandType :: ApplyBrandFn C.Type
applyBrandType = \case
C.CompositeType t -> C.CompositeType $ applyBrandCompositeType t
C.VoidType -> C.VoidType
C.WordType t -> C.WordType t
C.PtrType t -> C.PtrType $ applyBrandPtrType t
applyBrandFieldLocType :: ApplyBrandFn C.FieldLocType
applyBrandFieldLocType = \case
C.DataField l t -> C.DataField l t
C.PtrField i t -> C.PtrField i $ applyBrandPtrType t
C.HereField t -> C.HereField $ applyBrandCompositeType t
C.VoidField -> C.VoidField
-- | Generate @'Flat.Node'@s from a 'Stage1.Node' and its local name.
nestedToNodes :: NodeMap -> Word64 -> Name.LocalQ -> Stage1.Node -> [C.TypeParamRef Flat.Node] -> [Flat.Node]
nestedToNodes
nodeMap
thisMod
localName
Stage1.Node
{ nodeCommon = Stage1.NodeCommon{nodeId, nodeNested, nodeParams}
, nodeUnion
}
typeParams
=
mine ++ kids
where
myParams = typeParams ++ paramsToParams nodeMap nodeId (V.toList nodeParams)
kidsNS = Name.localQToNS localName
kids = concatMap
(\(unQ, node) ->
nestedToNodes nodeMap thisMod (Name.mkLocal kidsNS unQ) node myParams
)
nodeNested
name = Name.CapnpQ
{ local = localName
, fileId = thisMod
}
mine = case nodeUnion of
Stage1.NodeEnum enumerants ->
[ Flat.Node
{ name
, nodeId
, typeParams = myParams
, union_ = Flat.Enum enumerants
}
]
Stage1.NodeStruct struct ->
structToNodes nodeMap thisMod nodeId name kidsNS struct myParams
Stage1.NodeInterface iface ->
interfaceToNodes nodeMap thisMod nodeId name kidsNS iface myParams
Stage1.NodeConstant value ->
[ Flat.Node
{ name = Name.CapnpQ
{ local = Name.unQToLocal $ Name.valueName localName
, fileId = thisMod
}
, nodeId
, union_ = Flat.Constant
{ value = applyBrandValue $ C.bothMap
(\Stage1.Node{nodeCommon=Stage1.NodeCommon{nodeId}} -> nodeMap M.! nodeId)
value
}
, typeParams = myParams
}
]
Stage1.NodeOther ->
[ Flat.Node
{ name
, nodeId
, union_ = Flat.Other
, typeParams = myParams
}
]
interfaceToNodes :: NodeMap -> Word64 -> Word64 -> Name.CapnpQ -> Name.NS -> Stage1.Interface -> [C.TypeParamRef Flat.Node] -> [Flat.Node]
interfaceToNodes nodeMap thisMod nodeId name kidsNS Stage1.Interface{ methods, supers } typeParams =
let translateSuper =
applyBrandInterfaceType
. C.bothMap (\Stage1.Node{nodeCommon=Stage1.NodeCommon{nodeId}} -> nodeMap M.! nodeId)
prType = applyBrandCompositeType . C.bothMap
(\Stage1.Node{nodeCommon=Stage1.NodeCommon{nodeId}} ->
nodeMap M.! nodeId
)
in
Flat.Node
{ name
, nodeId
, union_ = Flat.Interface
{ methods =
[ Flat.Method
{ name
, paramType = prType paramType
, resultType = prType resultType
}
| Stage1.Method { name, paramType, resultType } <- methods
]
, supers = map translateSuper supers
}
, typeParams
}
: concatMap (\method -> methodToNodes nodeMap thisMod kidsNS method typeParams) methods
structToNodes :: NodeMap -> Word64 -> Word64 -> Name.CapnpQ -> Name.NS -> Stage1.Struct -> [C.TypeParamRef Flat.Node] -> [Flat.Node]
structToNodes
nodeMap
thisMod
nodeId
name
kidsNS
Stage1.Struct
{ fields
, isGroup
, dataWordCount
, pointerCount
, tagOffset
}
typeParams =
let
mkField fieldUnQ locType =
Flat.Field
{ fieldName = Name.mkSub name fieldUnQ
, fieldLocType = applyBrandFieldLocType $ C.bothMap
(\Stage1.Node{nodeCommon=Stage1.NodeCommon{nodeId}} -> nodeMap M.! nodeId)
locType
}
variants =
[ Flat.Variant
{ field = mkField fieldUnQ locType
, tagValue
}
| Stage1.Field{name=fieldUnQ, locType, tag=Just tagValue} <- fields
]
commonFields =
[ mkField fieldUnQ locType
| Stage1.Field{name=fieldUnQ, locType, tag=Nothing} <- fields
]
fieldNodes =
concatMap (fieldToNodes nodeMap thisMod kidsNS typeParams) fields
commonNode =
Flat.Node
{ name
, nodeId
, union_ = Flat.Struct
{ fields = commonFields
, union =
if null variants then
Nothing
else
Just Flat.Union
{ variants
, tagOffset
}
, isGroup
, dataWordCount
, pointerCount
}
, typeParams
}
in
commonNode : fieldNodes
fieldToNodes :: NodeMap -> Word64 -> Name.NS -> [C.TypeParamRef Flat.Node] -> Stage1.Field -> [Flat.Node]
fieldToNodes nodeMap thisMod ns typeParams Stage1.Field{name, locType} = case locType of
C.HereField
(C.StructType
struct@Stage1.Node
{ nodeUnion = Stage1.NodeStruct Stage1.Struct{isGroup=True}
}
_ -- Type parameters will be the same as the enclosing scope.
) -> nestedToNodes nodeMap thisMod (Name.mkLocal ns name) struct typeParams
_ ->
[]
methodToNodes :: NodeMap -> Word64 -> Name.NS -> Stage1.Method -> [C.TypeParamRef Flat.Node] -> [Flat.Node]
methodToNodes nodeMap thisMod ns Stage1.Method{ name, paramType, resultType } typeParams =
-- If the parameter and result types are anonymous, we need to generate
-- structs for them.
let maybeAnon ty suffix =
case ty of
C.StructType node@Stage1.Node{nodeCommon=Stage1.NodeCommon{nodeParent=Nothing}} _ ->
let localName = Name.mkLocal ns name
kidsNS = Name.localQToNS localName
in
nestedToNodes nodeMap thisMod (Name.mkLocal kidsNS suffix) node typeParams
_ ->
[]
in
maybeAnon paramType "params" ++ maybeAnon resultType "results"
| zenhack/haskell-capnp | cmd/capnpc-haskell/Trans/Stage1ToFlat.hs | mit | 10,737 | 0 | 23 | 3,704 | 2,786 | 1,464 | 1,322 | 227 | 5 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Main where
import Numeric.MCMC.Slice
rosenbrock :: [Double] -> Double
rosenbrock [x0, x1] = negate (5 *(x1 - x0 ^ 2) ^ 2 + 0.05 * (1 - x0) ^ 2)
main :: IO ()
main = withSystemRandom . asGenIO $ \gen -> do
_ <- chain 50 1 [0, 0] rosenbrock gen
mcmc 50 1 [0, 0] rosenbrock gen
| jtobin/speedy-slice | test/Rosenbrock.hs | mit | 336 | 0 | 14 | 74 | 153 | 83 | 70 | 9 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Text.Template.Inserts.Internal
-- Copyright : Joseph Abrahamson 2014
-- License : MIT
--
-- Maintainer : me@jspha.com
-- Stability : experimental
-- Portability : unknown
--
-- The internal workings of @inserts@. In most cases you don't want to be
-- digging around in this module, but it's useful if you want to somehow analyze
-- or transform the 'Template' type.
--
-- The usual caveat applies: this module is not a public API and is subject to
-- modification without warning.
module Text.Template.Inserts.Internal (
-- * Major types
Template (..), TemplateC (..),
-- ** The purely-Applicative 'Either'
Got (..), gotEither,
-- * Template functions
runTemplate, showTemplate, parseTemplate, templateParser
) where
import Control.Applicative
import Control.Monad
import qualified Data.Attoparsec.ByteString.Char8 as A
import qualified Data.ByteString as S
import qualified Data.ByteString.Builder as Sb
import qualified Data.ByteString.Lazy as Sl
import qualified Data.DList as Dl
import qualified Data.Foldable as F
import Data.Monoid
import Data.String
-- | 'Template' chunks are either 'Literal's or 'Hole's to be filled by a
-- runtime key lookup later.
data TemplateC = Literal Sb.Builder | Hole S.ByteString
instance Show TemplateC where
show (Literal builder) =
"Literal " ++ show (Sl.toStrict (Sb.toLazyByteString builder))
show (Hole bs) = "Hole " ++ show bs
-- Templates are just 'DList's of template chunks. It might be wise to
-- replace the 'DList' with 'Data.Sequence.Sequence' and then keep everything in
-- the Haskell Platform. It'd also allow for a public helper function which
-- takes 'Data.Map.Map's directly.
newtype Template =
Template { unTemplate :: Dl.DList TemplateC }
-- | /O(1)/ appends
instance Monoid Template where
mempty = Template mempty
Template t1 `mappend` Template t2 = Template (mappend t1 t2)
-- | 'Got' is the \"purely 'Applicative'\" 'Either' with
-- @[S.ByteString]@ as its 'Left' type. When both the left and
-- right arguments to '(<*>)' are 'Miss' their errors are `mappend`ed
-- together.
data Got a = Miss (Dl.DList S.ByteString) | Got a
deriving Functor
instance Applicative Got where
pure = Got
Miss e1 <*> Miss e2 = Miss (e1 <> e2)
Miss e <*> _ = Miss e
_ <*> Miss e = Miss e
Got f <*> Got x = Got (f x)
gotEither :: Got a -> Either [S.ByteString] a
gotEither (Miss e) = Left (Dl.toList e)
gotEither (Got a) = Right a
instance Monoid a => Monoid (Got a) where
mempty = pure mempty
mappend = liftA2 mappend
-- | Outputs either the successfully interpolated template or the list of
-- missing keys. For fast operation, try building the lookup function using
-- @unordered-containers@ @HashMap@s.
runTemplate
:: (S.ByteString -> Maybe S.ByteString)
-> Template -> Either [S.ByteString] Sl.ByteString
runTemplate lookMay =
gotEither . fmap Sb.toLazyByteString
. F.foldMap get
. unTemplate
where
get (Literal b) = pure b
get (Hole name) = Sb.byteString <$> look name
look :: S.ByteString -> Got S.ByteString
look s = maybe (Miss (pure s)) Got (lookMay s)
-- | We can build a lazy 'Sl.ByteString' much more quickly, so if you need
-- to quickly show your templates then this might be nicer than using 'show'
-- directly.
showTemplate :: Template -> Sl.ByteString
showTemplate t =
case runTemplate (\s -> Just $ "{{" <> s <> "}}") t of
Left _ -> error "Impossible!"
Right s -> s
instance Show Template where
show = show . Sl.toStrict . showTemplate
-- | Try to parse a 'S.ByteString' as a 'Template'.
parseTemplate :: S.ByteString -> Either String Template
parseTemplate = A.parseOnly templateParser
-- | Template literals can be embedded directly in Haskell files.
instance IsString Template where
fromString s =
case parseTemplate (fromString s) of
Right a -> a
Left _ -> error ("Could not parse a Template: " ++ show s)
foldlM :: MonadPlus f => (b -> a -> b) -> b -> f a -> f b
foldlM mix seed gen = do
may <- liftM Just gen `mplus` return Nothing
case may of
Nothing -> return seed
Just a -> foldlM mix (mix seed a) gen
foldMonoidM :: (MonadPlus f, Monoid b) => (a -> b) -> f a -> f b
foldMonoidM f = foldlM (\b a -> b <> f a) mempty
-- | An @attoparsec@ 'A.Parser' for 'Template's. This is useful if you'd
-- like to embed 'Template's into a more sophisticated, parseable type of
-- your own.
templateParser :: A.Parser Template
templateParser = foldMonoidM (Template . pure) templateChunk
where
templateChunk :: A.Parser TemplateC
templateChunk =
A.choice [ hole, noBraces ]
noBraces :: A.Parser TemplateC
noBraces =
Literal . Sb.byteString <$> A.takeWhile1 (not . (== '{'))
singleBrace :: A.Parser TemplateC
singleBrace =
let build c = Literal (Sb.char8 '{' <> Sb.char8 c)
in build <$> A.try (A.char '{' *> A.satisfy (not . (== '{')))
hole :: A.Parser TemplateC
hole =
"{{" *> A.skipSpace *>
(Hole <$> A.takeWhile1 (\c -> not (A.isSpace c || c == '}')))
<* A.skipSpace <* "}}"
| tel/inserts | src/Text/Template/Inserts/Internal.hs | mit | 5,317 | 0 | 20 | 1,221 | 1,316 | 696 | 620 | 90 | 2 |
module Main
where
import Data.Maybe
import Control.Monad
import EmpireZipper
import CreateTerrain
import Terrain
import GalaxyStats
import DataFunction
import ZipperGalaxy
import Galaxy
import World
import Civilization
import ZipperGalaxyUtils
import Math
import Utils
import qualified Data.Edison.Assoc.StandardMap as E
main :: IO ()
main = do
let g = testRandomGalaxy 22 24
let w = testRandomWorld g 22 5
putStrLn (galaxyStats g)
let tplc = testPlanetCreatingZipper
when (not tplc) $ putStrLn "!!! Warning: zipper test failed!"
case w of
Nothing -> putStrLn "Unable to create world?"
Just jw -> do
mainMenu jw
mainMenu :: World -> IO ()
mainMenu w = do
let g = galaxy w
i <- getMainMenuInput
case i of
Quit -> return ()
Life -> lifeMenu w >>= mainMenu
Zipper -> browseGalaxy' (empires w) (g, Nothing) >> mainMenu w
data MainMenuInput = Zipper | Life | Quit
lifeMenu :: World -> IO World
lifeMenu w = do
i <- getLifeInput w
case i of
QuitLife -> return w
Rounds v -> lifeMenu (timePass v w)
BrowseLife -> browseLife (newEmpireZipper w) >> lifeMenu w
browseLife :: EmpireZipper -> IO ()
browseLife z = do
putStrLn $ empireZipperInfo z
mz <- getBrowseLifeInput z
case mz of
Nothing -> return ()
Just nz -> browseLife nz
empireZipperInfo :: EmpireZipper -> String
empireZipperInfo (es, Nothing) = lifeInfo es
empireZipperInfo (es, Just (e, Nothing)) = dispEmpire e ++ "\n" ++ dispColoniesInfo e
empireZipperInfo (es, Just (e, Just c)) = dispColony c
getBrowseLifeInput :: EmpireZipper -> IO (Maybe EmpireZipper)
getBrowseLifeInput z = do
putStrLn "Name of thing to inspect"
c <- getLine
if null c
then return Nothing
else if c == "up"
then return $ Just $ upEZ z
else case tryDownEZ c z of
Nothing -> do
putStrLn "Not found"
getBrowseLifeInput z
Just nz -> do
putStrLn "Found way down"
return $ Just nz
lifeInfo :: E.FM CivKey Empire -> String
lifeInfo = concat . map (++ "\n") . E.elements . E.map dispEmpire
data LifeInput = Rounds Int | BrowseLife | QuitLife
getLifeInput :: World -> IO LifeInput
getLifeInput w = do
putStrLn (lifeInfo (empires w))
putStrLn "Type in:"
putStrLn " - Number to run rounds"
putStrLn " - nothing to go back"
putStrLn " - anything else to browse life"
c <- getLine
case reads c of
[(num, _)] -> return (Rounds num)
_ -> return $ if null c then QuitLife else BrowseLife
timePass :: Int -> World -> World
timePass i w | i <= 0 = w
| otherwise =
let newgal = regenerateGalaxy 0.05 (galaxy w)
newemps = E.map (updateEmpire 1) (empires w)
in timePass (i - 1) (w{galaxy = newgal, empires = newemps})
updateEmpire :: Flt -> Empire -> Empire
updateEmpire t e =
let newcol = E.map (popgrow t) (colonies e)
in e{colonies = newcol}
popgrow :: Flt -> Colony -> Colony
popgrow t c =
let oldpop = population c
newpop = floor (t * 1.1 * fromIntegral oldpop)
in c{population = newpop}
getMainMenuInput :: IO MainMenuInput
getMainMenuInput = do
putStrLn "What would you like to do?"
putStrLn "l. Life"
putStrLn "g. Browse galaxy"
putStrLn "_. Quit"
s <- getLine
case s of
[] -> getMainMenuInput
(c:cs) -> return $ case c of
'l' -> Life
'g' -> Zipper
_ -> Quit
type ZipperInput a = Maybe (GalaxyZipper a)
getZipperInput :: (Show a) => (GalaxyZipper a -> String) -> GalaxyZipper a -> IO (ZipperInput a)
getZipperInput showfunc z = do
putStrLn $ showfunc z
c <- getLine
let num :: Maybe Int
num = liftM fst $ safeHead (reads c)
case num of
Nothing -> strInput c
Just n -> numInput n
where
strInput c = do
case length c of
1 -> if not (null c)
then case head c of
'q' -> return Nothing
's' -> putStrLn (galaxyStats (galaxyInZipper z)) >> getZipperInput showfunc z
_ -> getZipperInput showfunc z
else getZipperInput showfunc z
_ -> if c == ""
then return $ Just (up z)
else proceed c z
numInput n = do
case tryDownNum (n - 1) z of
Just nz -> return $ Just nz
Nothing -> return $ Just (up z)
proceed c z = case tryDown c z of
Just nz -> return $ Just nz
Nothing -> getZipperInput showfunc z
browseGalaxy :: (Show a) => GalaxyZipper a -> IO ()
browseGalaxy = someInput (getZipperInput (genInfo (\t -> show t)))
someInput :: (a -> IO (Maybe a)) -> a -> IO ()
someInput func x = do
i <- func x
case i of
Nothing -> return ()
Just nx -> someInput func nx
browseGalaxy' :: E.FM CivKey Empire -> GalaxyZipper Terrain -> IO ()
browseGalaxy' e = someInput (getZipperInput (genInfo (terrainInfo e)))
testPlanetCreatingZipper :: Bool
testPlanetCreatingZipper =
let g = testRandomGalaxy 22 24
n1 = map planetname $ map fromJust $ map ZipperGalaxy.satelliteInZipper (habitablePlanetsZ g)
n2 = map planetname (filter sustainsLife (DataFunction.allBodies g))
in n1 == n2
| anttisalonen/starrover | src/Main.hs | mit | 5,371 | 0 | 21 | 1,617 | 1,888 | 912 | 976 | 155 | 9 |
nNodos :: ArbolG a -> Int
nNodos AVG = 0;
nNodos (AG r s) = 1 + (naux s)
naux :: [ArbolG a] -> Int
naux [] = 0;
naux (h:t)= (nNodos h) + (naux t)
| josegury/HaskellFuntions | Arboles/Int-NNodos.hs | mit | 147 | 0 | 7 | 37 | 105 | 54 | 51 | 6 | 1 |
module Control.Disruptor.EventPoller where
import Control.Disruptor.DataProvider
data PollState = Processing | Gating | Idle
data EventPoller p sequencer sequence gatingSequence a = EventPoller
{ eventPollerDataProvider :: p
, eventPollerSequencer :: sequencer
, eventPollerSequence :: sequence
, eventPollerGatingSequence :: gatingSequence
}
eventPoller :: (DataProvider p a, GetSequence sequence, SetSequence sequence, GetSequence gatingSequence)
=> p
-> sequencer
-> sequence
-> gatingSequence
-> EventPoller p sequencer sequence gatingSequence a
eventPoller = EventPoller
type Handler e a = a -> SequenceId a -> Bool -> IO (Checked e ())
poll poller handler = do
currentSequence <- get $ eventPollerSequence poller
let nextSequence = succ currentSequence
availableSequence <- (eventPollerSequencer poller) nextSequence =<< get (eventPollerGatingSequence poller)
undefined
| iand675/disruptor | src/Control/Disruptor/EventPoller.hs | mit | 976 | 0 | 11 | 210 | 234 | 126 | 108 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicediscovery-privatednsnamespace.html
module Stratosphere.Resources.ServiceDiscoveryPrivateDnsNamespace where
import Stratosphere.ResourceImports
-- | Full data type definition for ServiceDiscoveryPrivateDnsNamespace. See
-- 'serviceDiscoveryPrivateDnsNamespace' for a more convenient constructor.
data ServiceDiscoveryPrivateDnsNamespace =
ServiceDiscoveryPrivateDnsNamespace
{ _serviceDiscoveryPrivateDnsNamespaceDescription :: Maybe (Val Text)
, _serviceDiscoveryPrivateDnsNamespaceName :: Val Text
, _serviceDiscoveryPrivateDnsNamespaceVpc :: Val Text
} deriving (Show, Eq)
instance ToResourceProperties ServiceDiscoveryPrivateDnsNamespace where
toResourceProperties ServiceDiscoveryPrivateDnsNamespace{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::ServiceDiscovery::PrivateDnsNamespace"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("Description",) . toJSON) _serviceDiscoveryPrivateDnsNamespaceDescription
, (Just . ("Name",) . toJSON) _serviceDiscoveryPrivateDnsNamespaceName
, (Just . ("Vpc",) . toJSON) _serviceDiscoveryPrivateDnsNamespaceVpc
]
}
-- | Constructor for 'ServiceDiscoveryPrivateDnsNamespace' containing required
-- fields as arguments.
serviceDiscoveryPrivateDnsNamespace
:: Val Text -- ^ 'sdprdnName'
-> Val Text -- ^ 'sdprdnVpc'
-> ServiceDiscoveryPrivateDnsNamespace
serviceDiscoveryPrivateDnsNamespace namearg vpcarg =
ServiceDiscoveryPrivateDnsNamespace
{ _serviceDiscoveryPrivateDnsNamespaceDescription = Nothing
, _serviceDiscoveryPrivateDnsNamespaceName = namearg
, _serviceDiscoveryPrivateDnsNamespaceVpc = vpcarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicediscovery-privatednsnamespace.html#cfn-servicediscovery-privatednsnamespace-description
sdprdnDescription :: Lens' ServiceDiscoveryPrivateDnsNamespace (Maybe (Val Text))
sdprdnDescription = lens _serviceDiscoveryPrivateDnsNamespaceDescription (\s a -> s { _serviceDiscoveryPrivateDnsNamespaceDescription = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicediscovery-privatednsnamespace.html#cfn-servicediscovery-privatednsnamespace-name
sdprdnName :: Lens' ServiceDiscoveryPrivateDnsNamespace (Val Text)
sdprdnName = lens _serviceDiscoveryPrivateDnsNamespaceName (\s a -> s { _serviceDiscoveryPrivateDnsNamespaceName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicediscovery-privatednsnamespace.html#cfn-servicediscovery-privatednsnamespace-vpc
sdprdnVpc :: Lens' ServiceDiscoveryPrivateDnsNamespace (Val Text)
sdprdnVpc = lens _serviceDiscoveryPrivateDnsNamespaceVpc (\s a -> s { _serviceDiscoveryPrivateDnsNamespaceVpc = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/ServiceDiscoveryPrivateDnsNamespace.hs | mit | 2,992 | 0 | 15 | 309 | 370 | 211 | 159 | 36 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Web.Common
where
import Common (loggerName)
import Config (ServerType(..), BaseConfig(..))
import Control.Applicative ((<$>))
import Control.Monad.Catch (MonadThrow(..), MonadCatch(..))
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Data.Aeson (Value(..))
import Data.Aeson.Types (FromJSON)
import Network.Wai (rawPathInfo, rawQueryString)
import System.Log.Logger (debugM)
import Text.Read (readMaybe)
import Web.Scotty.Internal.Types (ActionT(..))
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text.Lazy as TL
import qualified Error as E
import qualified Web.Scotty.Trans as S
type ScottyM m = S.ScottyT E.Error m
type ActionM m = ActionT E.Error m
instance MonadThrow m => MonadThrow (ActionT E.Error m) where
throwM = lift . throwM
instance MonadCatch m => MonadCatch (ActionT E.Error m) where
catch (ActionT m) c = ActionT $ catch m (runAM . c)
data UrlInfo = UrlInfo
{ baseUrl :: String
, path :: String
, query :: String
}
type UrlBasedValue = UrlInfo -> Value
hostUrl :: MonadIO b => ServerType -> ActionM b (Maybe String)
hostUrl st = do
mh <- S.header "host"
let protocol =
case st of
Plain -> "http"
Tls -> "https"
return $ fmap (\h -> protocol ++ "://" ++ (TL.unpack h)) mh
getBaseUrl :: (MonadIO b, BaseConfig bc) => bc -> ActionM b String
getBaseUrl config = do
case getEndpoint config of
Just e -> return e
Nothing -> do
mh <- hostUrl $ getServerType config
case mh of
Just h -> return h
Nothing -> S.raise $ E.badRequest "Host header is required or endpoint should be set"
withHostUrl :: ( Functor b
, MonadIO b
, BaseConfig bc)
=> bc -> UrlBasedValue -> ActionM b ()
withHostUrl config v = do
pathString <- BS.unpack <$> rawPathInfo <$> S.request
queryString <- BS.unpack <$> rawQueryString <$> S.request
url <- getBaseUrl config
S.json $ v (UrlInfo url pathString queryString)
parseId :: (MonadIO m, Read a) => TL.Text -> ActionM m a
parseId paramName = do
s <- S.param paramName
case readMaybe s of
Nothing -> S.raise $ E.badRequest $ "Failed to parse ObjectId from " ++ (TL.unpack paramName)
Just v -> return v
parseMaybeString :: (MonadIO m) => TL.Text -> ActionM m (Maybe String)
parseMaybeString paramName =
(flip S.rescue) (\msg -> return Nothing) $ do
(value :: String) <- S.param paramName
return $ Just value
parseRequest :: ( Show a
, FromJSON a
, MonadIO b)
=> ActionM b a
parseRequest = do
request <- S.rescue S.jsonData $ \e ->
S.raise $ E.badRequest $ E.message e
liftIO $ debugM loggerName $ "Parsed request body: " ++ (show request)
return request
| VictorDenisov/keystone | src/Web/Common.hs | gpl-2.0 | 2,963 | 0 | 17 | 707 | 1,015 | 538 | 477 | 77 | 3 |
module Language.Haskell.HsColour.Classify
( TokenType(..)
, tokenise
) where
import Data.Char (isSpace, isUpper, isLower, isDigit)
import Data.List
-- | Lex Haskell source code into an annotated token stream, without
-- discarding any characters or layout.
tokenise :: String -> [(TokenType,String)]
tokenise str =
let chunks = glue . chunk $ str
in markDefs $ map (\s-> (classify s,s)) chunks
markDefs :: [(TokenType, String)] -> [(TokenType, String)]
markDefs [] = []
markDefs ((Varid, s) : rest) = (Definition, s) : continue rest
markDefs ((Varop, ">") : (Space, " ") : (Varid, d) : rest) =
(Varop, ">") : (Space, " ") : (Definition, d) : continue rest
markDefs rest = continue rest
continue rest
= let (thisLine, nextLine) = span (/= (Space, "\n")) rest
in
case nextLine of
[] -> thisLine
((Space, "\n"):nextLine') -> (thisLine ++ ((Space, "\n") : (markDefs nextLine')))
-- Basic Haskell lexing, except we keep whitespace.
chunk :: String -> [String]
chunk [] = []
chunk ('\r':s) = chunk s -- get rid of DOS newline stuff
chunk ('\n':s) = "\n": chunk s
chunk (c:s) | isLinearSpace c
= (c:ss): chunk rest where (ss,rest) = span isLinearSpace s
chunk ('{':'-':s) = let (com,s') = nestcomment 0 s
in ('{':'-':com) : chunk s'
chunk s = case Prelude.lex s of
[] -> [head s]: chunk (tail s) -- e.g. inside comment
((tok@('-':'-':_),rest):_)
| all (=='-') tok -> (tok++com): chunk s'
where (com,s') = eolcomment rest
((tok,rest):_) -> tok: chunk rest
isLinearSpace c = c `elem` " \t\f" -- " \t\xa0"
-- Glue sequences of tokens into more useful blobs
--glue (q:".":n:rest) | Char.isUpper (head q) -- qualified names
-- = glue ((q++"."++n): rest)
glue ("`":rest) = -- `varid` -> varop
case glue rest of
(qn:"`":rest) -> ("`"++qn++"`"): glue rest
_ -> "`": glue rest
glue (s:ss) | all (=='-') s && length s >=2 -- eol comment
= (s++concat c): glue rest
where (c,rest) = break ('\n'`elem`) ss
--glue ("{":"-":ss) = ("{-"++c): glue rest -- nested comment
-- where (c,rest) = nestcomment 0 ss
glue ("(":ss) = case rest of
")":rest -> ("(" ++ concat tuple ++ ")") : glue rest
_ -> "(" : glue ss
where (tuple,rest) = span (==",") ss
glue ("[":"]":ss) = "[]" : glue ss
glue ("\n":"#":ss)= "\n" : ('#':concat line) : glue rest
where (line,rest) = break ('\n'`elem`) ss
glue (s:ss) = s: glue ss
glue [] = []
-- Deal with comments.
nestcomment :: Int -> String -> (String,String)
nestcomment n ('{':'-':ss) | n>=0 = (("{-"++cs),rm)
where (cs,rm) = nestcomment (n+1) ss
nestcomment n ('-':'}':ss) | n>0 = (("-}"++cs),rm)
where (cs,rm) = nestcomment (n-1) ss
nestcomment n ('-':'}':ss) | n==0 = ("-}",ss)
nestcomment n (s:ss) | n>=0 = ((s:cs),rm)
where (cs,rm) = nestcomment n ss
nestcomment n [] = ([],[])
eolcomment :: String -> (String,String)
eolcomment s@('\n':_) = ([], s)
eolcomment ('\r':s) = eolcomment s
eolcomment (c:s) = (c:cs, s') where (cs,s') = eolcomment s
eolcomment [] = ([],[])
-- | Classification of tokens as lexical entities
data TokenType =
Space | Keyword | Keyglyph | Layout | Comment | Conid | Varid |
Conop | Varop | String | Char | Number | Cpp | Error |
Definition
deriving (Eq,Show)
classify :: String -> TokenType
classify s@(h:t)
| isSpace h = Space
| all (=='-') s = Comment
| "--" `isPrefixOf` s
&& any isSpace s = Comment -- not fully correct
| "{-" `isPrefixOf` s = Comment
| s `elem` keywords = Keyword
| s `elem` keyglyphs = Keyglyph
| s `elem` layoutchars = Layout
| isUpper h = Conid
| s == "[]" = Conid
| h == '(' && isTupleTail t = Conid
| h == '#' = Cpp
| isLower h = Varid
| h `elem` symbols = Varop
| h==':' = Conop
| h=='`' = Varop
| h=='"' = String
| h=='\'' = Char
| isDigit h = Number
| otherwise = Error
classify _ = Space
isTupleTail [')'] = True
isTupleTail (',':xs) = isTupleTail xs
isTupleTail _ = False
-- Haskell keywords
keywords =
["case","class","data","default","deriving","do","else","forall"
,"if","import","in","infix","infixl","infixr","instance","let","module"
,"newtype","of","qualified","then","type","where","_"
,"foreign","ccall","as","safe","unsafe"]
keyglyphs =
["..","::","=","\\","|","<-","->","@","~","=>","[","]"]
layoutchars =
map (:[]) ";{}(),"
symbols =
"!#$%&*+./<=>?@\\^|-~"
| crackleware/hscolour | Language/Haskell/HsColour/Classify.hs | gpl-2.0 | 4,980 | 0 | 15 | 1,525 | 2,049 | 1,109 | 940 | 107 | 3 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveFoldable
, DeriveFunctor
, DeriveTraversable
, FlexibleContexts
, FlexibleInstances
, OverloadedStrings
, TypeSynonymInstances
#-}
module Model.Expression.Evaluation ( evaluate
) where
import Control.Monad(msum)
import Control.Monad.Reader(Reader, ask, runReader)
import qualified Data.Text as T
import TextShow(TextShow(showt))
import Model.Expression
import Model.Expression.RecursionSchemas
import Model.Row
import Debug.Trace
evaluate :: Row -> [DataSource] -> Expression -> Field
evaluate r dss ex = runReader (eval ex) (r, dss)
type Eval = Reader (Row, [DataSource])
eval :: Expression -> Eval Field
eval = para ev
where
ev :: RAlgebra Node (Eval Field)
ev (Position n) = evalIndex n $ mkError $ "Error en $" `T.append` showt (n + 1)
ev (NamedPosition name Nothing) = return . mkError $ "Expresión con variable desconocida: " `T.append` name
ev (NamedPosition name (Just n)) = evalIndex n $ mkError $ "Error en " `T.append` name
ev (Constant f) = return f
ev (Unary info (_, v)) = opU info <$> v
ev (Binary info (_, v1) (_, v2)) = opB info <$> v1 <*> v2
ev (PrefixBinary info (_, v1) (_, v2)) = opPB info <$> v1 <*> v2
ev (Cast ft (_, v)) = convert ft <$> v
ev (Ternary (_, v1) (_, v2) (_, v3)) = ternary <$> v1 <*> v2 <*> v3
ev (ErrorCheck (_, v1) (_, v2)) =
v1 >>= (\case False -> v1
True -> v2) . isError
ev (FromSource (si, _) (v, _) (n1, _) (n2, _)) = evalFromSource si v n1 n2
ev (Error m) = return $ mkError m
evalIndex :: Int -> Field -> Eval Field
evalIndex n inError = do
(r, _) <- ask
return $ case recover r n (mkError $ "Índice erróneo " `T.append` showt (n + 1)) of
Right v -> if isError v
then inError
else v
Left e -> e
recover :: [a] -> Int -> e -> Either e a
recover [] _ = Left
recover (x:_) 0 = const $ Right x
recover (_:xs) n = recover xs (n - 1)
evalFromSource :: Expression -> Expression -> Expression -> Expression -> Eval Field
evalFromSource si val n1 n2 = do
source <- toInt <$> eval si
v1 <- eval val
(_, dss) <- ask
let t = do
ds <- recover dss source $ "Fuente errónea: " `T.append` toFormula si
case msum [
if v1 == evaluate row [] n1
then Just $ evaluate row [] n2
else Nothing
| row <- ds
] of
Just v -> Right v
Nothing -> Left $ "No encontrado " `T.append` toString v1
return $ case t of
Right v -> v
Left e -> mkError e
| jvilar/hrows | lib/Model/Expression/Evaluation.hs | gpl-2.0 | 2,826 | 0 | 18 | 947 | 1,067 | 560 | 507 | 66 | 13 |
--pattern matching
f3 0 = 0
f3 x = if x < 0
then -1
else 1
| rdnetto/H2V | H2V/tests/pattern_match.hs | gpl-2.0 | 74 | 0 | 6 | 32 | 32 | 17 | 15 | 4 | 2 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : ./OWL2/CASL2OWL.hs
Description : Comorphism from CASL to OWL2
Copyright : (c) C. Maeder, DFKI GmbH 2012
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable (via Logic.Logic)
-}
module OWL2.CASL2OWL where
import Logic.Logic as Logic
import Logic.Comorphism
import Common.AS_Annotation
import Common.DocUtils
import Common.Result
import Common.Id
import Common.ProofTree
import Common.Utils
import qualified Common.Lib.MapSet as MapSet
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.List
import Data.Maybe
-- OWL = codomain
import OWL2.Logic_OWL2
import OWL2.MS
import OWL2.AS
import OWL2.ProfilesAndSublogics
import OWL2.ManchesterPrint ()
import OWL2.Morphism
import OWL2.Symbols
import OWL2.Sign as OS
import OWL2.Translate
-- CASL = domain
import CASL.Logic_CASL
import CASL.AS_Basic_CASL
import CASL.Disambiguate
import CASL.Sign as CS
import qualified CASL.MapSentence as MapSen
import CASL.Morphism
import CASL.SimplifySen
import CASL.Sublogic
import CASL.ToDoc
import CASL.Overload
data CASL2OWL = CASL2OWL deriving Show
instance Language CASL2OWL
instance Comorphism
CASL2OWL -- comorphism
CASL -- lid domain
CASL_Sublogics -- sublogics domain
CASLBasicSpec -- Basic spec domain
CASLFORMULA -- sentence domain
SYMB_ITEMS -- symbol items domain
SYMB_MAP_ITEMS -- symbol map items domain
CASLSign -- signature domain
CASLMor -- morphism domain
Symbol -- symbol domain
RawSymbol -- rawsymbol domain
ProofTree -- proof tree domain
OWL2 -- lid codomain
ProfSub -- sublogics codomain
OntologyDocument -- Basic spec codomain
Axiom -- sentence codomain
SymbItems -- symbol items codomain
SymbMapItems -- symbol map items codomain
OS.Sign -- signature codomain
OWLMorphism -- morphism codomain
Entity -- symbol codomain
RawSymb -- rawsymbol codomain
ProofTree -- proof tree codomain
where
sourceLogic CASL2OWL = CASL
sourceSublogic CASL2OWL = caslTop
{ sub_features = LocFilSub }
targetLogic CASL2OWL = OWL2
mapSublogic CASL2OWL _ = Just topS
map_theory CASL2OWL = mapTheory
{- names must be disambiguated as is done in CASL.Qualify or SuleCFOL2SoftFOL.
Ops or preds in the overload relation denote the same objectProperty!
-}
toC :: Id -> ClassExpression
toC = Expression . idToIRI
toO :: Id -> Int -> ObjectPropertyExpression
toO i = ObjectProp . idToNumberedIRI i
toACE :: Id -> (Annotations, ClassExpression)
toACE i = ([], toC i)
toEBit :: Id -> ListFrameBit
toEBit i = ExpressionBit [toACE i]
mkDR :: DomainOrRange -> Id -> FrameBit
mkDR dr = ListFrameBit (Just $ DRRelation dr) . toEBit
mkObjEnt :: String -> Id -> Int -> String -> FrameBit -> Named Axiom
mkObjEnt s i n m = makeNamed (s ++ show i
++ (if n < 0 then "" else '_' : show n) ++ m) . PlainAxiom
(ObjectEntity $ toO i n)
toSubClass :: Id -> [ClassExpression] -> Axiom
toSubClass i = PlainAxiom (ClassEntity $ toC i) . ListFrameBit (Just SubClass)
. ExpressionBit . map (\ c -> ([], c))
getPropSens :: Id -> [SORT] -> Maybe SORT -> [Named Axiom]
getPropSens i args mres = let
ncs = number args
opOrPred = if isJust mres then "op " else "pred "
in makeNamed (opOrPred ++ show i)
(toSubClass i [ObjectJunction IntersectionOf
$ maybeToList (fmap toC mres)
++ map (\ (a, n) -> ObjectValuesFrom SomeValuesFrom
(toO i n) $ toC a) ncs])
: concatMap (\ (a, n) -> let mki = mkObjEnt opOrPred i n in
maybeToList (fmap (mki " domain" . mkDR ADomain) mres)
++ [mki " range" $ mkDR ARange a]) ncs
getPropNames :: (a -> [b]) -> MapSet.MapSet Id a -> Set.Set QName
getPropNames f = Map.foldWithKey (\ i s l ->
case Set.toList s of
[] -> l
h : _ -> Set.union l $ Set.fromList
$ map (idToNumberedIRI i . snd) $ number $ f h)
Set.empty . MapSet.toMap
commonType :: CS.Sign f e -> [[SORT]] -> Result [SORT]
commonType csig l =
case map (keepMaximals csig) $ transpose l of
hl | all (not . null) hl -> return $ map head hl
_ -> fail $ "no common types for " ++ show l
commonOpType :: CS.Sign f e -> Set.Set OpType -> Result OpType
commonOpType csig os = do
l <- commonType csig $ map (\ o -> opRes o : opArgs o) $ Set.toList os
case l of
r : args -> return $ mkTotOpType args r
_ -> fail $ "no common types for " ++ showDoc os ""
commonPredType :: CS.Sign f e -> Set.Set PredType -> Result PredType
commonPredType csig ps = do
args <- commonType csig $ map predArgs $ Set.toList ps
case args of
_ : _ -> return $ PredType args
_ -> fail $ "no common types for " ++ showDoc ps ""
getCommonSupers :: CS.Sign f e -> [SORT] -> Set.Set SORT
getCommonSupers csig s = let supers t = Set.insert t $ supersortsOf t csig in
if null s then Set.empty else foldr1 Set.intersection $ map supers s
keepMaximals :: CS.Sign f e -> [SORT] -> [SORT]
keepMaximals csig = keepMinimals1 True csig id . Set.toList
. getCommonSupers csig
mapSign :: CS.Sign f e -> Result (OS.Sign, [Named Axiom])
mapSign csig = let
esorts = emptySortSet csig
srel = sortRel csig
(eqs, subss) = eqAndSubsorts False srel
(isos, rels) = singleAndRelatedSorts srel
disjSorts = concatMap (\ l -> case l of
_ : _ : _ -> [makeNamed ("disjoint " ++ show l) $ mkMisc Disjoint l]
_ -> []) . sequence $ map (: []) isos ++ map (keepMaximals csig) rels
ss = sortSet csig
nsorts = Set.difference ss esorts
mkMisc ed l = PlainAxiom (Misc []) $ ListFrameBit (Just $ EDRelation ed)
$ ExpressionBit $ map toACE l
eqSorts = map (\ es -> makeNamed ("equal sorts " ++ show es)
$ mkMisc Equivalent es) eqs
subSens = map (\ (s, ts) -> makeNamed
("subsort " ++ show s ++ " of " ++ show ts) $ toSC s ts) subss
nonEmptySens = map (\ s -> mkIndi True s [s]) $ Set.toList nsorts
sortSens = eqSorts ++ disjSorts ++ subSens ++ nonEmptySens
mkIndi b i ts = makeNamed
("individual " ++ show i ++ " of class " ++ showDoc ts "")
$ PlainAxiom (SimpleEntity $ mkEntity NamedIndividual
$ idToAnonIRI b i)
$ ListFrameBit (Just Types) $ ExpressionBit
$ map toACE ts
om = opMap csig
keepMaxs = keepMaximals csig
mk s i = mkObjEnt s i (-1)
toSC i = toSubClass i . map toC
toIris = Set.map idToIRI
(cs, ncs) = MapSet.partition (null . opArgs) om
(sos, os) = MapSet.partition isSingleArgOp ncs
(props, nps) = MapSet.partition (null . predArgs) pm
(sps, rps') = MapSet.partition (isSingle . predArgs) nps
(bps, ps) = MapSet.partition isBinPredType rps'
pm = predMap csig
osig = OS.emptySign
{ concepts = toIris $ Set.unions
[ ss, MapSet.keysSet sps, MapSet.keysSet props
, MapSet.keysSet os, MapSet.keysSet ps]
, objectProperties = Set.unions
[ toIris $ Set.union (MapSet.keysSet sos) $ MapSet.keysSet bps
, getPropNames predArgs ps, getPropNames opArgs os ]
, individuals = toIris $ MapSet.keysSet cs
}
in do
s1 <- Map.foldWithKey (\ i s ml -> do
l <- ml
return $ mkIndi False i
(keepMinimals csig id $ map opRes $ Set.toList s) : l)
(return sortSens) (MapSet.toMap cs)
s2 <- Map.foldWithKey (\ i s ml -> do
l <- ml
let sl = Set.toList s
mki = mk "plain function " i
case (keepMaxs $ concatMap opArgs sl, keepMaxs $ map opRes sl) of
([a], [r]) -> return
$ [ mki " character" $ ListFrameBit Nothing
$ ObjectCharacteristics [([], Functional)]
, mki " domain" $ mkDR ADomain a, mki " range" $ mkDR ARange r]
++ l
(as, rs) -> fail $ "CASL2OWL.mapSign2: " ++ show i ++ " args: "
++ show as ++ " resulttypes: " ++ show rs)
(return s1) (MapSet.toMap sos)
s3 <- Map.foldWithKey (\ i s ml -> do
l <- ml
let mkp = mk "binary predicate " i
pTy <- commonPredType csig s
case predArgs pTy of
[a, r] -> return
$ [mkp " domain" $ mkDR ADomain a, mkp " range" $ mkDR ARange r]
++ l
ts -> fail $ "CASL2OWL.mapSign3: " ++ show i ++ " types: " ++ show ts)
(return s2) (MapSet.toMap bps)
s4 <- Map.foldWithKey (\ i s ml ->
case keepMaxs $ concatMap predArgs $ Set.toList s of
[r] -> do
l <- ml
return $ makeNamed ("plain predicate " ++ show i) (toSC i [r]) : l
ts -> fail $ "CASL2OWL.mapSign4: " ++ show i ++ " types: " ++ show ts)
(return s3) (MapSet.toMap sps)
s5 <- Map.foldWithKey (\ i s ml -> do
l <- ml
ot <- commonOpType csig s
return $ getPropSens i (opArgs ot) (Just $ opRes ot) ++ l
) (return s4) (MapSet.toMap os)
s6 <- Map.foldWithKey (\ i s ml -> do
l <- ml
pt <- commonPredType csig s
return $ getPropSens i (predArgs pt) Nothing ++ l
) (return s5) (MapSet.toMap ps)
return (osig, s6)
{- binary predicates and single argument functions should become
objectProperties.
Serge also turned constructors into concepts.
How to treat multi-argument predicates and functions?
Maybe create tuple concepts?
-}
mapTheory :: (FormExtension f, TermExtension f)
=> (CS.Sign f e, [Named (FORMULA f)]) -> Result (OS.Sign, [Named Axiom])
mapTheory (sig, sens) = do
let mor = disambigSig sig
tar = mtarget mor
nss = map (mapNamed $ MapSen.mapMorphForm (const id) mor) sens
(s, l) <- mapSign tar
ll <- mapM (\ ns -> case sentence ns of
Sort_gen_ax cs b -> return $ mapSortGenAx cs b
_ -> flip (hint []) nullRange
. ("not translated\n" ++) . show . printTheoryFormula
$ mapNamed (simplifySen (const return) (const id) tar) ns
) nss
return (s, l ++ concat ll)
mapSortGenAx :: [Constraint] -> Bool -> [Named Axiom]
mapSortGenAx cs b = map (\ (s, as) ->
let is = map (\ (Qual_op_name n ty _) -> case args_OP_TYPE ty of
[] -> ObjectOneOf [idToIRI n]
[_] -> ObjectValuesFrom SomeValuesFrom (toO n (-1)) $ toC s
_ -> toC n) as
in makeNamed ("generated " ++ show s)
$ PlainAxiom (ClassEntity $ toC s)
$ if b && not (isSingle is) then AnnFrameBit [] $ ClassDisjointUnion is
else ListFrameBit (Just $ EDRelation Equivalent)
$ ExpressionBit [([], case is of
[i] -> i
_ -> ObjectJunction UnionOf is)])
$ recoverSortGen cs
| gnn/Hets | OWL2/CASL2OWL.hs | gpl-2.0 | 10,667 | 0 | 25 | 2,788 | 3,745 | 1,891 | 1,854 | 244 | 5 |
{- |
Module : $Header$
Description : Normalization w.r.t. associativity and commutativity
Copyright : (c) Immanuel Normann, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : inormann@jacobs-university.de
Stability : provisional
Portability : portable
-}
module Search.Common.ACINormalization where
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.List as List
import Search.Utils.List
import Search.Utils.SetMap
import Text.ParserCombinators.Parsec
data Term t = ACI (Set.Set (Term t)) | Sequence [Term t] | Symbol t | Number Int deriving (Eq,Ord)
{-----------------------------------------------------------------------
-- Show
-}
instance (Show t) => Show (Term t) where
show (Number n) = show n
show (Symbol s) = show s
show (Sequence ts) = "[" ++ (mapShow "," ts) ++ "]"
show (ACI ts) = "{" ++ (mapShow "," (Set.toList ts)) ++ "}"
{-----------------------------------------------------------------------
-- Parse
-}
atom :: Parser (Term String)
atom = do ls <- many1 letter
ds <- many digit
return (Symbol (ls ++ ds))
aciParser :: Parser (Term String)
aciParser =
do char '['
children <- sepBy1 aciParser (char ',')
char ']'
return (Sequence children)
<|>
do char '{'
children <- sepBy1 aciParser (char ',')
char '}'
return (ACI (Set.fromList children))
<|>
do a <- atom
return a
run :: Show a => Parser a -> String -> IO ()
run p input
= case (parse p "" input) of
Left err -> do{ putStr "parse error at "
; print err
}
Right x -> print x
{-----------------------------------------------------------------------
-- Normalize
-}
compareTerms :: Term t -> Term t -> Ordering
compareTerms (ACI ts1) (ACI ts2) =
case compare (Set.size ts1) (Set.size ts2)
of LT -> LT
GT -> GT
EQ -> compareListOfTerms (List.sortBy compareTerms (Set.toList ts1)) (List.sortBy compareTerms (Set.toList ts2))
compareTerms (Sequence ts1) (Sequence ts2) =
case compare (length ts1) (length ts2)
of LT -> LT
GT -> GT
EQ -> compareListOfTerms ts1 ts2
compareTerms (Symbol _) (Symbol _) = EQ
compareTerms (Number m) (Number n) = compare m n
compareTerms (Number _) _ = LT
compareTerms (Symbol _) (Number _) = GT
compareTerms (Symbol _) _ = LT
compareTerms (Sequence _) (ACI _) = LT
compareTerms (Sequence _) _ = GT
compareTerms (ACI _) _ = GT
compareListOfTerms [] [] = EQ
compareListOfTerms (t1:t1s) (t2:t2s) =
if (termOrd == EQ) then (compareListOfTerms t1s t2s) else termOrd
where termOrd = compareTerms t1 t2
{- |
(replace symbList term) renames each symbol in the term by its position number in symbList
if the symbol is member of symbolList otherwise the symbol remains the same; e.g.:
replace ["b","a","c"] {["Q",{"a","b"}],["Q",{"c","b"}]}
-> {["Q",{0,1}],["Q",{0,2}]}
-}
replace :: (Eq s,Ord s) => [s] -> Term s -> Term s
replace ss (ACI terms) = ACI (Set.map (replace ss) terms)
replace ss (Sequence terms) = Sequence (map (replace ss) terms)
replace _ (Number n) = Number n
replace ss (Symbol s) =
case lookUpFirstEntry s ss
of (Just n) -> Number n
Nothing -> Symbol s
{- |
(hasSymbol term) is true iff any subterm contains a symbol.
-}
hasSymbol :: Term t -> Bool
hasSymbol (Number n) = False
hasSymbol (Symbol s) = True
hasSymbol (Sequence ts) = any hasSymbol ts
hasSymbol (ACI ts) = any hasSymbol (Set.toList ts)
minTerms :: (Eq t,Ord t) => Set.Set (Term t) -> Set.Set (Term t)
minTerms terms =
let termsWithSymbols = Set.filter hasSymbol terms
in if Set.null termsWithSymbols
then Set.empty -- Problem, wenn der einzige Term keine Symbole mehr enthaelt!! tritt auf bei aciMorphism t1!
else let (minTerm:_) = List.sortBy compareTerms (Set.toList termsWithSymbols)
termsAreEqual t1 t2 = (compareTerms t1 t2) == EQ
in Set.filter (termsAreEqual minTerm) termsWithSymbols
minMorphs :: (Eq s,Ord s) => Term s -> [[s]] -> [[s]]
minMorphs term [] = []
minMorphs term substitutions =
if Set.null minimalTerms then substitutions else Set.toList (image termsToSubs minimalTerms)
where minimalTerms = (minTerms $ dom termsToSubs)
termsToSubs = Map.fromList (map (\subs -> (replace subs term,subs)) substitutions)
{- |
prox takes an term and returns a list of the minimal symbols from that term (s. 'minTerms')
-}
prox :: (Eq s,Ord s) => Term s -> [s]
prox (Number n) = []
prox (Symbol s) = [s]
prox (Sequence ts) = case (filter hasSymbol ts)
of (t:_) -> prox t
_ -> []
prox (ACI ts) = List.nub (concat $ Set.toList (Set.map prox (minTerms ts)))
dist :: (Eq s,Ord s) => Term s -> [[s]] -> [[s]]
dist term subs = concatMap spread subs
where spread sub = map (\s -> sub++[s]) (prox (replace sub term))
symbolsOf :: (Ord s) => Term s -> Set.Set s
symbolsOf (Number n) = Set.empty
symbolsOf (Symbol s) = Set.singleton s
symbolsOf (Sequence ts) = Set.unions (map symbolsOf ts)
symbolsOf (ACI ts) = Set.unions (map symbolsOf (Set.toList ts))
aciMorphism :: (Eq s,Ord s) => Term s -> [[s]]
aciMorphism term = acim term [[]]
where symbolsOfTerm = (symbolsOf term)
substitutionIsIncomplete sub = Set.isProperSubsetOf (Set.fromList sub) symbolsOfTerm
acim term subs =
if all substitutionIsIncomplete subs
then acim term (minMorphs term (dist term subs))
else subs
aci :: [Char] -> (Term String, [[String]])
aci input = case (parse aciParser "" input)
of Left err -> error "parse error at " -- ++ (show err)
Right x -> (x,aciMorphism x)
{-
*ACINormalization> aci "{a2,a1,{b3,a2},{a1,a1}}"
({{"a1"},{"a2","b3"},"a1","a2"},[["a1","a2","b3"]])
*ACINormalization> aci "{a2,a1,{b3,a2},[a1,a1]}"
({{"a2","b3"},["a1","a1"],"a1","a2"},[["a1","a2","b3"]])
*ACINormalization> let (x,(p:_)) = aci "{a2,a1,{b3,a2},[a1,a1]}"
*ACINormalization> replace p x
{{1,2},[0,0],0,1}
*ACINormalization> let (x,[p]) = aci "{{[Q,{a,d}],[Q,{c,d}],[R,{a,c}]},{[Q,{b,d}],[R,{a,b}],[R,{b,c}]}}"
*ACINormalization> x
{{["Q",{"a","d"}],["Q",{"c","d"}],["R",{"a","c"}]},{["Q",{"b","d"}],["R",{"a","b"}],["R",{"b","c"}]}}
*ACINormalization> p
["R","b","c","a","Q","d"]
*ACINormalization> replace p x
{{[0,{1,2}],[0,{1,3}],[4,{1,5}]},{[0,{2,3}],[4,{2,5}],[4,{3,5}]}}
Die ACI Normalisierung, wie sie hier implementiert ist, ist wohl NICHT VOLLSTÄNDIG!!!!
wie das Beispiel zeigt:
*ACINormalization> print $ aci "[{a,b,c},{a,c,d}]"
([{"a","b","c"},{"a","c","d"}],[["c","a","b","d"]])
Denn auch die Subsitution ["a","c","b","d"] würde den Term minimal machen!!!
-}
| nevrenato/Hets_Fork | Search/Common/ACINormalization.hs | gpl-2.0 | 6,705 | 20 | 16 | 1,372 | 2,103 | 1,060 | 1,043 | 114 | 5 |
module Shrdlite.CombinatorParser (
module Shrdlite.CombinatorParser,
module Control.Applicative
) where
import Control.Applicative
import qualified Data.Foldable as F
newtype Parser t a = Parser ([t] -> [([t], a)])
parse :: Parser t a -> [t] -> [a]
parse (Parser p) ts = [a | ([], a) <- p ts]
instance Functor (Parser t) where
fmap f (Parser p) = Parser (\ts -> [(ts', f a) | (ts', a) <- p ts])
instance Applicative (Parser t) where
pure a = Parser (\ts -> [(ts, a)])
Parser p <*> Parser q =
Parser (\ts -> [(ts'', f a) | (ts', f) <- p ts, (ts'', a) <- q ts'])
instance Alternative (Parser t) where
empty = Parser (const [])
Parser p <|> Parser q = Parser (\ts -> p ts ++ q ts)
token :: Eq t => t -> Parser t t
token t = Parser (\ts -> case ts of
t':ts' | t == t' -> [(ts', t')]
_ -> [])
-- Convenience parsers
anyof :: [Parser t a] -> Parser t a
anyof = F.asum
tokens :: Eq t => [t] -> Parser t [t]
tokens = foldr (\ x -> (<*>) ((:) <$> token x)) (pure [])
lexicon :: [(a, [String])] -> Parser String a
lexicon alts = anyof [a <$ anyof (map (tokens.words) alt) | (a, alt) <- alts]
| chip2n/tin172-project | haskell/src/Shrdlite/CombinatorParser.hs | gpl-3.0 | 1,191 | 0 | 14 | 329 | 636 | 341 | 295 | 27 | 2 |
#!/usr/bin/env stack
{- stack runghc --verbosity info
--package hledger-lib
--package hledger
--package cmdargs
--package text
-}
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
import Control.Arrow (first)
import Data.Maybe
import Data.List
import Data.String.Here
import System.Console.CmdArgs
import Hledger.Cli
-- hledger-budget REPORT-COMMAND [--no-offset] [--no-buckets] [OPTIONS...]
budgetmode :: Mode RawOpts
budgetmode = (hledgerCommandMode
[here| budget
Perform some subset of reports available in core hledger but process automated
and periodic transactions. Also simplify tree of accounts to ease view of
"budget buckets". People familiar with ledger budgeting
(http://www.ledger-cli.org/3.0/doc/ledger3.html#Budgeting)
may consider this tool as an alias to `ledger --budget`.
FLAGS
With this tool you may either use so called periodic transactions that being
issued with each new period or use a family of approaches with automated
transactions. You may want to look at [budgeting section of
plaintextaccounting](http://plaintextaccounting.org/#budgeting).
Periodic transaction that being interpreted by this tool may look like:
~ monthly from 2017/3
income:salary $-4,000.00
expenses:taxes $1,000
expenses:housing:rent $1,200
expenses:grocery $400
expenses:leisure $200
expenses:health $200
expenses $100
assets:savings
Header of such entries starts with `'~'` (tilde symbol) following by an
interval with an effect period when transactions should be injected.
Effect of declaring such periodic transaction is:
- Transactions will be injected at the beginning of each period. I.e. for
monthly it will always refer to 1st day of month.
- Injected transaction will have inverted amounts to offset existing associated
expenses. I.e. for this example negative balance indicates how much you have
within your budget and positive amounts indicates how far you off from your
budget.
- Set of accounts across of all periodic transactions will form kinda buckets
where rest of the accounts will be sorted into. Each account not mentioned in
any of periodic transaction will be dropped without changing of balance for
parent account. I.e. for this example postings for `expenses:leisure:movie`
will contribute to the balance of `expenses:leisure` only in reports.
Note that beside a periodic transaction all automated transactions will be
handled in a similar way how they are handled in `rewrite` command.
Bucketing
It is very common to have more expense accounts than budget
"envelopes"/"buckets". For this reason all periodic transactions are treated as
a source of information about your budget "buckets".
I.e. example from previous section will build a sub-tree of accounts that look like
assets:savings
expenses
taxes
housing:rent
grocery
leisure
health
income:salary
All accounts used in your transactions journal files will be classified
according to that tree to contribute to an appropriate bucket of budget.
Everything else will be collected under virtual account `<unbucketed>` to give
you an idea of what parts of your accounts tree is not budgeted. For example
`liabilities` will contributed to that entry.
Reports
You can use `budget` command to produce next reports:
- `balance` - the most important one to track how you follow your budget. If
you use month-based budgeting you may want to use `--monthly` and
`--row-total` option to see how you are doing through the months. You also
may find it useful to add `--tree` option to see aggregated totals per
intermediate node of accounts tree.
- `register` - might be useful if you want to see long history (ex. `--weekly`)
that is too wide to fit into your terminal.
- `print` - this is mostly to check what actually happens. But you may use it
if you prefer to generate budget transactions and store it in a separate
journal for some less popular budgeting scheme.
Extra options for reports
You may tweak behavior of this command with additional options `--no-offset` and `--no-bucketing`.
- Don't use these options if your budgeting schema includes both periodic
transactions, and "bucketing". Unless you want to figure out how your
budgeting might look like. You may find helpful values of average column from
report
$ hledger budget -- bal --period 'monthly to last month' --no-offset --average
- Use `--no-offset` and `--no-bucketing` if your schema fully relies on
automated transactions and hand-crafted budgeting transactions. In this mode
only automated transactions will be processed. I.e. when you journal looks
something like
= ^expenses:food
budget:gifts *-1
assets:budget *1
2017/1/1 Budget for Jan
assets:bank $-1000
budget:gifts $200
budget:misc
- Use `--no-bucketing` only if you want to produce a valid journal. For example
when you want to pass it as an input for other `hledger` command. Most people
will find this useless.
Recommendations
- Automated transaction should follow same rules that usual transactions follow
(i.e. keep balance for real and balanced virtual postings).
- Don't change the balance of real asset and liability accounts for which you
usually put assertions. Keep in mind that `hledger` do not apply modification
transactions.
- In periodic transactions to offset your budget use either top-level account
like `Assets` or introduce a "virtual" one like `Assets:Bank:Budget` that
will be a child to the one you want to offset.
|]
[] -- ungrouped flags
[("\nBudgeting", budgetFlags), generalflagsgroup2] -- groupped flags
[] -- hidden flags
([], Nothing)
) { modeGroupModes = Group
{ groupUnnamed = map fst actions
, groupNamed = []
, groupHidden = []
}
}
budgetFlags :: [Flag RawOpts]
budgetFlags =
[ flagNone ["no-buckets"] (setboolopt "no-buckets") "show all accounts besides mentioned in periodic transactions"
, flagNone ["no-offset"] (setboolopt "no-offset") "do not add up periodic transactions"
]
actions :: [(Mode RawOpts, CliOpts -> Journal -> IO ())]
actions = first injectBudgetFlags <$>
[ (balancemode, balance)
, (balancesheetmode, balancesheet)
, (cashflowmode, cashflow)
, (incomestatementmode, incomestatement)
, (registermode, register)
, (printmode, print')
]
injectBudgetFlags :: Mode RawOpts -> Mode RawOpts
injectBudgetFlags = injectFlags "\nBudgeting" budgetFlags
-- maybe lenses will help...
injectFlags :: String -> [Flag RawOpts] -> Mode RawOpts -> Mode RawOpts
injectFlags section flags mode0 = mode' where
mode' = mode0 { modeGroupFlags = groupFlags' }
groupFlags0 = modeGroupFlags mode0
groupFlags' = groupFlags0 { groupNamed = namedFlags' }
namedFlags0 = groupNamed groupFlags0
namedFlags' =
case ((section ==) . fst) `partition` namedFlags0 of
([g], gs) -> (fst g, snd g ++ flags) : gs
_ -> (section, flags) : namedFlags0
journalBalanceTransactions' :: CliOpts -> Journal -> IO Journal
journalBalanceTransactions' opts j = do
let assrt = not . ignore_assertions_ $ inputopts_ opts
either error' return $ journalBalanceTransactions assrt j
budgetWrapper :: (CliOpts -> Journal -> IO ()) -> CliOpts -> Journal -> IO ()
budgetWrapper f opts' j = do
-- use original transactions as input for journalBalanceTransactions to re-infer balances/prices
let modifier = originalTransaction . foldr (flip (.) . runModifierTransaction') id mtxns
runModifierTransaction' = fmap txnTieKnot . runModifierTransaction Any
mtxns = jmodifiertxns j
dates = spanUnion (jdatespan j) (periodAsDateSpan $ period_ $ reportopts_ opts')
ts' = map modifier $ jtxns j
ts'' | boolopt "no-offset" $ rawopts_ opts' = ts'
| otherwise= [makeBudget t | pt <- jperiodictxns j, t <- runPeriodicTransaction pt dates] ++ ts'
makeBudget t = txnTieKnot $ t
{ tdescription = "Budget transaction"
, tpostings = map makeBudgetPosting $ tpostings t
}
makeBudgetPosting p = p { pamount = negate $ pamount p }
j' <- journalBalanceTransactions' opts' j{ jtxns = ts'' }
-- re-map account names into buckets from periodic transaction
let buckets = budgetBuckets j
remapAccount "" = "<unbucketed>"
remapAccount an
| an `elem` buckets = an
| otherwise = remapAccount (parentAccountName an)
remapPosting p = p { paccount = remapAccount $ paccount p, porigin = Just . fromMaybe p $ porigin p }
remapTxn = mapPostings (map remapPosting)
let j'' | boolopt "no-buckets" $ rawopts_ opts' = j'
| null buckets = j'
| otherwise = j' { jtxns = remapTxn <$> jtxns j' }
-- finally feed to real command
f opts' j''
budgetBuckets :: Journal -> [AccountName]
budgetBuckets = nub . map paccount . concatMap ptpostings . jperiodictxns
mapPostings :: ([Posting] -> [Posting]) -> (Transaction -> Transaction)
mapPostings f t = txnTieKnot $ t { tpostings = f $ tpostings t }
main :: IO ()
main = do
rawopts <- fmap decodeRawOpts . processArgs $ budgetmode
opts <- rawOptsToCliOpts rawopts
withJournalDo opts budget
budget :: CliOpts -> Journal -> IO ()
budget opts journal =
case find (\e -> command_ opts `elem` modeNames (fst e)) actions of
Just (_, action) -> budgetWrapper action opts journal
Nothing -> print budgetmode
| ony/hledger | bin/hledger-budget.hs | gpl-3.0 | 9,578 | 0 | 15 | 2,027 | 1,256 | 656 | 600 | 85 | 2 |
module Main where
import System.Environment
import Data.List
import Data.Char
--I DID NOT CREATE ANY OF THIS, I FOUND ALL OF IT ON REDDIT AND WANTED TO EXPERIMENT WITH HASKELL
{-
solve n xs = [sum $ zipWith (\a b -> if a == b then a else 0) xs (drop n $ cycle xs)]
solve1 = solve 1
main :: IO ()
main = do
contents <- getContents
print $ show $ solve1 $ read contents
-}
--I DID NOT CREATE ANY OF THIS, I FOUND ALL OF IT ON REDDIT AND WANTED TO EXPERIMENT WITH HASKELL
main = do
-- the <$> is an infix alias for fmap
-- the init is how we're getting rid of the \n
s <- init <$> readFile "input.txt"
print $ process 1 $ s ++ [head s]
let
l = length $ s
ll = div l 2
print $ process ll $ s ++ take ll s
process k s =
sum . map (read . (:[]) . fst) . filter (uncurry (==)) $ zip s (drop k s)
| kevinlmadison/advent_of_code | 3_year_2017/day01/haskell/sol_haskell.hs | gpl-3.0 | 842 | 0 | 13 | 231 | 179 | 92 | 87 | 13 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.AccountPermissions.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of account permissions.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.accountPermissions.list@.
module Network.Google.Resource.DFAReporting.AccountPermissions.List
(
-- * REST Resource
AccountPermissionsListResource
-- * Creating a Request
, accountPermissionsList
, AccountPermissionsList
-- * Request Lenses
, aplXgafv
, aplUploadProtocol
, aplAccessToken
, aplUploadType
, aplProFileId
, aplCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.accountPermissions.list@ method which the
-- 'AccountPermissionsList' request conforms to.
type AccountPermissionsListResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"accountPermissions" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] AccountPermissionsListResponse
-- | Retrieves the list of account permissions.
--
-- /See:/ 'accountPermissionsList' smart constructor.
data AccountPermissionsList =
AccountPermissionsList'
{ _aplXgafv :: !(Maybe Xgafv)
, _aplUploadProtocol :: !(Maybe Text)
, _aplAccessToken :: !(Maybe Text)
, _aplUploadType :: !(Maybe Text)
, _aplProFileId :: !(Textual Int64)
, _aplCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountPermissionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aplXgafv'
--
-- * 'aplUploadProtocol'
--
-- * 'aplAccessToken'
--
-- * 'aplUploadType'
--
-- * 'aplProFileId'
--
-- * 'aplCallback'
accountPermissionsList
:: Int64 -- ^ 'aplProFileId'
-> AccountPermissionsList
accountPermissionsList pAplProFileId_ =
AccountPermissionsList'
{ _aplXgafv = Nothing
, _aplUploadProtocol = Nothing
, _aplAccessToken = Nothing
, _aplUploadType = Nothing
, _aplProFileId = _Coerce # pAplProFileId_
, _aplCallback = Nothing
}
-- | V1 error format.
aplXgafv :: Lens' AccountPermissionsList (Maybe Xgafv)
aplXgafv = lens _aplXgafv (\ s a -> s{_aplXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aplUploadProtocol :: Lens' AccountPermissionsList (Maybe Text)
aplUploadProtocol
= lens _aplUploadProtocol
(\ s a -> s{_aplUploadProtocol = a})
-- | OAuth access token.
aplAccessToken :: Lens' AccountPermissionsList (Maybe Text)
aplAccessToken
= lens _aplAccessToken
(\ s a -> s{_aplAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aplUploadType :: Lens' AccountPermissionsList (Maybe Text)
aplUploadType
= lens _aplUploadType
(\ s a -> s{_aplUploadType = a})
-- | User profile ID associated with this request.
aplProFileId :: Lens' AccountPermissionsList Int64
aplProFileId
= lens _aplProFileId (\ s a -> s{_aplProFileId = a})
. _Coerce
-- | JSONP
aplCallback :: Lens' AccountPermissionsList (Maybe Text)
aplCallback
= lens _aplCallback (\ s a -> s{_aplCallback = a})
instance GoogleRequest AccountPermissionsList where
type Rs AccountPermissionsList =
AccountPermissionsListResponse
type Scopes AccountPermissionsList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient AccountPermissionsList'{..}
= go _aplProFileId _aplXgafv _aplUploadProtocol
_aplAccessToken
_aplUploadType
_aplCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy AccountPermissionsListResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/AccountPermissions/List.hs | mpl-2.0 | 4,946 | 0 | 18 | 1,155 | 726 | 421 | 305 | 107 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.KMS.CreateGrant
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds a grant to a key to specify who can access the key and under what
-- conditions. Grants are alternate permission mechanisms to key policies. For
-- more information about grants, see <http://docs.aws.amazon.com/kms/latest/developerguide/grants.html Grants> in the developer guide. If a grant
-- is absent, access to the key is evaluated based on IAM policies attached to
-- the user. 'ListGrants' 'RetireGrant' 'RevokeGrant'
--
-- <http://docs.aws.amazon.com/kms/latest/APIReference/API_CreateGrant.html>
module Network.AWS.KMS.CreateGrant
(
-- * Request
CreateGrant
-- ** Request constructor
, createGrant
-- ** Request lenses
, cgConstraints
, cgGrantTokens
, cgGranteePrincipal
, cgKeyId
, cgOperations
, cgRetiringPrincipal
-- * Response
, CreateGrantResponse
-- ** Response constructor
, createGrantResponse
-- ** Response lenses
, cgrGrantId
, cgrGrantToken
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.KMS.Types
import qualified GHC.Exts
data CreateGrant = CreateGrant
{ _cgConstraints :: Maybe GrantConstraints
, _cgGrantTokens :: List "GrantTokens" Text
, _cgGranteePrincipal :: Text
, _cgKeyId :: Text
, _cgOperations :: List "Operations" GrantOperation
, _cgRetiringPrincipal :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'CreateGrant' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cgConstraints' @::@ 'Maybe' 'GrantConstraints'
--
-- * 'cgGrantTokens' @::@ ['Text']
--
-- * 'cgGranteePrincipal' @::@ 'Text'
--
-- * 'cgKeyId' @::@ 'Text'
--
-- * 'cgOperations' @::@ ['GrantOperation']
--
-- * 'cgRetiringPrincipal' @::@ 'Maybe' 'Text'
--
createGrant :: Text -- ^ 'cgKeyId'
-> Text -- ^ 'cgGranteePrincipal'
-> CreateGrant
createGrant p1 p2 = CreateGrant
{ _cgKeyId = p1
, _cgGranteePrincipal = p2
, _cgRetiringPrincipal = Nothing
, _cgOperations = mempty
, _cgConstraints = Nothing
, _cgGrantTokens = mempty
}
-- | Specifies the conditions under which the actions specified by the 'Operations'
-- parameter are allowed.
cgConstraints :: Lens' CreateGrant (Maybe GrantConstraints)
cgConstraints = lens _cgConstraints (\s a -> s { _cgConstraints = a })
-- | For more information, see <http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#grant_token Grant Tokens>.
cgGrantTokens :: Lens' CreateGrant [Text]
cgGrantTokens = lens _cgGrantTokens (\s a -> s { _cgGrantTokens = a }) . _List
-- | Principal given permission by the grant to use the key identified by the 'keyId'
-- parameter.
cgGranteePrincipal :: Lens' CreateGrant Text
cgGranteePrincipal =
lens _cgGranteePrincipal (\s a -> s { _cgGranteePrincipal = a })
-- | A unique identifier for the customer master key. This value can be a globally
-- unique identifier or the fully specified ARN to a key. Key ARN Example -
-- arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012 Globally Unique Key ID Example - 12345678-1234-1234-1234-123456789012
--
cgKeyId :: Lens' CreateGrant Text
cgKeyId = lens _cgKeyId (\s a -> s { _cgKeyId = a })
-- | List of operations permitted by the grant. This can be any combination of one
-- or more of the following values: Decrypt Encrypt GenerateDataKey GenerateDataKeyWithoutPlaintext
-- ReEncryptFrom ReEncryptTo CreateGrant RetireGrant
cgOperations :: Lens' CreateGrant [GrantOperation]
cgOperations = lens _cgOperations (\s a -> s { _cgOperations = a }) . _List
-- | Principal given permission to retire the grant. For more information, see 'RetireGrant'.
cgRetiringPrincipal :: Lens' CreateGrant (Maybe Text)
cgRetiringPrincipal =
lens _cgRetiringPrincipal (\s a -> s { _cgRetiringPrincipal = a })
data CreateGrantResponse = CreateGrantResponse
{ _cgrGrantId :: Maybe Text
, _cgrGrantToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateGrantResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cgrGrantId' @::@ 'Maybe' 'Text'
--
-- * 'cgrGrantToken' @::@ 'Maybe' 'Text'
--
createGrantResponse :: CreateGrantResponse
createGrantResponse = CreateGrantResponse
{ _cgrGrantToken = Nothing
, _cgrGrantId = Nothing
}
-- | Unique grant identifier. You can use the /GrantId/ value to revoke a grant.
cgrGrantId :: Lens' CreateGrantResponse (Maybe Text)
cgrGrantId = lens _cgrGrantId (\s a -> s { _cgrGrantId = a })
-- | For more information, see <http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#grant_token Grant Tokens>.
cgrGrantToken :: Lens' CreateGrantResponse (Maybe Text)
cgrGrantToken = lens _cgrGrantToken (\s a -> s { _cgrGrantToken = a })
instance ToPath CreateGrant where
toPath = const "/"
instance ToQuery CreateGrant where
toQuery = const mempty
instance ToHeaders CreateGrant
instance ToJSON CreateGrant where
toJSON CreateGrant{..} = object
[ "KeyId" .= _cgKeyId
, "GranteePrincipal" .= _cgGranteePrincipal
, "RetiringPrincipal" .= _cgRetiringPrincipal
, "Operations" .= _cgOperations
, "Constraints" .= _cgConstraints
, "GrantTokens" .= _cgGrantTokens
]
instance AWSRequest CreateGrant where
type Sv CreateGrant = KMS
type Rs CreateGrant = CreateGrantResponse
request = post "CreateGrant"
response = jsonResponse
instance FromJSON CreateGrantResponse where
parseJSON = withObject "CreateGrantResponse" $ \o -> CreateGrantResponse
<$> o .:? "GrantId"
<*> o .:? "GrantToken"
| romanb/amazonka | amazonka-kms/gen/Network/AWS/KMS/CreateGrant.hs | mpl-2.0 | 6,771 | 0 | 11 | 1,438 | 893 | 536 | 357 | 95 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.StorageTransfer.TransferOperations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists transfer operations. Operations are ordered by their creation time
-- in reverse chronological order.
--
-- /See:/ <https://cloud.google.com/storage-transfer/docs Storage Transfer API Reference> for @storagetransfer.transferOperations.list@.
module Network.Google.Resource.StorageTransfer.TransferOperations.List
(
-- * REST Resource
TransferOperationsListResource
-- * Creating a Request
, transferOperationsList
, TransferOperationsList
-- * Request Lenses
, tolXgafv
, tolUploadProtocol
, tolAccessToken
, tolUploadType
, tolName
, tolFilter
, tolPageToken
, tolPageSize
, tolCallback
) where
import Network.Google.Prelude
import Network.Google.StorageTransfer.Types
-- | A resource alias for @storagetransfer.transferOperations.list@ method which the
-- 'TransferOperationsList' request conforms to.
type TransferOperationsListResource =
"v1" :>
Capture "name" Text :>
QueryParam "filter" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListOperationsResponse
-- | Lists transfer operations. Operations are ordered by their creation time
-- in reverse chronological order.
--
-- /See:/ 'transferOperationsList' smart constructor.
data TransferOperationsList =
TransferOperationsList'
{ _tolXgafv :: !(Maybe Xgafv)
, _tolUploadProtocol :: !(Maybe Text)
, _tolAccessToken :: !(Maybe Text)
, _tolUploadType :: !(Maybe Text)
, _tolName :: !Text
, _tolFilter :: !Text
, _tolPageToken :: !(Maybe Text)
, _tolPageSize :: !(Maybe (Textual Int32))
, _tolCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TransferOperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tolXgafv'
--
-- * 'tolUploadProtocol'
--
-- * 'tolAccessToken'
--
-- * 'tolUploadType'
--
-- * 'tolName'
--
-- * 'tolFilter'
--
-- * 'tolPageToken'
--
-- * 'tolPageSize'
--
-- * 'tolCallback'
transferOperationsList
:: Text -- ^ 'tolName'
-> Text -- ^ 'tolFilter'
-> TransferOperationsList
transferOperationsList pTolName_ pTolFilter_ =
TransferOperationsList'
{ _tolXgafv = Nothing
, _tolUploadProtocol = Nothing
, _tolAccessToken = Nothing
, _tolUploadType = Nothing
, _tolName = pTolName_
, _tolFilter = pTolFilter_
, _tolPageToken = Nothing
, _tolPageSize = Nothing
, _tolCallback = Nothing
}
-- | V1 error format.
tolXgafv :: Lens' TransferOperationsList (Maybe Xgafv)
tolXgafv = lens _tolXgafv (\ s a -> s{_tolXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tolUploadProtocol :: Lens' TransferOperationsList (Maybe Text)
tolUploadProtocol
= lens _tolUploadProtocol
(\ s a -> s{_tolUploadProtocol = a})
-- | OAuth access token.
tolAccessToken :: Lens' TransferOperationsList (Maybe Text)
tolAccessToken
= lens _tolAccessToken
(\ s a -> s{_tolAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tolUploadType :: Lens' TransferOperationsList (Maybe Text)
tolUploadType
= lens _tolUploadType
(\ s a -> s{_tolUploadType = a})
-- | Not used.
tolName :: Lens' TransferOperationsList Text
tolName = lens _tolName (\ s a -> s{_tolName = a})
-- | Required. A list of query parameters specified as JSON text in the form
-- of: \`{\"projectId\":\"my_project_id\",
-- \"jobNames\":[\"jobid1\",\"jobid2\",...],
-- \"operationNames\":[\"opid1\",\"opid2\",...],
-- \"transferStatuses\":[\"status1\",\"status2\",...]}\` Since
-- \`jobNames\`, \`operationNames\`, and \`transferStatuses\` support
-- multiple values, they must be specified with array notation.
-- \`projectId\` is required. \`jobNames\`, \`operationNames\`, and
-- \`transferStatuses\` are optional. The valid values for
-- \`transferStatuses\` are case-insensitive: IN_PROGRESS, PAUSED, SUCCESS,
-- FAILED, and ABORTED.
tolFilter :: Lens' TransferOperationsList Text
tolFilter
= lens _tolFilter (\ s a -> s{_tolFilter = a})
-- | The list page token.
tolPageToken :: Lens' TransferOperationsList (Maybe Text)
tolPageToken
= lens _tolPageToken (\ s a -> s{_tolPageToken = a})
-- | The list page size. The max allowed value is 256.
tolPageSize :: Lens' TransferOperationsList (Maybe Int32)
tolPageSize
= lens _tolPageSize (\ s a -> s{_tolPageSize = a}) .
mapping _Coerce
-- | JSONP
tolCallback :: Lens' TransferOperationsList (Maybe Text)
tolCallback
= lens _tolCallback (\ s a -> s{_tolCallback = a})
instance GoogleRequest TransferOperationsList where
type Rs TransferOperationsList =
ListOperationsResponse
type Scopes TransferOperationsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient TransferOperationsList'{..}
= go _tolName (Just _tolFilter) _tolXgafv
_tolUploadProtocol
_tolAccessToken
_tolUploadType
_tolPageToken
_tolPageSize
_tolCallback
(Just AltJSON)
storageTransferService
where go
= buildClient
(Proxy :: Proxy TransferOperationsListResource)
mempty
| brendanhay/gogol | gogol-storage-transfer/gen/Network/Google/Resource/StorageTransfer/TransferOperations/List.hs | mpl-2.0 | 6,498 | 0 | 18 | 1,450 | 966 | 562 | 404 | 134 | 1 |
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
(***) x y = x
| lspitzner/brittany | data/Test409.hs | agpl-3.0 | 128 | 0 | 5 | 16 | 14 | 8 | 6 | 1 | 1 |
module Foundation where
import ClassyPrelude.Yesod
import Yesod.Auth
import Yesod.Auth.Hardcoded
import Yesod.Auth.Message
import Yesod.Core.Types
import Yesod.Default.Util (addStaticContentExternal)
import Database.Persist.Sql (runSqlPool, ConnectionPool)
import Settings.StaticFiles
import Settings
import Model
import Text.Jasmine (minifym)
import Text.Hamlet (hamletFile)
import Network.Wai as W
import qualified Data.Text as T
-- | The foundation datatype for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ appSettings :: AppSettings
, appStatic :: Static -- ^ Settings for static file serving.
, appConnPool :: ConnectionPool -- ^ Database connection pool.
, appHttpManager :: Manager
, appLogger :: Logger
}
instance HasHttpManager App where
getHttpManager = appHttpManager
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
mkYesodData "App" $(parseRoutesFile "config/routes")
-- | A convenient synonym for creating forms.
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
approotRequest :: App -> W.Request -> Text
approotRequest master req =
case requestHeaderHost req of
Just a -> prefix `T.append` decodeUtf8 a
Nothing -> appRoot $ appSettings master
where
prefix =
if "https://" `T.isPrefixOf` appRoot (appSettings master)
then "https://"
else "http://"
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
-- Controls the base of generated URLs. For more information on modifying,
-- see: https://github.com/yesodweb/yesod/wiki/Overriding-approot
approot = ApprootMaster $ appRoot . appSettings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = Just <$> defaultClientSessionBackend
120 -- timeout in minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
agplWidget <- widgetToPageContent $
$(widgetFile "agplFooter")
pc <- widgetToPageContent $ do
mapM_ (addStylesheet . StaticR)
[ css_normalize_css
, css_bootstrap_css
, css_main_css
]
$(widgetFile "default-layout")
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- Routes not requiring authentication.
isAuthorized (AuthR _) _ = return Authorized
isAuthorized FaviconR _ = return Authorized
isAuthorized RobotsR _ = return Authorized
-- Routes requiring authentication
isAuthorized ModMainR _ = isAdmin
isAuthorized (ModEditR _) _ = isAdmin
isAuthorized (ModDeleteR _) _ = isAdmin
-- Default to Authorized for now.
isAuthorized _ _ = return Authorized
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext mime content = do
master <- getYesod
let staticDir = appStaticDir $ appSettings master
addStaticContentExternal
minifym
genFileName
staticDir
(StaticR . flip StaticRoute [])
ext
mime
content
where
-- Generate a unique filename based on the content itself
genFileName lbs = "autogen-" ++ base64md5 lbs
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog app _source level =
appShouldLogAll (appSettings app)
|| level == LevelWarn
|| level == LevelError
makeLogger = return . appLogger
isAdmin = do
mu <- maybeAuthId
return $ case mu of
Nothing -> AuthenticationRequired
Just _ -> Authorized
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB action = do
master <- getYesod
runSqlPool action $ appConnPool master
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner appConnPool
instance YesodAuth App where
type AuthId App = Text
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
-- Override the above two destinations when a Referer: header is present
redirectToReferer _ = True
getAuthId = return . Just . credsIdent
-- getAuthId creds = runDB $ do
-- x <- getBy $ UniqueUser $ credsIdent creds
-- case x of
-- Just (Entity uid _) -> return $ Just uid
-- Nothing -> do
-- fmap Just $ insert User
-- { userIdent = credsIdent creds
-- , userPassword = Nothing
-- }
authenticate Creds{..} = do
master <- getYesod
let name = appModeratorName $ appSettings master
if credsIdent == name
then return $ Authenticated name
else return $ UserError InvalidLogin
authPlugins _ = [authHardcoded]
authHttpManager = getHttpManager
maybeAuthId = lookupSession "_ID"
-- instance YesodAuthPersist App
instance YesodAuthHardcoded App where
validatePassword u p = do
master <- getYesod
let name = appModeratorName $ appSettings master
let pass = appModeratorPass $ appSettings master
return $ u == name && p == pass
doesUserNameExist u = do
master <- getYesod
let name = appModeratorName $ appSettings master
return $ u == name
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- Note: Some functionality previously present in the scaffolding has been
-- moved to documentation in the Wiki. Following are some hopefully helpful
-- links:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
-- https://github.com/yesodweb/yesod/wiki/Serve-static-files-from-a-separate-domain
-- https://github.com/yesodweb/yesod/wiki/i18n-messages-in-the-scaffolding
| nek0/yacs | Foundation.hs | agpl-3.0 | 7,452 | 2 | 15 | 1,871 | 1,126 | 598 | 528 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.LocalObjectReference where
import GHC.Generics
import Data.Text
import qualified Data.Aeson
-- | LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace.
data LocalObjectReference = LocalObjectReference
{ name :: Maybe Text -- ^ Name of the referent. More info: http://releases.k8s.io/HEAD/docs/user-guide/identifiers.md#names
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON LocalObjectReference
instance Data.Aeson.ToJSON LocalObjectReference
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/LocalObjectReference.hs | apache-2.0 | 717 | 0 | 9 | 89 | 83 | 50 | 33 | 14 | 0 |
name2reply :: String -> String
name2reply name =
"Please to meet you, " ++ name ++ ".\n" ++
"Youre name contains " ++ charcount ++ " characters"
where charcount = show $ length name
main :: IO ()
main = do
putStrLn "Greetings once again. What is your name?"
inpStr <- getLine
let outStr = name2reply inpStr
putStrLn outStr
| EricYT/Haskell | src/real_haskell/chapter-7/callingpure.hs | apache-2.0 | 354 | 0 | 10 | 90 | 98 | 46 | 52 | 11 | 1 |
import Safe (readMay)
import Control.Applicative ((<$>))
import Control.Applicative ((<*>))
displayAge maybeAge =
case maybeAge of
Nothing -> putStrLn "You provided Invalid year"
Just age -> putStrLn $ "In 2020, you will be : " ++ show age
calcAge birthYear futureYear = futureYear - birthYear
main = do
putStrLn "Enter your birthyear: "
birthYearString <- getLine
putStrLn "Enter some year in future: "
futureYearString <- getLine
-- applicative functor
-- i.e. applying function (which is inside functor)
-- to value (which is also inside functor)
-- <$> uses a function which is not wrapped in a functor,
-- <*> uses a function which is wrapped up in a functor.
let maybeAge = calcAge
<$> readMay birthYearString
<*> readMay futureYearString
displayAge maybeAge
| dongarerahul/edx-haskell | main-9.hs | apache-2.0 | 816 | 0 | 12 | 171 | 153 | 77 | 76 | 17 | 2 |
module Main where
import Graphics.Blank
import Life.Engine.QTree
import Life.Display.Canvas
import Life.Types
import Life.Formations
-- Runs Life indefinitely
life :: Config -> Scene -> IO ()
life c b = blankCanvas 3000 $ \dc -> lifeCanvas dc (scene c b :: Board)
-- Runs Life for the specified number of generations
lifeX :: Int -> Config -> Scene -> IO ()
lifeX x c s = blankCanvas 3000 $ \dc -> lifeXCanvas x dc (scene c s :: Board)
main = life ((160,160),True) $ gliders3 (0,0)
| ku-fpg/better-life | examples/simulations/QTreeCanvas.hs | bsd-2-clause | 489 | 0 | 9 | 92 | 186 | 101 | 85 | 11 | 1 |
-- Copyright (c) 2012-2019, Christopher Hall <hsw@ms2.hinet.net>
-- Licence BSD2 see LICENSE file
module SendControl where
import qualified Graphics.X11 as X
import qualified Graphics.X11.Xlib.Extras as XE
import qualified Graphics.UI.Gtk as GTK
import Control.Exception( bracket )
import Control.Concurrent( threadDelay )
-- the end-of-line symbol
eol :: X.KeySym
eol = X.xK_Return
--eol = X.xK_Linefeed
-- delay value
sendDelayMicroseconds :: Int
sendDelayMicroseconds = 5000
-- group together the low level X related items
-- needed to send a key event
type NativeAccess = (X.Display, X.Window, X.Window)
-- send a list of keys
-- ***TODO*** allow shift, control
-- ***UNTESTED***
send :: GTK.Socket -> [String] -> IO ()
send socket keyList = withNative socket $ \native -> do
mapM_ (\k -> sendOneKey native X.noModMask (symbol k)) keyList
where
symbol = X.stringToKeysym
-- send a single of keysym
sendKey :: GTK.Socket -> [GTK.Modifier] -> GTK.KeyVal -> IO ()
sendKey socket mods key = withNative socket $ \native -> do
let k1 = (fromIntegral key) :: Word
let k = (fromIntegral k1) :: X.KeySym
let modMask = foldl makeMask X.noModMask mods
sendOneKey native modMask k
where
makeMask a k = a + (modToMask k)
-- send a line ended by newline
-- each character of the string is treated as a separate keysym
sendLine :: GTK.Socket -> String -> IO ()
sendLine socket str = withNative socket $ \native -> do
mapM_ (\c -> do
let (shift, symbol) = sym c
sendOneKey native shift symbol
)str
sendOneKey native X.noModMask eol
-- bracket all the messy details of accessing Xlib
-- opens and closes the display, gets root window
-- finds the X window ID corresponding to the "plug" in the GTK socket
withNative :: GTK.Socket -> (NativeAccess -> IO ()) -> IO ()
withNative socket run =
bracket setup final run
where
setup :: IO NativeAccess
setup = do
socketId <- GTK.socketGetId socket
display <- X.openDisplay ""
--let root = X.defaultRootWindow display
let nativeSkt = GTK.fromNativeWindowId socketId :: X.Window
-- appears to return (root, parent, [children]) and Plug is first child
(root, _parent, plugId:_) <- XE.queryTree display nativeSkt
return (display, root, plugId)
final :: NativeAccess -> IO ()
final (display, _root, _window) = do
X.closeDisplay display
-- send the key event
-- needs flush and delay to ensure that the event actually gets sent
-- delay appears to be required or the event queue is overloaded
-- and the urxvt ceases to respond to normal key presses
sendOneKey :: NativeAccess -> X.KeyMask -> X.KeySym -> IO ()
sendOneKey (display, root, window) shift keysym = do
keycode <- X.keysymToKeycode display keysym
X.allocaXEvent $ \ke -> do
XE.setEventType ke X.keyPress
XE.setKeyEvent ke window root XE.none shift keycode True
X.sendEvent display window True X.keyPressMask ke
XE.setEventType ke X.keyRelease
X.sendEvent display window True X.keyReleaseMask ke
X.flush display -- ensure the key is sent immediately
threadDelay sendDelayMicroseconds -- must delay otherwise the event queue fails
{-
mapM_ (\n -> do
let k = X.keysymToString (n :: X.KeySym)
let c = fromIntegral n ::Int
putStrLn $ "SYM: " ++ (show (chr c)) ++ " -> " ++ (show k)
) $ take 256 [32..]
exitFailure
-}
-- convert ASCII character to the correct key
-- unfortunately since key codes are sent the
-- corresponding shift is also needed
-- e.g. the key codes for 2 and @ are the same
-- this code probably is tied to the US-international layout
sym :: Char -> (X.KeyMask, X.KeySym)
sym ' ' = (X.noModMask, X.stringToKeysym "space")
sym '!' = (X.shiftMask, X.stringToKeysym "exclam")
sym '"' = (X.shiftMask, X.stringToKeysym "quotedbl")
sym '#' = (X.shiftMask, X.stringToKeysym "numbersign")
sym '$' = (X.shiftMask, X.stringToKeysym "dollar")
sym '%' = (X.shiftMask, X.stringToKeysym "percent")
sym '&' = (X.shiftMask, X.stringToKeysym "ampersand")
sym '\'' = (X.noModMask, X.stringToKeysym "apostrophe")
sym '(' = (X.shiftMask, X.stringToKeysym "parenleft")
sym ')' = (X.shiftMask, X.stringToKeysym "parenright")
sym '*' = (X.shiftMask, X.stringToKeysym "asterisk")
sym '+' = (X.shiftMask, X.stringToKeysym "plus")
sym ',' = (X.noModMask, X.stringToKeysym "comma")
sym '-' = (X.noModMask, X.stringToKeysym "minus")
sym '.' = (X.noModMask, X.stringToKeysym "period")
sym '/' = (X.noModMask, X.stringToKeysym "slash")
sym '0' = (X.noModMask, X.stringToKeysym "0")
sym '1' = (X.noModMask, X.stringToKeysym "1")
sym '2' = (X.noModMask, X.stringToKeysym "2")
sym '3' = (X.noModMask, X.stringToKeysym "3")
sym '4' = (X.noModMask, X.stringToKeysym "4")
sym '5' = (X.noModMask, X.stringToKeysym "5")
sym '6' = (X.noModMask, X.stringToKeysym "6")
sym '7' = (X.noModMask, X.stringToKeysym "7")
sym '8' = (X.noModMask, X.stringToKeysym "8")
sym '9' = (X.noModMask, X.stringToKeysym "9")
sym ':' = (X.shiftMask, X.stringToKeysym "colon")
sym ';' = (X.noModMask, X.stringToKeysym "semicolon")
sym '<' = (X.noModMask, X.stringToKeysym "less")
sym '=' = (X.noModMask, X.stringToKeysym "equal")
sym '>' = (X.shiftMask, X.stringToKeysym "greater")
sym '?' = (X.shiftMask, X.stringToKeysym "question")
sym '@' = (X.shiftMask, X.stringToKeysym "at")
sym 'A' = (X.shiftMask, X.stringToKeysym "A")
sym 'B' = (X.shiftMask, X.stringToKeysym "B")
sym 'C' = (X.shiftMask, X.stringToKeysym "C")
sym 'D' = (X.shiftMask, X.stringToKeysym "D")
sym 'E' = (X.shiftMask, X.stringToKeysym "E")
sym 'F' = (X.shiftMask, X.stringToKeysym "F")
sym 'G' = (X.shiftMask, X.stringToKeysym "G")
sym 'H' = (X.shiftMask, X.stringToKeysym "H")
sym 'I' = (X.shiftMask, X.stringToKeysym "I")
sym 'J' = (X.shiftMask, X.stringToKeysym "J")
sym 'K' = (X.shiftMask, X.stringToKeysym "K")
sym 'L' = (X.shiftMask, X.stringToKeysym "L")
sym 'M' = (X.shiftMask, X.stringToKeysym "M")
sym 'N' = (X.shiftMask, X.stringToKeysym "N")
sym 'O' = (X.shiftMask, X.stringToKeysym "O")
sym 'P' = (X.shiftMask, X.stringToKeysym "P")
sym 'Q' = (X.shiftMask, X.stringToKeysym "Q")
sym 'R' = (X.shiftMask, X.stringToKeysym "R")
sym 'S' = (X.shiftMask, X.stringToKeysym "S")
sym 'T' = (X.shiftMask, X.stringToKeysym "T")
sym 'U' = (X.shiftMask, X.stringToKeysym "U")
sym 'V' = (X.shiftMask, X.stringToKeysym "V")
sym 'W' = (X.shiftMask, X.stringToKeysym "W")
sym 'X' = (X.shiftMask, X.stringToKeysym "X")
sym 'Y' = (X.shiftMask, X.stringToKeysym "Y")
sym 'Z' = (X.shiftMask, X.stringToKeysym "Z")
sym '[' = (X.noModMask, X.stringToKeysym "bracketleft")
sym '\\' =(X.noModMask, X.stringToKeysym "backslash")
sym ']' = (X.noModMask, X.stringToKeysym "bracketright")
sym '^' = (X.shiftMask, X.stringToKeysym "asciicircum")
sym '_' = (X.shiftMask, X.stringToKeysym "underscore")
sym '`' = (X.noModMask, X.stringToKeysym "grave")
sym 'a' = (X.noModMask, X.stringToKeysym "a")
sym 'b' = (X.noModMask, X.stringToKeysym "b")
sym 'c' = (X.noModMask, X.stringToKeysym "c")
sym 'd' = (X.noModMask, X.stringToKeysym "d")
sym 'e' = (X.noModMask, X.stringToKeysym "e")
sym 'f' = (X.noModMask, X.stringToKeysym "f")
sym 'g' = (X.noModMask, X.stringToKeysym "g")
sym 'h' = (X.noModMask, X.stringToKeysym "h")
sym 'i' = (X.noModMask, X.stringToKeysym "i")
sym 'j' = (X.noModMask, X.stringToKeysym "j")
sym 'k' = (X.noModMask, X.stringToKeysym "k")
sym 'l' = (X.noModMask, X.stringToKeysym "l")
sym 'm' = (X.noModMask, X.stringToKeysym "m")
sym 'n' = (X.noModMask, X.stringToKeysym "n")
sym 'o' = (X.noModMask, X.stringToKeysym "o")
sym 'p' = (X.noModMask, X.stringToKeysym "p")
sym 'q' = (X.noModMask, X.stringToKeysym "q")
sym 'r' = (X.noModMask, X.stringToKeysym "r")
sym 's' = (X.noModMask, X.stringToKeysym "s")
sym 't' = (X.noModMask, X.stringToKeysym "t")
sym 'u' = (X.noModMask, X.stringToKeysym "u")
sym 'v' = (X.noModMask, X.stringToKeysym "v")
sym 'w' = (X.noModMask, X.stringToKeysym "w")
sym 'x' = (X.noModMask, X.stringToKeysym "x")
sym 'y' = (X.noModMask, X.stringToKeysym "y")
sym 'z' = (X.noModMask, X.stringToKeysym "z")
sym '{' = (X.shiftMask, X.stringToKeysym "braceleft")
sym '|' = (X.shiftMask, X.stringToKeysym "bar")
sym '}' = (X.shiftMask, X.stringToKeysym "braceright")
sym '~' = (X.shiftMask, X.stringToKeysym "asciitilde")
-- unsupported codes just convert to space
sym _ = (X.noModMask, X.stringToKeysym "space")
-- convert a modifier to a mask
modToMask :: GTK.Modifier -> X.KeyMask
modToMask GTK.Shift = X.shiftMask
modToMask GTK.Lock = X.lockMask
modToMask GTK.Control = X.controlMask
modToMask GTK.Alt = X.mod1Mask
modToMask GTK.Alt2 = X.mod2Mask
modToMask GTK.Alt3 = X.mod3Mask
modToMask GTK.Alt4 = X.mod4Mask
modToMask GTK.Alt5 = X.mod5Mask
--modToMask GTK.Button1 = 0
--modToMask GTK.Button2 = 0
--modToMask GTK.Button3 = 0
--modToMask GTK.Button4 = 0
--modToMask GTK.Button5 = 0
--modToMask GTK.MODIFIER_RESERVED_13_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_14_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_15_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_16_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_17_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_18_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_19_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_20_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_21_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_22_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_23_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_24_MASK = 0
--modToMask GTK.MODIFIER_RESERVED_25_MASK = 0
--modToMask GTK.Super = 0
--modToMask GTK.Hyper = 0
--modToMask GTK.Meta = 0
--modToMask GTK.MODIFIER_RESERVED_29_MASK = 0
--modToMask GTK.Release = 0
--modToMask GTK.ModifierMask = 0
-- default
modToMask _ = X.noModMask
| hxw/conlecterm | src/SendControl.hs | bsd-2-clause | 9,670 | 0 | 17 | 1,526 | 2,993 | 1,579 | 1,414 | 160 | 1 |
module FizzBuzzKata.Day6 (fizzbuzz) where
fizzbuzz :: [Int] -> [String]
fizzbuzz [] = []
fizzbuzz nums = map toFizzBuzz nums
where
toFizzBuzz :: Int -> String
toFizzBuzz num
| isFizz num
&& isBuzz num = "fizz!buzz!"
| isFizz num = "fizz!"
| isBuzz num = "buzz!"
| otherwise = show num
isFizz :: Int -> Bool
isFizz num = num `isDivisibleBy` 3 || '3' `elementOf` show num
isBuzz :: Int -> Bool
isBuzz num = num `isDivisibleBy` 5 || '5' `elementOf` show num
isDivisibleBy :: Int -> Int -> Bool
isDivisibleBy num m = num `mod` m == 0
elementOf :: Char -> String -> Bool
elementOf c str = c `elem` str
| Alex-Diez/haskell-tdd-kata | old-katas/src/FizzBuzzKata/Day6.hs | bsd-3-clause | 850 | 0 | 11 | 365 | 256 | 133 | 123 | 19 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Main(main) where
import Prelude hiding (lookup)
import Control.Arrow((>>>), (&&&))
import Control.Exception(try)
import Data.Aeson(decode, encode, FromJSON)
import Data.ByteString.Lazy.Char8(pack, unpack)
import Data.Foldable(Foldable, toList)
import Data.Function(on)
import Data.List(groupBy, intercalate, sort, sortBy)
import Data.Map(lookup, Map)
import Data.Maybe(fromMaybe)
import Data.Set(difference, Set)
import GHC.Generics(Generic)
import Network.HTTP(getRequest, getResponseBody, simpleHTTP)
import qualified Data.Map as Map
import qualified Data.Set as Set
type URL = String
type Models = Set String
data Status = Status { status :: String } deriving (Generic, Show)
instance FromJSON Status
main :: IO ()
main =
do
(newGoods, newBads) <- findGoodsAndBads
(oldGoods, oldBads) <- findOldGoodsAndBads
printDiffs newGoods newBads oldGoods oldBads
modernize newGoods newBads
findGoodsAndBads :: IO (Models, Models)
findGoodsAndBads =
do
jsonStr <- simpleHTTP (getRequest statusOracleURL) >>= getResponseBody
let modelToStatusMap = jsonStr |> (decodeAsMap >>> (fmap $ status >>> (== goodStatus)))
let statusToModelMap = flipMap modelToStatusMap
let extractFromBool = (flip lookup statusToModelMap) >>> (fromMaybe Set.empty)
return (extractFromBool True, extractFromBool False)
where
flipMap = Map.toList >>> (scalaGroupBy snd) >>> groupedToMapPair >>> Map.fromList
groupedToMapPair = fmap (\(a, bs) -> (a, bs |> ((fmap fst) >>> Set.fromList)))
tee f = fmap $ f &&& id
decodeAsMap :: String -> Map String Status
decodeAsMap = pack >>> decode >>> (fromMaybe Map.empty)
scalaGroupBy f = sort >>> group >>> pair
where
sort = sortBy (compare `on` f)
group = groupBy ((==) `on` f)
pair = tee $ head >>> f
findOldGoodsAndBads :: IO (Models, Models)
findOldGoodsAndBads =
do
goods <- slurpSetFrom goodsFile
bads <- slurpSetFrom badsFile
return (goods, bads)
where
safeReadFile :: String -> IO (Either IOError String)
safeReadFile = readFile >>> try
decodeAsStrArr :: String -> Maybe [String]
decodeAsStrArr = pack >>> decode
slurpSetFrom filepath =
do
jsonStrEither <- safeReadFile filepath
let jsonStr = either (const "[]") id jsonStrEither
let modelSet = jsonStr |> (decodeAsStrArr >>> maybeFToSet)
return modelSet
printDiffs :: Models -> Models -> Models -> Models -> IO ()
printDiffs newGoods newBads oldGoods oldBads =
do
printUpdate "Newly working models" newGoods oldGoods
putStrLn ""
printUpdate "Newly broken models" newBads oldBads
where
printUpdate label news olds = putStrLn entry
where
entry = label ++ ":\n" ++ diffs
diffs = olds |> ((difference news) >>> Set.toList >>> sort >>> (intercalate "\n"))
modernize :: Models -> Models -> IO ()
modernize newGoods newBads =
do
writeSetToFile newGoods goodsFile
writeSetToFile newBads badsFile
where
writeSetToFile set file = writeFile file $ toJSON set
toJSON = Set.toList >>> encode >>> unpack
statusOracleURL :: URL
statusOracleURL = "http://localhost:9000/model/statuses.json"
goodsFile :: FilePath
goodsFile = "goods.json"
badsFile :: FilePath
badsFile = "bads.json"
goodStatus :: String
goodStatus = "compiling"
maybeFToSet :: (Foldable f, Ord a) => Maybe (f a) -> Set a
maybeFToSet = (fmap toList) >>> (fromMaybe []) >>> Set.fromList
a |> f = f a
| TheBizzle/GallyScraper-Haskell | src/Scraper.hs | bsd-3-clause | 3,532 | 5 | 17 | 730 | 1,139 | 607 | 532 | 88 | 1 |
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-}
{-# OPTIONS_GHC -fcontext-stack44 #-}
---------------------------------------------------------------------------
-- Generated by DB/Direct
---------------------------------------------------------------------------
module DB1.Integer_tbl where
import Database.HaskellDB.DBLayout
---------------------------------------------------------------------------
-- Table type
---------------------------------------------------------------------------
type Integer_tbl =
(RecCons F01 (Expr (Maybe Integer))
(RecCons F02 (Expr Integer)
(RecCons F03 (Expr (Maybe Integer))
(RecCons F04 (Expr Integer) RecNil))))
---------------------------------------------------------------------------
-- Table
---------------------------------------------------------------------------
integer_tbl :: Table Integer_tbl
integer_tbl = baseTable "integer_tbl" $
hdbMakeEntry F01 #
hdbMakeEntry F02 #
hdbMakeEntry F03 #
hdbMakeEntry F04
---------------------------------------------------------------------------
-- Fields
---------------------------------------------------------------------------
---------------------------------------------------------------------------
-- F01 Field
---------------------------------------------------------------------------
data F01 = F01
instance FieldTag F01 where fieldName _ = "f01"
f01 :: Attr F01 (Maybe Integer)
f01 = mkAttr F01
---------------------------------------------------------------------------
-- F02 Field
---------------------------------------------------------------------------
data F02 = F02
instance FieldTag F02 where fieldName _ = "f02"
f02 :: Attr F02 Integer
f02 = mkAttr F02
---------------------------------------------------------------------------
-- F03 Field
---------------------------------------------------------------------------
data F03 = F03
instance FieldTag F03 where fieldName _ = "f03"
f03 :: Attr F03 (Maybe Integer)
f03 = mkAttr F03
---------------------------------------------------------------------------
-- F04 Field
---------------------------------------------------------------------------
data F04 = F04
instance FieldTag F04 where fieldName _ = "f04"
f04 :: Attr F04 Integer
f04 = mkAttr F04
| m4dc4p/haskelldb | examples/DB1/Integer_tbl.hs | bsd-3-clause | 2,318 | 0 | 14 | 279 | 340 | 186 | 154 | 31 | 1 |
module Handler.Markdown where
import Import
import Text.Markdown
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Data.Text.Lazy (fromStrict)
putMarkdownR :: Handler Value
putMarkdownR = do
-- Look up the post parameter containing the input Markdown.
minput <- lookupPostParam "markdown"
case minput of
-- If the parameter is not provided, return a 400 error to the user.
Nothing -> invalidArgs ["No Markdown provided"]
-- Otherwise, render the Markdown to HTML and return it in a JSON object.
Just input -> return $ object
[ "html" .= renderHtml (markdown def $ fromStrict input)
] | fdilke/fpc-exp | src/Handler/Markdown.hs | bsd-3-clause | 660 | 0 | 17 | 156 | 122 | 66 | 56 | 12 | 2 |
module Main where
import FP
import Examples
main :: IO ()
main = examplesMain
| davdar/quals | src/Main.hs | bsd-3-clause | 80 | 0 | 6 | 16 | 25 | 15 | 10 | 5 | 1 |
-- | A streaming XML parser, using a method known as SAX. SAX isn't really a
-- standard, but an implementation, so it's just an \"SAX-like\" parser.
-- This module allows you parse an XML document without having to evaluate
-- it as a whole. This is needed for protocols like jabber, which use xml
-- streams for communication.
module Text.XML.HaXml.SAX
( SaxElement(..)
, saxParse
) where
import Text.XML.HaXml.Types
import Text.XML.HaXml.Parse
import Text.XML.HaXml.Lex
import Text.ParserCombinators.PolyState
data SaxElement
= SaxDocTypeDecl DocTypeDecl
-- ^ A doctype declaration occured(\<!DOCTYPE\>)
| SaxProcessingInstruction ProcessingInstruction
-- ^ A processing instruction occured (\<??\>)
| SaxComment String -- ^ A comment occured (\<!-- --\>)
| SaxElementOpen Name [Attribute] -- ^ An element was opened (\<\>)
| SaxElementClose Name -- ^ An element was closed (\<\/\>)
| SaxElementTag Name [Attribute]
-- ^ An element without content occured (\<\/\>)
| SaxCharData CharData -- ^ Some string data occured
| SaxReference Reference -- ^ A reference occured
-- | @saxParse file content@ takes a filename and the string content of that
-- file and generates a stream of @SaxElement@s. If an error occurs, the
-- parsing stops and a string is returned using the @Maybe@ type.
saxParse :: String -- ^ The filename
-> String -- ^ The content of the file
-> ([SaxElement],Maybe String)
-- ^ A tuple of the parsed elements and @Nothing@, if no
-- error occured, or @Just@ @String@ if an error occured.
saxParse file content = parseStream sax emptySTs
(xmlLex file content)
parseStream :: Parser s t a -> s -> [t] -> ([a], Maybe String)
parseStream _ _ [] = ([],Nothing)
parseStream p state toks = case runParser p state toks of
(Left err, _, _) -> ([],Just err)
(Right res, nstate, rest) -> (res:moreres, err)
where (moreres,err) = parseStream p nstate rest
sax :: XParser SaxElement
sax = oneOf [ saxelementopen
, saxelementclose
, saxprocessinginstruction
, saxcomment
, saxdoctypedecl
, saxreference
, saxchardata
]
`adjustErr` (++("\nLooking for a SAX event:\n"
++" elem-open, elem-close, PI, comment, DTD, ref, or chardata"))
saxelementopen :: XParser SaxElement
saxelementopen = do
tok TokAnyOpen
(ElemTag n as) <- elemtag
(( do tok TokEndClose
return (SaxElementTag n as)) `onFail`
( do tok TokAnyClose
return (SaxElementOpen n as))
`onFail` fail "missing > or /> in element tag")
saxelementclose :: XParser SaxElement
saxelementclose = do
tok TokEndOpen
n <- name
tok TokAnyClose
return (SaxElementClose n)
saxcomment :: XParser SaxElement
saxcomment = comment >>= return . SaxComment
saxchardata :: XParser SaxElement
saxchardata =
(cdsect >>= return . SaxCharData)
`onFail`
(chardata >>= return . SaxCharData)
saxreference :: XParser SaxElement
saxreference = reference >>= return . SaxReference
saxdoctypedecl :: XParser SaxElement
saxdoctypedecl = doctypedecl >>= return . SaxDocTypeDecl
saxprocessinginstruction :: XParser SaxElement
saxprocessinginstruction = fmap SaxProcessingInstruction processinginstruction
| FranklinChen/hugs98-plus-Sep2006 | packages/HaXml/src/Text/XML/HaXml/SAX.hs | bsd-3-clause | 3,272 | 16 | 15 | 690 | 670 | 371 | 299 | 65 | 2 |
module ChineseCheckers.ChineseCheckers where
import ChineseCheckers.Table
import Haste.Graphics.Canvas
squareContent :: Table -> Coord -> Content
squareContent [] _ = error "Table does not contain coordinate"
squareContent (t:ts) (x,y) = case t of
Square content _ coord | check coord -> content
| otherwise -> squareContent ts (x,y)
where check (x1,y1) = x1 == x && y1 == y
-- | puts a checker on the table
putPiece :: Table -> Content -> Coord -> Table
putPiece (t:ts) c (x,y) = case t of
(Square _ color coord) | check coord -> Square c color coord : ts
| otherwise -> t : putPiece ts c (x,y)
where check (x1,y1) = x1 == x && y1 == y
-- | removes a checker from the table
removePiece :: Table -> Coord -> Table
removePiece (t:ts) (x,y) = case t of
(Square _ color coord) | check coord -> Square Empty color coord : ts
| otherwise -> t : removePiece ts (x,y)
where check (x1,y1) = x1 == x && y1 == y
-- | moves a piece from its original positon to the new
movePiece :: Table -> Coord -> Coord -> Table
movePiece t (x1,y1) (x2,y2) = removePiece (putPiece t content (x2,y2)) (x1,y1)
where content = squareContent t (x1,y1)
-- | removes a player (color) from the table
removePlayer :: Color -> Table -> Table
removePlayer c = map isPlayer
where isPlayer (Square content col (x,y)) = case content of
Empty -> Square content col (x,y)
Piece color | color == c -> Square Empty col (x,y)
| otherwise -> Square content col (x,y)
removePlayers :: [Color] -> Table -> Table
removePlayers cs t = foldl (flip removePlayer) t cs
{-
isReachable :: Coord -> Table -> Table
isReachable c t = filter (isReachable' c) t
isReachable' :: Coord -> Square -> Bool
isReachable' (x,y) (Square _ _ (x1,y1)) = (abs(x-x1) == 2 && abs(y-y1) == 0) || (abs(x-x1) == 1 && abs(y-y1) == 1) || (abs(x-x1) == 2 && abs(y-y1) == 2) || (abs(x-x1) == 4 && abs(y-y1) == 0)
-}
-- | Given a coordinate, canMove generates a list of all possible movable squares from that position
canMove :: Coord -> Table -> Table
canMove (x,y) t = filter (checkusPrimus (x,y)) $ filter isEmpty t
where checkusPrimus (x,y) (Square c _ (x1,y1)) | (x+4) == x1 && y1 == y && (content (x+2,y) /= Empty) = True
| (x-4) == x1 && y1 == y && (content (x-2,y) /= Empty) = True
| (x-2) == x1 && (y+2) == y1 && (content (x-1,y+1) /= Empty) = True
| (x+2) == x1 && (y+2) == y1 && (content (x+1,y+1) /= Empty) = True
| (x+2) == x1 && (y-2) == y1 && (content (x+1,y-1) /= Empty) = True
| (x-2) == x1 && (y-2) == y1 && (content (x-1,y-1) /= Empty) = True
| abs(x-x1) == 2 && y == y1 = True
| abs(x-x1) == 1 && abs(y-y1) == 1 = True
-- | x == x1 && abs(y-y1) == 4 = False
| otherwise = False
content = squareContent t
-- | Checks if a player jumped over a piece, then it should be able to move again
moveAgain :: Coord -> Coord -> Bool
moveAgain (x,y) (x1,y1) = (abs(x1-x) == 4 && y1 == y) || (abs(x1-x) == 2 && abs(y1-y) == 2)
-- | Same as movePlayer but with Maybe type
movePlayer' :: Coord -> Coord -> Table -> Maybe Table
movePlayer' c1 c2 t | elem c2 $ map coordinates (canMove c1 t) = Just $ movePiece t c1 c2
| otherwise = Nothing
-- |
movePlayer :: Coord -> Coord -> Table -> Table
movePlayer c1 c2 t | elem c2 $ map coordinates (canMove c1 t) = movePiece t c1 c2
| otherwise = error "Can't move"
-- | Returns coordinates of a square
coordinates :: Square -> Coord
coordinates (Square _ _ coord) = coord
-- | Takes a square and checks if the piece on it is "home", meaning on the square with same color
pieceHome :: Square -> Bool
pieceHome (Square content col _) = case content of
Piece c -> c == col
_ -> False
-- | Checks if a player has all pieces "home"
playerHome :: Color -> Table -> Bool
playerHome c1 t = all pieceHome $ filter playerOnly t
where playerOnly (Square cont _ _) = case cont of
Piece col -> col == c1
otherwise -> False
-- | Game is over when all pieces have reached their "home"
gameOver :: Table -> Bool
gameOver = all pieceHome
-- | Checks if a square has Empty content
isEmpty :: Square -> Bool
isEmpty c = case c of
(Square Empty _ _) -> True
otherwise -> False
-- | Takes the current state of the game and input coordinates, performing a player action
playerAction :: GameState -> Coord -> GameState
playerAction gs c1 = case fromCoord gs of
Nothing -> GameState {gameTable = gameTable gs
, currentPlayer = currentPlayer gs
, players = players gs
, fromCoord = checkValid c1
, playerMoveAgain = playerMoveAgain gs}
Just c2 -> case action gs c2 c1 (playerMoveAgain gs) of
(Nothing,_) -> GameState {gameTable = gameTable gs
, currentPlayer = currentPlayer gs
, players = players gs
, fromCoord = fromCoord gs
, playerMoveAgain = playerMoveAgain gs}
(Just table,b) -> if b
then GameState {gameTable = table
, currentPlayer = fst . head $ players gs
, players = players gs
, fromCoord = Just c1
, playerMoveAgain = b}
else GameState {gameTable = table
, currentPlayer = fst . head . tail $ players gs
, players = tail (players gs) ++ [head $ players gs]
, fromCoord = Nothing
, playerMoveAgain = b}
where
checkValid c | checkPlayer (color $ head (players gs)) (squareContent (gameTable gs) c) = Just c
| otherwise = Nothing
color (s,c) = c
-- | Helper function for playerAction
action :: GameState -> Coord -> Coord -> Bool -> (Maybe Table, Bool)
action gs c1 c2 b = case checkPlayer (color $ head (players gs)) (squareContent (gameTable gs) c1) of
False -> (Nothing,False)
True | b && moveAgain c1 c2 -> (movePlayer' c1 c2 (gameTable gs), moveAgain c1 c2)
| b && not (moveAgain c1 c2) -> (Nothing, False)
| otherwise -> (movePlayer' c1 c2 (gameTable gs), moveAgain c1 c2)
--movePlayer' c1 c2 (gameTable gs)
where color (s,c) = c
allPlayer :: [Color]
allPlayer = [blue,orange,purple,green,red,yellow]
{-|
Given a list of player names, initGame associates each player
with a color and generates the inital game state
-}
initGame players = case (length players) of
2 -> create (zipWith mkPlayer players [blue,orange]) 2
4 -> create (zipWith mkPlayer players [blue,red,purple,orange]) 4
6 -> create (zipWith mkPlayer players [blue,red,purple,green,orange,yellow]) 6
otherwise -> error "Not correct number of players for game to start"
where mkPlayer a b = (a,b)
create p i = createProperGame p i
rotatePlayer :: GameState -> GameState
rotatePlayer gs = GameState {gameTable = gameTable gs
, currentPlayer = fst . head . tail $ players gs
, players = tail (players gs) ++ [head $ players gs]
, fromCoord = Nothing
, playerMoveAgain = False}
createProperGame :: [(String,Color)] -> Int -> GameState
createProperGame p i = case i of
2 -> GameState {gameTable = removePlayers [red,purple,green,yellow] startTable
, currentPlayer = fst . head $ p
, players = p
, fromCoord = Nothing
, playerMoveAgain = False}
4 -> GameState {gameTable = removePlayers [green,yellow] startTable
, currentPlayer = fst . head $ p
, players = p
, fromCoord = Nothing
, playerMoveAgain = False}
6 -> GameState {gameTable = startTable
, currentPlayer = fst . head $ p
, players = p
, fromCoord = Nothing
, playerMoveAgain = False}
-- | Checks if the piece belogns to the the player, meaning they are of the same color
checkPlayer :: Color -> Content -> Bool
checkPlayer c Empty = False
checkPlayer c (Piece c1) = c == c1
{-|
The following functions are only used for testing purposes
They tests the game logic by letting the programmer play the game from stdin/stdout
-}
{-
-- | printing Color
putColor :: Color -> IO ()
putColor = putChar . head . show
-- | Priting cell
putSquare :: Square -> IO ()
putSquare (Square content c _) = case content of
Empty -> putColor c
(Piece color) -> putColor color
-- | Get coordinates from stdin
getCoord :: IO (Coord,Coord)
getCoord = do
putStrLn "From which coordinate do you want to move?"
coord <- getLine
putStrLn "To which coordinate do you want to move?"
coord2 <- getLine
return (read coord :: (Int,Int), read coord2 :: (Int,Int))
-- | Get coordinates and moves a piece belonging to the current player
playerMove :: Color -> Table -> Bool -> IO (Table,Bool)
playerMove col t b = do
(c1,c2) <- getCoord
case (checkPlayer col (squareContent t c1)) of
False -> error "Not your piece!!"
True | (b && moveAgain c1 c2) -> return $ (movePlayer c1 c2 t, moveAgain c1 c2)
| (b && not (moveAgain c1 c2)) -> error "Illegal move!"
| otherwise -> return $ (movePlayer c1 c2 t, moveAgain c1 c2)
-- | Takes a list of player names and creates a new game
startGame :: [String] -> IO ()
startGame player = case (length player) of
2 -> startGame' startTable $ zipWith mkPlayer player [Blue,Red]
4 -> startGame' startTable $ zipWith mkPlayer player [Blue,Red,Pink,Green]
6 -> startGame' startTable $ zipWith mkPlayer player [Blue,Red,Pink,Green,Black,Yellow]
otherwise -> error "Not correct number of players for game to start"
where mkPlayer a b = (a,b)
-- | Game loop
startGame' :: Table -> [(String,Color)] -> IO ()
startGame' t ((s,col):xs) | gameOver t = putStrLn "GAME OVER!"
| otherwise = do
putStrLn $ s ++ "s speltur"
(newTable,again) <- playerMove col t False
putStrLn (show newTable)
case again of
True -> jumpAgain newTable $ ((s,col):xs)
False -> startGame' newTable $ xs ++ [(s,col)]
-- | Help function for the game loop, for when a player can move again
jumpAgain :: Table -> [(String,Color)] -> IO ()
jumpAgain t ((s,col):xs) = do
putStrLn $ s ++ "s speltur!"
(newTable,again) <- playerMove col t True
case again of
True -> jumpAgain newTable ((s,col):xs)
_ -> startGame' newTable $ xs ++ [(s,col)]
-}
| DATx02-16-14/Hastings | src/ChineseCheckers/ChineseCheckers.hs | bsd-3-clause | 13,687 | 0 | 17 | 6,032 | 2,808 | 1,479 | 1,329 | 136 | 4 |
import System.Directory
import System.IO
import Data.List
import Control.Monad
path = "./images/photothèque"
addPath :: String -> String
addPath s = path ++ "/" ++ s
isNotDots :: String -> Bool
isNotDots s = s /= "." && s /= ".."
isNotRef :: String -> Bool
isNotRef s = s /= "referencePage"
isDirectory :: String -> IO Bool
isDirectory s =
do p1 <- doesDirectoryExist.addPath $ s
return (isNotDots s && p1)
isFile :: String -> IO Bool
isFile s =
do p1 <- doesFileExist s
return (isNotDots s && isNotRef s && p1)
-- for a given folder returns the associated html page and all the pictures
getPicsName :: FilePath -> IO ((FilePath, String), [FilePath])
getPicsName p =
do setCurrentDirectory p
ref <- readFile "referencePage"
ctns <- getDirectoryContents "."
files <- filterM isFile ctns
setCurrentDirectory ".."
return ((p,reverse.tail.reverse $ ref),files)
-- pretty prints a list in file corresponding to handle h
hPrettyList :: Show a => [a] -> Handle -> IO ()
hPrettyList [] h = hPutStrLn h "[]"
hPrettyList xs h =
do hPutStr h "["
mapM (hPutStrLn h) (liner xs)
hPutStrLn h " ]"
where liner [] = []
liner [e] = [" " ++ show e]
liner (e:es) = (" " ++ show e ++ ", ") : liner es
expand :: ((t1, t2), [t]) -> [(t, t1, t2)]
expand ((p,r),xs) = map (\e -> (e,p,r)) xs
main :: IO ()
main = do sd <- getCurrentDirectory
ctns <- getDirectoryContents path
direc <- filterM isDirectory ctns
setCurrentDirectory path
names <- mapM getPicsName direc
let names2 = concat $ map expand names
--putStrLn (show names2)
setCurrentDirectory (sd ++ "/src")
handle <- openFile "ListOfPics.elm" WriteMode
hPutStrLn handle "module ListOfPics where"
hPutStrLn handle ""
hPutStrLn handle "import Dict exposing (..)"
hPutStrLn handle ""
hPutStr handle "picList = "
hPrettyList names handle
hPutStrLn handle ""
hPutStr handle "picList2 = "
hPrettyList names2 handle
hClose handle
putStrLn $ "Found " ++ (show.length $ names2) ++ " pictures in "
++ (show.length $ names) ++ " directories"
setCurrentDirectory sd
return () | eniac314/mairieMurol | src/gatherPics.hs | bsd-3-clause | 2,360 | 0 | 13 | 699 | 789 | 382 | 407 | 61 | 3 |
{-# LANGUAGE NoMonomorphismRestriction #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Curve.Rect
-- Copyright : (c) 2011 Michael Sloan
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : Michael Sloan <mgsloan@gmail.com>
-- Stability : experimental
-- Portability : GHC only
--
-- Functions to deal with the specific case of (Interval a, Interval a)
module Data.Curve.Rect where
import Data.Curve.Interval
import Data.Curve.Linear
import Data.Curve.Util
import Data.VectorSpace
import Numeric.Rounding
type DRect = (Interval Double, Interval Double)
type FRect = (Interval Float, Interval Float)
emptyR :: (Precision a) => (Interval a, Interval a)
emptyR = (empty, empty)
middle :: (Precision a) => (Interval a, Interval a) -> (a, a)
middle = mapT midpoint
-- Clockwise around corners
corner :: (Integral a, Precision b) => a -> (Interval b, Interval b) -> (b, b)
corner 0 (I x1 _, I y1 _) = (runDown x1, runDown y1)
corner 1 (I _ x2, I y1 _) = (runUp x2, runDown y1)
corner 2 (I _ x2, I _ y2) = (runUp x2, runUp y2)
corner 3 (I x1 x2, I _ y2) = (runDown x1, runUp y2)
corner x r = corner (x `mod` 4) r
-- TODO: place this in a Line.hs module orso
lineBetween :: (a, a) -> (a, a) -> (Linear a, Linear a)
lineBetween (x1, y1) (x2, y2) = (Linear x1 x2, Linear y1 y2)
rside :: (Integral a, Precision b) => a -> (Interval b, Interval b) -> (Linear b, Linear b)
rside i r = lineBetween (corner i r) (corner (i + 1) r)
boundPoints :: (Precision a, Precision a1) => [(a1, a)] -> (Interval a1, Interval a)
boundPoints xs = (fromList $ map fst xs, fromList $ map snd xs)
fromCorners :: (Precision a1, Precision a) => (a1, a) -> (a1, a) -> (Interval a1, Interval a)
fromCorners c1 c2 = boundPoints [c1, c2]
rect :: (Precision a, Precision a1, AdditiveGroup a1, AdditiveGroup a) =>
(a1, a) -> (a1, a) -> (Interval a1, Interval a)
rect tl sz = fromCorners tl $ tl ^+^ sz
clampR :: (Precision a) => (Interval a, Interval a) -> (a, a) -> (a, a)
clampR = zipT clamp
extendR :: (Precision a) => (Interval a, Interval a) -> (a, a) -> (Interval a, Interval a)
extendR = zipT extend
extentR :: (Precision a) => (Interval a, Interval a) -> (a, a)
extentR = mapT extent
expandR, moveR :: (Precision a) => (a, a) -> (Interval a, Interval a) -> (Interval a, Interval a)
expandR = zipT expand
moveR v = zipT (+) (mapT singleton v)
singletonR :: (Precision a) => (a, a) -> (Interval a, Interval a)
singletonR = mapT singleton
| mgsloan/curve | Data/Curve/Rect.hs | bsd-3-clause | 2,533 | 0 | 9 | 503 | 1,073 | 585 | 488 | 41 | 1 |
{-# LANGUAGE TupleSections #-}
module Sgf.List
( BState (..)
, ZipList' (..)
, Index
, indBase
, foldrM
, foldlM
, elemByInd
, elemsByInds
, elemsByNotInds
, indByElem
, indsByElems1
, indsByElems
, indsByNotElems
, elemsOrder1
, elemsOrder
, dropWhileEnd
, splitBy
, foldrMerge
, concatSeq
, zipWith'
, zipMap
, zipFoldM
, listEq
, transp
, shuffleList
)
where
import Data.Maybe (fromMaybe)
import Data.Monoid
import qualified Data.Foldable as F
import qualified Data.Traversable as T
import Control.Applicative
import Control.Monad.State
import Control.Monad.Reader
import System.Random (RandomGen, randomRs)
-- Reimplementation of list indexing part of Data.List using Backward State
-- monad. And some other useful functions for lists using State monads.
foldrM :: (Monad m) => (a -> b -> m b) -> b -> [a] -> m b
foldrM _ z [] = return z
foldrM g z (x : xs) = foldrM g z xs >>= g x
foldlM :: (Monad m) => (b -> a -> m b) -> b -> [a] -> m b
foldlM _ z [] = return z
foldlM g z (x : xs) = g z x >>= \z' -> foldlM g z' xs
-- Backward state monad from "The essence of functional programming" by Philip
-- Wadler.
newtype BState s a = BState {runBState :: s -> (a, s)}
instance Monad (BState s) where
return x = BState (\s -> (x, s))
BState m >>= f = BState $ \s2 ->
let (x, s0) = m s1
BState m' = f x
(x', s1) = m' s2
in (x', s0)
-- Slightly different ZipList: it uses my zipWith' instead of zipWith. Note,
-- that it can't be made an instance of Applicative.
newtype ZipList' a = ZipList' {getZipList' :: [a]}
deriving (Show)
instance Monoid a => Monoid (ZipList' a) where
mempty = ZipList' []
(ZipList' xs) `mappend` (ZipList' ys)
= ZipList' (zipWith' mappend xs ys)
-- Index list.
--
type Index = Int
-- Start index.
indBase :: Index
indBase = 1
-- FIXME: For v3. Make elemsByInds and indsByElems preserving indexes (keys)
-- order. I.e. elements in resulting list must be on the positions
-- corresponding to their keys. Implement this using zipper. This also should
-- make unnecessary construction like
--
-- \xs -> order >>= flip elemByInd xs
--
-- and speed it up dramatically.
-- FIXME: Versions, which work for several indexes (keys), and may have
-- several results (e.g. indsByElems) should return in Alernative. This allows
-- to not provide special version for infinity lists, because Alternative may
-- be defined in such way, that (<|>) ignores 2nd argument. So, will function
-- work on infinity list or not will be completely defined by Alternative
-- instance definition.
-- Folding functions for use in State monads for indexing list.
--
-- If key list ks satisfies predicate p (function name comes from the fact,
-- that entire key list should satisfy predicate), add x to the result zs.
-- This folding function will not work on infinity list.
allElems :: ([a] -> Bool) -> [a] -> b -> [b] -> ([b], [a])
allElems p ks x zs
| p ks = (x : zs, ks)
| otherwise = (zs, ks)
-- If at least one element from key list ks satisfies predicate p, add x to
-- the result zs and remove matched key from ks (so each key can match only
-- once). This version (unlike allElems) has fixed point, when there is no
-- more keys left, and, hence, will work on infinity list.
anyElems :: (a -> Bool) -> [a] -> b -> [b] -> ([b], [a])
anyElems _ [] _ _ = ([], []) -- fixed point.
anyElems p ks x zs
| any p ks = (x : zs, filter (not . p) ks)
| otherwise = (zs, ks)
-- Index list by right folding it inside Backward State monad.
--
-- I assume, that keys (or indexes) list is _not_ sorted and for every key
-- (index) i should check every element of keys (indexes) list. Hence, if keys
-- (indexes) list is infinity and key (index) does not present in it (or
-- already have been found and deleted), all below functions will search
-- indefinitely. Though, some of them can work on infinity input list.
--
-- Find all elements with specified indexes. Works on infinity input list.
elemsByInds :: [Index] -> [a] -> [a]
elemsByInds js = fst . flip runBState (indBase, js) . elemsByIndsM
elemsByIndsM :: [a] -> BState (Index, [Index]) [a]
elemsByIndsM = foldrM (\x -> BState . f x) []
where
f x zs (s, js) = fmap (s + 1, ) $ anyElems (s ==) js x zs
-- Complement to elemsByInds. Find all elements with indexes _not_ in the
-- supplied list. Does not work on infinity input list.
elemsByNotInds :: [Index] -> [a] -> [a]
elemsByNotInds js = fst . flip runBState (indBase, js) . elemsByNotIndsM
elemsByNotIndsM :: [a] -> BState (Index, [Index]) [a]
elemsByNotIndsM = foldrM (\x -> BState . f x) []
where
f x zs (s, js) = fmap (s + 1, ) $ allElems (notElem s) js x zs
-- Reverse of elemByInds. Find _first_ index of all elements in the supplied
-- list. Works on infinity input list.
indsByElems1 :: (a -> a -> Bool)
-> [a] -- Elements, which indexes i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
indsByElems1 eq ks = fst . flip runBState (indBase, ks) . indsByElemsM1 eq
indsByElemsM1 :: (a -> a -> Bool) -> [a] -> BState (Index, [a]) [Index]
indsByElemsM1 eq = foldrM (\x -> BState . f x) []
where
f x zs (s, ks) = fmap (s + 1, ) $ anyElems (x `eq`) ks s zs
-- Find _all_ indexes of all elements in the supplied list. Does not work on
-- infinity input list.
indsByElems :: (a -> a -> Bool)
-> [a] -- Elements, which indexes i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
indsByElems eq ks = fst . flip runBState (indBase, ks) . indsByElemsM eq
indsByElemsM :: (a -> a -> Bool) -> [a] -> BState (Index, [a]) [Index]
indsByElemsM eq = foldrM (\x -> BState . f x) []
where
f _ _ (s, []) = fmap (s + 1, ) ([], []) -- fixed point.
f x zs (s, ks) = fmap (s + 1, ) $ allElems p ks s zs
where p = any (x `eq`)
-- Complement to indsByElems. Find all indexes of elements _not_ in the
-- supplied list. Does not work on infinity input list.
indsByNotElems :: (a -> a -> Bool)
-> [a] -- Elements, which indexes i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
indsByNotElems eq ks = fst . flip runBState (indBase, ks) . indsByNotElemsM eq
indsByNotElemsM :: (a -> a -> Bool) -> [a] -> BState (Index, [a]) [Index]
indsByNotElemsM eq = foldrM (\x -> BState . f x) []
where
f x zs (s, ks) = fmap (s + 1, ) $ allElems p ks s zs
where p = all (not . (x `eq`))
-- Some more specific "instances" of above functions.
--
-- Works on infinity list.
elemByInd :: Index -> [a] -> [a]
elemByInd j = elemsByInds [j]
-- Works on infinity list.
indByElem :: (a -> a -> Bool)
-> a -- Element, which index i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
indByElem eq k = indsByElems1 eq [k]
-- Does not work on infinity list.
indsByElem :: (a -> a -> Bool)
-> a -- Element, which index i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
indsByElem eq k = indsByElems eq [k]
-- Convert list of elements into list of _first_ element's indexes in
-- "reference" list. Indexes comes in the same order as elements, which i have
-- searched for (instead of indexes in increasing order, which result of
-- indsByElems will have).
elemsOrder1 :: (a -> a -> Bool)
-> [a] -- Elements, which indexes i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
elemsOrder1 eq ks xs = ks >>= flip (indByElem eq) xs
-- Convert list of elements into list of _all_ element's indexes in
-- "reference" list. Indexes comes in the same order as elements, which i have
-- searched for (instead of indexes in increasing order, which result of
-- indsByElems will have).
elemsOrder :: (a -> a -> Bool)
-> [a] -- Elements, which indexes i'm searching for.
-> [a] -- List, where i'm searching for.
-> [Index]
elemsOrder eq ks xs = ks >>= flip (indsByElem eq) xs
-- Sublists.
--
-- Drop elements, that satisfies predicate p, starting from the list end.
dropWhileEnd :: (a -> Bool) -> [a] -> [a]
dropWhileEnd p = foldr (\x z -> if null z && p x then [] else x : z) []
-- Split list by list (separator is list of elements). Does not omit separator
-- itself, just leave it as a separate element. Note, that this function
-- can't be implemented using Backward State monad.
splitBy :: (Alternative f, Eq a) => [a] -> [a] -> [f a]
splitBy ks = let ks' = reverse ks
in fst
. flip runState ks'
. flip runReaderT ks'
. splitByM
splitByM :: (Alternative f, Eq a) =>
[a] -> ReaderT [a] (State [a]) [f a]
splitByM xs = do
(z1 : z2 : zs) <- foldrM fM [empty, empty] xs
return ((z1 <|> z2) : zs)
where
-- z1 is "probably separator", z2 is column behind the separator.
--fM :: (Alternative f, Eq a) =>
-- a -> [f a] -> ReaderT [a] (State [a]) [f a]
fM x (z1 : z2 : zs) = do
ks <- ask
cs <- lift get
let (zs', cs') = f ks cs
lift (put cs')
return zs'
where
--f :: Eq a => [a] -> [a] -> ([f a], [a])
f [] _ = (empty : (pure x <|> z2) : zs, [])
f ks [c]
| x == c = (empty : empty : (pure x <|> z1) : z2 : zs, ks)
f (k : ks) (c : cs)
| x == c = ((pure x <|> z1) : z2 : zs, cs)
| x == k = (pure x : (z1 <|> z2) : zs, ks)
| otherwise = (empty : (pure x <|> z1 <|> z2) : zs, k : ks)
-- Folding.
--
-- Apply user-defined function g to every element in input list. Then if it
-- returns True along with new element (monoid), mappend new element into head
-- of accumulator, otherwise just add it to the accumulator list.
foldrMerge :: (Monad m, Monoid b) => (a -> m (b, Bool)) -> [a] -> m [b]
foldrMerge g = foldrM (\x zs -> liftM (f zs) (g x)) []
where
f :: (Monoid b) => [b] -> (b, Bool) -> [b]
f [] (x', _) = [x']
f (z : zs) (x', p)
| p = x' `mappend` z : zs
| otherwise = x' : z : zs
-- Sequence Traversable, then concatenate Foldable result in monad m.
concatSeq :: (T.Traversable t, Monad m) => t (m [a]) -> m [a]
concatSeq = liftM F.concat . T.sequence
-- Zipping.
--
-- Instead of discarding longer tail (like zipWith do), add it to the result
-- as is.
zipWith' :: (a -> a -> a) -> [a] -> [a] -> [a]
zipWith' _ xs [] = xs
zipWith' _ [] ys = ys
zipWith' f (x : xs) (y : ys) = f x y : zipWith' f xs ys
-- FIXME: zipMap should return (Maybe (t b)) and use (StateT s Maybe) to
-- handle failure properly.
-- FIXME: Empty (or not long enough) list of functions is exception for
-- zipMap. To properly fix this, i need a way for function g to somehow
-- continue returning (m b) values without having (a -> b) function. In other
-- words, it either need data constructor for type b (which is impossible) or
-- b should be monoid (then it can use (return mempty)) or it may use last
-- function (a -> b) for all remaining elements (which still will break, if [a
-- -> b] is empty list). So, the only good solution is (Monoid b) constraint,
-- but i'm not sure, that i want it. Other way is implementing monadic zipMap
--
-- zipMapM :: (T.Traversable t, Monad m, Monoid (m b)) =>
-- [a -> m b] -> t a -> m (t b)
--
-- Note, that (MonadPlus m) is not the right thing here, because mzero is
-- failure, but not a neutral context.
-- "Zippy apply (map)" list of function to some traversable datatype. If list
-- of functions [a -> b] is empty or not long enough, this is error.
zipMap :: (T.Traversable t) => [a -> b] -> t a -> t b
zipMap fs = fst . flip runState fs . T.mapM g
where
g x = do
(f : fs') <- get
put fs'
return (f x)
-- FIXME: Am i really need monadic zipFold ? Or just slightly different
-- zipMap, which folds result (i.e. does not have constraint T.Traversable)?
-- And, in such case, should i use (StateT s Maybe) here to just reflect
-- possible failure, nothing more? In other words, constraint MonadPlus seems
-- useless, since i don't need MonadPlus, i just need failure due to
-- not-equal-length lists (as well as in zipMap), and, hence, why not just use
-- Maybe?
-- Generic list Eq instance or "zippy monadic foldMap". List of monadic
-- functions [a -> m b] is "zippy applied" to other list [a] and results are
-- (right) folded in monad m into monoid b. If length of list of functions
-- [a -> m b] and other list [a] are not equal, return failure context of
-- monad m (mzero). If both have length zero, return mempty in monad m. Note,
-- that there is no sense here in
--
-- (F.Foldable t) => .. -> t a -> ..
--
-- because any foldable can be converted (foldMap-ed) into list, and then
-- applied to this function.
zipFoldM :: (MonadPlus m, Monoid b) => [a -> m b] -> [a] -> m b
zipFoldM fs xs = do
(y, gs) <- runStateT (zipFoldrMT xs) fs
case gs of
[] -> return y
_ -> mzero
-- I need foldl here, because other zipFoldMT will not work, when either of
-- lists is infinity.
zipFoldlMT :: (MonadPlus m, Monoid b) => [a] -> StateT [a -> m b] m b
zipFoldlMT = foldlM (\z -> StateT . g z) mempty
where
--g :: (MonadPlus m, Monoid b) =>
-- a -> b -> [a -> m b] -> m (b, [a -> m b])
g _ _ [] = mzero
g z x (f : fs) = do
y <- f x
return (y `mappend` z, fs)
-- I assume, that (y `mappend` z) works faster, than (z `mappend` y), where z
-- is "long" monoid (e.g. this is true for (++), which is mappend for list).
-- And here i don't want to mappend elements in reverse order (as in
-- zipFoldlMT), which is what happen, if i use (y `mappend` z) in foldl, where
-- z is fold accumulator. So, i use function composition to sequence monoids
-- in correct (foldr's) order and then just apply resulting function to
-- neutral monoid - mempty. See "Using Difference Lists" from "Learn You a
-- Haskell for Great Good" for details.
zipFoldrMT :: (MonadPlus m, Monoid b) => [a] -> StateT [a -> m b] m b
zipFoldrMT xs = do
h <- foldlM (\z -> StateT . g z) (mempty `mappend`) xs
return (h mempty)
where
--g :: (MonadPlus m, Monoid b) =>
-- a -> b -> [a -> m b] -> m (b, [a -> m b])
g _ _ [] = mzero
g z x (f : fs) = do
y <- f x
return (z . (y `mappend`), fs)
-- Eq instance for foldable datatype with some function eq supplied as
-- equality for elements.
listEq :: (a -> a -> Bool) -> [a] -> [a] -> Bool
listEq eq xs = let xs' = map (\x -> Just . All . (x `eq`)) xs
in getAll . fromMaybe (All False) . zipFoldM xs'
-- Random.
--
-- Pick from a list xs first occurences of all elements found in reference
-- list ks. Stop processing a list xs if all reference elements have found.
-- Works with inifinity list xs, if it contain all elements from reference
-- list ks. May be used to make random transposition from randomRs output.
transp :: Eq a => [a] -> [a] -> [a]
transp ks = fst . flip runBState ks . transpM
transpM :: Eq a => [a] -> BState [a] [a]
transpM = foldrM (\x -> BState . f x) []
where
--f :: a -> [a] -> [a] -> ([a], [a])
f _ _ [] = ([], [])
f x zs ks
| x `elem` ks = (x : zs, filter (/= x) ks)
| otherwise = (zs, ks)
-- Shuffle list elements.
shuffleList :: RandomGen g => g -> [a] -> [a]
shuffleList g xs = let lx = length xs
ts = transp (take lx [1..]) $ randomRs (1, lx) g
in ts >>= flip elemByInd xs
| sgf-dma/hs-sgf-lib | src/Sgf/List.hs | bsd-3-clause | 16,529 | 0 | 16 | 4,910 | 4,059 | 2,235 | 1,824 | 206 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module contains objects which represent data of Telegram Bot API responses
module Web.Telegram.API.Bot.Data
( -- * Types
User (..)
, ChatMember (..)
, Chat (..)
, Message (..)
, MessageEntity (..)
, PhotoSize (..)
, Audio (..)
, Document (..)
, Game (..)
, Animation (..)
, Sticker (..)
, Video (..)
, Voice (..)
, Venue (..)
, Contact (..)
, Location (..)
, Update (..)
, File (..)
, UserProfilePhotos (..)
, InlineQuery (..)
, ChosenInlineResult (..)
, InlineQueryResult (..)
, InlineKeyboardMarkup (..)
, InlineKeyboardButton (..)
, CallbackGame (..)
, CallbackQuery (..)
, ChatType (..)
, ParseMode (..)
, InputMessageContent (..)
, KeyboardButton (..)
, WebhookInfo (..)
-- * Functions
, inlineKeyboardButton
, keyboardButton
, inlineQueryResultArticle
, inlineQueryResultAudio
, inlineQueryResultContact
, inlineQueryResultDocument
, inlineQueryResultGif
, inlineQueryResultLocation
, inlineQueryResultMpeg4Gif
, inlineQueryResultPhoto
, inlineQueryResultVenue
, inlineQueryResultVideo
, inlineQueryResultVoice
, inlineQueryResultCachedAudio
, inlineQueryResultCachedDocument
, inlineQueryResultCachedGif
, inlineQueryResultCachedMpeg4Gif
, inlineQueryResultGame
, inlineQueryResultCachedPhoto
, inlineQueryResultCachedSticker
, inlineQueryResultCachedVideo
, inlineQueryResultCachedVoice
) where
import Prelude hiding (id)
import Data.Aeson
import Data.Maybe()
import Data.Aeson.Types
import Data.Text (Text)
import qualified Data.Char as Char
import GHC.Generics
import Data.List
import Web.Telegram.API.Bot.JsonExt
-- | This object represents a Telegram user or bot.
data User = User
{
user_id :: Int -- ^ Unique identifier for this user or bot
, user_first_name :: Text -- ^ User‘s or bot’s first name
, user_last_name :: Maybe Text -- ^ User‘s or bot’s last name
, user_username :: Maybe Text -- ^ User‘s or bot’s username
} deriving (Show, Generic)
instance ToJSON User where
toJSON = toJsonDrop 5
instance FromJSON User where
parseJSON = parseJsonDrop 5
-- | This object represents a phone contact.
data Contact = Contact
{
contact_phone_number :: Text -- ^ Contact's phone number
, contact_first_name :: Text -- ^ Contact's first name
, contact_last_name :: Maybe Text -- ^ Contact's last name
, contact_user_id :: Maybe Int -- ^ Contact's user identifier in Telegram
} deriving (Show, Generic)
instance ToJSON Contact where
toJSON = toJsonDrop 8
instance FromJSON Contact where
parseJSON = parseJsonDrop 8
-- | This object represents a chat.
data Chat = Chat
{
chat_id :: Int -- ^ Unique identifier for this chat, not exceeding 1e13 by absolute value
, chat_type :: ChatType -- ^ Type of chat, can be either 'Private', 'Group', 'Supergroup' or 'Channel'
, chat_title :: Maybe Text -- ^ Title, for channels and group chats
, chat_username :: Maybe Text -- ^ Username, for private chats and channels if available
, chat_first_name :: Maybe Text -- ^ First name of the other party in a private chat
, chat_last_name :: Maybe Text -- ^ Last name of the other party in a private chat
, chat_all_members_are_administrators :: Maybe Bool -- ^ True if a group has ‘All Members Are Admins’ enabled.
} deriving (Show, Generic)
instance ToJSON Chat where
toJSON = toJsonDrop 5
instance FromJSON Chat where
parseJSON = parseJsonDrop 5
-- | Type of chat.
data ChatType = Private
| Group
| Supergroup
| Channel deriving (Show, Generic)
instance ToJSON ChatType where
toJSON Private = "private"
toJSON Group = "group"
toJSON Supergroup = "supergroup"
toJSON Channel = "channel"
instance FromJSON ChatType where
parseJSON "private" = pure Private
parseJSON "group" = pure Group
parseJSON "supergroup" = pure Supergroup
parseJSON "channel" = pure Channel
parseJSON _ = fail "Failed to parse ChatType"
-- | Parse mode for text message
data ParseMode = Markdown | HTML deriving (Show, Generic)
instance ToJSON ParseMode where
toJSON Markdown = "Markdown"
toJSON HTML = "HTML"
instance FromJSON ParseMode where
parseJSON "Markdown" = pure Markdown
parseJSON "HTML" = pure HTML
parseJSON _ = fail "Failed to parse ParseMode"
-- | This object represents one size of a photo or a 'File' / 'Sticker' thumbnail.
data PhotoSize = PhotoSize
{
photo_file_id :: Text -- ^ Unique identifier for this file
, photo_width :: Int -- ^ Photo width
, photo_height :: Int -- ^ Photo height
, photo_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON PhotoSize where
toJSON = toJsonDrop 6
instance FromJSON PhotoSize where
parseJSON = parseJsonDrop 6
-- | This object represents an audio file to be treated as music by the Telegram clients.
data Audio = Audio
{
audio_file_id :: Text -- ^ Unique identifier for this file
, audio_duration :: Int -- ^ Duration of the audio in seconds as defined by sender
, audio_performer :: Maybe Text -- ^ Performer of the audio as defined by sender or by audio tags
, audio_title :: Maybe Text -- ^ Title of the audio as defined by sender or by audio tags
, audio_mime_type :: Maybe Text -- ^ MIME type of the file as defined by sender
, audio_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON Audio where
toJSON = toJsonDrop 6
instance FromJSON Audio where
parseJSON = parseJsonDrop 6
-- | This object represents a general file (as opposed to 'PhotoSize', 'Voice' messages and 'Audio' files).
data Document = Document
{
doc_file_id :: Text -- ^ Unique file identifier
, doc_thumb :: Maybe PhotoSize -- ^ Document thumbnail as defined by sender
, doc_file_name :: Maybe Text -- ^ Original filename as defined by sender
, doc_mime_type :: Maybe Text -- ^ MIME type of the file as defined by sender
, doc_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON Document where
toJSON = toJsonDrop 4
instance FromJSON Document where
parseJSON = parseJsonDrop 4
-- | This object represents a game. Use BotFather to create and edit games, their short names will act as unique identifiers.
data Game = Game
{
game_title :: Text -- ^ Title of the game
, game_description :: Text -- ^ Description of the game
, game_photo :: [PhotoSize] -- ^ Photo that will be displayed in the game message in chats.
, game_text :: Maybe Text -- ^ Brief description of the game or high scores included in the game message. Can be automatically edited to include current high scores for the game when the bot calls setGameScore, or manually edited using editMessageText. 0-4096 characters.
, game_text_entities :: Maybe [MessageEntity] -- ^ Special entities that appear in text, such as usernames, URLs, bot commands, etc.
, game_animation :: Maybe Animation -- ^ Animation that will be displayed in the game message in chats. Upload via BotFather
} deriving (Show, Generic)
instance ToJSON Game where
toJSON = toJsonDrop 5
instance FromJSON Game where
parseJSON = parseJsonDrop 5
-- | This object represents an animation file to be displayed in the message containing a game.
data Animation = Animation
{
anim_file_id :: Text -- ^ Unique file identifier
, anim_thumb :: Maybe PhotoSize -- ^ Animation thumbnail as defined by sender
, anim_file_name :: Maybe Text -- ^ Original animation filename as defined by sender
, anim_mime_type :: Maybe Text -- ^ MIME type of the file as defined by sender
, anim_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON Animation where
toJSON = toJsonDrop 5
instance FromJSON Animation where
parseJSON = parseJsonDrop 5
-- | This object represents a sticker.
data Sticker = Sticker
{
sticker_file_id :: Text -- ^ Unique identifier for this file
, sticker_width :: Int -- ^ Sticker width
, sticker_height :: Int -- ^ Sticker height
, sticker_thumb :: Maybe PhotoSize -- ^ Sticker thumbnail in .webp or .jpg format
, sticker_emoji :: Maybe Text -- ^ Emoji associated with the sticker
, sticker_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON Sticker where
toJSON = toJsonDrop 8
instance FromJSON Sticker where
parseJSON = parseJsonDrop 8
-- | This object represents a video file.
data Video = Video
{
video_file_id :: Text -- ^ Unique identifier for this file
, video_width :: Int -- ^ Video width as defined by sender
, video_height :: Int -- ^ Video height as defined by sender
, video_duration :: Int -- ^ Duration of the video in seconds as defined by sender
, video_thumb :: Maybe PhotoSize -- ^ Video thumbnail
, video_mime_type :: Maybe Text -- ^ MIME type of a file as defined by sender
, video_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON Video where
toJSON = toJsonDrop 6
instance FromJSON Video where
parseJSON = parseJsonDrop 6
-- | This object represents a voice note.
data Voice = Voice
{
voice_file_id :: Text -- ^ Unique identifier for this file
, voice_duration :: Int -- ^ Duration of the audio in seconds as defined by sender
, voice_mime_type :: Maybe Text -- ^ MIME type of the file as defined by sender
, voice_file_size :: Maybe Int -- ^ File size
} deriving (Show, Generic)
instance ToJSON Voice where
toJSON = toJsonDrop 6
instance FromJSON Voice where
parseJSON = parseJsonDrop 6
-- | This object represents an incoming inline query. When the user sends an empty query, your bot could return some default or trending results.
data InlineQuery = InlineQuery
{
query_id :: Text -- ^ Unique identifier for this query
, query_from :: User -- ^ Sender
, query_location :: Maybe Location -- ^ Sender location, only for bots that request user location
, query_query :: Text -- ^ Text of the query
, query_offset :: Text -- ^ Offset of the results to be returned, can be controlled by the bot
} deriving (Show, Generic)
instance ToJSON InlineQuery where
toJSON = toJsonDrop 6
instance FromJSON InlineQuery where
parseJSON = parseJsonDrop 6
-- | This object represents a result of an inline query that was chosen by the user and sent to their chat partner.
data ChosenInlineResult = ChosenInlineResult
{
chosen_result_id :: Text -- ^ The unique identifier for the result that was chosen
, chosen_from :: User -- ^ The user that chose the result
, chosen_location :: Maybe Location -- ^ Sender location, only for bots that require user location
, chosen_inline_message_id :: Maybe Text -- ^ Identifier of the sent inline message. Available only if there is an inline keyboard attached to the message. Will be also received in callback queries and can be used to edit the message.
, chosen_query :: Text -- ^ The query that was used to obtain the result
} deriving (Show, Generic)
instance ToJSON ChosenInlineResult where
toJSON = toJsonDrop 7
instance FromJSON ChosenInlineResult where
parseJSON = parseJsonDrop 7
-- | This object represents the content of a message to be sent as a result of an inline query.
data InputMessageContent =
-- | Represents the content of a text message to be sent as the result of an inline query.
InputTextMessageContent
{
imc_message_text :: Text -- ^ Text of the message to be sent, 1-4096 characters
, imc_parse_mode :: Maybe ParseMode -- ^ Send 'Markdown' or 'HTML', if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message.
, imc_disable_web_page_preview :: Maybe Bool -- ^ Disables link previews for links in the sent message
}
-- | Represents the content of a location message to be sent as the result of an inline query.
| InputLocationMessageContent
{
imc_latitude :: Float -- ^ Latitude of the location in degrees
, imc_longitude :: Float -- ^ Longitude of the location in degrees
}
-- | Represents the content of a venue message to be sent as the result of an inline query.
| InputVenueMessageContent
{
imc_latitude :: Float -- ^ Latitude of the location in degrees
, imc_longitude :: Float -- ^ Longitude of the location in degrees
, imc_title :: Text -- ^ Name of the venue
, imc_address :: Text -- ^ Address of the venue
, imc_foursquare_id :: Maybe Text -- ^ Foursquare identifier of the venue, if known
}
-- | Represents the content of a contact message to be sent as the result of an inline query.
| InputContactMessageContent
{
imc_phone_number :: Text -- ^ Contact's phone number
, imc_first_name :: Text -- ^ Contact's first name
, imc_last_name :: Maybe Text -- ^ Contact's last name
} deriving (Show, Generic)
instance ToJSON InputMessageContent where
toJSON = toJsonDrop 4
instance FromJSON InputMessageContent where
parseJSON = parseJsonDrop 4
data InlineQueryResult =
-- | Represents a link to an article or web page.
InlineQueryResultArticle
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 Bytes
, iq_res_title :: Maybe Text -- ^ Title of the result
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_url :: Maybe Text -- ^ URL of the result
, iq_res_hide_url :: Maybe Bool -- ^ Pass True, if you don't want the URL to be shown in the message
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_thumb_url :: Maybe Text -- ^ Url of the thumbnail for the result
, iq_res_thumb_width :: Maybe Int -- ^ Thumbnail width
, iq_res_thumb_height :: Maybe Int -- ^ Thumbnail height
}
-- | Represents a link to a photo. By default, this photo will be sent by the user with optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the photo.
| InlineQueryResultPhoto
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_photo_url :: Text -- ^ A valid URL of the photo. Photo must be in jpeg format. Photo size must not exceed 5MB
, iq_res_thumb_url :: Maybe Text -- ^ URL of the thumbnail for the photo
, iq_res_photo_width :: Maybe Int -- ^ Optional. Width of the photo
, iq_res_photo_height :: Maybe Int -- ^ Optional. Height of the photo
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_caption :: Maybe Text -- ^ Caption of the photo to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the photo
}
-- | Represents a link to an animated GIF file. By default, this animated GIF file will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of the animation.
| InlineQueryResultGif
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_gif_url :: Text -- ^ A valid URL for the GIF file. File size must not exceed 1MB
, iq_res_gif_width :: Maybe Int -- ^ Width of the GIF
, iq_res_gif_height :: Maybe Int -- ^ Height of the GIF
, iq_res_thumb_url :: Maybe Text -- ^ URL of the static thumbnail for the result (jpeg or gif)
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_caption :: Maybe Text -- ^ Caption of the GIF file to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the GIF animation
}
-- | Represents a link to a video animation (H.264/MPEG-4 AVC video without sound). By default, this animated MPEG-4 file will be sent by the user with optional caption. Alternatively, you can provide message_text to send it instead of the animation.
| InlineQueryResultMpeg4Gif
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_mpeg4_url :: Text -- ^ A valid URL for the MP4 file. File size must not exceed 1MB
, iq_res_mpeg4_width :: Maybe Int -- ^ Video width
, iq_res_mpeg4_height :: Maybe Int -- ^ Video height
, iq_res_thumb_url :: Maybe Text -- ^ URL of the static thumbnail (jpeg or gif) for the result
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_caption :: Maybe Text -- ^ Caption of the MPEG-4 file to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the video animation
}
-- | Represents link to a page containing an embedded video player or a video file.
| InlineQueryResultVideo
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_video_url :: Text -- ^ A valid URL for the embedded video player or video file
, iq_res_mime_type :: Text -- ^ Mime type of the content of video url, “text/html” or “video/mp4”
, iq_res_thumb_url :: Maybe Text -- ^ URL of the thumbnail (jpeg only) for the video
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_caption :: Maybe Text -- ^ Caption of the video to be sent, 0-200 characters
, iq_res_video_width :: Maybe Int -- ^ Video width
, iq_res_video_height :: Maybe Int -- ^ Video height
, iq_res_video_duration :: Maybe Int -- ^ Video duration in seconds
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the video
}
-- | Represents a link to an mp3 audio file. By default, this audio file will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the audio.
| InlineQueryResultAudio
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_audio_url :: Text -- ^ A valid URL for the audio file
, iq_res_title :: Maybe Text -- ^ Title
, iq_res_caption :: Maybe Text -- ^ Caption, 0-200 characters
, iq_res_performer :: Maybe Text -- ^ Performer
, iq_res_audio_duration :: Maybe Int -- ^ Audio duration in seconds
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the audio
}
-- | Represents a link to a voice recording in an .ogg container encoded with OPUS. By default, this voice recording will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the the voice message.
| InlineQueryResultVoice
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_voice_url :: Text -- ^ A valid URL for the voice recording
, iq_res_title :: Maybe Text -- ^ Recording title
, iq_res_caption :: Maybe Text -- ^ Caption, 0-200 characters
, iq_res_voice_duration :: Maybe Int -- ^ Recording duration in seconds
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the voice recording
}
-- | Represents a link to a file. By default, this file will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the file. Currently, only .PDF and .ZIP files can be sent using this method.
| InlineQueryResultDocument
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_caption :: Maybe Text -- ^ Caption of the document to be sent, 0-200 characters
, iq_res_document_url :: Text -- ^ A valid URL for the file
, iq_res_mime_type :: Text -- ^ Mime type of the content of the file, either “application/pdf” or “application/zip”
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the file
, iq_res_thumb_url :: Maybe Text -- ^ URL of the thumbnail (jpeg only) for the file
, iq_res_thumb_width :: Maybe Int -- ^ Thumbnail width
, iq_res_thumb_height :: Maybe Int -- ^ Thumbnail height
}
-- | Represents a location on a map. By default, the location will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the location.
| InlineQueryResultLocation
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 Bytes
, iq_res_latitude :: Float -- ^ Location latitude in degrees
, iq_res_longitude :: Float -- ^ Location longitude in degrees
, iq_res_title :: Maybe Text -- ^ Location title
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the location
, iq_res_thumb_url :: Maybe Text -- ^ Url of the thumbnail for the result
, iq_res_thumb_width :: Maybe Int -- ^ Thumbnail width
, iq_res_thumb_height :: Maybe Int -- ^ Thumbnail height
}
-- | Represents a venue. By default, the venue will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the venue.
| InlineQueryResultVenue
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 Bytes
, iq_res_latitude :: Float -- ^ Latitude of the venue location in degrees
, iq_res_longitude :: Float -- ^ Longitude of the venue location in degrees
, iq_res_title :: Maybe Text -- ^ Title of the venue
, iq_res_address :: Text -- ^ Address of the venue
, iq_res_foursquare_id :: Maybe Text -- ^ Foursquare identifier of the venue if known
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the venue
, iq_res_thumb_url :: Maybe Text -- ^ Url of the thumbnail for the result
, iq_res_thumb_width :: Maybe Int -- ^ Thumbnail width
, iq_res_thumb_height :: Maybe Int -- ^ Thumbnail height
}
-- | Represents a link to a photo stored on the Telegram servers. By default, this photo will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the photo.
| InlineQueryResultContact
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 Bytes
, iq_res_phone_number :: Text -- ^ Contact's phone number
, iq_res_first_name :: Text -- ^ Contact's first name
, iq_res_last_name :: Maybe Text -- ^ Contact's last name
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the contact
, iq_res_thumb_url :: Maybe Text -- ^ Url of the thumbnail for the result
, iq_res_thumb_width :: Maybe Int -- ^ Thumbnail width
, iq_res_thumb_height :: Maybe Int -- ^ Thumbnail height
}
-- | Represents a Game.
| InlineQueryResultGame
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_game_short_name :: Text -- ^ Short name of the game
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
}
-- | Represents a link to a photo stored on the Telegram servers. By default, this photo will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the photo.
| InlineQueryResultCachedPhoto
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_photo_file_id :: Text -- ^ A valid file identifier of the photo
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_caption :: Maybe Text -- ^ Caption of the photo to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the photo
}
-- | Represents a link to an animated GIF file stored on the Telegram servers. By default, this animated GIF file will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with specified content instead of the animation.
| InlineQueryResultCachedGif
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_gif_file_id :: Text -- ^ A valid file identifier for the GIF file
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_caption :: Maybe Text -- ^ Caption of the GIF file to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the GIF animation
}
-- | Represents a link to a video animation (H.264/MPEG-4 AVC video without sound) stored on the Telegram servers. By default, this animated MPEG-4 file will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the animation.
| InlineQueryResultCachedMpeg4Gif
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_mpeg4_file_id :: Text -- ^ A valid file identifier for the MP4 file
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_caption :: Maybe Text -- ^ Caption of the MPEG-4 file to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the video animation
}
-- | Represents a link to a sticker stored on the Telegram servers. By default, this sticker will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the sticker.
| InlineQueryResultCachedSticker
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_sticker_file_id :: Text -- ^ A valid file identifier of the sticker
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the sticker
}
-- | Represents a link to a file stored on the Telegram servers. By default, this file will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the file. Currently, only pdf-files and zip archives can be sent using this method.
| InlineQueryResultCachedDocument
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_document_file_id :: Text -- ^ A valid file identifier for the file
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_caption :: Maybe Text -- ^ Caption of the document to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the file
}
-- | Represents a link to a video file stored on the Telegram servers. By default, this video file will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the video.
| InlineQueryResultCachedVideo
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_video_file_id :: Text -- ^ A valid file identifier for the video file
, iq_res_title :: Maybe Text -- ^ Title for the result
, iq_res_description :: Maybe Text -- ^ Short description of the result
, iq_res_caption :: Maybe Text -- ^ Caption of the video to be sent, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ Content of the message to be sent instead of the video
}
-- | Represents a link to a voice message stored on the Telegram servers. By default, this voice message will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the voice message.
| InlineQueryResultCachedVoice
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_voice_file_id :: Text -- ^ A valid file identifier for the voice message
, iq_res_title :: Maybe Text -- ^ Voice message title
, iq_res_caption :: Maybe Text -- ^ Caption, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ ontent of the message to be sent instead of the voice message
}
-- | Represents a link to an mp3 audio file stored on the Telegram servers. By default, this audio file will be sent by the user. Alternatively, you can use input_message_content to send a message with the specified content instead of the audio.
| InlineQueryResultCachedAudio
{
iq_res_id :: Text -- ^ Unique identifier for this result, 1-64 bytes
, iq_res_audio_file_id :: Text -- ^ A valid file identifier for the audio file
, iq_res_caption :: Maybe Text -- ^ Caption, 0-200 characters
, iq_res_reply_markup :: Maybe InlineKeyboardMarkup -- ^ An Inline keyboard attached to the message
, iq_res_input_message_content :: Maybe InputMessageContent -- ^ ontent of the message to be sent instead of the audio
} deriving (Show, Generic)
dropCached :: String -> String
dropCached name = if "Cached" `isPrefixOf` name then drop 6 name else name
tagModifier :: String -> String
tagModifier "InlineQueryResultMpeg4Gif" = "mpeg4_gif"
tagModifier "InlineQueryResultCachedMpeg4Gif" = "mpeg4_gif"
tagModifier x = (fmap Char.toLower . dropCached . drop 17) x
inlineQueryJSONOptions :: Options
inlineQueryJSONOptions = defaultOptions {
fieldLabelModifier = drop 7
, omitNothingFields = True
, sumEncoding = TaggedObject { tagFieldName = "type", contentsFieldName = undefined }
, constructorTagModifier = tagModifier
}
instance ToJSON InlineQueryResult where
toJSON = genericToJSON inlineQueryJSONOptions
instance FromJSON InlineQueryResult where
parseJSON = genericParseJSON inlineQueryJSONOptions
inlineQueryResultArticle :: Text -> Text -> InputMessageContent -> InlineQueryResult
inlineQueryResultArticle id title content = InlineQueryResultArticle id (Just title) (Just content) Nothing Nothing Nothing Nothing Nothing Nothing Nothing
inlineQueryResultPhoto :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultPhoto id photoUrl thumbUlr = InlineQueryResultPhoto id photoUrl (Just thumbUlr) Nothing Nothing Nothing Nothing Nothing Nothing Nothing
inlineQueryResultGif :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultGif id gifUrl thumbUrl = InlineQueryResultGif id gifUrl Nothing Nothing (Just thumbUrl) Nothing Nothing Nothing Nothing
inlineQueryResultMpeg4Gif :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultMpeg4Gif id mpeg4Url thumbUrl = InlineQueryResultMpeg4Gif id mpeg4Url Nothing Nothing (Just thumbUrl) Nothing Nothing Nothing Nothing
inlineQueryResultVideo :: Text -> Text -> Text -> Text -> Text -> InlineQueryResult
inlineQueryResultVideo id videoUrl mimeType thumbUrl title = InlineQueryResultVideo id videoUrl mimeType (Just thumbUrl) (Just title) Nothing Nothing Nothing Nothing Nothing Nothing Nothing
inlineQueryResultAudio :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultAudio id audioUrl title = InlineQueryResultAudio id audioUrl (Just title) Nothing Nothing Nothing Nothing Nothing
inlineQueryResultVoice :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultVoice id voiceUrl title = InlineQueryResultVoice id voiceUrl (Just title) Nothing Nothing Nothing Nothing
inlineQueryResultDocument :: Text -> Text -> Text -> Text -> InlineQueryResult
inlineQueryResultDocument id title docUrl mimeType = InlineQueryResultDocument id (Just title) Nothing docUrl mimeType Nothing Nothing Nothing Nothing Nothing Nothing
inlineQueryResultLocation :: Text -> Float -> Float -> Text -> InlineQueryResult
inlineQueryResultLocation id lat lon title = InlineQueryResultLocation id lat lon (Just title) Nothing Nothing Nothing Nothing Nothing
inlineQueryResultVenue :: Text -> Float -> Float -> Text -> Text -> InlineQueryResult
inlineQueryResultVenue id lat lon title address = InlineQueryResultVenue id lat lon (Just title) address Nothing Nothing Nothing Nothing Nothing Nothing
inlineQueryResultContact :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultContact id phoneNumber firstName = InlineQueryResultContact id phoneNumber firstName Nothing Nothing Nothing Nothing Nothing Nothing
inlineQueryResultGame :: Text -> Text -> InlineQueryResult
inlineQueryResultGame id gameShortName = InlineQueryResultGame id gameShortName Nothing
inlineQueryResultCachedPhoto :: Text -> Text -> InlineQueryResult
inlineQueryResultCachedPhoto id fileId = InlineQueryResultCachedPhoto id fileId Nothing Nothing Nothing Nothing Nothing
inlineQueryResultCachedGif :: Text -> Text -> InlineQueryResult
inlineQueryResultCachedGif id fileId = InlineQueryResultCachedGif id fileId Nothing Nothing Nothing Nothing
inlineQueryResultCachedMpeg4Gif :: Text -> Text -> InlineQueryResult
inlineQueryResultCachedMpeg4Gif id fileId = InlineQueryResultCachedMpeg4Gif id fileId Nothing Nothing Nothing Nothing
inlineQueryResultCachedSticker :: Text -> Text -> InlineQueryResult
inlineQueryResultCachedSticker id fileId = InlineQueryResultCachedSticker id fileId Nothing Nothing
inlineQueryResultCachedDocument :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultCachedDocument id fileId title = InlineQueryResultCachedDocument id (Just title) fileId Nothing Nothing Nothing Nothing
inlineQueryResultCachedVideo :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultCachedVideo id fileId title = InlineQueryResultCachedVideo id fileId (Just title) Nothing Nothing Nothing Nothing
inlineQueryResultCachedVoice :: Text -> Text -> Text -> InlineQueryResult
inlineQueryResultCachedVoice id fileId title = InlineQueryResultCachedVoice id fileId (Just title) Nothing Nothing Nothing
inlineQueryResultCachedAudio :: Text -> Text -> InlineQueryResult
inlineQueryResultCachedAudio id fileId = InlineQueryResultCachedAudio id fileId Nothing Nothing Nothing
data InlineKeyboardMarkup = InlineKeyboardMarkup
{
inline_keyboard :: [[InlineKeyboardButton]]
} deriving (FromJSON, ToJSON, Show, Generic)
data InlineKeyboardButton = InlineKeyboardButton
{
ikb_text :: Text
, ikb_url :: Maybe Text
, ikb_callback_data :: Maybe Text
, ikb_switch_inline_query :: Maybe Text
, ikb_callback_game :: Maybe CallbackGame
, ikb_switch_inline_query_current_chat :: Maybe Text -- ^ If set, pressing the button will insert the bot‘s username and the specified inline query in the current chat's input field. Can be empty, in which case only the bot’s username will be inserted.
} deriving (Show, Generic)
instance ToJSON InlineKeyboardButton where
toJSON = toJsonDrop 4
instance FromJSON InlineKeyboardButton where
parseJSON = parseJsonDrop 4
inlineKeyboardButton :: Text -> InlineKeyboardButton
inlineKeyboardButton buttonText =
InlineKeyboardButton buttonText Nothing Nothing Nothing Nothing Nothing
data CallbackGame = CallbackGame
{
} deriving (Show, Generic)
instance ToJSON CallbackGame where
toJSON = toJsonDrop 3
instance FromJSON CallbackGame where
parseJSON = parseJsonDrop 3
data CallbackQuery = CallbackQuery
{
cq_id :: Text
, cq_from :: User
, cq_message :: Maybe Message
, cq_inline_message_id :: Maybe Text
, cq_chat_instance :: Text
, cq_data :: Maybe Text
, cq_game_short_name :: Maybe Text
} deriving (Show, Generic)
instance ToJSON CallbackQuery where
toJSON = toJsonDrop 3
instance FromJSON CallbackQuery where
parseJSON = parseJsonDrop 3
-- | This object represents an incoming update.
-- Only one of the optional parameters can be present in any given update.
data Update = Update
{
update_id :: Int -- ^ The update's unique identifier. Update identifiers start from a certain positive number and increase sequentially. This ID becomes especially handy if you’re using 'setWebhooks', since it allows you to ignore repeated updates or to restore the correct update sequence, should they get out of order.
, message :: Maybe Message -- ^ New incoming message of any kind — text, photo, sticker, etc.
, edited_message :: Maybe Message -- ^ New version of a message that is known to the bot and was edited
, channel_post :: Maybe Message -- ^ New incoming channel post of any kind — text, photo, sticker, etc.
, edited_channel_post :: Maybe Message -- ^ New version of a channel post that is known to the bot and was edited
, inline_query :: Maybe InlineQuery -- ^ New incoming inline query
, chosen_inline_result :: Maybe ChosenInlineResult -- ^ The result of a inline query that was chosen by a user and sent to their chat partner
, callback_query :: Maybe CallbackQuery -- ^ This object represents an incoming callback query from a callback button in an inline keyboard. If the button that originated the query was attached to a message sent by the bot, the field message will be presented. If the button was attached to a message sent via the bot (in inline mode), the field inline_message_id will be presented.
} deriving (FromJSON, ToJSON, Show, Generic)
-- | This object represents a point on the map.
data Location = Location
{
longitude :: Float -- ^ Longitude as defined by sender
, latitude :: Float -- ^ Latitude as defined by sender
} deriving (FromJSON, ToJSON, Show, Generic)
-- | This object represents a file ready to be downloaded. The file can be downloaded via the link
-- @https://api.telegram.org/file/bot<token>/<file_path>@. It is guaranteed that the link will be valid
-- for at least 1 hour. When the link expires, a new one can be requested by calling 'getFile'.
--
-- Maximum file size to download is 20 MB
data File = File
{
file_id :: Text -- ^ Unique identifier for this file
, file_size :: Maybe Int -- ^ File size, if known
, file_path :: Maybe Text -- ^ File path. Use @https://api.telegram.org/file/bot<token>/<file_path>@ to get the file.
} deriving (FromJSON, ToJSON, Show, Generic)
-- | This object represent a user's profile pictures.
data UserProfilePhotos = UserProfilePhotos
{
total_count :: Int -- ^ Total number of profile pictures the target user has
, photos :: [[PhotoSize]] -- ^ Requested profile pictures (in up to 4 sizes each)
} deriving (FromJSON, ToJSON, Show, Generic)
data ChatMember = ChatMember
{
cm_user :: User -- ^ Information about the user
, cm_status :: Text -- ^ The member's status in the chat. Can be “creator”, “administrator”, “member”, “left” or “kicked”
} deriving (Show, Generic)
instance ToJSON ChatMember where
toJSON = toJsonDrop 3
instance FromJSON ChatMember where
parseJSON = parseJsonDrop 3
-- | This object represents a message.
data Message = Message
{
message_id :: Int -- ^ Unique message identifier
, from :: Maybe User -- ^ Sender, can be empty for messages sent to channels
, date :: Int -- ^ Date the message was sent in Unix time
, chat :: Chat -- ^ Conversation the message belongs to
, forward_from :: Maybe User -- ^ For forwarded messages, sender of the original message
, forward_from_chat :: Maybe Chat -- ^ For messages forwarded from a channel, information about the original channel
, forward_from_message_id :: Maybe Int -- ^ For forwarded channel posts, identifier of the original message in the channel
, forward_date :: Maybe Int -- ^ For forwarded messages, date the original message was sent in Unix time
, reply_to_message :: Maybe Message -- ^ For replies, the original message. Note that the 'Message' object in this field will not contain further 'reply_to_message' fields even if it itself is a reply.
, edit_date :: Maybe Int -- ^ Date the message was last edited in Unix time
, text :: Maybe Text -- ^ For text messages, the actual UTF-8 text of the message
, entities :: Maybe [MessageEntity] -- ^ For text messages, special entities like usernames, URLs, bot commands, etc. that appear in the text
, audio :: Maybe Audio -- ^ Message is an audio file, information about the file
, document :: Maybe Document -- ^ Message is a general file, information about the file
, game :: Maybe Game -- ^ Message is a game, information about the game
, photo :: Maybe [PhotoSize] -- ^ Message is a photo, available sizes of the photo
, sticker :: Maybe Sticker -- ^ Message is a sticker, information about the sticker
, video :: Maybe Video -- ^ Message is a video, information about the video
, voice :: Maybe Voice -- ^ Message is a voice message, information about the file
, caption :: Maybe Text -- ^ Caption for the photo or video
, contact :: Maybe Contact -- ^ Message is a shared contact, information about the contact
, location :: Maybe Location -- ^ Message is a shared location, information about the location
, venue :: Maybe Venue -- ^ Message is a venue, information about the venue
, new_chat_member :: Maybe User -- ^ A new member was added to the group, information about them (this member may be the bot itself)
, left_chat_member :: Maybe User -- ^ A member was removed from the group, information about them (this member may be the bot itself)
, new_chat_title :: Maybe Text -- ^ A chat title was changed to this value
, new_chat_photo :: Maybe [PhotoSize] -- ^ A chat photo was change to this value
, delete_chat_photo :: Maybe Bool -- ^ Service message: the chat photo was deleted
, group_chat_created :: Maybe Bool -- ^ Service message: the group has been created
, supergroup_chat_created :: Maybe Bool -- ^ Service message: the supergroup has been created
, channel_chat_created :: Maybe Bool -- ^ Service message: the channel has been created
, migrate_to_chat_id :: Maybe Int -- ^ The group has been migrated to a supergroup with the specified identifier, not exceeding 1e13 by absolute value
, migrate_from_chat_id :: Maybe Int -- ^ The supergroup has been migrated from a group with the specified identifier, not exceeding 1e13 by absolute value
, pinned_message :: Maybe Message -- ^ Specified message was pinned. Note that the Message object in this field will not contain further reply_to_message fields even if it is itself a reply.
} deriving (FromJSON, ToJSON, Show, Generic)
-- | This object represents one special entity in a text message. For example, hashtags, usernames, URLs, etc.
data MessageEntity = MessageEntity
{
me_type :: Text -- ^ Type of the entity. Can be mention (@username), hashtag, bot_command, url, email, bold (bold text), italic (italic text), code (monowidth string), pre (monowidth block), text_link (for clickable text URLs), text_mention (for users without usernames)
, me_offset :: Int -- ^ Offset in UTF-16 code units to the start of the entity
, me_length :: Int -- ^ Length of the entity in UTF-16 code units
, me_url :: Maybe Text -- ^ For “text_link” only, url that will be opened after user taps on the text
, me_user :: Maybe User -- ^ For “text_mention” only, the mentioned user
} deriving (Show, Generic)
instance ToJSON MessageEntity where
toJSON = toJsonDrop 3
instance FromJSON MessageEntity where
parseJSON = parseJsonDrop 3
-- | This object represents a venue.
data Venue = Venue
{
venue_location :: Location -- ^ Venue location
, venue_title :: Text -- ^ Name of the venue
, venue_address :: Text -- ^ Address of the venue
, venue_foursquare_id :: Maybe Text -- ^ Foursquare identifier of the venue
} deriving (Show, Generic)
instance ToJSON Venue where
toJSON = toJsonDrop 6
instance FromJSON Venue where
parseJSON = parseJsonDrop 6
data KeyboardButton = KeyboardButton
{
kb_text :: Text -- ^ Text of the button. If none of the optional fields are used, it will be sent to the bot as a message when the button is pressed
, kb_request_contact :: Maybe Bool -- ^ If True, the user's phone number will be sent as a contact when the button is pressed. Available in private chats only
, kb_request_location :: Maybe Bool -- ^ If True, the user's current location will be sent when the button is pressed. Available in private chats only
} deriving (Show, Generic)
instance ToJSON KeyboardButton where
toJSON = toJsonDrop 3
instance FromJSON KeyboardButton where
parseJSON = parseJsonDrop 3
keyboardButton :: Text -> KeyboardButton
keyboardButton buttonText = KeyboardButton buttonText Nothing Nothing
data WebhookInfo = WebhookInfo
{
whi_url :: Text -- ^ Webhook URL, may be empty if webhook is not set up
, whi_has_custom_certificate :: Bool -- ^ True, if a custom certificate was provided for webhook certificate checks
, whi_pending_update_count :: Int -- ^ Number of updates awaiting delivery
, whi_last_error_date :: Maybe Int -- ^ Unix time for the most recent error that happened when trying to deliver an update via webhook
, whi_last_error_message :: Maybe Text -- ^ Error message in human-readable format for the most recent error that happened when trying to deliver an update via webhook
, whi_max_connections :: Maybe Int -- ^ Maximum allowed number of simultaneous HTTPS connections to the webhook for update delivery
, whi_allowed_updates :: Maybe [Text] -- ^ A list of update types the bot is subscribed to. Defaults to all update types
} deriving (Show, Generic)
instance ToJSON WebhookInfo where
toJSON = toJsonDrop 4
instance FromJSON WebhookInfo where
parseJSON = parseJsonDrop 4 | cblp/haskell-telegram-api | src/Web/Telegram/API/Bot/Data.hs | bsd-3-clause | 50,133 | 1 | 10 | 11,649 | 6,198 | 3,622 | 2,576 | 642 | 2 |
module Gtk.SamplesSpec (main, spec) where
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
True `shouldBe` False
| nishihs/gtk-samples | test/Gtk/SamplesSpec.hs | bsd-3-clause | 208 | 0 | 13 | 49 | 76 | 40 | 36 | 9 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC
-- Copyright : Isaac Jones 2003-2007
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is a fairly large module. It contains most of the GHC-specific code for
-- configuring, building and installing packages. It also exports a function
-- for finding out what packages are already installed. Configuring involves
-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions
-- this version of ghc supports and returning a 'Compiler' value.
--
-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out
-- what packages are installed.
--
-- Building is somewhat complex as there is quite a bit of information to take
-- into account. We have to build libs and programs, possibly for profiling and
-- shared libs. We have to support building libraries that will be usable by
-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files
-- using ghc. Linking, especially for @split-objs@ is remarkably complex,
-- partly because there tend to be 1,000's of @.o@ files and this can often be
-- more than we can pass to the @ld@ or @ar@ programs in one go.
--
-- Installing for libs and exes involves finding the right files and copying
-- them to the right places. One of the more tricky things about this module is
-- remembering the layout of files in the build directory (which is not
-- explicitly documented) and thus what search dirs are used for various kinds
-- of files.
module Distribution.Simple.GHC (
getGhcInfo,
configure, getInstalledPackages, getPackageDBContents,
buildLib, buildExe,
replLib, replExe,
startInterpreter,
installLib, installExe,
libAbiHash,
hcPkgInfo,
registerPackage,
componentGhcOptions,
getLibDir,
isDynamic,
getGlobalPackageDB,
pkgRoot
) where
import qualified Distribution.Simple.GHC.IPI641 as IPI641
import qualified Distribution.Simple.GHC.IPI642 as IPI642
import qualified Distribution.Simple.GHC.Internal as Internal
import Distribution.Simple.GHC.ImplInfo
import Distribution.PackageDescription as PD
( PackageDescription(..), BuildInfo(..), Executable(..), Library(..)
, allExtensions, libModules, exeModules
, hcOptions, hcSharedOptions, hcProfOptions )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
( InstalledPackageInfo_(..) )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), ComponentLocalBuildInfo(..)
, absoluteInstallDirs, depLibraryPaths )
import qualified Distribution.Simple.Hpc as Hpc
import Distribution.Simple.InstallDirs hiding ( absoluteInstallDirs )
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Package
( PackageName(..) )
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.Program
( Program(..), ConfiguredProgram(..), ProgramConfiguration
, ProgramSearchPath
, rawSystemProgramStdout, rawSystemProgramStdoutConf
, getProgramInvocationOutput, requireProgramVersion, requireProgram
, userMaybeSpecifyPath, programPath, lookupProgram, addKnownProgram
, ghcProgram, ghcPkgProgram, hsc2hsProgram, ldProgram )
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import qualified Distribution.Simple.Program.Ar as Ar
import qualified Distribution.Simple.Program.Ld as Ld
import qualified Distribution.Simple.Program.Strip as Strip
import Distribution.Simple.Program.GHC
import Distribution.Simple.Setup
( toFlag, fromFlag, fromFlagOrDefault, configCoverage, configDistPref )
import qualified Distribution.Simple.Setup as Cabal
( Flag(..) )
import Distribution.Simple.Compiler
( CompilerFlavor(..), CompilerId(..), Compiler(..), compilerVersion
, PackageDB(..), PackageDBStack, AbiTag(..) )
import Distribution.Version
( Version(..), anyVersion, orLaterVersion )
import Distribution.System
( Platform(..), OS(..) )
import Distribution.Verbosity
import Distribution.Text
( display )
import Distribution.Utils.NubList
( NubListR, overNubListR, toNubListR )
import Language.Haskell.Extension (Extension(..), KnownExtension(..))
import Control.Monad ( unless, when )
import Data.Char ( isDigit, isSpace )
import Data.List
import qualified Data.Map as M ( fromList )
import Data.Maybe ( catMaybes )
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid ( Monoid(..) )
#endif
import Data.Version ( showVersion )
import System.Directory
( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing )
import System.FilePath ( (</>), (<.>), takeExtension,
takeDirectory, replaceExtension,
splitExtension, isRelative )
import qualified System.Info
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration
-> IO (Compiler, Maybe Platform, ProgramConfiguration)
configure verbosity hcPath hcPkgPath conf0 = do
(ghcProg, ghcVersion, conf1) <-
requireProgramVersion verbosity ghcProgram
(orLaterVersion (Version [6,4] []))
(userMaybeSpecifyPath "ghc" hcPath conf0)
let implInfo = ghcVersionImplInfo ghcVersion
-- This is slightly tricky, we have to configure ghc first, then we use the
-- location of ghc to help find ghc-pkg in the case that the user did not
-- specify the location of ghc-pkg directly:
(ghcPkgProg, ghcPkgVersion, conf2) <-
requireProgramVersion verbosity ghcPkgProgram {
programFindLocation = guessGhcPkgFromGhcPath ghcProg
}
anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath conf1)
when (ghcVersion /= ghcPkgVersion) $ die $
"Version mismatch between ghc and ghc-pkg: "
++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " "
++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion
-- Likewise we try to find the matching hsc2hs program.
let hsc2hsProgram' = hsc2hsProgram {
programFindLocation = guessHsc2hsFromGhcPath ghcProg
}
conf3 = addKnownProgram hsc2hsProgram' conf2
languages <- Internal.getLanguages verbosity implInfo ghcProg
extensions <- Internal.getExtensions verbosity implInfo ghcProg
ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg
let ghcInfoMap = M.fromList ghcInfo
let comp = Compiler {
compilerId = CompilerId GHC ghcVersion,
compilerAbiTag = NoAbiTag,
compilerCompat = [],
compilerLanguages = languages,
compilerExtensions = extensions,
compilerProperties = ghcInfoMap
}
compPlatform = Internal.targetPlatform ghcInfo
conf4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap conf3 -- configure gcc and ld
return (comp, compPlatform, conf4)
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find
-- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking
-- for a versioned or unversioned ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessToolFromGhcPath :: Program -> ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe FilePath)
guessToolFromGhcPath tool ghcProg verbosity searchpath
= do let toolname = programName tool
path = programPath ghcProg
dir = takeDirectory path
versionSuffix = takeVersionSuffix (dropExeExtension path)
guessNormal = dir </> toolname <.> exeExtension
guessGhcVersioned = dir </> (toolname ++ "-ghc" ++ versionSuffix)
<.> exeExtension
guessVersioned = dir </> (toolname ++ versionSuffix)
<.> exeExtension
guesses | null versionSuffix = [guessNormal]
| otherwise = [guessGhcVersioned,
guessVersioned,
guessNormal]
info verbosity $ "looking for tool " ++ toolname
++ " near compiler in " ++ dir
exists <- mapM doesFileExist guesses
case [ file | (file, True) <- zip guesses exists ] of
-- If we can't find it near ghc, fall back to the usual
-- method.
[] -> programFindLocation tool verbosity searchpath
(fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp
return (Just fp)
where takeVersionSuffix :: FilePath -> String
takeVersionSuffix = takeWhileEndLE isSuffixChar
isSuffixChar :: Char -> Bool
isSuffixChar c = isDigit c || c == '.' || c == '-'
dropExeExtension :: FilePath -> FilePath
dropExeExtension filepath =
case splitExtension filepath of
(filepath', extension) | extension == exeExtension -> filepath'
| otherwise -> filepath
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding ghc-pkg, we try looking for both a versioned and unversioned
-- ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessGhcPkgFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding hsc2hs, we try looking for both a versioned and unversioned
-- hsc2hs in the same dir, that is:
--
-- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs(.exe)
--
guessHsc2hsFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram
getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)]
getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg
where
Just version = programVersion ghcProg
implInfo = ghcVersionImplInfo version
-- | Given a single package DB, return all installed packages.
getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration
-> IO InstalledPackageIndex
getPackageDBContents verbosity packagedb conf = do
pkgss <- getInstalledPackages' verbosity [packagedb] conf
toPackageIndex verbosity pkgss conf
-- | Given a package DB stack, return all installed packages.
getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity packagedbs conf = do
checkPackageDbEnvVar
checkPackageDbStack packagedbs
pkgss <- getInstalledPackages' verbosity packagedbs conf
index <- toPackageIndex verbosity pkgss conf
return $! hackRtsPackage index
where
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_,[rts])]
-> PackageIndex.insert (removeMingwIncludeDir rts) index
_ -> index -- No (or multiple) ghc rts package is registered!!
-- Feh, whatever, the ghc test suite does some crazy stuff.
-- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a
-- @PackageIndex@. Helper function used by 'getPackageDBContents' and
-- 'getInstalledPackages'.
toPackageIndex :: Verbosity
-> [(PackageDB, [InstalledPackageInfo])]
-> ProgramConfiguration
-> IO InstalledPackageIndex
toPackageIndex verbosity pkgss conf = do
-- On Windows, various fields have $topdir/foo rather than full
-- paths. We need to substitute the right value in so that when
-- we, for example, call gcc, we have proper paths to give it.
topDir <- getLibDir' verbosity ghcProg
let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs)
| (_, pkgs) <- pkgss ]
return $! (mconcat indices)
where
Just ghcProg = lookupProgram ghcProgram conf
getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath
getLibDir verbosity lbi =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdoutConf verbosity ghcProgram
(withPrograms lbi) ["--print-libdir"]
getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath
getLibDir' verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdout verbosity ghcProg ["--print-libdir"]
-- | Return the 'FilePath' to the global GHC package database.
getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath
getGlobalPackageDB verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdout verbosity ghcProg ["--print-global-package-db"]
checkPackageDbEnvVar :: IO ()
checkPackageDbEnvVar =
Internal.checkPackageDbEnvVar "GHC" "GHC_PACKAGE_PATH"
checkPackageDbStack :: PackageDBStack -> IO ()
checkPackageDbStack (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStack rest
| GlobalPackageDB `notElem` rest =
die $ "With current ghc versions the global package db is always used "
++ "and must be listed first. This ghc limitation may be lifted in "
++ "future, see http://hackage.haskell.org/trac/ghc/ticket/5977"
checkPackageDbStack _ =
die $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This
-- breaks when you want to use a different gcc, so we need to filter
-- it out.
removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo
removeMingwIncludeDir pkg =
let ids = InstalledPackageInfo.includeDirs pkg
ids' = filter (not . ("mingw" `isSuffixOf`)) ids
in pkg { InstalledPackageInfo.includeDirs = ids' }
-- | Get the packages from specific PackageDBs, not cumulative.
--
getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration
-> IO [(PackageDB, [InstalledPackageInfo])]
getInstalledPackages' verbosity packagedbs conf
| ghcVersion >= Version [6,9] [] =
sequence
[ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb
return (packagedb, pkgs)
| packagedb <- packagedbs ]
where
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
getInstalledPackages' verbosity packagedbs conf = do
str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf ["list"]
let pkgFiles = [ init line | line <- lines str, last line == ':' ]
dbFile packagedb = case (packagedb, pkgFiles) of
(GlobalPackageDB, global:_) -> return $ Just global
(UserPackageDB, _global:user:_) -> return $ Just user
(UserPackageDB, _global:_) -> return $ Nothing
(SpecificPackageDB specific, _) -> return $ Just specific
_ -> die "cannot read ghc-pkg package listing"
pkgFiles' <- mapM dbFile packagedbs
sequence [ withFileContents file $ \content -> do
pkgs <- readPackages file content
return (db, pkgs)
| (db , Just file) <- zip packagedbs pkgFiles' ]
where
-- Depending on the version of ghc we use a different type's Read
-- instance to parse the package file and then convert.
-- It's a bit yuck. But that's what we get for using Read/Show.
readPackages
| ghcVersion >= Version [6,4,2] []
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI642.toCurrent pkgs)
_ -> failToRead file
| otherwise
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI641.toCurrent pkgs)
_ -> failToRead file
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
failToRead file = die $ "cannot read ghc package database " ++ file
-- -----------------------------------------------------------------------------
-- Building
-- | Build a library with GHC.
--
buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib = buildOrReplLib False
replLib = buildOrReplLib True
buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildOrReplLib forRepl verbosity numJobs pkg_descr lbi lib clbi = do
libName <- case componentLibraries clbi of
[libName] -> return libName
[] -> die "No library name found when building library"
_ -> die "Multiple library names found when building library"
let libTargetDir = buildDir lbi
whenVanillaLib forceVanilla =
when (forceVanilla || withVanillaLib lbi)
whenProfLib = when (withProfLib lbi)
whenSharedLib forceShared =
when (forceShared || withSharedLib lbi)
whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi)
ifReplLib = when forRepl
comp = compiler lbi
ghcVersion = compilerVersion comp
implInfo = getImplInfo comp
(Platform _hostArch hostOS) = hostPlatform lbi
hole_insts = map (\(k,(p,n)) -> (k,(InstalledPackageInfo.packageKey p,n))) (instantiatedWith lbi)
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let runGhcProg = runGHC verbosity ghcProg comp
libBi <- hackThreadedFlag verbosity
comp (withProfLib lbi) (libBuildInfo lib)
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi
forceVanillaLib = doingTH && not isGhcDynamic
forceSharedLib = doingTH && isGhcDynamic
-- TH always needs default libs, even when building for profiling
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi
-- Component name. Not 'libName' because that has the "HS" prefix
-- that GHC gives Haskell libraries.
cname = display $ PD.package $ localPkgDescr lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname
| otherwise = mempty
createDirectoryIfMissingVerbose verbosity True libTargetDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules?
let cObjs = map (`replaceExtension` objExtension) (cSources libBi)
baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir
vanillaOpts = baseOpts `mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptNumJobs = numJobs,
ghcOptPackageKey = toFlag (pkgKey lbi),
ghcOptSigOf = hole_insts,
ghcOptInputModules = toNubListR $ libModules lib,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = vanillaOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Prof
}
sharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptInputFiles = toNubListR
[libTargetDir </> x | x <- cObjs]
}
replOpts = vanillaOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags $
(ghcOptExtra vanillaOpts),
ghcOptNumJobs = mempty
}
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
vanillaSharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
unless (forRepl || null (libModules lib)) $
do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts)
shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts)
useDynToo = dynamicTooSupported &&
(forceVanillaLib || withVanillaLib lbi) &&
(forceSharedLib || withSharedLib lbi) &&
null (hcSharedOptions GHC libBi)
if useDynToo
then do
runGhcProg vanillaSharedOpts
case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of
(Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do
-- When the vanilla and shared library builds are done
-- in one pass, only one set of HPC module interfaces
-- are generated. This set should suffice for both
-- static and dynamically linked executables. We copy
-- the modules interfaces so they are available under
-- both ways.
copyDirectoryRecursive verbosity dynDir vanillaDir
_ -> return ()
else if isGhcDynamic
then do shared; vanilla
else do vanilla; shared
whenProfLib (runGhcProg profOpts)
-- build any C sources
unless (null (cSources libBi)) $ do
info verbosity "Building C Sources..."
sequence_
[ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo
lbi libBi clbi libTargetDir filename
vanillaCcOpts = if isGhcDynamic
-- Dynamic GHC requires C sources to be built
-- with -fPIC for REPL to work. See #2207.
then baseCcOpts { ghcOptFPic = toFlag True }
else baseCcOpts
profCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptObjSuffix = toFlag "p_o"
}
sharedCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptFPic = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptObjSuffix = toFlag "dyn_o"
}
odir = fromFlag (ghcOptObjDir vanillaCcOpts)
createDirectoryIfMissingVerbose verbosity True odir
let runGhcProgIfNeeded ccOpts = do
needsRecomp <- checkNeedsRecompilation filename ccOpts
when needsRecomp $ runGhcProg ccOpts
runGhcProgIfNeeded vanillaCcOpts
unless forRepl $
whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCcOpts)
unless forRepl $ whenProfLib (runGhcProgIfNeeded profCcOpts)
| filename <- cSources libBi]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
ifReplLib $ do
when (null (libModules lib)) $ warn verbosity "No exposed modules"
ifReplLib (runGhcProg replOpts)
-- link:
unless forRepl $ do
info verbosity "Linking..."
let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension))
(cSources libBi)
cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension))
(cSources libBi)
cid = compilerId (compiler lbi)
vanillaLibFilePath = libTargetDir </> mkLibName libName
profileLibFilePath = libTargetDir </> mkProfLibName libName
sharedLibFilePath = libTargetDir </> mkSharedLibName cid libName
ghciLibFilePath = libTargetDir </> Internal.mkGHCiLibName libName
libInstallPath = libdir $ absoluteInstallDirs pkg_descr lbi NoCopyDest
sharedLibInstallPath = libInstallPath </> mkSharedLibName cid libName
stubObjs <- fmap catMaybes $ sequence
[ findFileWithExtension [objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
stubProfObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["p_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
stubSharedObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
hObjs <- Internal.getHaskellObjects implInfo lib lbi
libTargetDir objExtension True
hProfObjs <-
if (withProfLib lbi)
then Internal.getHaskellObjects implInfo lib lbi
libTargetDir ("p_" ++ objExtension) True
else return []
hSharedObjs <-
if (withSharedLib lbi)
then Internal.getHaskellObjects implInfo lib lbi
libTargetDir ("dyn_" ++ objExtension) False
else return []
unless (null hObjs && null cObjs && null stubObjs) $ do
rpaths <- getRPaths lbi clbi
let staticObjectFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
profObjectFiles =
hProfObjs
++ map (libTargetDir </>) cProfObjs
++ stubProfObjs
ghciObjFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
dynamicObjectFiles =
hSharedObjs
++ map (libTargetDir </>) cSharedObjs
++ stubSharedObjs
-- After the relocation lib is created we invoke ghc -shared
-- with the dependencies spelled out as -package arguments
-- and ghc invokes the linker with the proper library paths
ghcSharedLinkArgs =
mempty {
ghcOptShared = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptInputFiles = toNubListR dynamicObjectFiles,
ghcOptOutputFile = toFlag sharedLibFilePath,
-- For dynamic libs, Mac OS/X needs to know the install location
-- at build time. This only applies to GHC < 7.8 - see the
-- discussion in #1660.
ghcOptDylibName = if (hostOS == OSX
&& ghcVersion < Version [7,8] [])
then toFlag sharedLibInstallPath
else mempty,
ghcOptPackageKey = toFlag (pkgKey lbi),
ghcOptNoAutoLinkPackages = toFlag True,
ghcOptPackageDBs = withPackageDB lbi,
ghcOptPackages = toNubListR $
Internal.mkGhcOptPackages clbi ,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptRPaths = rpaths
}
info verbosity (show (ghcOptPackages ghcSharedLinkArgs))
whenVanillaLib False $ do
Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles
whenProfLib $ do
Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles
whenGHCiLib $ do
(ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi)
Ld.combineObjectFiles verbosity ldProg
ghciLibFilePath ghciObjFiles
whenSharedLib False $
runGhcProg ghcSharedLinkArgs
-- | Start a REPL without loading any source files.
startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler
-> PackageDBStack -> IO ()
startInterpreter verbosity conf comp packageDBs = do
let replOpts = mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptPackageDBs = packageDBs
}
checkPackageDbStack packageDBs
(ghcProg, _) <- requireProgram verbosity ghcProgram conf
runGHC verbosity ghcProg comp replOpts
-- | Build an executable with GHC.
--
buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe = buildOrReplExe False
replExe = buildOrReplExe True
buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi
exe@Executable { exeName = exeName', modulePath = modPath } clbi = do
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let comp = compiler lbi
implInfo = getImplInfo comp
runGhcProg = runGHC verbosity ghcProg comp
exeBi <- hackThreadedFlag verbosity
comp (withProfExe lbi) (buildInfo exe)
-- exeNameReal, the name that GHC really uses (with .exe on Windows)
let exeNameReal = exeName' <.>
(if takeExtension exeName' /= ('.':exeExtension)
then exeExtension
else "")
let targetDir = (buildDir lbi) </> exeName'
let exeDir = targetDir </> (exeName' ++ "-tmp")
createDirectoryIfMissingVerbose verbosity True targetDir
createDirectoryIfMissingVerbose verbosity True exeDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules? FIX: what about exeName.hi-boot?
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName'
| otherwise = mempty
-- build executables
srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath
rpaths <- getRPaths lbi clbi
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"]
cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain]
cObjs = map (`replaceExtension` objExtension) cSrcs
baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir)
`mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptInputFiles = toNubListR
[ srcMainFile | isHaskellMain],
ghcOptInputModules = toNubListR
[ m | not isHaskellMain, m <- exeModules exe]
}
staticOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticOnly,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = baseOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC exeBi,
ghcOptHPCDir = hpcdir Hpc.Prof
}
dynOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $
hcSharedOptions GHC exeBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
dynTooOpts = staticOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi,
ghcOptLinkLibs = toNubListR $ extraLibs exeBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi,
ghcOptInputFiles = toNubListR
[exeDir </> x | x <- cObjs]
}
dynLinkerOpts = mempty {
ghcOptRPaths = rpaths
}
replOpts = baseOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags
(ghcOptExtra baseOpts)
}
-- For a normal compile we do separate invocations of ghc for
-- compiling as for linking. But for repl we have to do just
-- the one invocation, so that one has to include all the
-- linker stuff too, like -l flags and any .o files from C
-- files etc.
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
commonOpts | withProfExe lbi = profOpts
| withDynExe lbi = dynOpts
| otherwise = staticOpts
compileOpts | useDynToo = dynTooOpts
| otherwise = commonOpts
withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi)
-- For building exe's that use TH with -prof or -dynamic we actually have
-- to build twice, once without -prof/-dynamic and then again with
-- -prof/-dynamic. This is because the code that TH needs to run at
-- compile time needs to be the vanilla ABI so it can be loaded up and run
-- by the compiler.
-- With dynamic-by-default GHC the TH object files loaded at compile-time
-- need to be .dyn_o instead of .o.
doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi
-- Should we use -dynamic-too instead of compiling twice?
useDynToo = dynamicTooSupported && isGhcDynamic
&& doingTH && withStaticExe
&& null (hcSharedOptions GHC exeBi)
compileTHOpts | isGhcDynamic = dynOpts
| otherwise = staticOpts
compileForTH
| forRepl = False
| useDynToo = False
| isGhcDynamic = doingTH && (withProfExe lbi || withStaticExe)
| otherwise = doingTH && (withProfExe lbi || withDynExe lbi)
linkOpts = commonOpts `mappend`
linkerOpts `mappend`
mempty { ghcOptLinkNoHsMain = toFlag (not isHaskellMain) } `mappend`
(if withDynExe lbi then dynLinkerOpts else mempty)
-- Build static/dynamic object files for TH, if needed.
when compileForTH $
runGhcProg compileTHOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
unless forRepl $
runGhcProg compileOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
-- build any C sources
unless (null cSrcs) $ do
info verbosity "Building C Sources..."
sequence_
[ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi
clbi exeDir filename) `mappend` mempty {
ghcOptDynLinkMode = toFlag (if withDynExe lbi
then GhcDynamicOnly
else GhcStaticOnly),
ghcOptProfilingMode = toFlag (withProfExe lbi)
}
odir = fromFlag (ghcOptObjDir opts)
createDirectoryIfMissingVerbose verbosity True odir
needsRecomp <- checkNeedsRecompilation filename opts
when needsRecomp $
runGhcProg opts
| filename <- cSrcs ]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
when forRepl $ runGhcProg replOpts
-- link:
unless forRepl $ do
info verbosity "Linking..."
runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir </> exeNameReal) }
-- | Returns True if the modification date of the given source file is newer than
-- the object file we last compiled for it, or if no object file exists yet.
checkNeedsRecompilation :: FilePath -> GhcOptions -> IO Bool
checkNeedsRecompilation filename opts = filename `moreRecentFile` oname
where oname = getObjectFileName filename opts
-- | Finds the object file name of the given source file
getObjectFileName :: FilePath -> GhcOptions -> FilePath
getObjectFileName filename opts = oname
where odir = fromFlag (ghcOptObjDir opts)
oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts)
oname = odir </> replaceExtension filename oext
-- | Calculate the RPATHs for the component we are building.
--
-- Calculates relative RPATHs when 'relocatable' is set.
getRPaths :: LocalBuildInfo
-> ComponentLocalBuildInfo -- ^ Component we are building
-> IO (NubListR FilePath)
getRPaths lbi clbi | supportRPaths hostOS = do
libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi
let hostPref = case hostOS of
OSX -> "@loader_path"
_ -> "$ORIGIN"
relPath p = if isRelative p then hostPref </> p else p
rpaths = toNubListR (map relPath libraryPaths)
return rpaths
where
(Platform _ hostOS) = hostPlatform lbi
-- The list of RPath-supported operating systems below reflects the
-- platforms on which Cabal's RPATH handling is tested. It does _NOT_
-- reflect whether the OS supports RPATH.
-- E.g. when this comment was written, the *BSD operating systems were
-- untested with regards to Cabal RPATH handling, and were hence set to
-- 'False', while those operating systems themselves do support RPATH.
supportRPaths Linux = True
supportRPaths Windows = False
supportRPaths OSX = True
supportRPaths FreeBSD = False
supportRPaths OpenBSD = False
supportRPaths NetBSD = False
supportRPaths DragonFly = False
supportRPaths Solaris = False
supportRPaths AIX = False
supportRPaths HPUX = False
supportRPaths IRIX = False
supportRPaths HaLVM = False
supportRPaths IOS = False
supportRPaths Ghcjs = False
supportRPaths (OtherOS _) = False
-- Do _not_ add a default case so that we get a warning here when a new OS
-- is added.
getRPaths _ _ = return mempty
-- | Filter the "-threaded" flag when profiling as it does not
-- work with ghc-6.8 and older.
hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo
hackThreadedFlag verbosity comp prof bi
| not mustFilterThreaded = return bi
| otherwise = do
warn verbosity $ "The ghc flag '-threaded' is not compatible with "
++ "profiling in ghc-6.8 and older. It will be disabled."
return bi { options = filterHcOptions (/= "-threaded") (options bi) }
where
mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] []
&& "-threaded" `elem` hcOptions GHC bi
filterHcOptions p hcoptss =
[ (hc, if hc == GHC then filter p opts else opts)
| (hc, opts) <- hcoptss ]
-- | Extracts a String representing a hash of the ABI of a built
-- library. It can fail if the library has not yet been built.
--
libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO String
libAbiHash verbosity _pkg_descr lbi lib clbi = do
libBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfLib lbi) (libBuildInfo lib)
let
comp = compiler lbi
vanillaArgs =
(componentGhcOptions verbosity lbi libBi clbi (buildDir lbi))
`mappend` mempty {
ghcOptMode = toFlag GhcModeAbiHash,
ghcOptPackageKey = toFlag (pkgKey lbi),
ghcOptInputModules = toNubListR $ exposedModules lib
}
sharedArgs = vanillaArgs `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi
}
profArgs = vanillaArgs `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi
}
ghcArgs = if withVanillaLib lbi then vanillaArgs
else if withSharedLib lbi then sharedArgs
else if withProfLib lbi then profArgs
else error "libAbiHash: Can't find an enabled library way"
--
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
hash <- getProgramInvocationOutput verbosity (ghcInvocation ghcProg comp ghcArgs)
return (takeWhile (not . isSpace) hash)
componentGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo -> FilePath
-> GhcOptions
componentGhcOptions = Internal.componentGhcOptions
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for GHC.
installExe :: Verbosity
-> LocalBuildInfo
-> InstallDirs FilePath -- ^Where to copy the files to
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> PackageDescription
-> Executable
-> IO ()
installExe verbosity lbi installDirs buildPref
(progprefix, progsuffix) _pkg exe = do
let binDir = bindir installDirs
createDirectoryIfMissingVerbose verbosity True binDir
let exeFileName = exeName exe <.> exeExtension
fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix
installBinary dest = do
installExecutableFile verbosity
(buildPref </> exeName exe </> exeFileName)
(dest <.> exeExtension)
when (stripExes lbi) $
Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi)
(dest <.> exeExtension)
installBinary (binDir </> fixedExeBaseName)
-- |Install for ghc, .hi, .a and, if --with-ghci given, .o
installLib :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic libraries
-> FilePath -- ^Build location
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> IO ()
installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do
-- copy .hi files over:
whenVanilla $ copyModuleFiles "hi"
whenProf $ copyModuleFiles "p_hi"
whenShared $ copyModuleFiles "dyn_hi"
-- copy the built library files over:
whenVanilla $ mapM_ (installOrdinary builtDir targetDir) vanillaLibNames
whenProf $ mapM_ (installOrdinary builtDir targetDir) profileLibNames
whenGHCi $ mapM_ (installOrdinary builtDir targetDir) ghciLibNames
whenShared $ mapM_ (installShared builtDir dynlibTargetDir) sharedLibNames
where
install isShared srcDir dstDir name = do
let src = srcDir </> name
dst = dstDir </> name
createDirectoryIfMissingVerbose verbosity True dstDir
if isShared
then do when (stripLibs lbi) $ Strip.stripLib verbosity
(hostPlatform lbi) (withPrograms lbi) src
installExecutableFile verbosity src dst
else installOrdinaryFile verbosity src dst
installOrdinary = install False
installShared = install True
copyModuleFiles ext =
findModuleFiles [builtDir] [ext] (libModules lib)
>>= installOrdinaryFiles verbosity targetDir
cid = compilerId (compiler lbi)
libNames = componentLibraries clbi
vanillaLibNames = map mkLibName libNames
profileLibNames = map mkProfLibName libNames
ghciLibNames = map Internal.mkGHCiLibName libNames
sharedLibNames = map (mkSharedLibName cid) libNames
hasLib = not $ null (libModules lib)
&& null (cSources (libBuildInfo lib))
whenVanilla = when (hasLib && withVanillaLib lbi)
whenProf = when (hasLib && withProfLib lbi)
whenGHCi = when (hasLib && withGHCiLib lbi)
whenShared = when (hasLib && withSharedLib lbi)
-- -----------------------------------------------------------------------------
-- Registering
hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo
hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg
, HcPkg.noPkgDbStack = v < [6,9]
, HcPkg.noVerboseFlag = v < [6,11]
, HcPkg.flagPackageConf = v < [7,5]
, HcPkg.useSingleFileDb = v < [7,9]
}
where
v = versionBranch ver
Just ghcPkgProg = lookupProgram ghcPkgProgram conf
Just ver = programVersion ghcPkgProg
registerPackage
:: Verbosity
-> InstalledPackageInfo
-> PackageDescription
-> LocalBuildInfo
-> Bool
-> PackageDBStack
-> IO ()
registerPackage verbosity installedPkgInfo _pkg lbi _inplace packageDbs =
HcPkg.reregister (hcPkgInfo $ withPrograms lbi) verbosity
packageDbs (Right installedPkgInfo)
pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath
pkgRoot verbosity lbi = pkgRoot'
where
pkgRoot' GlobalPackageDB =
let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg)
pkgRoot' UserPackageDB = do
appDir <- getAppUserDataDirectory "ghc"
let ver = compilerVersion (compiler lbi)
subdir = System.Info.arch ++ '-':System.Info.os ++ '-':showVersion ver
rootDir = appDir </> subdir
-- We must create the root directory for the user package database if it
-- does not yet exists. Otherwise '${pkgroot}' will resolve to a
-- directory at the time of 'ghc-pkg register', and registration will
-- fail.
createDirectoryIfMissing True rootDir
return rootDir
pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp)
-- -----------------------------------------------------------------------------
-- Utils
isDynamic :: Compiler -> Bool
isDynamic = Internal.ghcLookupProperty "GHC Dynamic"
supportsDynamicToo :: Compiler -> Bool
supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too"
| typelead/epm | Cabal/Distribution/Simple/GHC.hs | bsd-3-clause | 50,490 | 0 | 23 | 15,460 | 9,563 | 5,018 | 4,545 | 795 | 17 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
module Stanag.MessageAcknowledgement where
import Ivory.Language
import Stanag.Packing
messageAcknowledgementInstance :: MemArea (Stored Uint32)
messageAcknowledgementInstance = area "messageAcknowledgementInstance" (Just (ival 0))
[ivoryFile|Stanag/MessageAcknowledgement.ivory|]
| GaloisInc/loi | Stanag/MessageAcknowledgement.hs | bsd-3-clause | 406 | 0 | 9 | 40 | 63 | 37 | 26 | 10 | 1 |
{-# OPTIONS -fno-spec-constr #-}
{-# LANGUAGE CPP, UndecidableInstances #-}
#include "fusion-phases.h"
-- | Unvectorised parallel arrays.
--
-- * These operators may be used directly by unvectorised client programs.
--
-- * They are also used by the "Data.Array.Parallel.Lifted.Combinators"
-- module to define the closure converted versions that vectorised code
-- uses.
--
-- * In general, the operators here are all unsafe and don't do bounds checks.
-- The lifted versions also don't check that each of the argument arrays
-- have the same length.
-- TODO:
-- Export unsafe versions from Data.Array.Parallel.PArray.Unsafe, and ensure
-- this module exports safe wrappers. We want to use the unsafe versions in
-- D.A.P.Lifted.Combinators for performance reasons, but the user facing PArray
-- functions should all be safe. In particular, the vectoriser guarantees
-- that all arrays passed to lifted functions will have the same length, but
-- the user may not obey this restriction.
--
module Data.Array.Parallel.PArray
( PArray, PA
, valid
, nf
, typeRep
-- * Constructors
, empty
, singleton, singletonl
, replicate, replicatel, replicates, replicates'
, append, appendl
, concat, concatl
, unconcat
, nestUSegd
-- * Projections
, length, lengthl -- length from D.A.P.PArray.PData.Base
, index, indexl
, extract, extracts, extracts'
, slice, slicel
, takeUSegd
-- * Pack and Combine
, pack, packl
, packByTag
, combine2
-- * Enumerations
, enumFromTo, enumFromTol -- from D.A.P.PArray.Scalar
-- * Tuples
, zip, zipl
, zip3
, zip4
, zip5
, unzip, unzipl
, unzip3
, unzip4
, unzip5
-- * Conversions
, fromVector, toVector
, fromList, toList
, fromUArray, toUArray -- from D.A.P.PArray.Scalar
, fromUArray2) -- from D.A.P.PArray.Scalar
where
import Data.Array.Parallel.Trace
import Data.Array.Parallel.PArray.PData
import Data.Array.Parallel.PArray.PRepr
import Data.Array.Parallel.PArray.Scalar
import Data.Array.Parallel.PArray.Reference.Convert
import GHC.Exts (Int(I#), (==#), (+#))
import Data.Vector (Vector)
import Data.Array.Parallel.Base (Tag)
import qualified Data.Array.Parallel.Pretty as T
import qualified Data.Array.Parallel.Array as A
import qualified Data.Array.Parallel.Unlifted as U
import qualified Data.Vector as V
import qualified Data.Array.Parallel.PArray.Reference as R
import qualified Data.Array.Parallel.PArray.Reference.Convert as R
import Data.Typeable hiding ( typeRep )
import qualified Prelude as P
import Prelude hiding
( length, replicate, concat
, enumFromTo
, zip, zip3, unzip, unzip3)
-- Pretty ---------------------------------------------------------------------
instance PA a => T.PprPhysical (PArray a) where
pprp (PArray n# pdata)
= ( T.text "PArray " T.<+> T.int (I# n#))
T.$+$ ( T.nest 4
$ pprpDataPA pdata)
instance PA a => Similar a where
similar = similarPA
instance PA a => R.PprPhysical1 a where
pprp1 = pprpPA
-- Array -----------------------------------------------------------------------
-- Generic interface to PArrays.
--
-- NOTE:
-- The toVector conversion is defined by looking up every index instead of
-- using the bulk fromVectorPA function.
-- We do this to convert arrays of type (PArray Void) properly, as although a
-- (PArray Void) has an intrinsic length, a (PData Void) does not. If we try
-- to se the fromVectorPA function at this type we'll just get an `error`.
-- Arrays of type PArray Void aren't visible in the user API, but during
-- debugging we need to be able to print them out with the implied length.
--
instance PA e => A.Array PArray e where
valid = valid
singleton = singleton
append = append
length = length
index (PArray _ pdata) ix
= indexPA pdata ix
toVector arr = V.map (A.index arr) $ V.enumFromTo 0 (A.length arr - 1)
fromVector = fromVector
-- Operators ==================================================================
-- Each of these operators is wrapped in withRef functions so that we can
-- compare their outputs to the reference implementation.
-- See D.A.P.Reference for details.
-- Basics ---------------------------------------------------------------------
instance (Eq a, PA a) => Eq (PArray a) where
(==) (PArray _ xs) (PArray _ ys) = toVectorPA xs == toVectorPA ys
(/=) (PArray _ xs) (PArray _ ys) = toVectorPA xs /= toVectorPA ys
-- | Check that an array has a valid internal representation.
valid :: PA a => PArray a -> Bool
valid (PArray n# darr1)
= validPA darr1
&& coversPA True darr1 (I# n#)
{-# INLINE_PA valid #-}
-- | Force an array to normal form.
nf :: PA a => PArray a -> ()
nf (PArray _ d)
= nfPA d
{-# INLINE_PA nf #-}
-- | Get the type of a thing.
typeRep :: PA a => a -> TypeRep
typeRep x = typeRepPA x
-- Empty ---------------------------------------------------------------------
-- | O(1). An empty array.
empty :: PA a => PArray a
empty
= withRef1 "empty" R.empty
$ PArray 0# emptyPA
{-# INLINE_PA empty #-}
-- Singleton ------------------------------------------------------------------
-- | O(1). Produce an array containing a single element.
singleton :: PA a => a -> PArray a
singleton x
= traceOp (OpSingleton (typeRepPA x))
$ withRef1 "singleton" (R.singleton x)
$ PArray 1# (replicatePA 1 x)
{-# INLINE_PA singleton #-}
-- | O(n). Produce an array of singleton arrays.
singletonl :: PA a => PArray a -> PArray (PArray a)
singletonl arr@(PArray n# pdata)
= traceOp (OpSingletonL (typeRepDataPA pdata) (I# n#))
$ withRef2 "singletonl" (R.singletonl (toRef1 arr))
$ replicatel_ (replicate_ (length arr) 1) arr
{-# INLINE_PA singletonl #-}
-- Replicate ------------------------------------------------------------------
-- | O(n). Define an array of the given size, that maps all elements to the same value.
-- We require the replication count to be > 0 so that it's easier to maintain
-- the validPR invariants for nested arrays.
replicate :: PA a => Int -> a -> PArray a
replicate n x
= traceOp (OpReplicate (typeRepPA x) n)
$ withRef1 "replicate" (R.replicate n x)
$ replicate_ n x
{-# INLINE_PA replicate #-}
replicate_ :: PA a => Int -> a -> PArray a
replicate_ (I# n#) x
= PArray n# (replicatePA (I# n#) x)
{-# INLINE_PA replicate_ #-}
-- | O(sum lengths). Lifted replicate.
replicatel :: PA a => PArray Int -> PArray a -> PArray (PArray a)
replicatel reps arr@(PArray n# pdata)
= traceOp (OpReplicateL (typeRepDataPA pdata)
(I# n#))
$ withRef2 "replicatel" (R.replicatel (toRef1 reps) (toRef1 arr))
$ replicatel_ reps arr
replicatel_ :: PA a => PArray Int -> PArray a -> PArray (PArray a)
replicatel_ (PArray n# (PInt lens)) (PArray _ pdata)
= if n# ==# 0# then empty else
let !segd = U.lengthsToSegd lens
!vsegd = U.promoteSegdToVSegd segd
!pdata' = replicatesPA segd pdata
!pdatas' = singletondPA pdata'
in PArray n# $ mkPNestedPA vsegd pdatas' segd pdata'
{-# INLINE_PA replicatel_ #-}
-- | O(sum lengths). Segmented replicate.
replicates :: PA a => U.Segd -> PArray a -> PArray a
replicates segd arr@(PArray _ pdata)
= traceOp (OpReplicateS (typeRepDataPA pdata)
(U.elementsSegd segd))
$ withRef1 "replicates" (R.replicates segd (toRef1 arr))
$ let !(I# n#) = U.elementsSegd segd
in PArray n# $ replicatesPA segd pdata
{-# INLINE_PA replicates #-}
-- | O(sum lengths). Wrapper for segmented replicate that takes replication counts
-- and uses them to build the `U.Segd`.
replicates' :: PA a => PArray Int -> PArray a -> PArray a
replicates' (PArray _ (PInt reps)) arr
= replicates (U.lengthsToSegd reps) arr
{-# INLINE_PA replicates' #-}
-- Append ---------------------------------------------------------------------
-- | Append two arrays.
append :: PA a => PArray a -> PArray a -> PArray a
append arr1@(PArray n1# pdata1) arr2@(PArray n2# pdata2)
= traceOp (OpAppend (typeRepDataPA pdata1)
(I# n1#) (I# n2#) (I# (n1# +# n2#)))
$ withRef1 "append" (R.append (toRef1 arr1) (toRef1 arr2))
$ PArray (n1# +# n2#) (appendPA pdata1 pdata2)
{-# INLINE_PA append #-}
-- | Lifted append.
-- Both arrays must have the same length
appendl :: PA a => PArray (PArray a) -> PArray (PArray a) -> PArray (PArray a)
appendl arr1@(PArray n# pdata1) arr2@(PArray _ pdata2)
= traceOp (OpAppendL (typeRepDataPA pdata1)
(I# n#))
$ withRef2 "appendl" (R.appendl (toRef2 arr1) (toRef2 arr2))
$ PArray n# $ appendlPA pdata1 pdata2
{-# INLINE_PA appendl #-}
-- Concat ---------------------------------------------------------------------
-- | Concatenate a nested array.
concat :: PA a => PArray (PArray a) -> PArray a
concat arr@(PArray n# pdata)
= let pdata' = concatPA pdata
!(I# n2#) = lengthPA pdata'
in traceOp (OpConcat (typeRepDataPA pdata)
(I# n#) (I# n2#))
$ withRef1 "concat" (R.concat (toRef2 arr))
$ PArray n2# pdata'
{-# INLINE_PA concat #-}
-- | Lifted concat.
concatl :: PA a => PArray (PArray (PArray a)) -> PArray (PArray a)
concatl arr@(PArray n# pdata)
= traceOp (OpConcatL (typeRepDataPA pdata)
(I# n#))
$ withRef2 "concatl" (R.concatl (toRef3 arr))
$ PArray n# $ concatlPA pdata
{-# INLINE_PA concatl #-}
-- | Impose a nesting structure on a flat array
unconcat :: (PA a, PA b) => PArray (PArray a) -> PArray b -> PArray (PArray b)
unconcat (PArray n# pdata1) (PArray _ pdata2)
= traceOp (OpUnconcat (typeRepDataPA pdata1) (I# n#))
$ PArray n# $ unconcatPA pdata1 pdata2
{-# INLINE_PA unconcat #-}
-- | Create a nested array from a segment descriptor and some flat data.
-- The segment descriptor must represent as many elements as present
-- in the flat data array, else `error`
nestUSegd :: PA a => U.Segd -> PArray a -> PArray (PArray a)
nestUSegd segd (PArray n# pdata)
| U.elementsSegd segd == I# n#
, I# n2# <- U.lengthSegd segd
= PArray n2#
$ PNested (U.promoteSegdToVSegd segd) (singletondPA pdata) segd pdata
| otherwise
= error $ unlines
[ "Data.Array.Parallel.PArray.nestUSegd: number of elements defined by "
++ "segment descriptor and data array do not match"
, " length of segment desciptor = " ++ show (U.elementsSegd segd)
, " length of data array = " ++ show (I# n#) ]
{-# INLINE_PA nestUSegd #-}
-- Projections ---------------------------------------------------------------
-- | Take the length of some arrays.
lengthl :: PA a => PArray (PArray a) -> PArray Int
lengthl arr@(PArray n# (PNested vsegd _ _ _))
= traceOp (OpLengthL (I# n#))
$ withRef1 "lengthl" (R.lengthl (toRef2 arr))
$ PArray n# $ PInt $ U.takeLengthsOfVSegd vsegd
{-# INLINE_PA lengthl #-}
-- | O(1). Lookup a single element from the source array.
index :: PA a => PArray a -> Int -> a
index (PArray _ arr) ix
= traceOp (OpIndex)
$ indexPA arr ix
{-# INLINE_PA index #-}
-- | O(len indices). Lookup a several elements from several source arrays
indexl :: PA a => PArray (PArray a) -> PArray Int -> PArray a
indexl (PArray n# darr) (PArray _ ixs)
= traceOp (OpIndexL (I# n#))
$ PArray n# (indexlPA darr ixs)
{-# INLINE_PA indexl #-}
-- | Extract a range of elements from an array.
extract :: PA a => PArray a -> Int -> Int -> PArray a
extract (PArray _ arr) start len@(I# len#)
= traceOp (OpExtract (I# len#))
$ PArray len# (extractPA arr start len)
{-# INLINE_PA extract #-}
-- | Segmented extract.
extracts :: PA a => Vector (PArray a) -> U.SSegd -> PArray a
extracts arrs ssegd
= traceOp (OpExtractS (U.sum $ U.lengthsOfSSegd ssegd))
$ let pdatas = fromVectordPA $ V.map (\(PArray _ vec) -> vec) arrs
!(I# n#) = (U.sum $ U.lengthsOfSSegd ssegd)
in PArray n#
(extractssPA pdatas ssegd)
{-# INLINE_PA extracts #-}
-- | Wrapper for `extracts` that takes arrays of sources, starts and lengths of
-- the segments, and uses these to build the `U.SSegd`.
-- TODO: The lengths of the sources, starts and lengths arrays must be the same,
-- but this is not checked.
-- All sourceids must point to valid data arrays.
-- Segments must be within their corresponding source array.
extracts'
:: PA a
=> Vector (PArray a)
-> PArray Int -- ^ id of source array for each segment.
-> PArray Int -- ^ starting index of each segment in its source array.
-> PArray Int -- ^ length of each segment.
-> PArray a
extracts' arrs (PArray _ (PInt sources)) (PArray _ (PInt starts)) (PArray _ (PInt lengths))
= let segd = U.lengthsToSegd lengths
ssegd = U.mkSSegd starts sources segd
in extracts arrs ssegd
{-# INLINE_PA extracts' #-}
-- | Extract a range of elements from an arrary.
-- Like `extract` but with the parameters in a different order.
slice :: PA a => Int -> Int -> PArray a -> PArray a
slice start len@(I# len#) (PArray _ darr)
= traceOp (OpSlice len)
$ PArray len# (extractPA darr start len)
{-# INLINE_PA slice #-}
-- | Extract some slices from some arrays.
-- The arrays of starting indices and lengths must themselves
-- have the same length.
slicel :: PA a => PArray Int -> PArray Int -> PArray (PArray a) -> PArray (PArray a)
slicel (PArray n# sliceStarts) (PArray _ sliceLens) (PArray _ darr)
= traceOp (OpSliceL (I# n#))
$ PArray n# (slicelPA sliceStarts sliceLens darr)
{-# INLINE_PA slicel #-}
-- | Take the segment descriptor from a nested array and demote it to a
-- plain Segd. This is unsafe because it can cause index space overflow.
takeUSegd :: PArray (PArray a) -> U.Segd
takeUSegd (PArray _ pdata)
= takeSegdPD pdata
{-# INLINE_PA takeUSegd #-}
-- Pack and Combine -----------------------------------------------------------
-- | Select the elements of an array that have their tag set to True.
pack :: PA a => PArray a -> PArray Bool -> PArray a
pack arr@(PArray _ xs) flags@(PArray len# (PBool sel2))
= traceOp (OpPack (I# len#))
$ withRef1 "pack" (R.pack (toRef1 arr) (toRef1 flags))
$ let darr' = packByTagPA xs (U.tagsSel2 sel2) 1
-- The selector knows how many elements are set to '1',
-- so we can use this for the length of the resulting array.
!(I# m#) = U.elementsSel2_1 sel2
in PArray m# darr'
{-# INLINE_PA pack #-}
-- | Lifted pack.
packl :: PA a => PArray (PArray a) -> PArray (PArray Bool) -> PArray (PArray a)
packl xss@(PArray n# xdata@(PNested _ _ segd _))
fss@(PArray _ fdata)
= traceOp (OpPackL (I# n#))
$ withRef2 "packl" (R.packl (toRef2 xss) (toRef2 fss))
$ let
-- Concatenate both arrays to get the flat data.
-- Although the virtual segmentation should be the same,
-- the physical segmentation of both arrays may be different.
xdata_flat = concatPA xdata
PBool sel = concatPA fdata
tags = U.tagsSel2 sel
-- Count how many elements go into each segment.
segd' = U.lengthsToSegd $ U.count_s segd tags 1
-- Build the result array
vsegd' = U.promoteSegdToVSegd segd'
flat' = packByTagPA xdata_flat tags 1
pdatas' = singletondPA flat'
in PArray n# (PNested vsegd' pdatas' segd' flat')
{-# INLINE_PA packl #-}
-- | Filter an array based on some tags.
packByTag :: PA a => PArray a -> U.Array Tag -> Tag -> PArray a
packByTag arr@(PArray n# darr) tags tag
= traceOp (OpPackByTag (I# n#))
$ withRef1 "packByTag" (R.packByTag (toRef1 arr) tags tag)
$ let darr' = packByTagPA darr tags tag
!(I# n2#) = lengthPA darr'
in PArray n2# darr'
{-# INLINE_PA packByTag #-}
-- | Combine two arrays based on a selector.
combine2 :: forall a. PA a => U.Sel2 -> PArray a -> PArray a -> PArray a
combine2 sel arr1@(PArray _ darr1) arr2@(PArray _ darr2)
= traceOp (OpCombine2 (U.elementsSel2_0 sel + U.elementsSel2_1 sel))
$ withRef1 "combine2" (R.combine2 sel (toRef1 arr1) (toRef1 arr2))
$ let darr' = combine2PA sel darr1 darr2
!(I# n#) = lengthPA darr'
in PArray n# darr'
{-# INLINE_PA combine2 #-}
-- Tuples ---------------------------------------------------------------------
-- | O(1). Zip a pair of arrays into an array of pairs.
-- The two arrays must have the same length, else `error`.
zip :: PArray a -> PArray b -> PArray (a, b)
zip (PArray n# pdata1) (PArray _ pdata2)
= traceOp (OpZip (I# n#))
$ PArray n# $ zipPD pdata1 pdata2
{-# INLINE_PA zip #-}
-- | Lifted zip.
zipl :: (PA a, PA b)
=> PArray (PArray a) -> PArray (PArray b) -> PArray (PArray (a, b))
zipl (PArray n# xs) (PArray _ ys)
= traceOp (OpZipL (I# n#))
$ PArray n# $ ziplPA xs ys
{-# INLINE_PA zipl #-}
-- | O(1). Zip three arrays.
-- All arrays must have the same length, else `error`.
zip3 :: PArray a -> PArray b -> PArray c -> PArray (a, b, c)
zip3 (PArray n# pdata1) (PArray _ pdata2) (PArray _ pdata3)
= PArray n# $ zip3PD pdata1 pdata2 pdata3
{-# INLINE_PA zip3 #-}
-- | O(1). Zip four arrays.
-- All arrays must have the same length, else `error`.
zip4 :: PArray a -> PArray b -> PArray c -> PArray d -> PArray (a, b, c, d)
zip4 (PArray n# pdata1) (PArray _ pdata2) (PArray _ pdata3) (PArray _ pdata4)
= PArray n# $ zip4PD pdata1 pdata2 pdata3 pdata4
{-# INLINE_PA zip4 #-}
-- | O(1). Zip five arrays.
-- All arrays must have the same length, else `error`.
zip5 :: PArray a -> PArray b -> PArray c -> PArray d -> PArray e -> PArray (a, b, c, d, e)
zip5 (PArray n# pdata1) (PArray _ pdata2) (PArray _ pdata3) (PArray _ pdata4) (PArray _ pdata5)
= PArray n# $ zip5PD pdata1 pdata2 pdata3 pdata4 pdata5
{-# INLINE_PA zip5 #-}
-- | O(1). Unzip an array of pairs into a pair of arrays.
unzip :: PArray (a, b) -> (PArray a, PArray b)
unzip (PArray n# (PTuple2 xs ys))
= (PArray n# xs, PArray n# ys)
{-# INLINE_PA unzip #-}
-- | O(1). Unzip an array of triples into a triple of arrays.
unzip3 :: PArray (a, b, c) -> (PArray a, PArray b, PArray c)
unzip3 (PArray n# (PTuple3 xs ys zs))
= (PArray n# xs, PArray n# ys, PArray n# zs)
{-# INLINE_PA unzip3 #-}
-- | O(1). Unzip an array of triples into a triple of arrays.
unzip4 :: PArray (a, b, c, d) -> (PArray a, PArray b, PArray c, PArray d)
unzip4 (PArray n# (PTuple4 ws xs ys zs))
= (PArray n# ws, PArray n# xs, PArray n# ys, PArray n# zs)
{-# INLINE_PA unzip4 #-}
-- | O(1). Unzip an array of triples into a triple of arrays.
unzip5 :: PArray (a, b, c, d, e) -> (PArray a, PArray b, PArray c, PArray d, PArray e)
unzip5 (PArray n# (PTuple5 vs ws xs ys zs))
= (PArray n# vs, PArray n# ws, PArray n# xs, PArray n# ys, PArray n# zs)
{-# INLINE_PA unzip5 #-}
-- | Lifted unzip
unzipl :: PArray (PArray (a, b)) -> PArray (PArray a, PArray b)
unzipl (PArray n# pdata)
= traceOp (OpZipL (I# n#))
$ PArray n# $ unziplPD pdata
{-# INLINE_PA unzipl #-}
-- Conversions ----------------------------------------------------------------
-- | Convert a `Vector` to a `PArray`
fromVector :: PA a => Vector a -> PArray a
fromVector vec
= let !(I# n#) = V.length vec
in PArray n# (fromVectorPA vec)
{-# INLINE_PA fromVector #-}
-- | Convert a `PArray` to a `Vector`
toVector :: PA a => PArray a -> Vector a
toVector (PArray _ arr)
= toVectorPA arr
{-# INLINE_PA toVector #-}
-- | Convert a list to a `PArray`.
fromList :: PA a => [a] -> PArray a
fromList xx
= let !(I# n#) = P.length xx
in PArray n# (fromVectorPA $ V.fromList xx)
{-# INLINE_PA fromList #-}
-- | Convert a `PArray` to a list.
toList :: PA a => PArray a -> [a]
toList (PArray _ arr)
= V.toList $ toVectorPA arr
{-# INLINE_PA toList #-}
| mainland/dph | dph-lifted-vseg/Data/Array/Parallel/PArray.hs | bsd-3-clause | 20,403 | 16 | 15 | 4,977 | 5,563 | 2,854 | 2,709 | -1 | -1 |
-- #!/usr/bin/env runghc
-- (,st') in parse
-- {-# LANGUAGE TupleSections #-}
-- {-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
-- {-# OPTIONS_GHC -cpp -DPiForallInstalled #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- This module tests Pire's parser, forget, and untie functionality
--------------------------------------------------------------------
module ParserTests.Nat where
import Test.Tasty
import Test.Tasty.HUnit
import Pire.Syntax.Ws
import Pire.Syntax
import Pire.Parser.Expr
import Pire.Forget
import Pire.Untie
import Pire.Parser.ParseUtils
import Pire.Parser.PiParseUtils
-- import Pire.Untie
#ifdef PiForallInstalled
import qualified PiForall.Parser as P
#endif
-- main' "nat"
natU = testGroup "parsing nats - unit tests" $ tail
[
undefined
, let s = "7{-foo-}"
in
testGroup ("nat '"++s++"'") $ tail [
undefined
, testCase ("parsing nat '"++s++"'")
$ parse nat s @?= Nat 7
, testCase ("parsing nat_ '"++s++"'")
$ parse nat_ s @?= Nat_ 7 "7" (Ws "{-foo-}")
, testCase ("parse & forget nat_ '"++s++"'")
$ (forget $ parseP nat_ s) @?= (parseP nat s)
#ifdef PiForallInstalled
, testCase ("parse & untie nat_ '"++s++"'")
$ (untie $ parseP nat_ s) @?= (piParseP P.natenc s)
, testCase ("parse & untie expr_ '"++s++"'")
$ (untie $ parseP expr_ s) @?= (piParseP P.expr s)
, testCase ("parse & untie nat '"++s++"'")
$ (untie $ parse nat s) @?= (piParse P.natenc s)
#endif
]
, let s = "\\ x . 7 "
in
testGroup (">>= for Paren: \""++s++"\"") $ tail [
undefined
-- make sure, the bind op for Nat/Nat_ is defined
-- trivial equality
, testCase ("parsing expr '"++s++"'")
$ (parseP expr s) @?= (parseP expr s)
, testCase ("parsing expr_ '"++s++"'")
$ (parseP expr_ s) @?= (parseP expr_ s)
#ifdef PiForallInstalled
#endif
]
]
| reuleaux/pire | tests/ParserTests/Nat.hs | bsd-3-clause | 2,294 | 0 | 18 | 618 | 529 | 290 | 239 | 31 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main (main{-,module Plugins.API-}) where
--import Shell
--import Plugins.API
{-
import Paths_tpm
import Control.Monad
import Control.Monad.Trans
import System.Environment
import System.Directory
import System.FilePath
-}
{-import qualified Shell.Commands.PCR as PCR
import qualified Shell.Commands.Admin as ADM
import qualified Shell.Commands.Session as SHL
import qualified Shell.Commands.Capability as CAP
import qualified Shell.Commands.Key as KEY
import qualified Shell.Commands.Storage as STO -}
{-
import Prelude hiding (writeFile,readFile)
import Data.ByteString (readFile,writeFile)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Binary (encode,decode)
import Control.Exception (tryJust, catch, IOException, SomeException)
-}
import TPM
{-
import OpenSSL (withOpenSSL)
import OpenSSL.EVP.Cipher
-}
import Appraiser
import Attestation
import Measurer
import PrivacyCA
import Provisioning
import Demo3Shared
import InterpreterTypes
import ProtoMain
import CAProtoMain
import AttesterMain
import AppMain
import Protocol
import HttpChannel
import AppraiserProtocol
import AttesterProtocol
--import CAMain
--import AttMain
--import AMain
--import BMain
--import ScottyCA
import qualified AbstractedCommunication as ELFIJERLFWIEJF
import qualified TestChannel as WOIEJF
import qualified TestHttpChannelA as WEOIJFWAPOJF
import qualified TestHttpChannelB as EIJFWSAOERFJ
--import qualified TestHttpChannelAMain as FEIJFEIJF
--import qualified TestHttpChannelBMain as EIFJEIF
{-
cmd_log :: (TPM t) => ShellCmd (State t)
cmd_log = ShellCmd ["log", "l"]
"Enable or disable logging on TPM command strings"
"internal command"
docmd
where docmd "toggle" = getLogging >>= setLogging . not >> showlog
docmd "on" = setLogging True >> showlog
docmd "off" = setLogging False >> showlog
docmd "" = showlog
docmd s = shellPutStrLn "Usage: log [on|off|info]"
showlog = getLogging >>= \l -> shellPutStrLn ("Logging is " ++ msg l)
msg True = "on"
msg False = "off"
envplugs = liftIO $ (getEnv "TPM_PLUGINS") `catch` (\(_:: SomeException) -> return "")
envpaths = liftIO $ (getEnv "TPM_PATHS") `catch` (\(_:: SomeException) -> return "")
breakon c [] = []
breakon c s = case break (== c) s of
(a,[]) -> [a]
(a,c:b) -> a:(breakon c b)
plugins = do
env <- liftM (breakon ':') envplugs
home <- getHomeDirectory
datd <- getDataFileName ""
return (env ++ [home++"/.tpm"] ++ [datd])
paths = do
env <- liftM (breakon ':') envpaths
home <- getHomeDirectory
datd <- getDataFileName ""
return (env ++ [home++"/.tpm"] ++ [datd])
history = do
home <- getHomeDirectory
createDirectoryIfMissing True (home </> ".tpm")
return $ Just (home </> ".tpm" </> "history")
loadstate :: IO ([(String,TPM_KEY)], [(String,TPM_STORED_DATA)])
loadstate = do
home <- getHomeDirectory
createDirectoryIfMissing True (home </> ".tpm")
let file = home </> ".tpm" </> "state"
res <- tryJust (\(e :: IOException) -> return $ Just ()) (readFile file)
case res of
Left _ -> return ([],[])
Right r -> return $ decode (LBS.fromChunks [r])
savestate :: [(String,TPM_KEY)] -> [(String,TPM_STORED_DATA)] -> IO ()
savestate keys sealed = do
home <- getHomeDirectory
createDirectoryIfMissing True (home </> ".tpm")
let file = home </> ".tpm" </> "state"
writeFile file (BS.concat $ LBS.toChunks (encode (keys,sealed)))
return ()
close = do
tpm <- getTPM
sta <- shellGetState
case session sta of
Nothing -> return ()
Just sh -> do shellPutStr "Closing active session..."
liftIO $ tpm_session_close tpm sh
shellPutStrLn "done"
mapM_ (flushkey tpm) (loaded sta)
save <- liftIO $ savestate (keys sta) (sealed sta)
shellPutStrLn $ "Exiting TPM interpreter. Goodbye."
where flushkey tpm (name,key) = do
liftIO $ tpm_flushspecific tpm key tpm_rt_key
shellPutStrLn $ "Evicted loaded key " ++ name ++ " from TPM."
shell tpm = do
plugpaths <- plugins
srcpaths <- paths
hist <- history
(kys,sld) <- loadstate
let state = initial tpm kys sld
let shell' = initialShell state
return $ shell' { pluginDirs = plugpaths
, cmdPrefix = ""
, prompt = "tpm> "
, greetMsg = Just "Welcome to TPM"
, defaultEval = ""
, cmdBuiltin = [ exitCommand ["quit","exit",":q",":e"]
, helpCommand ["help","?"]
, cmd_log ] ++
PCR.allcmds ++
ADM.allcmds ++
SHL.allcmds ++
CAP.allcmds ++
STO.allcmds ++
KEY.allcmds
, closeShell = close
, historyFile = hist
}
-}
main = do {-withOpenSSL $ do
let tpm = tpm_socket "/var/run/tpm/tpmd_socket:0"
shell' <- shell tpm
runShell shell' -}
putStrLn "project main"
return ()
| armoredsoftware/protocol | tpm/mainline/project/Main.hs | bsd-3-clause | 5,457 | 0 | 8 | 1,545 | 118 | 84 | 34 | 26 | 1 |
module Tests ( tests ) where
import qualified Distribution.TestSuite as TS
import Tests.TestUtils
import Tests.KleenexParser
import Tests.Coding
import Tests.Regression
import Tests.KFoldApproximation
import Tests.OneApproximation
tests :: IO [TS.Test]
tests = return [ simpleGroup True "Coding" codingTests
, simpleGroup True "Regression" regressionTests
, simpleGroup True "Kleenex parsing" kleenexParserTests
, simpleGroup True "Iterative Approximations test" oneApproximationTests
, simpleGroup True "K-fold Approximations test" kApproximationTests
]
| diku-kmc/repg | test/Tests.hs | mit | 694 | 0 | 7 | 200 | 118 | 66 | 52 | 14 | 1 |
module Main where
import Data.Int (Int16, Int32, Int64, Int8)
import Data.Word (Word16, Word32, Word64, Word8)
import Foreign.C.String (CString, withCString)
import Foreign.C.Types (CInt (..), CSize (..))
import Foreign.Ptr (FunPtr, Ptr)
--struct FooBar
-- |
-- yoy
--typedef foobar_thing_cb = {- bar :: -} Ptr FooBar -> {- user_data :: -} Ptr () -> ()
-- |
-- Set the callback for the `${thing}` event. Pass NULL to unset.
--
--foreign import ccall foobar_callback_thing :: {- bar :: -} Ptr FooBar -> {- callback :: -} Ptr foobar_thing_cb -> {- user_data :: -} Ptr () -> {- result :: -} ()
-- |
-- yoy
--typedef foobar_bleh_thing_cb = {- bar :: -} Ptr FooBar -> {- user_data :: -} Ptr () -> ()
-- |
-- Set the callback for the `${bleh_thing}` event. Pass NULL to unset.
--
--foreign import ccall foobar_callback_bleh_thing :: {- bar :: -} Ptr FooBar -> {- callback :: -} Ptr foobar_bleh_thing_cb -> {- user_data :: -} Ptr () -> {- result :: -} ()
-- |
-- Heyo haha.
--typedef foobar_bleh_otherthing_cb = {- bar :: -} Ptr FooBar -> {- fluff :: -} Word32 -> {- user_data :: -} Ptr () -> <unresolved>
-- |
-- Set the callback for the `${bleh_otherthing}` event. Pass NULL to unset.
--
--foreign import ccall foobar_callback_bleh_otherthing :: {- bar :: -} Ptr FooBar -> {- callback :: -} Ptr foobar_bleh_otherthing_cb -> {- result :: -} ()
--foreign import ccall foobar_iterate :: {- bar :: -} Ptr FooBar -> {- user_data :: -} <unresolved> -> {- result :: -} <unresolved>
--foreign import ccall foobar_iterate2 :: {- bar :: -} Ptr FooBar -> {- user_data :: -} <unresolved> -> {- result :: -} <unresolved>
| iphydf/apidsl | test/event/event.exp.hs | gpl-3.0 | 1,733 | 0 | 6 | 411 | 115 | 83 | 32 | 6 | 0 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Main
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Main (main) where
import Test.Tasty
import Test.AWS.Redshift
import Test.AWS.Redshift.Internal
main :: IO ()
main = defaultMain $ testGroup "Redshift"
[ testGroup "tests" tests
, testGroup "fixtures" fixtures
]
| olorin/amazonka | amazonka-redshift/test/Main.hs | mpl-2.0 | 537 | 0 | 8 | 103 | 76 | 47 | 29 | 9 | 1 |
{-# LANGUAGE CPP, NoImplicitPrelude #-}
{-# OPTIONS -Wall #-}
module Language.Paraiso.OM
(
OM(..), makeOM
) where
import qualified Data.Vector as V
import Language.Paraiso.Name
import Language.Paraiso.OM.Builder (Builder, buildKernel)
import Language.Paraiso.OM.Graph
import Language.Paraiso.OM.DynValue (DynValue)
import NumericPrelude
-- | POM is Primordial Orthotope Machine.
data OM vector gauge anot
= OM
{ omName :: Name,
setup :: Setup vector gauge anot,
kernels :: V.Vector (Kernel vector gauge anot)
}
deriving (Show)
instance Nameable (OM v g a) where
name = omName
-- | create a POM easily and consistently.
makeOM ::
Name -- ^The machine name.
-> a -- ^The annotation at the root level.
-> [Named DynValue] -- ^The list of static variables.
-> [Named (Builder v g a ())] -- ^The list of pair of the kernel name and its builder.
-> OM v g a -- ^The result.
makeOM name0 a0 vars0 kerns
= OM {
omName = name0,
setup = setup0,
kernels = V.fromList $ map (\(Named n b) -> buildKernel setup0 n b) kerns
}
where
setup0 = Setup { staticValues = V.fromList vars0, globalAnnotation = a0 } | nushio3/Paraiso | Language/Paraiso/OM.hs | bsd-3-clause | 1,301 | 0 | 13 | 390 | 309 | 182 | 127 | 31 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.HTools.Node
( testHTools_Node
, Node.Node(..)
, setInstanceSmallerThanNode
, genNode
, genOnlineNode
, genEmptyOnlineNode
, genNodeList
, genUniqueNodeList
) where
import Test.QuickCheck
import Test.HUnit
import Control.Monad
import qualified Data.Map as Map
import qualified Data.Graph as Graph
import Data.List
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHTools
import Test.Ganeti.HTools.Instance ( genInstanceSmallerThanNode
, genInstanceList
, genInstanceOnNodeList)
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Loader as Loader
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Types as Types
import qualified Ganeti.HTools.Graph as HGraph
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Arbitrary instances
-- | Generates an arbitrary node based on sizing information.
genNode :: Maybe Int -- ^ Minimum node size in terms of units
-> Maybe Int -- ^ Maximum node size (when Nothing, bounded
-- just by the max... constants)
-> Gen Node.Node
genNode min_multiplier max_multiplier = do
let (base_mem, base_dsk, base_cpu, base_spindles) =
case min_multiplier of
Just mm -> (mm * Types.unitMem,
mm * Types.unitDsk,
mm * Types.unitCpu,
mm)
Nothing -> (0, 0, 0, 0)
(top_mem, top_dsk, top_cpu, top_spindles) =
case max_multiplier of
Just mm -> (mm * Types.unitMem,
mm * Types.unitDsk,
mm * Types.unitCpu,
mm)
Nothing -> (maxMem, maxDsk, maxCpu, maxSpindles)
name <- genFQDN
mem_t <- choose (base_mem, top_mem)
mem_f <- choose (base_mem, mem_t)
mem_n <- choose (0, mem_t - mem_f)
dsk_t <- choose (base_dsk, top_dsk)
dsk_f <- choose (base_dsk, dsk_t)
cpu_t <- choose (base_cpu, top_cpu)
cpu_n <- choose (base_cpu, cpu_t)
offl <- arbitrary
spindles <- choose (base_spindles, top_spindles)
let n = Node.create name (fromIntegral mem_t) mem_n mem_f
(fromIntegral dsk_t) dsk_f (fromIntegral cpu_t) cpu_n offl spindles
0 0 False
n' = Node.setPolicy nullIPolicy n
return $ Node.buildPeers n' Container.empty
-- | Helper function to generate a sane node.
genOnlineNode :: Gen Node.Node
genOnlineNode =
arbitrary `suchThat` (\n -> not (Node.offline n) &&
not (Node.failN1 n) &&
Node.availDisk n > 0 &&
Node.availMem n > 0 &&
Node.availCpu n > 0 &&
Node.tSpindles n > 0)
-- | Helper function to generate a sane empty node with consistent
-- internal data.
genEmptyOnlineNode :: Gen Node.Node
genEmptyOnlineNode =
(do node <- arbitrary
let fmem = truncate (Node.tMem node) - Node.nMem node
let node' = node { Node.offline = False
, Node.fMem = fmem
, Node.fMemForth = fmem
, Node.pMem = fromIntegral fmem / Node.tMem node
, Node.pMemForth = fromIntegral fmem / Node.tMem node
, Node.rMem = 0
, Node.rMemForth = 0
, Node.pRem = 0
, Node.pRemForth = 0
}
return node') `suchThat` (\ n -> not (Node.failN1 n) &&
Node.availDisk n > 0 &&
Node.availMem n > 0 &&
Node.availCpu n > 0 &&
Node.tSpindles n > 0)
-- | Generate a node with exclusive storage enabled.
genExclStorNode :: Gen Node.Node
genExclStorNode = do
n <- genOnlineNode
fs <- choose (Types.unitSpindle, Node.tSpindles n)
fsForth <- choose (Types.unitSpindle, fs)
let pd = fromIntegral fs / fromIntegral (Node.tSpindles n)::Double
let pdForth = fromIntegral fsForth / fromIntegral (Node.tSpindles n)::Double
return n { Node.exclStorage = True
, Node.fSpindles = fs
, Node.fSpindlesForth = fsForth
, Node.pDsk = pd
, Node.pDskForth = pdForth
}
-- | Generate a node with exclusive storage possibly enabled.
genMaybeExclStorNode :: Gen Node.Node
genMaybeExclStorNode = oneof [genOnlineNode, genExclStorNode]
-- and a random node
instance Arbitrary Node.Node where
arbitrary = genNode Nothing Nothing
-- | Node list generator.
-- Given a node generator, create a random length node list. Note that "real"
-- clusters always have at least one node, so we don't generate empty node
-- lists here.
genNodeList :: Gen Node.Node -> Gen Node.List
genNodeList ngen = fmap (snd . Loader.assignIndices) names_nodes
where names_nodes = (fmap . map) (\n -> (Node.name n, n)) nodes
nodes = listOf1 ngen `suchThat`
((\ns -> ns == nub ns) . map Node.name)
-- | Node list generator where node names are unique
genUniqueNodeList :: Gen Node.Node -> Gen (Node.List, Types.NameAssoc)
genUniqueNodeList ngen = (do
nl <- genNodeList ngen
let na = (fst . Loader.assignIndices) $
map (\n -> (Node.name n, n)) (Container.elems nl)
return (nl, na)) `suchThat`
(\(nl, na) -> Container.size nl == Map.size na)
-- | Generate a node list, an instance list, and a node graph.
-- We choose instances with nodes contained in the node list.
genNodeGraph :: Gen (Maybe Graph.Graph, Node.List, Instance.List)
genNodeGraph = do
nl <- genNodeList genOnlineNode `suchThat` ((2<=).Container.size)
il <- genInstanceList (genInstanceOnNodeList nl)
return (Node.mkNodeGraph nl il, nl, il)
-- * Test cases
prop_setAlias :: Node.Node -> String -> Bool
prop_setAlias node name =
Node.name newnode == Node.name node &&
Node.alias newnode == name
where newnode = Node.setAlias node name
prop_setOffline :: Node.Node -> Bool -> Property
prop_setOffline node status =
Node.offline newnode ==? status
where newnode = Node.setOffline node status
prop_setXmem :: Node.Node -> Int -> Property
prop_setXmem node xm =
Node.xMem newnode ==? xm
where newnode = Node.setXmem node xm
prop_setMcpu :: Node.Node -> Double -> Property
prop_setMcpu node mc =
Types.iPolicyVcpuRatio (Node.iPolicy newnode) ==? mc
where newnode = Node.setMcpu node mc
-- Check if adding an instance that consumes exactly all reserved
-- memory does not raise an N+1 error
prop_addPri_NoN1Fail :: Property
prop_addPri_NoN1Fail =
forAll genMaybeExclStorNode $ \node ->
forAll (genInstanceSmallerThanNode node) $ \inst ->
let inst' = inst { Instance.mem = Node.fMem node - Node.rMem node }
in (Node.addPri node inst' /=? Bad Types.FailN1)
-- | Check that an instance add with too high memory or disk will be
-- rejected.
prop_addPriFM :: Node.Node -> Instance.Instance -> Property
prop_addPriFM node inst =
Instance.mem inst >= Node.fMem node && not (Node.failN1 node) &&
Instance.usesMemory inst ==>
(Node.addPri node inst'' ==? Bad Types.FailMem)
where inst' = setInstanceSmallerThanNode node inst
inst'' = inst' { Instance.mem = Instance.mem inst }
-- | Check that adding a primary instance with too much disk fails
-- with type FailDisk.
prop_addPriFD :: Instance.Instance -> Property
prop_addPriFD inst =
forAll (genNode (Just 1) Nothing) $ \node ->
forAll (elements Instance.localStorageTemplates) $ \dt ->
Instance.dsk inst >= Node.fDsk node && not (Node.failN1 node) ==>
let inst' = setInstanceSmallerThanNode node inst
inst'' = inst' { Instance.dsk = Instance.dsk inst
, Instance.diskTemplate = dt }
in (Node.addPri node inst'' ==? Bad Types.FailDisk)
-- | Check if an instance exceeds a spindles limit or has no spindles set.
hasInstTooManySpindles :: Instance.Instance -> Int -> Bool
hasInstTooManySpindles inst sp_lim =
case Instance.getTotalSpindles inst of
Just s -> s > sp_lim
Nothing -> True
-- | Check that adding a primary instance with too many spindles fails
-- with type FailSpindles (when exclusive storage is enabled).
prop_addPriFS :: Instance.Instance -> Property
prop_addPriFS inst =
forAll genExclStorNode $ \node ->
forAll (elements Instance.localStorageTemplates) $ \dt ->
hasInstTooManySpindles inst (Node.fSpindles node) &&
not (Node.failN1 node) ==>
let inst' = setInstanceSmallerThanNode node inst
inst'' = inst' { Instance.disks = Instance.disks inst
, Instance.diskTemplate = dt }
in (Node.addPri node inst'' ==? Bad Types.FailSpindles)
-- | Check that adding a primary instance with too many VCPUs fails
-- with type FailCPU.
prop_addPriFC :: Property
prop_addPriFC =
forAll (choose (1, maxCpu)) $ \extra ->
forAll genMaybeExclStorNode $ \node ->
forAll (arbitrary `suchThat` Instance.notOffline
`suchThat` (not . Instance.forthcoming)) $ \inst ->
let inst' = setInstanceSmallerThanNode node inst
inst'' = inst' { Instance.vcpus = Node.availCpu node + extra }
in case Node.addPri node inst'' of
Bad Types.FailCPU -> passTest
v -> failTest $ "Expected OpFail FailCPU, but got " ++ show v
-- | Check that an instance add with too high memory or disk will be
-- rejected.
prop_addSec :: Node.Node -> Instance.Instance -> Int -> Property
prop_addSec node inst pdx =
((Instance.mem inst >= (Node.fMem node - Node.rMem node) &&
not (Instance.isOffline inst)) ||
Instance.dsk inst >= Node.fDsk node ||
(Node.exclStorage node &&
hasInstTooManySpindles inst (Node.fSpindles node))) &&
not (Node.failN1 node) ==>
isBad (Node.addSec node inst pdx)
-- | Check that an offline instance with reasonable disk size but
-- extra mem/cpu can always be added.
prop_addOfflinePri :: NonNegative Int -> NonNegative Int -> Property
prop_addOfflinePri (NonNegative extra_mem) (NonNegative extra_cpu) =
forAll genMaybeExclStorNode $ \node ->
forAll (genInstanceSmallerThanNode node) $ \inst ->
let inst' = inst { Instance.runSt = Types.StatusOffline
, Instance.mem = Node.availMem node + extra_mem
, Instance.vcpus = Node.availCpu node + extra_cpu }
in case Node.addPriEx True node inst' of
Ok _ -> passTest
v -> failTest $ "Expected OpGood, but got: " ++ show v
-- | Check that an offline instance with reasonable disk size but
-- extra mem/cpu can always be added.
prop_addOfflineSec :: NonNegative Int -> NonNegative Int
-> Types.Ndx -> Property
prop_addOfflineSec (NonNegative extra_mem) (NonNegative extra_cpu) pdx =
forAll genMaybeExclStorNode $ \node ->
forAll (genInstanceSmallerThanNode node) $ \inst ->
let inst' = inst { Instance.runSt = Types.StatusOffline
, Instance.mem = Node.availMem node + extra_mem
, Instance.vcpus = Node.availCpu node + extra_cpu
, Instance.diskTemplate = Types.DTDrbd8 }
in case Node.addSec node inst' pdx of
Ok _ -> passTest
v -> failTest $ "Expected OpGood/OpGood, but got: " ++ show v
-- | Checks for memory reservation changes.
prop_rMem :: Instance.Instance -> Property
prop_rMem inst =
not (Instance.isOffline inst) && not (Instance.forthcoming inst) ==>
-- TODO Should we also require ((> Types.unitMem) . Node.fMemForth) ?
forAll (genMaybeExclStorNode `suchThat` ((> Types.unitMem) . Node.fMem)) $
\node ->
-- ab = auto_balance, nb = non-auto_balance
-- we use -1 as the primary node of the instance
let inst' = inst { Instance.pNode = -1, Instance.autoBalance = True
, Instance.diskTemplate = Types.DTDrbd8 }
inst_ab = setInstanceSmallerThanNode node inst'
inst_nb = inst_ab { Instance.autoBalance = False }
-- now we have the two instances, identical except the
-- autoBalance attribute
orig_rmem = Node.rMem node
inst_idx = Instance.idx inst_ab
node_add_ab = Node.addSec node inst_ab (-1)
node_add_nb = Node.addSec node inst_nb (-1)
node_del_ab = liftM (`Node.removeSec` inst_ab) node_add_ab
node_del_nb = liftM (`Node.removeSec` inst_nb) node_add_nb
in case (node_add_ab, node_add_nb, node_del_ab, node_del_nb) of
(Ok a_ab, Ok a_nb,
Ok d_ab, Ok d_nb) ->
counterexample "Consistency checks failed" $
Node.rMem a_ab > orig_rmem &&
Node.rMem a_ab - orig_rmem == Instance.mem inst_ab &&
Node.rMem a_nb == orig_rmem &&
Node.rMem d_ab == orig_rmem &&
Node.rMem d_nb == orig_rmem &&
-- this is not related to rMem, but as good a place to
-- test as any
inst_idx `elem` Node.sList a_ab &&
inst_idx `notElem` Node.sList d_ab
x -> failTest $ "Failed to add/remove instances: " ++ show x
-- | Check mdsk setting.
prop_setMdsk :: Node.Node -> SmallRatio -> Bool
prop_setMdsk node mx =
Node.loDsk node' >= 0 &&
fromIntegral (Node.loDsk node') <= Node.tDsk node &&
Node.availDisk node' >= 0 &&
Node.availDisk node' <= Node.fDsk node' &&
fromIntegral (Node.availDisk node') <= Node.tDsk node' &&
Node.mDsk node' == mx'
where node' = Node.setMdsk node mx'
SmallRatio mx' = mx
-- Check tag maps
prop_tagMaps_idempotent :: Property
prop_tagMaps_idempotent =
forAll genTags $ \tags ->
Node.delTags (Node.addTags m tags) tags ==? m
where m = Map.empty
prop_tagMaps_reject :: Property
prop_tagMaps_reject =
forAll (genTags `suchThat` (not . null)) $ \tags ->
let m = Node.addTags Map.empty tags
in all (\t -> Node.rejectAddTags m [t]) tags
prop_showField :: Node.Node -> Property
prop_showField node =
forAll (elements Node.defaultFields) $ \ field ->
fst (Node.showHeader field) /= Types.unknownField &&
Node.showField node field /= Types.unknownField
prop_computeGroups :: [Node.Node] -> Bool
prop_computeGroups nodes =
let ng = Node.computeGroups nodes
onlyuuid = map fst ng
in length nodes == sum (map (length . snd) ng) &&
all (\(guuid, ns) -> all ((== guuid) . Node.group) ns) ng &&
length (nub onlyuuid) == length onlyuuid &&
(null nodes || not (null ng))
-- Check idempotence of add/remove operations
prop_addPri_idempotent :: Property
prop_addPri_idempotent =
forAll genMaybeExclStorNode $ \node ->
forAll (genInstanceSmallerThanNode node) $ \inst ->
case Node.addPri node inst of
Ok node' -> Node.removePri node' inst ==? node
_ -> failTest "Can't add instance"
prop_addSec_idempotent :: Property
prop_addSec_idempotent =
forAll genMaybeExclStorNode $ \node ->
forAll (genInstanceSmallerThanNode node) $ \inst ->
let pdx = Node.idx node + 1
inst' = Instance.setPri inst pdx
inst'' = inst' { Instance.diskTemplate = Types.DTDrbd8 }
in case Node.addSec node inst'' pdx of
Ok node' -> Node.removeSec node' inst'' ==? node
_ -> failTest "Can't add instance"
-- | Check that no graph is created on an empty node list.
case_emptyNodeList :: Assertion
case_emptyNodeList =
assertEqual "" Nothing $ Node.mkNodeGraph emptynodes emptyinstances
where emptynodes = Container.empty :: Node.List
emptyinstances = Container.empty :: Instance.List
-- | Check that the number of vertices of a nodegraph is equal to the number of
-- nodes in the original node list.
prop_numVertices :: Property
prop_numVertices =
forAll genNodeGraph $ \(graph, nl, _) ->
(fmap numvertices graph ==? Just (Container.size nl))
where numvertices = length . Graph.vertices
-- | Check that the number of edges of a nodegraph is equal to twice the number
-- of instances with secondary nodes in the original instance list.
prop_numEdges :: Property
prop_numEdges =
forAll genNodeGraph $ \(graph, _, il) ->
(fmap numedges graph ==? Just (numwithsec il * 2))
where numedges = length . Graph.edges
numwithsec = length . filter Instance.hasSecondary . Container.elems
-- | Check that a node graph is colorable.
prop_nodeGraphIsColorable :: Property
prop_nodeGraphIsColorable =
forAll genNodeGraph $ \(graph, _, _) ->
fmap HGraph.isColorable graph ==? Just True
-- | Check that each edge in a nodegraph is an instance.
prop_instanceIsEdge :: Property
prop_instanceIsEdge =
forAll genNodeGraph $ \(graph, _, il) ->
fmap (\g -> all (`isEdgeOn` g) (iwithsec il)) graph ==? Just True
where i `isEdgeOn` g = iEdges i `intersect` Graph.edges g == iEdges i
iEdges i = [ (Instance.pNode i, Instance.sNode i)
, (Instance.sNode i, Instance.pNode i)]
iwithsec = filter Instance.hasSecondary . Container.elems
-- | Check that each instance in an edge in the resulting nodegraph.
prop_edgeIsInstance :: Property
prop_edgeIsInstance =
forAll genNodeGraph $ \(graph, _, il) ->
fmap (all (`isInstanceIn` il).Graph.edges) graph ==? Just True
where e `isInstanceIn` il = any (`hasNodes` e) (Container.elems il)
i `hasNodes` (v1,v2) =
Instance.allNodes i `elem` permutations [v1,v2]
-- | List of tests for the Node module.
testSuite "HTools/Node"
[ 'prop_setAlias
, 'prop_setOffline
, 'prop_setMcpu
, 'prop_setXmem
, 'prop_addPriFM
, 'prop_addPriFD
, 'prop_addPriFS
, 'prop_addPriFC
, 'prop_addPri_NoN1Fail
, 'prop_addSec
, 'prop_addOfflinePri
, 'prop_addOfflineSec
, 'prop_rMem
, 'prop_setMdsk
, 'prop_tagMaps_idempotent
, 'prop_tagMaps_reject
, 'prop_showField
, 'prop_computeGroups
, 'prop_addPri_idempotent
, 'prop_addSec_idempotent
, 'case_emptyNodeList
, 'prop_numVertices
, 'prop_numEdges
, 'prop_nodeGraphIsColorable
, 'prop_edgeIsInstance
, 'prop_instanceIsEdge
]
| dimara/ganeti | test/hs/Test/Ganeti/HTools/Node.hs | bsd-2-clause | 19,615 | 2 | 28 | 4,822 | 4,860 | 2,568 | 2,292 | 358 | 3 |
--------------------------------------------------------------------
-- |
-- Module : MediaWiki.API.Action.Unblock
-- Description : Representing Unblock requests.
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
-- Portability: portable
--
-- Representing Unblock requests.
--
--------------------------------------------------------------------
module MediaWiki.API.Action.Unblock where
import MediaWiki.API.Types
import MediaWiki.API.Utils
data UnblockRequest
= UnblockRequest
{ ublkId :: Maybe String
, ublkUser :: Maybe UserName
, ublkToken :: Maybe Token
, ublkGetToken :: Bool
, ublkReason :: Maybe String
}
instance APIRequest UnblockRequest where
isPostable _ = True
showReq r =
[ mbOpt "id" id (ublkId r)
, mbOpt "user" id (ublkUser r)
, mbOpt "token" id (ublkToken r)
, optB "gettoken" (ublkGetToken r)
, mbOpt "reason" id (ublkReason r)
]
emptyUnblockRequest :: UnblockRequest
emptyUnblockRequest = UnblockRequest
{ ublkId = Nothing
, ublkUser = Nothing
, ublkToken = Nothing
, ublkGetToken = False
, ublkReason = Nothing
}
| neobrain/neobot | mediawiki/MediaWiki/API/Action/Unblock.hs | bsd-3-clause | 1,219 | 0 | 9 | 246 | 229 | 135 | 94 | 25 | 1 |
module SrcLocPretty where
import SrcLoc1
import PrettyPrint
instance Printable SrcLoc where
ppi (SrcLoc f _ l c) = f<>":"<>l<>','<>c
wrap = ppi
shLineCol (SrcLoc _ _ l c) = show l++"_"++show c
| forste/haReFork | tools/base/AST/SrcLocPretty.hs | bsd-3-clause | 204 | 0 | 9 | 43 | 93 | 48 | 45 | 7 | 1 |
{-+
Knot-tying definitions for the base syntax to Stratego translation.
-}
module Base2Stratego where
import BaseStruct2Stratego(transP,transD,transE,showId)
import Syntax(HsPatI(..),HsDeclI(..),HsExpI(..)) -- recursive base syntax
--transPat :: HsPat -> P
transPat (Pat p) = transP showId transPat p
transDecs ds = map transDec ds
--transDec :: HsDecl -> D
transDec (Dec d) = transD showId transExp transPat transDecs bad bad bad d
--transExp :: HsExp -> E
transExp (Exp e) = transE showId transExp transPat transDecs bad bad e
bad x = error "Base2Stratego: not yet"
| forste/haReFork | tools/Phugs/Base2Stratego.hs | bsd-3-clause | 574 | 0 | 7 | 89 | 157 | 87 | 70 | 8 | 1 |
import Sudoku
import Control.Exception
import System.Environment
import Control.Parallel.Strategies
import Control.DeepSeq
import Data.Maybe
-- <<main
main :: IO ()
main = do
[f] <- getArgs
file <- readFile f
let puzzles = lines file
(as,bs) = splitAt (length puzzles `div` 2) puzzles -- <1>
solutions = runEval $ do
as' <- rpar (force (map solve as)) -- <2>
bs' <- rpar (force (map solve bs)) -- <2>
rseq as' -- <3>
rseq bs' -- <3>
return (as' ++ bs') -- <4>
print (length (filter isJust solutions))
-- >>
| seahug/parconc-examples | sudoku2.hs | bsd-3-clause | 710 | 0 | 18 | 290 | 216 | 110 | 106 | 19 | 1 |
-- | Free regs map for x86_64
module RegAlloc.Linear.X86_64.FreeRegs
where
import X86.Regs
import RegClass
import Reg
import Panic
import Platform
import Data.Word
import Data.Bits
newtype FreeRegs = FreeRegs Word64
deriving Show
noFreeRegs :: FreeRegs
noFreeRegs = FreeRegs 0
releaseReg :: RealReg -> FreeRegs -> FreeRegs
releaseReg (RealRegSingle n) (FreeRegs f)
= FreeRegs (f .|. (1 `shiftL` n))
releaseReg _ _
= panic "RegAlloc.Linear.X86_64.FreeRegs.releaseReg: no reg"
initFreeRegs :: Platform -> FreeRegs
initFreeRegs platform
= foldr releaseReg noFreeRegs (allocatableRegs platform)
getFreeRegs :: Platform -> RegClass -> FreeRegs -> [RealReg] -- lazily
getFreeRegs platform cls (FreeRegs f) = go f 0
where go 0 _ = []
go n m
| n .&. 1 /= 0 && classOfRealReg platform (RealRegSingle m) == cls
= RealRegSingle m : (go (n `shiftR` 1) $! (m+1))
| otherwise
= go (n `shiftR` 1) $! (m+1)
-- ToDo: there's no point looking through all the integer registers
-- in order to find a floating-point one.
allocateReg :: RealReg -> FreeRegs -> FreeRegs
allocateReg (RealRegSingle r) (FreeRegs f)
= FreeRegs (f .&. complement (1 `shiftL` r))
allocateReg _ _
= panic "RegAlloc.Linear.X86_64.FreeRegs.allocateReg: no reg"
| ghc-android/ghc | compiler/nativeGen/RegAlloc/Linear/X86_64/FreeRegs.hs | bsd-3-clause | 1,339 | 0 | 14 | 310 | 395 | 211 | 184 | 33 | 2 |
-- This used lots of memory, and took a long time to compile, with GHC 6.12:
-- http://www.haskell.org/pipermail/glasgow-haskell-users/2010-May/018835.html
{-# LANGUAGE FlexibleContexts, TypeFamilies #-}
module IndTypesPerf where
import IndTypesPerfMerge
data Rec1 = Rec1 !Int
mkRec1 v = mk $ merge v () where mk (Tagged i :* ()) = Rec1 i
| ryantm/ghc | testsuite/tests/indexed-types/should_compile/IndTypesPerf.hs | bsd-3-clause | 345 | 0 | 10 | 55 | 64 | 34 | 30 | 7 | 1 |
module ShouldSucceed where
-- !!! tests the deduction of contexts.
f :: (Eq a) => a -> [a]
f x = g x
where
g y = if (y == x) then [] else [y]
| wxwxwwxxx/ghc | testsuite/tests/typecheck/should_compile/tc040.hs | bsd-3-clause | 157 | 0 | 9 | 50 | 67 | 38 | 29 | 4 | 2 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
module SCInstances () where
import Control.Applicative
import Control.Monad
import Data.Bits (shift, xor)
import Data.Int (Int64)
import qualified Data.Map as M
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import Data.Word (Word32)
import qualified Test.SmallCheck.Series as SCS
import qualified HotDB.Core.Node as N
import qualified HotDB.Core.Operation as O
instance Monad m => SCS.Serial m Int64 where
series = mkInt64 <$> SCS.series
where mkInt64 (msi, lsi) = (shift (fromInteger msi) 32) `xor` fromInteger lsi
instance Monad m => SCS.Serial m Word32 where
series = (SCS.generate $ \d -> if d >= 0 then pure 0 else empty) <|> nats
where nats = SCS.generate $ \d -> let ud = fromIntegral d in [1..ud]
instance Monad m => SCS.Serial m T.Text where
series = T.pack <$> SCS.series
instance Monad m => SCS.Serial m (Seq.Seq N.Node) where
series = Seq.fromList <$> SCS.series
instance Monad m => SCS.Serial m (M.Map T.Text N.Node) where
series = M.fromList <$> SCS.series
instance Monad m => SCS.Serial m N.Node where
series = SCS.decDepth $
SCS.cons0 N.EmptyNode SCS.\/
SCS.cons1 N.RootNode SCS.\/
SCS.cons1 N.BoolNode SCS.\/
SCS.cons1 N.IntNode SCS.\/
SCS.cons1 N.DoubleNode SCS.\/
SCS.cons1 N.TextNode SCS.\/
SCS.cons1 N.MapNode
instance Monad m => SCS.Serial m O.Operation where
series = SCS.decDepth $
SCS.cons0 O.NoOp SCS.\/
SCS.cons2 O.RootSet SCS.\/
SCS.cons1 O.IntInc SCS.\/
SCS.cons3 O.TextInsert SCS.\/
SCS.cons1 O.TextDelete SCS.\/
SCS.cons3 O.SequenceSet SCS.\/
SCS.cons3 O.SequenceInsert SCS.\/
SCS.cons1 O.SequenceDelete SCS.\/
SCS.cons3 O.MapSet SCS.\/
SCS.cons1 O.MapUnset
| jjwchoy/hotdb | tests/SCInstances.hs | mit | 1,930 | 0 | 17 | 491 | 660 | 345 | 315 | 46 | 0 |
module LC where
import Data.Char (ord)
import Data.List (foldl')
columnToNumber :: String -> Int
columnToNumber s = foldl' (\acc (i, v) -> acc + 26 ^ i * v) 0 values
where values = zip (reverse [0..(length s -1)]) ((\c -> ord c - 64) <$> s)
| AriaFallah/leetcode | haskell/ExcelSheetColumn.hs | mit | 245 | 0 | 13 | 51 | 131 | 72 | 59 | 6 | 1 |
{- |
Module : DataAssociation.APriori
Description : Implements /A-Priory/ Large Itemsets extraction.
License : MIT
Stability : development
__A-Priory__ Large 'Itemset's extraction. See __2.1__ in <http://rakesh.agrawal-family.com/papers/vldb94apriori.pdf>.
Defines the __APriori__ instance of 'LargeItemsetsExtractor'.
-}
module DataAssociation.APriori where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe (maybeToList)
import Control.Arrow
import DataAssociation
import DataAssociation.Definitions
import DataAssociation.Utils
import DataAssociation.Abstract
-----------------------------------------------------------------------------
data AprioriCache set it = AprioriCache (Map (set it) Float) Float deriving (Show, Read)
emptyAprioriCache :: AprioriCache set it
emptyAprioriCache = AprioriCache Map.empty 1
-- | select Large itemsets from given cache
aprioriCached :: (Ord (set it), Ord it, Itemset set it) =>
AprioriCache set it
-> [set it] -- ^ transactions
-> MinSupport
-> (AprioriCache set it, Map (set it) Float) -- ^ cache and __large__ itemsets
aprioriCached c@(AprioriCache cache cMinsup) transactions ms@(MinSupport minsup) =
if minsup < cMinsup then let result = fst $ runApriori ms transactions
in (AprioriCache (Map.union cache result) minsup, result)
else (c, Map.filter (>= minsup) cache)
-- | This might be dangerous, because (MinSupport 0) is used.
mkAprioriCache :: (Ord (set it), Ord it, Itemset set it) =>
[set it] -- ^ /transactions/
-> AprioriCache set it -- ^ cache
mkAprioriCache = flip AprioriCache 0 . fst .runApriori (MinSupport 0)
-----------------------------------------------------------------------------
-- | The __APriori__ instance. Defined in "DataAssociation.APriori". Based on 'apriori'.
instance (Ord (set it), Ord it, Itemset set it) =>
LargeItemsetsExtractor set it where
findLargeItemsets minsup = fst . runApriori minsup
runApriori minsup@(MinSupport msup) rawdata = apriori' minsup tr seeds Map.empty []
where tr = (rawdata, length rawdata)
itemsSup = sortingGroupBy id
(calculateSupport (snd tr) . length)
(concatMap listItems rawdata)
satisfying = filter ((>= msup) . snd) itemsSup
-- itemsets of size 1 with sufficient support
seeds = Map.fromList $ map ((newItemset . (:[]) . fst) &&& snd) satisfying
-----------------------------------------------------------------------------
-- | generate Large itemsets with a-priory algorithm. (Figure 1 in the article)
apriori :: (Ord (set it), Ord it, Itemset set it) =>
MinSupport
-> ([set it], Int) -- ^ /transactions/ and their count
-> Map (set it) Float -- ^ seeds L_{k-1} with the corresponding support
-> Map (set it) Float -- ^ __large__ itemsets
apriori minsup transactionsWithSize seeds =
fst $ apriori' minsup transactionsWithSize seeds Map.empty []
-----------------------------------------------------------------------------
-- | inner `apriori` implementation with debugging capacity
apriori' :: (Ord (set it), Ord it, Itemset set it) =>
MinSupport
-> ([set it], Int) -- ^ /transactions/ and their count
-> Map (set it) Float -- ^ seeds L_{k-1} with the corresponding support
-> Map (set it) Float -- ^ __large__ itemsets accumulator
-> [AprioriDebugData set it] -- ^ debug data accumulator
-> (Map (set it) Float, [AprioriDebugData set it ]) -- ^ (__large__ itemsets, debug data)
apriori' mSup@(MinSupport minsup) tr@(transactions, transactionsSize) seeds acc debugAcc =
if Map.null next then (acc, reverse debugAcc)
else apriori' mSup tr next (Map.union acc next) (dd:debugAcc)
where next = Map.filter (> minsup) cCount
cCount = Map.map (calculateSupport transactionsSize) $
countSupported transactions candidates
(joined, pruned) = aprioriGen' $ Map.keys seeds
candidates = pruned
dd = AprioriDebugData (Map.assocs seeds) joined pruned
-- | APriori debug data container
data AprioriDebugData set it = AprioriDebugData {
dSeeds :: [(set it, Float)] -- ^ debug: apriori seeds
, dJoin :: [set it] -- ^ debug: apriori joined
, dPrune :: [set it] -- ^ debug: apriori pruned
}
-----------------------------------------------------------------------------
-- | Apriori Candidate Generation. Generates the L_{k} /candidates/ from L_{k-1} (see 2.1.1 in the article).
-- Consists of `aprioryGenJoin` and `aprioryGenPrune`.
aprioriGen :: (Itemset set it, Ord it) => [set it] -- ^ L_{k-1}
-> [set it] -- ^ L_{k} /candidates/
-----------------------------------------------------------------------------
-- | Inner Apriori Candidate Generation with debugging capacity.
aprioriGen' :: (Itemset set it, Ord it) => [set it] -- ^ L_{k-1}
-> ([set it], [set it]) -- ^ results of (join, prune)
-- | Apriori Candidate Generation: Join.
aprioriGenJoin :: (Itemset set it, Ord it) => [set it] -- ^ L_{k-1}
-> [set it] -- ^ L_{k} /candidates/
-- | Apriori Candidate Generation: Prune.
aprioriGenPrune :: (Itemset set it) => [set it] -- ^ L_{k-1}
-> [set it] -- ^ L_{k} /candidates/
-> [set it] -- ^ L_{k} /candidates/
aprioriGen' sets = (joined, pruned)
where ((_, joined), pruned) = preservingArg (uncurry aprioriGenPrune)
. preservingArg aprioriGenJoin $ sets
aprioriGen = snd . aprioriGen'
aprioriGenJoin seeds = do p <- seeds
q <- seeds
(diff1, diff2) <- maybeToList $ lastElementDifference p q -- oneElementDifference -- TODO ASK
if diff1 < diff2 then return $ insertItem diff2 p
else []
aprioriGenPrune seeds generated = do g <- generated
[g | all (`elem` seeds) (allSubsetsOneShorter g)]
-----------------------------------------------------------------------------
{- | returns Just ( element contained in the first argument and not the second
, element contained in the second argument and not the first)
if
1. the two sets have the same length
2. n-1 elements are the same
3. one element differs
returns Nothing otherwise
-}
oneElementDifference :: (Itemset set it) => set it -> set it -> Maybe (it, it)
oneElementDifference x y =
if sameLength && length difference == 1
then Just (head difference, head difference2)
else Nothing
where sameLength = setSize x == setSize y
difference = itemsetDiff x y
difference2 = itemsetDiff y x
{- | returns Just ( element contained in the first argument and not the second
, element contained in the second argument and not the first)
if
1. the two sets have the same length
2. first @n-1@ (sorted) elements of bpth are the same
3. last elements differ
returns Nothing otherwise
-}
lastElementDifference :: (Itemset set it) => set it -> set it -> Maybe (it, it)
lastElementDifference x y =
if sameLength && xInit == yInit
then Just (xLast, yLast)
else Nothing
where sameLength = setSize x == setSize y
(xInit, xLast) = splitInit x
(yInit, yLast) = splitInit y
| fehu/min-dat--a-priori | core/src/DataAssociation/APriori.hs | mit | 7,907 | 1 | 15 | 2,227 | 1,668 | 898 | 770 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module KMeans where
import Data.Default
import Data.List
import qualified Data.Map as M
class (Default v, Ord v) => Vector v where
distance :: v -> v -> Double
centroid :: [v] -> v
class Vector v => Vectorizable e v where
toVector :: e -> v
instance Vector (Double, Double) where
distance (a, b) (c, d) = sqrt $ (c-a)**2 + (d-b)**2
centroid lst = let (u, v) = foldr (\(a, b) (c, d) -> (a+c, b+d)) (0.0, 0.0) lst
n = fromIntegral $ length lst
in (u / n, v / n)
instance Vectorizable (Double, Double) (Double, Double) where
toVector = id
clusterAssignmentPhase :: (Vector v, Vectorizable e v) => [v] -> [e] -> M.Map v [e]
clusterAssignmentPhase centroids points =
let initialMap = M.fromList $ zip centroids (repeat [])
in foldr (\p m -> let chosenCentroid = minimumBy (\x y ->
compare (distance x $ toVector p)
(distance y $ toVector p))
centroids
in M.adjust (p:) chosenCentroid m)
initialMap points
newCentroidPhase :: (Vector v, Vectorizable e v) => M.Map v [e] -> [(v, v)]
newCentroidPhase = M.toList . fmap (centroid . map toVector)
{- kMeans :: (Vector v, Vectorizable e v) => (Int -> [e] -> [v]) -> [e] -> [v]
-}
| evge-niy/learn_yesod | src/KMeans.hs | mit | 1,510 | 0 | 20 | 529 | 537 | 290 | 247 | 29 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGGradientElement
(pattern SVG_SPREADMETHOD_UNKNOWN, pattern SVG_SPREADMETHOD_PAD,
pattern SVG_SPREADMETHOD_REFLECT, pattern SVG_SPREADMETHOD_REPEAT,
getGradientUnits, getGradientTransform, getSpreadMethod,
SVGGradientElement(..), gTypeSVGGradientElement,
IsSVGGradientElement, toSVGGradientElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
pattern SVG_SPREADMETHOD_UNKNOWN = 0
pattern SVG_SPREADMETHOD_PAD = 1
pattern SVG_SPREADMETHOD_REFLECT = 2
pattern SVG_SPREADMETHOD_REPEAT = 3
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGGradientElement.gradientUnits Mozilla SVGGradientElement.gradientUnits documentation>
getGradientUnits ::
(MonadDOM m, IsSVGGradientElement self) =>
self -> m SVGAnimatedEnumeration
getGradientUnits self
= liftDOM
(((toSVGGradientElement self) ^. js "gradientUnits") >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGGradientElement.gradientTransform Mozilla SVGGradientElement.gradientTransform documentation>
getGradientTransform ::
(MonadDOM m, IsSVGGradientElement self) =>
self -> m SVGAnimatedTransformList
getGradientTransform self
= liftDOM
(((toSVGGradientElement self) ^. js "gradientTransform") >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGGradientElement.spreadMethod Mozilla SVGGradientElement.spreadMethod documentation>
getSpreadMethod ::
(MonadDOM m, IsSVGGradientElement self) =>
self -> m SVGAnimatedEnumeration
getSpreadMethod self
= liftDOM
(((toSVGGradientElement self) ^. js "spreadMethod") >>=
fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGGradientElement.hs | mit | 2,668 | 0 | 11 | 431 | 545 | 330 | 215 | 47 | 1 |
module Errors where
import Control.Applicative
data ExceptT e m a = ExceptT {
runExceptT :: m (Either e a)
}
instance Functor m => Functor (ExceptT e m) where
fmap f = ExceptT . fmap (fmap f) . runExceptT
instance Applicative m => Applicative (ExceptT e m) where
pure = ExceptT . pure . Right
f <*> x = ExceptT $ liftA2 (<*>) (runExceptT f) (runExceptT x)
instance Monad m => Monad (ExceptT e m) where
return = ExceptT . return . Right
x >>= f = ExceptT $ runExceptT x >>= either (return . Left) (runExceptT . f)
liftEither :: Monad m => Either e a -> ExceptT e m a
liftEither x = ExceptT (return x)
lift :: Functor m => m a -> ExceptT e m a
lift x = ExceptT (fmap Right x)
throwE :: Monad m => e -> ExceptT e m a
throwE x = liftEither (Left x)
catchE :: Monad m => ExceptT e m a -> (e -> ExceptT c m a) -> ExceptT c m a
catchE handler errorHandler = ExceptT $ do
x <- runExceptT handler
case x of
Left failure -> runExceptT (errorHandler failure)
Right success -> return (Right success)
---
data Error = NoSuchUser |
NullId |
EmailAlreadyTaken |
NullEmail |
NullName |
NullPassword |
NullPasswordConfirmation |
PasswordConfirmationMissmatch |
NullMessage |
NullFollowerId |
InvalidDelete |
InvalidFollow |
InvalidPassword
| lkania/Haskitter | src/Errors.hs | mit | 1,484 | 0 | 13 | 491 | 524 | 263 | 261 | 37 | 2 |
import System.Random
import Control.Monad.State
randomSt :: (RandomGen g, Random a) => State g a
randomSt = state random | RAFIRAF/HASKELL | For a Few Monads More/stateRandom.hs | mit | 130 | 0 | 6 | 27 | 45 | 24 | 21 | 4 | 1 |
module SanitySpec
( spec
) where
import Test.Hspec
spec :: Spec
spec = describe "sanity" $ it "should be sane" $ True `shouldBe` True
| ianagbip1oti/ProC | test/SanitySpec.hs | mit | 150 | 0 | 8 | 40 | 44 | 25 | 19 | 5 | 1 |
-- Joe Loser
-- CS4450 HW4
-- 11/3/14
import PropLogic
import Control.Applicative
import Control.Monad
import Data.List (nub)
-- #1 - returns the set of all free variables in a proposition
freeVars :: String -> Maybe [Var]
freeVars str = case parse(str) of
Nothing -> Nothing -- returns Nothing on any error
Just x -> Just $ nub $ freeVars' x
where
freeVars' (Atom v) = [v]
freeVars' (Not b) = freeVars' b
freeVars' (Imply a b) = freeVars' a ++ freeVars' b
freeVars' (Or a b) = freeVars' a ++ freeVars' b
freeVars' (And a b) = freeVars' a ++ freeVars' b
freeVars' (Iff a b) = freeVars' a ++ freeVars' b
-- #2 - evaluates a proposition given an assignment of truth-values to free variables
evalProp :: [(Var, Bool)] -> String -> Maybe Bool
evalProp assoc str = do
parsed <- parse str
evalProp1 assoc parsed
where
evalProp1 assoc (Atom v) = lookup v assoc
evalProp1 assoc (Not a) = do
a <- evalProp1 assoc a
return $ not a
evalProp1 assoc (Imply a b) = do
a <- evalProp1 assoc a
b <- evalProp1 assoc b
return $ a <= b
evalProp1 assoc (Or a b) = do
a <- evalProp1 assoc a
b <- evalProp1 assoc b
return $ a || b
evalProp1 assoc (And a b) = do
a <- evalProp1 assoc a
b <- evalProp1 assoc b
return $ a && b
evalProp1 assoc (Iff a b) = evalProp1 assoc $ And (Imply a b) (Imply b a)
-- #3 - determines whether a proposition is a tautology (true under every possible free
-- variable truth-assignment)
-- idea is to zip all the free vars with the truth table
-- run through evalProp
-- check to make sure all of them are true--if so, it's a tautology
isTautology :: String -> Maybe Bool
isTautology str =
let
parsed = parse str -- this is a Maybe Prop
frees = freeVars str
-- takes in the list of frees, calcs length of frees and then gets all possible
-- combinations of those free vars.
-- arg1 = anon function to apply
-- arg2 = variables I want to apply arg1 to
-- returns Monad of that result of arg1 applied to arg2
table = liftM (\fs -> truthTable $ length fs) frees
frees' = liftM (\fs -> replicate (2 ^ length fs) fs) frees
pairs = liftM2 (\fs bs -> zip fs bs) frees' table
-- applying zip function to all of the pairs to get a list of association lists
-- doing pattern matching on the tuple (xs, ys) in the inner lambda function
assocs = liftM (\ps -> map (\(xs, ys) -> zip xs ys) ps) pairs
-- partially applying map to the associations
results = liftM (map (flip evalProp str)) assocs
in all (== Just True) <$> results -- fm
-- Helper function to generate all the possible values for a given number
-- of variables
truthTable :: Int -> [[Bool]]
truthTable 0 = [[]]
truthTable n = map (False:) prev ++ map (True:) prev
where prev = truthTable (n-1)
| JoeLoser/CS4450-Principles-of-Programming | homeworks/hw4/hw4_jrlmwc.hs | mit | 2,818 | 4 | 15 | 684 | 841 | 423 | 418 | 50 | 7 |
import Distribution.PackageDescription
import Distribution.Simple
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.UserHooks
import Distribution.Simple.Setup
import System.Info
import System.Directory
import System.Environment
import System.Process
import System.FilePath
import Data.Maybe
main = defaultMainWithHooks simpleUserHooks { confHook = odenConfHook
, postConf = odenPostConf }
addLibDirsToBuildInfo :: BuildInfo -> IO BuildInfo
addLibDirsToBuildInfo buildInfo = do
wd <- getCurrentDirectory
return $ buildInfo {
extraLibDirs = wd : extraLibDirs buildInfo
}
addLibDirsToOdenExe :: [Executable] -> IO [Executable]
addLibDirsToOdenExe = mapM addIfOden
where
addIfOden exe
| exeName exe == "oden-exe" = do
withLibDirs <- addLibDirsToBuildInfo (buildInfo exe)
return $ exe { buildInfo = withLibDirs }
| otherwise = return exe
addLibDirsToTests :: [TestSuite] -> IO [TestSuite]
addLibDirsToTests = mapM addLibDirs
where
addLibDirs suite = do
withLibDirs <- addLibDirsToBuildInfo (testBuildInfo suite)
return $ suite { testBuildInfo = withLibDirs }
odenConfHook :: (GenericPackageDescription, HookedBuildInfo)
-> ConfigFlags
-> IO LocalBuildInfo
odenConfHook (description, buildInfo) flags = do
localBuildInfo <- confHook simpleUserHooks (description, buildInfo) flags
let packageDescription = localPkgDescr localBuildInfo
lib = fromJust $ library packageDescription
libWithLibDirs <- addLibDirsToBuildInfo (libBuildInfo lib)
executablesWithLibDirs <- addLibDirsToOdenExe (executables packageDescription)
testSuitesWithLibDirs <- addLibDirsToTests (testSuites packageDescription)
return $ localBuildInfo {
localPkgDescr = packageDescription {
library = Just $ lib {
libBuildInfo = libWithLibDirs
},
executables = executablesWithLibDirs,
testSuites = testSuitesWithLibDirs
}
}
ext :: String
ext = case os of
"darwin" -> ".dylib"
"windows" -> ".dll"
_ -> ".so"
odenPostConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO ()
odenPostConf _ _ _ _ = do
wd <- getCurrentDirectory
setEnv "GOPATH" (wd </> "go")
let dynamicPath = wd </> ("libimporter" ++ ext)
buildDynamic = shell ("go build -buildmode=c-shared -o " ++ dynamicPath ++ " oden/cmd/importer")
putStrLn $ "Compiling Go dynamic library to " ++ dynamicPath
readCreateProcess buildDynamic "" >>= putStr
| oden-lang/oden | Setup.hs | mit | 2,521 | 3 | 14 | 499 | 628 | 319 | 309 | 59 | 3 |
{- arch-tag: Path tests main file
Copyright (C) 2004-2011 John Goerzen <jgoerzen@complete.org>
All rights reserved.
For license and copyright information, see the file LICENSE
-}
module Pathtest(tests) where
import Test.HUnit
import System.Path
import System.FilePath (pathSeparator)
sep = map (\c -> if c == '/' then pathSeparator else c)
test_absNormPath =
let f base' p' exp' = TestLabel (show (base, p)) $ TestCase $ exp @=? absNormPath base p
where
base = sep base'
p = sep p'
exp = fmap sep exp'
f2 = f "/usr/1/2" in
[
f "/" "" (Just "/")
,f "/usr/test" "" (Just "/usr/test")
,f "/usr/test" ".." (Just "/usr")
,f "/usr/1/2" "/foo/bar" (Just "/foo/bar")
,f2 "jack/./.." (Just "/usr/1/2")
,f2 "jack///../foo" (Just "/usr/1/2/foo")
,f2 "../bar" (Just "/usr/1/bar")
,f2 "../" (Just "/usr/1")
,f2 "../.." (Just "/usr")
,f2 "../../" (Just "/usr")
,f2 "../../.." (Just "/")
,f2 "../../../" (Just "/")
,f2 "../../../.." Nothing
]
test_secureAbsNormPath =
let f base' p' exp' = TestLabel (show (base, p)) $ TestCase $ exp @=? secureAbsNormPath base p
where
base = sep base'
p = sep p'
exp = fmap sep exp'
f2 = f "/usr/1/2" in
[
f "/" "" (Just "/")
,f "/usr/test" "" (Just "/usr/test")
,f "/usr/test" ".." Nothing
,f "/usr/1/2" "/foo/bar" Nothing
,f "/usr/1/2" "/usr/1/2" (Just "/usr/1/2")
,f "/usr/1/2" "/usr/1/2/foo/bar" (Just "/usr/1/2/foo/bar")
,f2 "jack/./.." (Just "/usr/1/2")
,f2 "jack///../foo" (Just "/usr/1/2/foo")
,f2 "../bar" Nothing
,f2 "../" Nothing
,f2 "../.." Nothing
,f2 "../../" Nothing
,f2 "../../.." Nothing
,f2 "../../../" Nothing
,f2 "../../../.." Nothing
]
test_splitExt =
let f inp' exp' = TestCase $ exp @=? splitExt inp
where
inp = sep inp'
exp = (\(x,y) -> (sep x, y)) exp'
in [
f "" ("", "")
,f "/usr/local" ("/usr/local", "")
,f "../foo.txt" ("../foo", ".txt")
,f "../bar.txt.gz" ("../bar.txt", ".gz")
,f "foo.txt/bar" ("foo.txt/bar", "")
,f "foo.txt/bar.bz" ("foo.txt/bar", ".bz")
]
tests = TestList [TestLabel "splitExt" (TestList test_splitExt)
,TestLabel "absNormPath" (TestList test_absNormPath)
,TestLabel "secureAbsNormPath" (TestList test_secureAbsNormPath)
]
| haskellbr/missingh | missingh-all/testsrc/Pathtest.hs | mit | 2,835 | 0 | 15 | 1,053 | 799 | 412 | 387 | 61 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module DataBuilder where
import GHC.Generics
import Data.Text
import qualified Data.Text.Lazy.Builder as B
import qualified Data.Text.Lazy.Builder.Int as B
import qualified Data.Text.Lazy.Builder.RealFloat as B
import Data.Monoid((<>), mconcat)
data Person = Person { firstName :: String, lastName :: String }
deriving (Show, Ord, Eq, Generic)
data Client i = GovOrg { clientId :: i, clientName :: String }
| Company { clientId :: i, clientName :: String , person :: Person, duty :: String }
| Individual { clientId :: i, person :: Person }
deriving (Show, Ord, Eq, Generic)
data Product = Product { id :: Integer, name :: String, price :: Double, descr :: String } deriving (Show, Ord, Eq, Generic)
data Purchase = Purchase { client :: Client Integer, products :: [Product] } deriving (Show, Ord, Eq, Generic)
escapeString :: String -> Text
escapeString = replace "\n" "\\n" . replace "," "\\," . replace "(" "\\(" .
replace ")" "\\)" . pack
personToText :: Person -> B.Builder
personToText (Person f l) =
"person(" <> B.fromText (escapeString f) <> B.singleton ','
<> B.fromText (escapeString l) <> B.singleton ')'
clientToText :: Client Integer -> B.Builder
clientToText (GovOrg i n) =
"client(gov," <> B.decimal i <> B.singleton ',' <> B.fromText (escapeString n) <> B.singleton ')'
clientToText (Company i n p d) =
"client(com," <> B.decimal i <> B.singleton ',' <> B.fromText (escapeString n) <> B.singleton ','
<> personToText p <> B.singleton ',' <> B.fromText (escapeString d) <> B.singleton ')'
clientToText (Individual i p) =
"client(ind," <> B.decimal i <> B.singleton ',' <> personToText p <> B.singleton ')'
productToText :: Product -> B.Builder
productToText (Product i n p d) =
"product(" <> B.decimal i <> B.singleton ',' <> B.fromText (escapeString n) <> B.singleton ','
<> B.realFloat p <> B.singleton ',' <> B.fromText (escapeString d) <> B.singleton ')'
productsToText :: [Product] -> B.Builder
productsToText = mconcat . go
where go [] = []
go [p] = [productToText p]
go (p:ps) = productToText p : B.singleton ',' : go ps
purchaseToText :: Purchase -> B.Builder
purchaseToText (Purchase c ps) =
"purchase(" <> clientToText c <> B.singleton ',' <> B.singleton '[' <> productsToText ps <> B.singleton ']' <> ")"
| hnfmr/beginning_haskell | chapter10/DataBuilder.hs | mit | 2,491 | 0 | 14 | 553 | 912 | 478 | 434 | 44 | 3 |
{-
ghci c:\Users\Thomas\Documents\GitHub\practice\pe\nonvisualstudio\haskell\Spec\Problem0010.Spec.hs
c:\Users\Thomas\Documents\GitHub\practice\pe\nonvisualstudio\haskell\Implementation\Problem0010.hs
c:\Users\Thomas\Documents\GitHub\practice\utilities\nonvisualstudio\haskell\Implementation\TestAbstract.hs
-}
-- :r :q :set +s for times
module Problem0010Tests where
import Test.HUnit
import System.IO
import TestAbstract
import Problem0010
testCases = TestList
[
TestCase $ easyAssertEqual "sumPrimesBelow" sumPrimesBelow 10 17,
TestCase $ easyAssertEqual "sumPrimesBelow" sumPrimesBelow 2000000 142913828922
]
tests = runTestTT testCases
| Sobieck00/practice | pe/nonvisualstudio/haskell/OldWork/Spec/Problem0010.Spec.hs | mit | 684 | 0 | 8 | 90 | 70 | 39 | 31 | 10 | 1 |
module Darcs.Patch.Debug ( PatchDebug(..) )where
import Darcs.Patch.Witnesses.Ordered ( FL, RL )
-- | PatchDebug is a hook class for temporarily adding debug information.
-- To use it, add any methods that are required, implement those methods
-- where needed, and then make it available in the relevant contexts.
-- For example it can be temporarily added as a superclass of `Patchy`.
-- The advantage of having it here already is that everything is
-- (or should be) declared as an instance of it, so you can use
-- defaulting or just leave out declarations of instance methods and
-- code will still compile.
class PatchDebug p where
-- | A dummy method so we can export/import PatchDebug(..) without
-- triggering warnings
patchDebugDummy :: p wX wY -> ()
patchDebugDummy _ = ()
instance PatchDebug p => PatchDebug (FL p)
instance PatchDebug p => PatchDebug (RL p)
| DavidAlphaFox/darcs | src/Darcs/Patch/Debug.hs | gpl-2.0 | 884 | 0 | 8 | 161 | 116 | 66 | 50 | 7 | 0 |
{-| Implementation of the generic daemon functionality.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Daemon
( DaemonOptions(..)
, OptType
, CheckFn
, PrepFn
, MainFn
, defaultOptions
, oShowHelp
, oShowVer
, oNoDaemonize
, oNoUserChecks
, oDebug
, oPort
, oBindAddress
, oSyslogUsage
, parseArgs
, parseAddress
, cleanupSocket
, describeError
, genericMain
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Maybe (fromMaybe, listToMaybe)
import Data.Word
import GHC.IO.Handle (hDuplicateTo)
import Network.BSD (getHostName)
import qualified Network.Socket as Socket
import System.Console.GetOpt
import System.Directory
import System.Exit
import System.Environment
import System.IO
import System.IO.Error (isDoesNotExistError, modifyIOError, annotateIOError)
import System.Posix.Directory
import System.Posix.Files
import System.Posix.IO
import System.Posix.Process
import System.Posix.Types
import System.Posix.Signals
import Ganeti.Common as Common
import Ganeti.Logging
import Ganeti.Runtime
import Ganeti.BasicTypes
import Ganeti.Utils
import qualified Ganeti.Constants as C
import qualified Ganeti.Ssconf as Ssconf
-- * Constants
-- | \/dev\/null path.
devNull :: FilePath
devNull = "/dev/null"
-- | Error message prefix, used in two separate paths (when forking
-- and when not).
daemonStartupErr :: String -> String
daemonStartupErr = ("Error when starting the daemon process: " ++)
-- * Data types
-- | Command line options structure.
data DaemonOptions = DaemonOptions
{ optShowHelp :: Bool -- ^ Just show the help
, optShowVer :: Bool -- ^ Just show the program version
, optShowComp :: Bool -- ^ Just show the completion info
, optDaemonize :: Bool -- ^ Whether to daemonize or not
, optPort :: Maybe Word16 -- ^ Override for the network port
, optDebug :: Bool -- ^ Enable debug messages
, optNoUserChecks :: Bool -- ^ Ignore user checks
, optBindAddress :: Maybe String -- ^ Override for the bind address
, optSyslogUsage :: Maybe SyslogUsage -- ^ Override for Syslog usage
}
-- | Default values for the command line options.
defaultOptions :: DaemonOptions
defaultOptions = DaemonOptions
{ optShowHelp = False
, optShowVer = False
, optShowComp = False
, optDaemonize = True
, optPort = Nothing
, optDebug = False
, optNoUserChecks = False
, optBindAddress = Nothing
, optSyslogUsage = Nothing
}
instance StandardOptions DaemonOptions where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- | Abrreviation for the option type.
type OptType = GenericOptType DaemonOptions
-- | Check function type.
type CheckFn a = DaemonOptions -> IO (Either ExitCode a)
-- | Prepare function type.
type PrepFn a b = DaemonOptions -> a -> IO b
-- | Main execution function type.
type MainFn a b = DaemonOptions -> a -> b -> IO ()
-- * Command line options
oNoDaemonize :: OptType
oNoDaemonize =
(Option "f" ["foreground"]
(NoArg (\ opts -> Ok opts { optDaemonize = False }))
"Don't detach from the current terminal",
OptComplNone)
oDebug :: OptType
oDebug =
(Option "d" ["debug"]
(NoArg (\ opts -> Ok opts { optDebug = True }))
"Enable debug messages",
OptComplNone)
oNoUserChecks :: OptType
oNoUserChecks =
(Option "" ["no-user-checks"]
(NoArg (\ opts -> Ok opts { optNoUserChecks = True }))
"Ignore user checks",
OptComplNone)
oPort :: Int -> OptType
oPort def =
(Option "p" ["port"]
(reqWithConversion (tryRead "reading port")
(\port opts -> Ok opts { optPort = Just port }) "PORT")
("Network port (default: " ++ show def ++ ")"),
OptComplInteger)
oBindAddress :: OptType
oBindAddress =
(Option "b" ["bind"]
(ReqArg (\addr opts -> Ok opts { optBindAddress = Just addr })
"ADDR")
"Bind address (default depends on cluster configuration)",
OptComplInetAddr)
oSyslogUsage :: OptType
oSyslogUsage =
(Option "" ["syslog"]
(reqWithConversion syslogUsageFromRaw
(\su opts -> Ok opts { optSyslogUsage = Just su })
"SYSLOG")
("Enable logging to syslog (except debug \
\messages); one of 'no', 'yes' or 'only' [" ++ C.syslogUsage ++
"]"),
OptComplChoices ["yes", "no", "only"])
-- | Generic options.
genericOpts :: [OptType]
genericOpts = [ oShowHelp
, oShowVer
, oShowComp
]
-- | Annotates and transforms IOErrors into a Result type. This can be
-- used in the error handler argument to 'catch', for example.
ioErrorToResult :: String -> IOError -> IO (Result a)
ioErrorToResult description exc =
return . Bad $ description ++ ": " ++ show exc
-- | Small wrapper over getArgs and 'parseOpts'.
parseArgs :: String -> [OptType] -> IO (DaemonOptions, [String])
parseArgs cmd options = do
cmd_args <- getArgs
parseOpts defaultOptions cmd_args cmd (options ++ genericOpts) []
-- * Daemon-related functions
-- | PID file mode.
pidFileMode :: FileMode
pidFileMode = unionFileModes ownerReadMode ownerWriteMode
-- | PID file open flags.
pidFileFlags :: OpenFileFlags
pidFileFlags = defaultFileFlags { noctty = True, trunc = False }
-- | Writes a PID file and locks it.
writePidFile :: FilePath -> IO Fd
writePidFile path = do
fd <- openFd path ReadWrite (Just pidFileMode) pidFileFlags
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
my_pid <- getProcessID
_ <- fdWrite fd (show my_pid ++ "\n")
return fd
-- | Helper function to ensure a socket doesn't exist. Should only be
-- called once we have locked the pid file successfully.
cleanupSocket :: FilePath -> IO ()
cleanupSocket socketPath =
catchJust (guard . isDoesNotExistError) (removeLink socketPath)
(const $ return ())
-- | Sets up a daemon's environment.
setupDaemonEnv :: FilePath -> FileMode -> IO ()
setupDaemonEnv cwd umask = do
changeWorkingDirectory cwd
_ <- setFileCreationMask umask
_ <- createSession
return ()
-- | Cleanup function, performing all the operations that need to be done prior
-- to shutting down a daemon.
finalCleanup :: FilePath -> IO ()
finalCleanup = removeFile
-- | Signal handler for the termination signal.
handleSigTerm :: ThreadId -> IO ()
handleSigTerm mainTID =
-- Throw termination exception to the main thread, so that the daemon is
-- actually stopped in the proper way, executing all the functions waiting on
-- "finally" statement.
Control.Exception.throwTo mainTID ExitSuccess
-- | Signal handler for reopening log files.
handleSigHup :: FilePath -> IO ()
handleSigHup path = do
setupDaemonFDs (Just path)
logInfo "Reopening log files after receiving SIGHUP"
-- | Sets up a daemon's standard file descriptors.
setupDaemonFDs :: Maybe FilePath -> IO ()
setupDaemonFDs logfile = do
null_in_handle <- openFile devNull ReadMode
null_out_handle <- openFile (fromMaybe devNull logfile) AppendMode
hDuplicateTo null_in_handle stdin
hDuplicateTo null_out_handle stdout
hDuplicateTo null_out_handle stderr
hClose null_in_handle
hClose null_out_handle
-- | Computes the default bind address for a given family.
defaultBindAddr :: Int -- ^ The port we want
-> Socket.Family -- ^ The cluster IP family
-> Result (Socket.Family, Socket.SockAddr)
defaultBindAddr port Socket.AF_INET =
Ok (Socket.AF_INET,
Socket.SockAddrInet (fromIntegral port) Socket.iNADDR_ANY)
defaultBindAddr port Socket.AF_INET6 =
Ok (Socket.AF_INET6,
Socket.SockAddrInet6 (fromIntegral port) 0 Socket.iN6ADDR_ANY 0)
defaultBindAddr _ fam = Bad $ "Unsupported address family: " ++ show fam
-- | Based on the options, compute the socket address to use for the
-- daemon.
parseAddress :: DaemonOptions -- ^ Command line options
-> Int -- ^ Default port for this daemon
-> IO (Result (Socket.Family, Socket.SockAddr))
parseAddress opts defport = do
let port = maybe defport fromIntegral $ optPort opts
def_family <- Ssconf.getPrimaryIPFamily Nothing
case optBindAddress opts of
Nothing -> return (def_family >>= defaultBindAddr port)
Just saddr -> Control.Exception.catch
(resolveAddr port saddr)
(ioErrorToResult $ "Invalid address " ++ saddr)
-- | Environment variable to override the assumed host name of the
-- current node.
vClusterHostNameEnvVar :: String
vClusterHostNameEnvVar = "GANETI_HOSTNAME"
getFQDN :: IO String
getFQDN = do
hostname <- getHostName
addrInfos <- Socket.getAddrInfo Nothing (Just hostname) Nothing
let address = listToMaybe addrInfos >>= (Just . Socket.addrAddress)
case address of
Just a -> do
fqdn <- liftM fst $ Socket.getNameInfo [] True False a
return (fromMaybe hostname fqdn)
Nothing -> return hostname
-- | Returns if the current node is the master node.
isMaster :: IO Bool
isMaster = do
let ioErrorToNothing :: IOError -> IO (Maybe String)
ioErrorToNothing _ = return Nothing
vcluster_node <- Control.Exception.catch
(liftM Just (getEnv vClusterHostNameEnvVar))
ioErrorToNothing
curNode <- case vcluster_node of
Just node_name -> return node_name
Nothing -> getFQDN
masterNode <- Ssconf.getMasterNode Nothing
case masterNode of
Ok n -> return (curNode == n)
Bad _ -> return False
-- | Ensures that the daemon runs on the right node (and exits
-- gracefully if it doesnt)
ensureNode :: GanetiDaemon -> IO ()
ensureNode daemon = do
is_master <- isMaster
when (daemonOnlyOnMaster daemon && not is_master) $ do
putStrLn "Not master, exiting."
exitWith (ExitFailure C.exitNotmaster)
-- | Run an I\/O action that might throw an I\/O error, under a
-- handler that will simply annotate and re-throw the exception.
describeError :: String -> Maybe Handle -> Maybe FilePath -> IO a -> IO a
describeError descr hndl fpath =
modifyIOError (\e -> annotateIOError e descr hndl fpath)
-- | Run an I\/O action as a daemon.
--
-- WARNING: this only works in single-threaded mode (either using the
-- single-threaded runtime, or using the multi-threaded one but with
-- only one OS thread, i.e. -N1).
daemonize :: FilePath -> (Maybe Fd -> IO ()) -> IO ()
daemonize logfile action = do
(rpipe, wpipe) <- createPipe
-- first fork
_ <- forkProcess $ do
-- in the child
closeFd rpipe
let wpipe' = Just wpipe
setupDaemonEnv "/" (unionFileModes groupModes otherModes)
setupDaemonFDs (Just logfile) `Control.Exception.catch`
handlePrepErr False wpipe'
_ <- installHandler lostConnection (Catch (handleSigHup logfile)) Nothing
-- second fork, launches the actual child code; standard
-- double-fork technique
_ <- forkProcess (action wpipe')
exitImmediately ExitSuccess
closeFd wpipe
hndl <- fdToHandle rpipe
errors <- hGetContents hndl
ecode <- if null errors
then return ExitSuccess
else do
hPutStrLn stderr $ daemonStartupErr errors
return $ ExitFailure C.exitFailure
exitImmediately ecode
-- | Generic daemon startup.
genericMain :: GanetiDaemon -- ^ The daemon we're running
-> [OptType] -- ^ The available options
-> CheckFn a -- ^ Check function
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> IO ()
genericMain daemon options check_fn prep_fn exec_fn = do
let progname = daemonName daemon
(opts, args) <- parseArgs progname options
ensureNode daemon
exitUnless (null args) "This program doesn't take any arguments"
unless (optNoUserChecks opts) $ do
runtimeEnts <- getEnts
ents <- exitIfBad "Can't find required user/groups" runtimeEnts
verifyDaemonUser daemon ents
syslog <- case optSyslogUsage opts of
Nothing -> exitIfBad "Invalid cluster syslog setting" $
syslogUsageFromRaw C.syslogUsage
Just v -> return v
log_file <- daemonLogFile daemon
-- run the check function and optionally exit if it returns an exit code
check_result <- check_fn opts
check_result' <- case check_result of
Left code -> exitWith code
Right v -> return v
let processFn = if optDaemonize opts
then daemonize log_file
else \action -> action Nothing
processFn $ innerMain daemon opts syslog check_result' prep_fn exec_fn
-- | Full prepare function.
--
-- This is executed after daemonization, and sets up both the log
-- files (a generic functionality) and the custom prepare function of
-- the daemon.
fullPrep :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> IO (FilePath, b)
fullPrep daemon opts syslog check_result prep_fn = do
logfile <- if optDaemonize opts
then return Nothing
else liftM Just $ daemonLogFile daemon
pidfile <- daemonPidFile daemon
let dname = daemonName daemon
setupLogging logfile dname (optDebug opts) True False syslog
_ <- describeError "writing PID file; already locked?"
Nothing (Just pidfile) $ writePidFile pidfile
logNotice $ dname ++ " daemon startup"
prep_res <- prep_fn opts check_result
tid <- myThreadId
_ <- installHandler sigTERM (Catch $ handleSigTerm tid) Nothing
return (pidfile, prep_res)
-- | Inner daemon function.
--
-- This is executed after daemonization.
innerMain :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> Maybe Fd -- ^ Error reporting function
-> IO ()
innerMain daemon opts syslog check_result prep_fn exec_fn fd = do
(pidFile, prep_result) <- fullPrep daemon opts syslog check_result prep_fn
`Control.Exception.catch` handlePrepErr True fd
-- no error reported, we should now close the fd
maybeCloseFd fd
finally (exec_fn opts check_result prep_result) (finalCleanup pidFile)
-- | Daemon prepare error handling function.
handlePrepErr :: Bool -> Maybe Fd -> IOError -> IO a
handlePrepErr logging_setup fd err = do
let msg = show err
case fd of
-- explicitly writing to the fd directly, since when forking it's
-- better (safer) than trying to convert this into a full handle
Just fd' -> fdWrite fd' msg >> return ()
Nothing -> hPutStrLn stderr (daemonStartupErr msg)
when logging_setup $ logError msg
exitWith $ ExitFailure 1
-- | Close a file descriptor.
maybeCloseFd :: Maybe Fd -> IO ()
maybeCloseFd Nothing = return ()
maybeCloseFd (Just fd) = closeFd fd
| badp/ganeti | src/Ganeti/Daemon.hs | gpl-2.0 | 15,931 | 0 | 16 | 3,631 | 3,416 | 1,770 | 1,646 | 324 | 4 |
{-# OPTIONS -w -O0 #-}
{- |
Module : ATC/ProofTree.der.hs
Description : generated Typeable, ShATermConvertible instances
Copyright : (c) DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable(overlapping Typeable instances)
Automatic derivation of instances via DrIFT-rule Typeable, ShATermConvertible
for the type(s):
'Common.ProofTree.ProofTree'
-}
{-
Generated by 'genRules' (automatic rule generation for DrIFT). Don't touch!!
dependency files:
Common/ProofTree.hs
-}
module ATC.ProofTree () where
import ATerm.Lib
import Common.ProofTree
import Data.Typeable
{-! for Common.ProofTree.ProofTree derive : Typeable !-}
{-! for Common.ProofTree.ProofTree derive : ShATermConvertible !-}
| nevrenato/Hets_Fork | ATC/ProofTree.der.hs | gpl-2.0 | 816 | 0 | 4 | 126 | 29 | 20 | 9 | 5 | 0 |
module GRPAgentBinding
( reprogram
, act
, reinforcement
)
where
import GRPStats
import GRPCommon
--This file is currently DEAD CODE. Compilation is done by calling ghc and execution is done by calling the generated executables.
--Documentation of that procedure is in GRPHeadless
--The best bet to get this up and running probably is plugin.
--When one of these three is called for the first time, eval initial state and inject. That is, when compilation is needed.
--Afterwards, store State to AgentStats
reprogram :: AgentStats -> (AgentStats, String)
reprogram ag = (ag, "")
act :: AgentStats -> Input -> (AgentStats, Output)
act ag inp = (ag, [])
reinforcement :: AgentStats -> Int -> String -> AgentStats
reinforcement ag _ _ = ag
--In Agent:
-- reprogram :: [StdGen] -> State -> [String] -> (String, State)
-- act :: [StdGen] -> State -> Input -> (Output, State)
-- reinforcement :: [StdGen] -> State -> Int -> String -> State
--TODO: What about safety assertions? What about compiler feedback and fitness eval? Who takes care of stripping source code of it's prefix and reattaching it?
--create Object file. Load it. Evaluate Initial state. All as required.
--compile :: AgentStats -> AgentStats
--Unload a Agent's object file.
--unload
| vektordev/GP | src/GRPAgentBinding.hs | gpl-2.0 | 1,298 | 0 | 7 | 257 | 130 | 81 | 49 | 12 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Sound.Tidal.Show (show, showAll, draw, drawLine, drawLineSz) where
import Sound.Tidal.Pattern
import Data.List (intercalate, sortOn)
import Data.Ratio (numerator, denominator)
import Data.Maybe (fromMaybe, isJust)
import qualified Data.Map.Strict as Map
instance (Show a) => Show (Pattern a) where
show = showPattern (Arc 0 1)
showPattern :: Show a => Arc -> Pattern a -> String
showPattern a p = intercalate "\n" evStrings
where evs = map showEvent $ sortOn part $ queryArc p a
maxPartLength :: Int
maxPartLength = maximum $ map (length . fst) evs
evString :: (String, String) -> String
evString ev = ((replicate (maxPartLength - (length (fst ev))) ' ')
++ fst ev
++ snd ev
)
evStrings = map evString evs
showEvent :: Show a => Event a -> (String, String)
showEvent (Event _ (Just (Arc ws we)) a@(Arc ps pe) e) =
(h ++ "(" ++ show a ++ ")" ++ t ++ "|", show e)
where h | ws == ps = ""
| otherwise = prettyRat ws ++ "-"
t | we == pe = ""
| otherwise = "-" ++ prettyRat we
showEvent (Event _ Nothing a e) =
("~" ++ show a ++ "~|", show e)
-- Show everything, including event context
showAll :: Show a => Arc -> Pattern a -> String
showAll a p = intercalate "\n" $ map show $ sortOn part $ queryArc p a
instance Show Context where
show (Context cs) = show cs
instance Show Value where
show (VS s) = ('"':s) ++ "\""
show (VI i) = show i
show (VF f) = show f ++ "f"
show (VR r) = show r ++ "r"
show (VB b) = show b
show (VX xs) = show xs
instance {-# OVERLAPPING #-} Show ControlMap where
show m = intercalate ", " $ map (\(name, v) -> name ++ ": " ++ show v) $ Map.toList m
instance {-# OVERLAPPING #-} Show Arc where
show (Arc s e) = prettyRat s ++ ">" ++ prettyRat e
instance {-# OVERLAPPING #-} Show a => Show (Event a) where
show e = show (context e) ++ ((\(a,b) -> a ++ b) $ showEvent e)
prettyRat :: Rational -> String
prettyRat r | unit == 0 && frac > 0 = showFrac (numerator frac) (denominator frac)
| otherwise = show unit ++ showFrac (numerator frac) (denominator frac)
where unit = floor r :: Int
frac = r - toRational unit
showFrac :: Integer -> Integer -> String
showFrac 0 _ = ""
showFrac 1 2 = "½"
showFrac 1 3 = "⅓"
showFrac 2 3 = "⅔"
showFrac 1 4 = "¼"
showFrac 3 4 = "¾"
showFrac 1 5 = "⅕"
showFrac 2 5 = "⅖"
showFrac 3 5 = "⅗"
showFrac 4 5 = "⅘"
showFrac 1 6 = "⅙"
showFrac 5 6 = "⅚"
showFrac 1 7 = "⅐"
showFrac 1 8 = "⅛"
showFrac 3 8 = "⅜"
showFrac 5 8 = "⅝"
showFrac 7 8 = "⅞"
showFrac 1 9 = "⅑"
showFrac 1 10 = "⅒"
showFrac n d = fromMaybe plain $ do n' <- up n
d' <- down d
return $ n' ++ d'
where plain = show n ++ "/" ++ show d
up 1 = Just "¹"
up 2 = Just "²"
up 3 = Just "³"
up 4 = Just "⁴"
up 5 = Just "⁵"
up 6 = Just "⁶"
up 7 = Just "⁷"
up 8 = Just "⁸"
up 9 = Just "⁹"
up 0 = Just "⁰"
up _ = Nothing
down 1 = Just "₁"
down 2 = Just "₂"
down 3 = Just "₃"
down 4 = Just "₄"
down 5 = Just "₅"
down 6 = Just "₆"
down 7 = Just "₇"
down 8 = Just "₈"
down 9 = Just "₉"
down 0 = Just "₀"
down _ = Nothing
stepcount :: Pattern a -> Int
stepcount pat = fromIntegral $ eventSteps $ concatMap (\ev -> [start ev, stop ev]) $ map part $ filter eventHasOnset $ queryArc pat (Arc 0 1)
where eventSteps xs = foldr lcm 1 $ map denominator xs
data Render = Render Int Int String
instance Show Render where
show (Render cyc i render) | i <= 1024 = "\n[" ++ show cyc ++ (if cyc == 1 then " cycle" else " cycles") ++ "]\n" ++ render
| otherwise = "That pattern is too complex to draw."
drawLine :: Pattern Char -> Render
drawLine = drawLineSz 78
drawLineSz :: Int -> Pattern Char -> Render
drawLineSz sz pat = joinCycles sz $ drawCycles pat
where
drawCycles :: Pattern Char -> [Render]
drawCycles pat' = (draw pat'):(drawCycles $ rotL 1 pat')
joinCycles :: Int -> [Render] -> Render
joinCycles _ [] = Render 0 0 ""
joinCycles n ((Render cyc l s):cs) | l > n = Render 0 0 ""
| otherwise = Render (cyc+cyc') (l + l' + 1) $ intercalate "\n" $ map (\(a,b) -> a ++ b) lineZip
where
(Render cyc' l' s') = joinCycles (n-l-1) cs
linesN = max (length $ lines s) (length $ lines s')
lineZip = take linesN $
zip (lines s ++ (repeat $ replicate l ' '))
(lines s' ++ (repeat $ replicate l' ' '))
-- where maximum (map (length . head . (++ [""]) . lines) cs)
draw :: Pattern Char -> Render
draw pat = Render 1 s $ (intercalate "\n" $ map ((\x -> ('|':x)) .drawLevel) ls)
where ls = levels pat
s = stepcount pat
rs = toRational s
drawLevel :: [Event Char] -> String
drawLevel [] = replicate s ' '
drawLevel (e:es) = map f $ take s $ zip (drawLevel es ++ repeat ' ') (drawEvent e ++ repeat ' ')
f (' ', x) = x
f (x, _) = x
drawEvent :: Event Char -> String
drawEvent ev = (replicate (floor $ rs * evStart) ' ')
++ (value ev:(replicate ((floor $ rs * (evStop - evStart)) - 1) '-'))
where evStart = start $ wholeOrPart ev
evStop = stop $ wholeOrPart ev
{-
fitsWhole :: Event b -> [Event b] -> Bool
fitsWhole event events =
not $ any (\event' -> isJust $ subArc (wholeOrPart event) (wholeOrPart event')) events
addEventWhole :: Event b -> [[Event b]] -> [[Event b]]
addEventWhole e [] = [[e]]
addEventWhole e (level:ls)
| isAnalog e = level:ls
| fitsWhole e level = (e:level) : ls
| otherwise = level : addEventWhole e ls
arrangeEventsWhole :: [Event b] -> [[Event b]]
arrangeEventsWhole = foldr addEventWhole []
levelsWhole :: Eq a => Pattern a -> [[Event a]]
levelsWhole pat = arrangeEventsWhole $ sortOn' ((\Arc{..} -> 0 - (stop - start)) . wholeOrPart) (defragParts $ queryArc pat (Arc 0 1))
sortOn' :: Ord a => (b -> a) -> [b] -> [b]
sortOn' f = map snd . sortOn fst . map (\x -> let y = f x in y `seq` (y, x))
-}
fits :: Event b -> [Event b] -> Bool
fits (Event _ _ part' _) events = not $ any (\Event{..} -> isJust $ subArc part' part) events
addEvent :: Event b -> [[Event b]] -> [[Event b]]
addEvent e [] = [[e]]
addEvent e (level:ls)
| fits e level = (e:level) : ls
| otherwise = level : addEvent e ls
arrangeEvents :: [Event b] -> [[Event b]]
arrangeEvents = foldr addEvent []
levels :: Eq a => Pattern a -> [[Event a]]
-- levels pat = arrangeEvents $ sortOn' ((\Arc{..} -> stop - start) . part) (defragParts $ queryArc pat (Arc 0 1))
levels pat = arrangeEvents $ reverse $ defragParts $ queryArc pat (Arc 0 1)
| d0kt0r0/Tidal | src/Sound/Tidal/Show.hs | gpl-3.0 | 7,077 | 1 | 17 | 2,063 | 2,671 | 1,332 | 1,339 | 145 | 21 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- The constraint reduction rules, which are not enforced as invariants in
-- "Theory.Constraint.Solver.Reduction".
--
-- A goal represents a possible application of a rule that may result in
-- multiple cases or even non-termination (if applied repeatedly). These goals
-- are computed as the list of 'openGoals'. See
-- "Theory.Constraint.Solver.ProofMethod" for the public interface to solving
-- goals and the implementation of heuristics.
module Theory.Constraint.Solver.Goals (
Usefulness(..)
, AnnotatedGoal
, openGoals
, solveGoal
, plainOpenGoals
) where
-- import Debug.Trace
import Prelude hiding (id, (.))
import qualified Data.ByteString.Char8 as BC
import qualified Data.DAG.Simple as D (reachableSet)
-- import Data.Foldable (foldMap)
import qualified Data.Map as M
import qualified Data.Monoid as Mono
import qualified Data.Set as S
import Control.Basics
import Control.Category
import Control.Monad.Disj
import Control.Monad.State (gets)
import Control.Monad.Trans.State.Lazy hiding (get,gets)
import Control.Monad.Trans.FastFresh -- GHC7.10 needs: hiding (get,gets)
import Control.Monad.Trans.Reader -- GHC7.10 needs: hiding (get,gets)
import Extension.Data.Label as L
import Theory.Constraint.Solver.Contradictions (substCreatesNonNormalTerms)
import Theory.Constraint.Solver.Reduction
import Theory.Constraint.System
import Theory.Tools.IntruderRules (mkDUnionRule, isDExpRule, isDPMultRule, isDEMapRule)
import Theory.Model
import Utils.Misc (twoPartitions)
------------------------------------------------------------------------------
-- Extracting Goals
------------------------------------------------------------------------------
data Usefulness =
Useful
-- ^ A goal that is likely to result in progress.
| LoopBreaker
-- ^ A goal that is delayed to avoid immediate termination.
| ProbablyConstructible
-- ^ A goal that is likely to be constructible by the adversary.
| CurrentlyDeducible
-- ^ A message that is deducible for the current solution.
deriving (Show, Eq, Ord)
-- | Goals annotated with their number and usefulness.
type AnnotatedGoal = (Goal, (Integer, Usefulness))
-- Instances
------------
-- | The list of goals that must be solved before a solution can be extracted.
-- Each goal is annotated with its age and an indicator for its usefulness.
openGoals :: System -> [AnnotatedGoal]
openGoals sys = do
(goal, status) <- M.toList $ get sGoals sys
let solved = get gsSolved status
-- check whether the goal is still open
guard $ case goal of
ActionG i (kFactView -> Just (UpK, m)) ->
if get sDiffSystem sys
-- In a diff proof, all action goals need to be solved.
then not (solved)
else
not $ solved
-- message variables are not solved, except if the node already exists in the system -> facilitates finding contradictions
|| (isMsgVar m && Nothing == M.lookup i (get sNodes sys)) || sortOfLNTerm m == LSortPub
-- handled by 'insertAction'
|| isPair m || isInverse m || isProduct m --- || isXor m
|| isUnion m || isNullaryPublicFunction m
ActionG _ _ -> not solved
PremiseG _ _ -> not solved
-- Technically the 'False' disj would be a solvable goal. However, we
-- have a separate proof method for this, i.e., contradictions.
DisjG (Disj []) -> False
DisjG _ -> not solved
ChainG c p ->
case kFactView (nodeConcFact c sys) of
Just (DnK, viewTerm2 -> FUnion args) ->
-- do not solve Union conclusions if they contain only known msg vars
not solved && not (allMsgVarsKnownEarlier c args)
-- open chains for msg vars are only solved if N5'' is applicable
Just (DnK, m) | isMsgVar m -> (not solved) &&
(chainToEquality m c p)
| otherwise -> not solved
fa -> error $ "openChainGoals: impossible fact: " ++ show fa
-- FIXME: Split goals may be duplicated, we always have to check
-- explicitly if they still exist.
SplitG idx -> splitExists (get sEqStore sys) idx
let
useful = case goal of
_ | get gsLoopBreaker status -> LoopBreaker
ActionG i (kFactView -> Just (UpK, m))
-- if there are KU-guards then all knowledge goals are useful
| hasKUGuards -> Useful
| currentlyDeducible i m -> CurrentlyDeducible
| probablyConstructible m -> ProbablyConstructible
_ -> Useful
return (goal, (get gsNr status, useful))
where
existingDeps = rawLessRel sys
hasKUGuards =
any ((KUFact `elem`) . guardFactTags) $ S.toList $ get sFormulas sys
checkTermLits :: (LSort -> Bool) -> LNTerm -> Bool
checkTermLits p =
Mono.getAll . foldMap (Mono.All . p . sortOfLit)
-- KU goals of messages that are likely to be constructible by the
-- adversary. These are terms that do not contain a fresh name or a fresh
-- name variable. For protocols without loops they are very likely to be
-- constructible. For protocols with loops, such terms have to be given
-- similar priority as loop-breakers.
probablyConstructible m = checkTermLits (LSortFresh /=) m
&& not (containsPrivate m)
-- KU goals of messages that are currently deducible. Either because they
-- are composed of public names only and do not contain private function
-- symbols or because they can be extracted from a sent message using
-- unpairing or inversion only.
currentlyDeducible i m = (checkTermLits (LSortPub ==) m
&& not (containsPrivate m))
|| extractible i m
extractible i m = or $ do
(j, ru) <- M.toList $ get sNodes sys
-- We cannot deduce a message from a last node.
guard (not $ isLast sys j)
let derivedMsgs = concatMap toplevelTerms $
[ t | Fact OutFact _ [t] <- get rConcs ru] <|>
[ t | Just (DnK, t) <- kFactView <$> get rConcs ru]
-- m is deducible from j without an immediate contradiction
-- if it is a derived message of 'ru' and the dependency does
-- not make the graph cyclic.
return $ m `elem` derivedMsgs &&
not (j `S.member` D.reachableSet [i] existingDeps)
toplevelTerms t@(viewTerm2 -> FPair t1 t2) =
t : toplevelTerms t1 ++ toplevelTerms t2
toplevelTerms t@(viewTerm2 -> FInv t1) = t : toplevelTerms t1
toplevelTerms t = [t]
allMsgVarsKnownEarlier (i,_) args = (all isMsgVar args) &&
(all (`elem` earlierMsgVars) args)
where earlierMsgVars = do (j, _, t) <- allKUActions sys
guard $ isMsgVar t && alwaysBefore sys j i
return t
-- check whether we have a chain that fits N5'' (an open chain between an
-- equality rule and a simple msg var conclusion that exists as a K up
-- previously) which needs to be resolved even if it is an open chain
chainToEquality :: LNTerm -> NodeConc -> NodePrem -> Bool
chainToEquality t_start conc p = is_msg_var && is_equality && ku_before
where
-- check whether it is a msg var
is_msg_var = isMsgVar t_start
-- and whether we do have an equality rule instance at the end
is_equality = isIEqualityRule $ nodeRule (fst p) sys
-- get all KU-facts with the same msg var
ku_start = filter (\x -> (fst x) == t_start) $
map (\(i, _, m) -> (m, i)) $ allKUActions sys
-- and check whether any of them happens before the KD-conclusion
ku_before = any (\(_, x) -> alwaysBefore sys x (fst conc)) ku_start
-- | The list of all open goals left together with their status.
plainOpenGoals:: System -> [(Goal, GoalStatus)]
plainOpenGoals sys = openGoalsLeft
where
openGoalsLeft = filter isOpen (M.toList $ L.get sGoals sys)
isOpen(_, status) = case status of
GoalStatus s _ _ -> not s
------------------------------------------------------------------------------
-- Solving 'Goal's
------------------------------------------------------------------------------
-- | @solveGoal rules goal@ enumerates all possible cases of how this goal
-- could be solved in the context of the given @rules@. For each case, a
-- sensible case-name is returned.
solveGoal :: Goal -> Reduction String
solveGoal goal = do
-- mark before solving, as representation might change due to unification
markGoalAsSolved "directly" goal
rules <- askM pcRules
case goal of
ActionG i fa -> solveAction (nonSilentRules rules) (i, fa)
PremiseG p fa ->
solvePremise (get crProtocol rules ++ get crConstruct rules) p fa
ChainG c p -> solveChain (get crDestruct rules) (c, p)
SplitG i -> solveSplit i
DisjG disj -> solveDisjunction disj
-- The following functions are internal to 'solveGoal'. Use them with great
-- care.
-- | CR-rule *S_at*: solve an action goal.
solveAction :: [RuleAC] -- ^ All rules labelled with an action
-> (NodeId, LNFact) -- ^ The action we are looking for.
-> Reduction String -- ^ A sensible case name.
solveAction rules (i, fa@(Fact _ ann _)) = do
mayRu <- M.lookup i <$> getM sNodes
showRuleCaseName <$> case mayRu of
Nothing -> case fa of
(Fact KUFact _ [m@(viewTerm2 -> FXor ts)]) -> do
partitions <- disjunctionOfList $ twoPartitions ts
case partitions of
(_, []) -> do
let ru = Rule (IntrInfo CoerceRule) [kdFact m] [fa] [fa] []
modM sNodes (M.insert i ru)
insertGoal (PremiseG (i, PremIdx 0) (kdFact m)) False
return ru
(a', b') -> do
let a = fAppAC Xor a'
let b = fAppAC Xor b'
let ru = Rule (IntrInfo (ConstrRule $ BC.pack "_xor")) [(kuFact a),(kuFact b)] [fa] [fa] []
modM sNodes (M.insert i ru)
mapM_ requiresKU [a, b] *> return ru
_ -> do
ru <- labelNodeId i (annotatePrems <$> rules) Nothing
act <- disjunctionOfList $ get rActs ru
void (solveFactEqs SplitNow [Equal fa act])
return ru
Just ru -> do unless (fa `elem` get rActs ru) $ do
act <- disjunctionOfList $ get rActs ru
void (solveFactEqs SplitNow [Equal fa act])
return ru
where
-- If the fact in the action goal has annotations, then consider annotated
-- versions of intruder rules (this allows high or low priority intruder knowledge
-- goals to propagate to intruder knowledge of subterms)
annotatePrems ru@(Rule ri ps cs as nvs) =
if not (S.null ann) && isIntruderRule ru then
Rule ri (annotateFact ann <$> ps) cs (annotateFact ann <$> as) nvs
else ru
requiresKU t = do
j <- freshLVar "vk" LSortNode
let faKU = kuFact t
insertLess j i
void (insertAction j faKU)
-- | CR-rules *DG_{2,P}* and *DG_{2,d}*: solve a premise with a direct edge
-- from a unifying conclusion or using a destruction chain.
--
-- Note that *In*, *Fr*, and *KU* facts are solved directly when adding a
-- 'ruleNode'.
--
solvePremise :: [RuleAC] -- ^ All rules with a non-K-fact conclusion.
-> NodePrem -- ^ Premise to solve.
-> LNFact -- ^ Fact required at this premise.
-> Reduction String -- ^ Case name to use.
solvePremise rules p faPrem
| isKDFact faPrem = do
iLearn <- freshLVar "vl" LSortNode
mLearn <- varTerm <$> freshLVar "t" LSortMsg
let concLearn = kdFact mLearn
premLearn = outFact mLearn
-- !! Make sure that you construct the correct rule!
ruLearn = Rule (IntrInfo IRecvRule) [premLearn] [concLearn] [] []
cLearn = (iLearn, ConcIdx 0)
pLearn = (iLearn, PremIdx 0)
modM sNodes (M.insert iLearn ruLearn)
insertChain cLearn p
solvePremise rules pLearn premLearn
| otherwise = do
(ru, c, faConc) <- insertFreshNodeConc rules
insertEdges [(c, faConc, faPrem, p)]
return $ showRuleCaseName ru
-- | CR-rule *DG2_chain*: solve a chain constraint.
solveChain :: [RuleAC] -- ^ All destruction rules.
-> (NodeConc, NodePrem) -- ^ The chain to extend by one step.
-> Reduction String -- ^ Case name to use.
solveChain rules (c, p) = do
faConc <- gets $ nodeConcFact c
(do -- solve it by a direct edge
cRule <- gets $ nodeRule (nodeConcNode c)
pRule <- gets $ nodeRule (nodePremNode p)
faPrem <- gets $ nodePremFact p
contradictoryIf (forbiddenEdge cRule pRule)
insertEdges [(c, faConc, faPrem, p)]
let mPrem = case kFactView faConc of
Just (DnK, m') -> m'
_ -> error $ "solveChain: impossible"
caseName (viewTerm -> FApp o _) = showFunSymName o
caseName (viewTerm -> Lit l) = showLitName l
contradictoryIf (illegalCoerce pRule mPrem)
return (caseName mPrem)
`disjunction`
-- extend it with one step
case kFactView faConc of
Just (DnK, viewTerm2 -> FUnion args) ->
do -- If the chain starts at a union message, we
-- compute the applicable destruction rules directly.
i <- freshLVar "vr" LSortNode
let rus = map (ruleACIntrToRuleACInst . mkDUnionRule args)
(filter (not . isMsgVar) args)
-- NOTE: We rely on the check that the chain is open here.
ru <- disjunctionOfList rus
modM sNodes (M.insert i ru)
-- FIXME: Do we have to add the PremiseG here so it
-- marked as solved?
let v = PremIdx 0
faPrem <- gets $ nodePremFact (i,v)
extendAndMark i ru v faPrem faConc
Just (DnK, m) ->
do -- If the chain does not start at a union message,
-- the usual *DG2_chain* extension is perfomed.
-- But we ignore open chains, as we only resolve
-- open chains with a direct chain
contradictoryIf (isMsgVar m)
cRule <- gets $ nodeRule (nodeConcNode c)
(i, ru) <- insertFreshNode rules (Just cRule)
contradictoryIf (forbiddenEdge cRule ru)
-- This requires a modified chain constraint def:
-- path via first destruction premise of rule ...
(v, faPrem) <- disjunctionOfList $ take 1 $ enumPrems ru
extendAndMark i ru v faPrem faConc
_ -> error "solveChain: not a down fact"
)
where
extendAndMark :: NodeId -> RuleACInst -> PremIdx -> LNFact -> LNFact
-> Control.Monad.Trans.State.Lazy.StateT System
(Control.Monad.Trans.FastFresh.FreshT
(DisjT (Control.Monad.Trans.Reader.Reader ProofContext))) String
extendAndMark i ru v faPrem faConc = do
insertEdges [(c, faConc, faPrem, (i, v))]
markGoalAsSolved "directly" (PremiseG (i, v) faPrem)
insertChain (i, ConcIdx 0) p
return (showRuleCaseName ru)
-- contradicts normal form condition:
-- no edge from dexp to dexp KD premise, no edge from dpmult
-- to dpmult KD premise, and no edge from dpmult to demap KD premise
-- (this condition replaces the exp/noexp tags)
-- no more than the allowed consecutive rule applications
forbiddenEdge :: RuleACInst -> RuleACInst -> Bool
forbiddenEdge cRule pRule = isDExpRule cRule && isDExpRule pRule ||
isDPMultRule cRule && isDPMultRule pRule ||
isDPMultRule cRule && isDEMapRule pRule ||
(getRuleName cRule == getRuleName pRule)
&& (getRemainingRuleApplications cRule == 1)
-- Contradicts normal form condition N2:
-- No coerce of a pair of inverse.
illegalCoerce pRule mPrem = isCoerceRule pRule && isPair mPrem ||
isCoerceRule pRule && isInverse mPrem ||
-- Also: Coercing of products is unnecessary, since the protocol is *-restricted.
isCoerceRule pRule && isProduct mPrem
-- | Solve an equation split. There is no corresponding CR-rule in the rule
-- system on paper because there we eagerly split over all variants of a rule.
-- In practice, this is too expensive and we therefore use the equation store
-- to delay these splits.
solveSplit :: SplitId -> Reduction String
solveSplit x = do
split <- gets ((`performSplit` x) . get sEqStore)
let errMsg = error "solveSplit: inexistent split-id"
store <- maybe errMsg disjunctionOfList split
-- FIXME: Simplify this interaction with the equation store
hnd <- getMaudeHandle
substCheck <- gets (substCreatesNonNormalTerms hnd)
store' <- simp hnd substCheck store
contradictoryIf (eqsIsFalse store')
sEqStore =: store'
return "split"
-- | CR-rule *S_disj*: solve a disjunction of guarded formulas using a case
-- distinction.
--
-- In contrast to the paper, we use n-ary disjunctions and also split over all
-- of them at once.
solveDisjunction :: Disj LNGuarded -> Reduction String
solveDisjunction disj = do
(i, gfm) <- disjunctionOfList $ zip [(1::Int)..] $ getDisj disj
insertFormula gfm
return $ "case_" ++ show i
| tamarin-prover/tamarin-prover | lib/theory/src/Theory/Constraint/Solver/Goals.hs | gpl-3.0 | 19,112 | 0 | 30 | 6,292 | 3,861 | 1,993 | 1,868 | 253 | 14 |
import Data.List
data Tree = Empty | Node Tree Int Tree deriving (Show, Eq)
--input:
--first line length of each tree
--middle 2*N elements of tree
--after tree is the number of swaps
--following the number of swaps is the depth at which to swap
--understood 1 at start of tree (not mentioned anywhere)
main :: IO()
main = do
_ <- getLine
tmp <- fmap ((map words). lines) getContents
let
nodes = map read $ concat $ head $ split_args ([["1"]] ++ tmp)
swaps = tail $ map read $ concat $ head $ tail $ split_args tmp
mapM_ putStrLn $ map (unwords. map show) $ run_swaps swaps $ buildTree $ convert_args nodes
split_args :: [[String]] -> [[[String]]]
split_args args = (\x -> [take (length args - length x) args] ++ [x] ) $ reverse $ s args
where
s [] = []
s y
| length (last y) < 2 = (last y) : s (init y)
| otherwise = []
run_swaps :: [Int] -> Tree -> [[Int]]
run_swaps [] _ = [[]]
run_swaps [s] tree = [inorder $ swap_DF tree s 1]
run_swaps (s:ss) tree = (\x -> [inorder x] ++ run_swaps ss x)
$ swap_DF tree s 1
--detect and padd empty places so build tree will work
--take 15 [2^j-1 | j <- [1..]]
convert_args :: [Int] -> [Int]
convert_args nodes = pad nodes 0
where
pad nodes cur
| cur > node_max (length nodes) = nodes
| nodes !! cur == -1 = pad (take ((cur+1)*2-1) nodes
++ [-1,-1]
++ drop ((cur+1)*2-1) nodes)
(cur+1)
| otherwise = pad nodes (cur+1)
node_max n_len = if n_len > 2^14
then 0
else fromIntegral (2^(floor $ logBase 2 (fromIntegral n_len))-1)
--nodes come in level order as list
--[1,2,3,4] becomes r rl rr rll (and would have a lot of -1) [1,2,3,4,-1,-1,-1]
--tree not actually symmetric, so need a new way to build
buildTree [] = Empty
buildTree nodes = if head nodes == -1
then Empty
else Node (buildTree (separateLR left (tail nodes)))
(head nodes)
(buildTree (separateLR right (tail nodes)))
where
left = isLeft (length nodes - 1) 0
right = map not left
separateLR :: [Bool] -> [a] -> [a]
separateLR cond [] = []
separateLR [] nodes = []
separateLR cond nodes = (if (head cond) then [head nodes] else [])
++ separateLR (tail cond) (tail nodes)
isLeft :: Int -> Int -> [Bool]
isLeft len power
| len <= (2^power) = take len $ replicate (2^power) True ++ replicate (2^power) False
| otherwise = replicate (2^power) True
++ replicate (2^power) False
++ isLeft (len - 2^(power+1)) (power + 1)
swap (Node left value right) = Node right value left
get_left (Node left _ _ ) = left
get_right (Node _ _ right) = right
get_value (Node _ value _ ) = value
get_children x
| x == Empty = []
| get_left x == Empty && get_right x == Empty = []
| get_left x == Empty && get_right x /= Empty = [get_right x]
| get_left x /= Empty && get_right x == Empty = [get_left x]
| otherwise = [get_left x] ++ [get_right x]
traverseBF :: Tree -> [Int]
traverseBF tree = tbf [tree]
where
tbf :: [Tree] -> [Int]
tbf [] = []
tbf level = map get_value level ++ (tbf $ concat $ map get_children level)
--swaps also occur at multiples of swap depth
swap_DF :: Tree -> Int -> Int -> Tree
swap_DF cur depth c_depth
| cur == Empty = Empty
| c_depth `mod` depth == 0 = Node (swap_DF (get_right cur) depth (c_depth + 1))
(get_value cur)
(swap_DF (get_left cur) depth (c_depth + 1))
| otherwise = Node (swap_DF (get_left cur) depth (c_depth + 1))
(get_value cur)
(swap_DF (get_right cur) depth (c_depth + 1))
-- | c_depth < depth = Node (swap_DF (get_left cur) depth (c_depth + 1))
-- (get_value cur)
-- (swap_DF (get_right cur) depth (c_depth + 1))
-- | otherwise = Empty
inorder :: Tree -> [Int]
inorder Empty = []
inorder (Node l v r) = inorder l ++ [v] ++ inorder r | woodsjc/hackerrank-challenges | swap-nodes.hs | gpl-3.0 | 4,423 | 102 | 25 | 1,550 | 1,594 | 838 | 756 | 79 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.ShippingSettings.Getsupportedcarriers
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves supported carriers and carrier services for an account.
--
-- /See:/ <https://developers.google.com/shopping-content Content API for Shopping Reference> for @content.shippingsettings.getsupportedcarriers@.
module Network.Google.Resource.Content.ShippingSettings.Getsupportedcarriers
(
-- * REST Resource
ShippingSettingsGetsupportedcarriersResource
-- * Creating a Request
, shippingSettingsGetsupportedcarriers
, ShippingSettingsGetsupportedcarriers
-- * Request Lenses
, ssgMerchantId
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.shippingsettings.getsupportedcarriers@ method which the
-- 'ShippingSettingsGetsupportedcarriers' request conforms to.
type ShippingSettingsGetsupportedcarriersResource =
"content" :>
"v2" :>
Capture "merchantId" (Textual Word64) :>
"supportedCarriers" :>
QueryParam "alt" AltJSON :>
Get '[JSON]
ShippingSettingsGetSupportedCarriersResponse
-- | Retrieves supported carriers and carrier services for an account.
--
-- /See:/ 'shippingSettingsGetsupportedcarriers' smart constructor.
newtype ShippingSettingsGetsupportedcarriers = ShippingSettingsGetsupportedcarriers'
{ _ssgMerchantId :: Textual Word64
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ShippingSettingsGetsupportedcarriers' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssgMerchantId'
shippingSettingsGetsupportedcarriers
:: Word64 -- ^ 'ssgMerchantId'
-> ShippingSettingsGetsupportedcarriers
shippingSettingsGetsupportedcarriers pSsgMerchantId_ =
ShippingSettingsGetsupportedcarriers'
{ _ssgMerchantId = _Coerce # pSsgMerchantId_
}
-- | The ID of the account for which to retrieve the supported carriers.
ssgMerchantId :: Lens' ShippingSettingsGetsupportedcarriers Word64
ssgMerchantId
= lens _ssgMerchantId
(\ s a -> s{_ssgMerchantId = a})
. _Coerce
instance GoogleRequest
ShippingSettingsGetsupportedcarriers where
type Rs ShippingSettingsGetsupportedcarriers =
ShippingSettingsGetSupportedCarriersResponse
type Scopes ShippingSettingsGetsupportedcarriers =
'["https://www.googleapis.com/auth/content"]
requestClient
ShippingSettingsGetsupportedcarriers'{..}
= go _ssgMerchantId (Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy ::
Proxy ShippingSettingsGetsupportedcarriersResource)
mempty
| rueshyna/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/ShippingSettings/Getsupportedcarriers.hs | mpl-2.0 | 3,568 | 0 | 12 | 747 | 316 | 192 | 124 | 57 | 1 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Model.Permission
( module Model.Permission.Types
-- , permissionVIEW
, permissionPRIVATE
, readPermission
, readRelease
, dataPermission4
, canReadData2
, accessJSON
-- * Checking permissioned objects
, checkPermission
, PermissionResponse (..)
-- * New
, requestAccess
, Permissioned
, mkPermissioned
) where
import Data.Monoid ((<>))
import qualified JSON
import Model.Release.Types
import Model.Permission.Types
-- | Represents a permissioned object. The constructor is not exported: use
-- 'mkPermissioned' and 'requestAccess' instead.
data Permissioned a = Permissioned
{ unsafeAccess :: a
, grantedPermission :: Permission
}
-- | Smart constructor for Permissioned.
--
-- As one can tell from the first argument, this assumes that objects already
-- have some way of being mapped to the permissions granted on them. This is
-- generally true because of how the existing code works. It might change in the
-- future, for example if database queries return a 'Permissioned' value
-- directly, obsoleting this function.
mkPermissioned :: (a -> Permission) -> a -> Permissioned a
mkPermissioned getPerm o = Permissioned o (getPerm o)
-- | How to get access to a permissioned object. It's not a great design, but it
-- makes a concrete concept out of an existing pattern in the codebase. A better
-- design could perhaps couple the access request to the action that needs the
-- access.
requestAccess
:: Permission
-- ^ Requested permission
-> Permissioned a
-- ^ object
-> Maybe a
-- ^ Maybe the unwrapped object
requestAccess requestedPerm obj =
if requestedPerm <= grantedPermission obj
then Just (unsafeAccess obj)
else Nothing
-- |Level at which things become visible. ; TODO: use this somewhere?
-- permissionVIEW :: Permission
-- permissionVIEW = PermissionPUBLIC
-- |Alias for READ. Grants full access to private data, bypassing consent permissions.
permissionPRIVATE :: Permission
permissionPRIVATE = PermissionREAD
-- |The necessary permission level to read a data object with the given release.
-- Equivalent to the SQL function read_permission.
readPermission :: Release -> Permission
readPermission ReleasePUBLIC = PermissionPUBLIC
readPermission ReleaseSHARED = PermissionSHARED
readPermission ReleaseEXCERPTS = PermissionSHARED
readPermission ReleasePRIVATE = permissionPRIVATE
-- |The most restrictive data release level that the current user may access under the given permission.
-- Equivalent to the SQL function read_release. Inverse of 'readPermission' module meaning of @Nothing@.
readRelease :: Permission -> Maybe Release
readRelease PermissionNONE = Nothing
readRelease PermissionPUBLIC = Just ReleasePUBLIC
readRelease PermissionSHARED = Just ReleaseSHARED
readRelease _ = Just ReleasePRIVATE
-- |The effective permission for data objects with the given attributes, effectively collapsing ineffective permissions NONE.
releasePermission :: Release -> Permission -> Permission
releasePermission effectiveReleaseOnData currentUserAllowedPermissionOnVolume
| currentUserAllowedPermissionOnVolume >= readPermission effectiveReleaseOnData = currentUserAllowedPermissionOnVolume
| otherwise = PermissionNONE
dataPermission4 :: (a -> EffectiveRelease) -> (a -> VolumeRolePolicy) -> a -> Permission
dataPermission4 getObjEffectiveRelease getCurrentUserVolumeRole obj =
let
effRelease = getObjEffectiveRelease obj
in
case getCurrentUserVolumeRole obj of
RolePublicViewer PublicRestrictedPolicy ->
releasePermission (effRelPrivate effRelease) PermissionPUBLIC
RoleSharedViewer SharedRestrictedPolicy ->
releasePermission (effRelPrivate effRelease) PermissionSHARED
-- other levels that behave more like private (options: none, shared, read, edit, admin) ?
rp ->
releasePermission (effRelPublic effRelease) (extractPermissionIgnorePolicy rp)
canReadData2 :: (a -> EffectiveRelease) -> (a -> VolumeRolePolicy) -> a -> Bool
canReadData2 getObjEffectiveRelease getCurrentUserVolumeRole obj =
dataPermission4 getObjEffectiveRelease getCurrentUserVolumeRole obj > PermissionNONE
accessJSON :: JSON.ToObject o => Access -> o
accessJSON Access{..} =
"site" JSON..= accessSite'
<> "member" JSON..= accessMember'
-- | Responses to 'checkPermission'
data PermissionResponse a
= PermissionGranted a
-- ^ Whatever you wanted, you got it!
| PermissionDenied
-- ^ No.
-- | Decorate some permissioned object with a permission response
-- TODO: Maybe replace with requestAccess
checkPermission
:: (a -> Permission) -- ^ Extract the object's permission rules
-> a -- ^ The object in question
-> Permission -- ^ The requested permission
-> PermissionResponse a
-- ^ The object decorated with the permission response
checkPermission getGrantedPerms obj requestedPerms =
case compare (getGrantedPerms obj) requestedPerms of
LT -> PermissionDenied
GT -> PermissionGranted obj
EQ -> PermissionGranted obj
| databrary/databrary | src/Model/Permission.hs | agpl-3.0 | 5,151 | 0 | 12 | 897 | 708 | 388 | 320 | 79 | 3 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE OverloadedStrings #-}
module Main
( main,
)
where
import Bazel.Query
import Bazel.Name (parseLabel)
import GHC.Stack
import Test.Framework (defaultMain, Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit ((@?=), (@?=), assertFailure)
assertRender :: HasCallStack => Query -> String -> IO ()
assertRender q s = case nonEmptyQuery q of
Nothing -> assertFailure $ "Query unexpectedly rendered as empty: " ++ show q
Just q' -> renderQueryStr q' @?= s
assertEmpty :: HasCallStack => Query -> IO ()
assertEmpty q = case nonEmptyQuery q of
Nothing -> return ()
Just q' -> assertFailure $ "Expected query to be empty; got: "
++ renderQueryStr q'
main :: IO ()
main =
defaultMain
[ testCase "word" $ do
let q = word "foo"
assertRender q "foo",
testCase "string literal" $
assertRender "foo" "foo",
testCase "fun" $ do
let q = fun "foo" [intArg 42, "some_arg"]
assertRender q "foo(42, some_arg)",
testCase "allpaths" $ do
let q = allpaths "//foo/bar" "//hee/haw"
assertRender q "allpaths(//foo/bar, //hee/haw)",
testCase "attr" $ do
let q = attr "srcs" "//package/file" "//package:*"
assertRender q "attr(srcs, //package/file, //package:*)",
testCase "deps" $ do
let q = deps "//foo"
assertRender q "deps(//foo)",
testCase "depsUpto" $ do
let q = depsUpto "//foo" 3
assertRender q "deps(//foo, 3)",
testCase "filter" $ do
let q = filterQ "\\.cc$" (deps "//foo")
assertRender q "filter(\\.cc$, deps(//foo))",
testCase "kind" $ do
let q = kind "haskell_" "//haskell/tools/..."
assertRender q "kind(haskell_, //haskell/tools/...)",
testCase "somepaths" $ do
let q = somepaths "//foo/bar" "//hee/haw"
assertRender q "somepaths(//foo/bar, //hee/haw)",
testCase "queryArg" $ do
let q1 = fun "attr" [intArg 42, "some_arg"]
q2 = strFun "kind" ["haskell_", queryArg q1]
assertRender q2 "kind(haskell_, attr(42, some_arg))",
testGroup "combine" $
let q1 = "//some/path/..."
q2 = "//some/path/internal/..."
in [ testCase "intersection" $
assertRender (q1 <^> q2)
"(//some/path/... ^ //some/path/internal/...)",
testCase "union" $
assertRender (q1 <+> q2)
"(//some/path/... + //some/path/internal/...)",
testCase "except" $
assertRender (q1 <-> q2)
"(//some/path/... - //some/path/internal/...)"
],
testCase "let-in" $ do
let q =
letIn ("v", "foo/...") $
allpaths (var "v") "//common" <^> var "v"
assertRender q
"let v = foo/... in (allpaths($v, //common) ^ $v)",
testGroup "intersection"
[ testCase "empty" $ assertEmpty (intersection []),
testCase "group of 3" $
assertRender (intersection [word "x", word "y", word "z"])
"(y ^ (z ^ x))"
],
testGroup "union"
[ testCase "empty" $ assertEmpty (union []),
testCase "group of 3" $
assertRender (union [word "x", word "y", word "z"])
"(y + (z + x))"
],
testCase "labelToQuery" $ do
let label = parseLabel "//some/path:target"
(assertRender . labelToQuery) label "//some/path:target",
emptyQueryTests
]
emptyQueryTests :: Test
emptyQueryTests = testGroup "empty"
[ testGroup "union"
[ testCase "self identity" $ assertEmpty (empty <+> empty),
testCase "left identity" $ assertRender (empty <+> word "x") "x",
testCase "right identity" $ assertRender ("x" <+> empty) "x",
testCase "left-associated" $
assertRender (("x" <+> empty) <+> "y") "(x + y)",
testCase "right-associated" $
assertRender ("x" <+> (empty <+> "y")) "(x + y)"
],
testGroup "intersect"
[ testCase "self identity" $ assertEmpty (empty <^> empty),
testCase "left empty" $ assertEmpty (empty <^> "x"),
testCase "right empty" $ assertEmpty ("x" <^> empty),
testCase "left-associated" $ assertEmpty (("x" <^> empty) <^> "y"),
testCase "right-associated" $ assertEmpty ("x" <^> (empty <^> "y"))
],
testGroup "except"
[ testCase "self identity" $ assertEmpty (empty <-> empty),
testCase "left empty" $ assertEmpty (empty <-> word "x"),
testCase "right identity" $ assertRender ("x" <-> empty) "x",
testCase "left-associated" $
assertRender (("x" <-> empty) <-> "y") "(x - y)",
testCase "right-associated" $
assertRender ("x" <-> ("y" <-> empty)) "(x - y)"
],
testCase "fun" $
assertEmpty $ fun "foo" [intArg 42, queryArg empty],
testGroup "bind"
[ testCase "binding to empty" $
assertEmpty $ letIn ("v", empty) (var "v"),
testCase "unused binding" $
assertRender
(letIn ("v", empty) $ letIn ("w", "x") (var "w"))
"let w = x in $w",
testCase "union with binding to empty" $
assertRender
(letIn ("v", empty) $ letIn ("w", "x") (var "w" <+> var "v"))
"let w = x in $w",
testCase "unused binding, empty result" $
assertEmpty $ letIn ("v", "x") empty,
testCase "binding to intersection with empty" $
assertEmpty $ letIn ("v", "x" <^> empty) (var "v")
]
]
| google/hrepl | bazel/Bazel/QueryTest.hs | apache-2.0 | 6,194 | 0 | 17 | 1,801 | 1,584 | 787 | 797 | -1 | -1 |
module AlecSequences.A273191 (a273191) where
import AlecSequences.A273190 (a273190)
import Helpers.ListHelpers (runLengths)
a273191 :: Int -> Int
a273191 n = a273191_list !! (n - 1)
a273191_list :: [Int]
a273191_list = runLengths $ map a273190 [0..]
| peterokagey/haskellOEIS | src/AlecSequences/A273191.hs | apache-2.0 | 252 | 0 | 7 | 35 | 84 | 48 | 36 | 7 | 1 |
module Helper.Name (snakeCase, camelCase) where
import Prelude
import Data.Char
import Data.List.Split
snakeCase :: FilePath -> String
snakeCase fp =
foldr1 (\x a -> x ++ "_" ++ a) pieces
where
pieces = splitOn "." fp
camelCase :: FilePath -> String
camelCase fp =
foldr (\x a -> upper x ++ a) [] pieces
where
pieces = splitOn "." fp
upper (x:xs) = (toUpper x : xs)
upper a = a
| fabianbergmark/APIs | src/Helper/Name.hs | bsd-2-clause | 406 | 0 | 9 | 98 | 168 | 90 | 78 | 14 | 2 |
module EqExercises where
data TisAnInteger = TisAn Integer
instance Eq TisAnInteger where
(==) (TisAn v) (TisAn v') = v == v'
| OCExercise/haskellbook-solutions | chapters/chapter06/exercises/scratch/eq_exercises.hs | bsd-2-clause | 132 | 0 | 8 | 27 | 49 | 27 | 22 | 4 | 0 |
{-# LANGUAGE ConstraintKinds #-}
module Opaleye.Internal.Column where
import Data.String
import qualified Opaleye.Internal.HaskellDB.PrimQuery as HPQ
-- | A column of a @Query@, of type @pgType@. For example 'Column'
-- @PGInt4@ is an @int4@ column and a 'Column' @PGText@ is a @text@
-- column.
--
-- Do not use the 'Show' instance of 'Column'. It is considered
-- deprecated and will be removed in version 0.7.
newtype Column pgType = Column HPQ.PrimExpr deriving Show
-- | Only used within a 'Column', to indicate that it can be @NULL@.
-- For example, a 'Column' ('Nullable' @PGText@) can be @NULL@ but a
-- 'Column' @PGText@ cannot.
data Nullable a = Nullable
unColumn :: Column a -> HPQ.PrimExpr
unColumn (Column e) = e
-- | Treat a 'Column' as though it were of a different type. If such
-- a treatment is not valid then Postgres may fail with an error at
-- SQL run time.
unsafeCoerceColumn :: Column a -> Column b
unsafeCoerceColumn (Column e) = Column e
-- | Cast a column to any other type. Implements Postgres's @::@ or
-- @CAST( ... AS ... )@ operations. This is safe for some
-- conversions, such as uuid to text.
unsafeCast :: String -> Column a -> Column b
unsafeCast = mapColumn . HPQ.CastExpr
where
mapColumn :: (HPQ.PrimExpr -> HPQ.PrimExpr) -> Column c -> Column a
mapColumn primExpr c = Column (primExpr (unColumn c))
unsafeCompositeField :: Column a -> String -> Column b
unsafeCompositeField (Column e) fieldName =
Column (HPQ.CompositeExpr e fieldName)
binOp :: HPQ.BinOp -> Column a -> Column b -> Column c
binOp op (Column e) (Column e') = Column (HPQ.BinExpr op e e')
unOp :: HPQ.UnOp -> Column a -> Column b
unOp op (Column e) = Column (HPQ.UnExpr op e)
-- For import order reasons we can't make the return type PGBool
unsafeCase_ :: [(Column pgBool, Column a)] -> Column a -> Column a
unsafeCase_ alts (Column otherwise_) = Column (HPQ.CaseExpr (unColumns alts) otherwise_)
where unColumns = map (\(Column e, Column e') -> (e, e'))
unsafeIfThenElse :: Column pgBool -> Column a -> Column a -> Column a
unsafeIfThenElse cond t f = unsafeCase_ [(cond, t)] f
unsafeGt :: Column a -> Column a -> Column pgBool
unsafeGt = binOp (HPQ.:>)
unsafeEq :: Column a -> Column a -> Column pgBool
unsafeEq = binOp (HPQ.:==)
class PGNum a where
pgFromInteger :: Integer -> Column a
pgFromInteger = sqlFromInteger
sqlFromInteger :: Integer -> Column a
type SqlNum = PGNum
instance SqlNum a => Num (Column a) where
fromInteger = pgFromInteger
(*) = binOp (HPQ.:*)
(+) = binOp (HPQ.:+)
(-) = binOp (HPQ.:-)
abs = unOp HPQ.OpAbs
negate = unOp HPQ.OpNegate
-- We can't use Postgres's 'sign' function because it returns only a
-- numeric or a double
signum c = unsafeCase_ [(c `unsafeGt` 0, 1), (c `unsafeEq` 0, 0)] (-1)
class PGFractional a where
pgFromRational :: Rational -> Column a
pgFromRational = sqlFromRational
sqlFromRational :: Rational -> Column a
type SqlFractional = PGFractional
instance (SqlNum a, SqlFractional a) => Fractional (Column a) where
fromRational = sqlFromRational
(/) = binOp (HPQ.:/)
-- | A dummy typeclass whose instances support integral operations.
class PGIntegral a
type SqlIntegral = PGIntegral
class PGString a where
pgFromString :: String -> Column a
pgFromString = sqlFromString
sqlFromString :: String -> Column a
type SqlString = PGString
instance SqlString a => IsString (Column a) where
fromString = sqlFromString
| WraithM/haskell-opaleye | src/Opaleye/Internal/Column.hs | bsd-3-clause | 3,461 | 0 | 11 | 657 | 959 | 511 | 448 | -1 | -1 |
-- CIS 194, Spring 2015
--
-- Test cases for HW 01
module HW01Tests where
import HW01
import Testing
-- Exercise 1 -----------------------------------------
ex1Tests :: [Test]
ex1Tests = [ testF1 "lastDigit" lastDigit
[(123, 3), (1234, 4), (5, 5), (10, 0), (0, 0)]
, testF1 "dropLastDigit" dropLastDigit
[(123, 12), (1234, 123), (5, 0), (10, 1), (0,0)]
]
-- Exercise 2 -----------------------------------------
ex2Tests :: [Test]
ex2Tests = [ testF1 "toRevDigits" toRevDigits
[(12340, [0,4,3,2,1]), (1, [1]), (0, []), (-17, [])]
]
-- Exercise 3 -----------------------------------------
ex3Tests :: [Test]
ex3Tests = [ testF1 "doubleEveryOther" doubleEveryOther
[([], []), ([1], [1]), ([1, 1], [1, 2])]
]
-- Exercise 4 -----------------------------------------
ex4Tests :: [Test]
ex4Tests = [ testF1 "sumDigits" sumDigits
[([], 0), ([10, 5, 18, 4] , 19)]
]
-- Exercise 5 -----------------------------------------
ex5Tests :: [Test]
ex5Tests = [ testF1 "luhn" luhn
[(5594589764218858, True), (1234567898765432 , False)]
]
-- Exercise 6 -----------------------------------------
ex6Tests :: [Test]
ex6Tests = [ testF4 "hanoi" hanoi
[(2, "a", "b", "c", [("a","c"), ("a","b"), ("c","b")])]
]
-- All Tests ------------------------------------------
allTests :: [Test]
allTests = concat [ ex1Tests
, ex2Tests
, ex3Tests
, ex4Tests
, ex5Tests
, ex6Tests
]
| ThomWright/CIS194 | test/HW01Tests.hs | bsd-3-clause | 1,641 | 0 | 10 | 477 | 496 | 315 | 181 | 30 | 1 |
#define IncludedshiftIndicesRight
{-@ automatic-instances shiftIndicesRight @-}
{-@ shiftIndicesRight
:: lo:INat
-> hi:Integer
-> x:RString
-> input:RString
-> target:RString
-> { map (shiftStringRight target x input) (makeIndices input target lo hi) == makeIndices (x <+> input) target (stringLen x + lo) (stringLen x + hi) }
/ [if hi < lo then 0 else hi-lo]
@-}
shiftIndicesRight :: Integer -> Integer -> RString -> RString -> RString -> Proof
shiftIndicesRight lo hi x input target
| hi < lo
= trivial
shiftIndicesRight lo hi x input target
| lo == hi, isGoodIndex input target lo
= map (shiftStringRight target x input) (makeIndices input target lo hi)
==. map (shiftStringRight target x input) (lo `C` makeIndices input target (lo+1) hi)
==. map (shiftStringRight target x input) (lo `C` N)
==. (shiftStringRight target x input lo) `C` (map (shiftStringRight target x input) N)
==. (stringLen x + lo) `C` N
==. (stringLen x + lo) `C` makeIndices (x <+> input) target (stringLen x + lo + 1) (stringLen x + hi)
==. makeIndices (x <+> input) target (stringLen x + lo) (stringLen x + hi)
? isGoodIxConcatFront input x target lo
*** QED
shiftIndicesRight lo hi x input target
| lo == hi
= map (shiftStringRight target x input) (makeIndices input target lo hi)
==. map (shiftStringRight target x input) (makeIndices input target (lo+1) hi)
==. map (shiftStringRight target x input) N
==. makeIndices (x <+> input) target (stringLen x + lo + 1) (stringLen x + hi)
==. makeIndices (x <+> input) target (stringLen x + lo) (stringLen x + hi)
? isGoodIxConcatFront input x target lo
*** QED
shiftIndicesRight lo hi x input target
| isGoodIndex input target lo
= map (shiftStringRight target x input) (makeIndices input target lo hi)
==. map (shiftStringRight target x input) (lo `C` makeIndices input target (lo+1) hi)
==. (shiftStringRight target x input lo) `C` (map (shiftStringRight target x input) (makeIndices input target (lo+1) hi))
==. (shift (stringLen x) lo) `C` (makeIndices ((<+>) x input) target (stringLen x + (lo+1)) (stringLen x + hi))
? shiftIndicesRight (lo+1) hi x input target
==. (stringLen x + lo) `C` (makeIndices ((<+>) x input) target (stringLen x + (lo+1)) (stringLen x + hi))
==. makeIndices ((<+>) x input) target (stringLen x + lo) (stringLen x + hi)
? isGoodIxConcatFront input x target lo
*** QED
shiftIndicesRight lo hi x input target
= shiftIndicesRight (lo+1) hi x input target
&&& isGoodIxConcatFront input x target lo
{- automatic-instances isGoodIxConcatFront @-}
{- AUTO INSTANCES FAILS -}
{-@ isGoodIxConcatFront
:: input:RString -> input':RString -> tg:RString -> i:INat
-> {(isGoodIndex input tg i) <=> (isGoodIndex (input' <+> input) tg (stringLen input' + i))
} @-}
isGoodIxConcatFront :: RString -> RString -> RString -> Integer -> Proof
isGoodIxConcatFront input input' tg i
= isGoodIndex input tg i
==. (subString input i (stringLen tg) == tg
&& i + stringLen tg <= stringLen input
&& 0 <= i)
==. (subString input i (stringLen tg) == tg
&& (stringLen input' + i) + stringLen tg <= stringLen (input' <+> input)
&& 0 <= i)
==. (subString (input' <+> input) (stringLen input' + i) (stringLen tg) == tg
&& (stringLen input' + i) + stringLen tg <= stringLen (input' <+> input)
&& 0 <= (stringLen input' + i))
? (subStringConcatFront input input' (stringLen tg) i *** QED)
==. isGoodIndex (input' <+> input) tg (stringLen input' + i)
*** QED
| nikivazou/verified_string_matching | src/AutoProofs/shiftIndicesRight.hs | bsd-3-clause | 3,591 | 15 | 37 | 775 | 1,324 | 674 | 650 | 53 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.