code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Data.TrieMap.Representation (Repr(..), genRepr, genOptRepr, genOrdRepr) where
import Data.TrieMap.Representation.Class
import Data.TrieMap.Representation.Instances ()
import Data.TrieMap.Representation.TH
|
lowasser/TrieMap
|
Data/TrieMap/Representation.hs
|
bsd-3-clause
| 212
| 0
| 5
| 14
| 49
| 34
| 15
| 4
| 0
|
module Arhelk.Core.Rule(
Rule
, RuleM
, proposeMany
, propose
, imply
, implyNothing
, implyMap
, runRule
) where
import Control.Monad.Reader
import Control.Monad.Trans.RSS.Strict
import Control.Monad.Writer
import Lens.Simple
newtype RuleM a b c = RuleM { unRuleM :: RSST a b () Identity c }
deriving (Functor, Applicative, Monad, MonadReader a, MonadWriter b)
type Rule a = RuleM a [a] ()
-- | Proposing a hypothesis to following actions. If action generates some
-- hypothesis, the function modifies the values with given setter and value.
--
-- Examples:
--
-- >>> propose fieldA valueA $ imply fieldB valueB
-- [{ fieldA = valueA, fieldB = valueB }]
--
-- >>> propose fieldA valueA (imply fieldB valueB1 >> imply fieldB valueB2)
-- [{ fieldA = valueA, fieldB = valueB1 }, { fieldA = valueA, fieldB = valueB2 }]
propose :: Setter a a' b (Maybe b') -> b' -> RuleM r [a] c -> RuleM r [a'] c
propose field v = proposeMany field [v]
-- | Proposing a multiple hypothesis to following actions. If action generates some
-- hypothesis, the function modifies the values with given setter and values.
--
-- Examples:
--
-- >>> proposeMany fieldA [valueA1, valueA2] $ imply fieldB valueB
-- [{ fieldA = Just valueA1, fieldB = Just valueB }, { fieldA = Just valueA2, fieldB = Just valueB }]
--
-- >>> propose fieldA [valueA1, valueA2] (imply fieldB valueB1 >> imply fieldB valueB2)
-- [{ fieldA = Just valueA1, fieldB = Just valueB1 }, { fieldA = Just valueA1, fieldB = Just valueB2 }, { fieldA = Just valueA2, fieldB = Just valueB1 }, { fieldA = Just valueA2, fieldB = Just valueB2 }]
proposeMany :: Setter a a' b (Maybe b') -> [b'] -> RuleM r [a] c -> RuleM r [a'] c
proposeMany field vs (RuleM subrule) = do
r <- ask
let Identity (c, _, ws) = runRSST subrule r ()
tell $ concat $ (\v -> set field (Just v) <$> ws) <$> vs
return c
-- | Generates new hypothesis by modifying base value with given setter and value.
--
-- >>> imply fieldA valueA
-- { fieldA = Just valueA, fieldB = Nothing, ... }
imply :: Setter a a' b (Maybe b') -> b' -> RuleM a [a'] ()
imply field v = do
a <- ask
tell [set field (Just v) a]
-- | Generates new hypothesis by placing base value into set of hypothesises.
--
-- >>> implyNothing
-- { fieldA = Nothing, fieldB = Nothing, ... }
implyNothing :: RuleM a [a] ()
implyNothing = do
w <- ask
tell [w]
-- | Transforms all hypothesises that are produced by given function
--
-- @implyMap f rule@ will apply __f__ to each hypothesis produced by __rule__
implyMap :: (a -> [a']) -> RuleM r [a] c -> RuleM r [a'] c
implyMap f (RuleM rule) = do
r <- ask
let Identity (c, _, ws) = runRSST rule r ()
tell $ concat $ f <$> ws
return c
-- | Runs rule and returns list of all produced hypothesises
runRule :: Monoid a => Rule a -> [a]
runRule = (\(_, _, a) -> a) . runIdentity . (\ m -> runRSST m mempty ()) . unRuleM
|
Teaspot-Studio/arhelk-core
|
src/Arhelk/Core/Rule.hs
|
bsd-3-clause
| 2,880
| 0
| 14
| 597
| 698
| 380
| 318
| 40
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- Create a source distribution tarball
module Stack.SDist
( getSDistTarball
, checkSDistTarball
, checkSDistTarball'
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent.Execute (ActionContext(..))
import Control.Monad (unless, void, liftM)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Resource
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Data.Data (Data, Typeable, cast, gmapT)
import Data.Either (partitionEithers)
import Data.List
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Time.Clock.POSIX
import Distribution.Package (Dependency (..))
import qualified Distribution.PackageDescription.Check as Check
import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription)
import Distribution.Version (simplifyVersionRange, orLaterVersion, earlierVersion)
import Distribution.Version.Extra
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO hiding (getModificationTime, getPermissions)
import Prelude -- Fix redundant import warnings
import Stack.Build (mkBaseConfigOpts)
import Stack.Build.Execute
import Stack.Build.Installed
import Stack.Build.Source (loadSourceMap, getDefaultPackageConfig)
import Stack.Build.Target
import Stack.Constants
import Stack.Package
import Stack.Types
import Stack.Types.Internal
import System.Directory (getModificationTime, getPermissions)
import qualified System.FilePath as FP
-- | Special exception to throw when you want to fail because of bad results
-- of package check.
data CheckException
= CheckException (NonEmpty Check.PackageCheck)
deriving (Typeable)
instance Exception CheckException
instance Show CheckException where
show (CheckException xs) =
"Package check reported the following errors:\n" ++
(intercalate "\n" . fmap show . NE.toList $ xs)
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,MonadLogger m,MonadBaseControl IO m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
-- | Given the path to a local package, creates its source
-- distribution tarball.
--
-- While this yields a 'FilePath', the name of the tarball, this
-- tarball is not written to the disk and instead yielded as a lazy
-- bytestring.
getSDistTarball
:: M env m
=> Maybe PvpBounds -- ^ Override Config value
-> Path Abs Dir -- ^ Path to local package
-> m (FilePath, L.ByteString) -- ^ Filename and tarball contents
getSDistTarball mpvpBounds pkgDir = do
config <- asks getConfig
let pvpBounds = fromMaybe (configPvpBounds config) mpvpBounds
tweakCabal = pvpBounds /= PvpBoundsNone
pkgFp = toFilePath pkgDir
lp <- readLocalPackage pkgDir
$logInfo $ "Getting file list for " <> T.pack pkgFp
(fileList, cabalfp) <- getSDistFileList lp
$logInfo $ "Building sdist tarball for " <> T.pack pkgFp
files <- normalizeTarballPaths (lines fileList)
-- NOTE: Could make this use lazy I/O to only read files as needed
-- for upload (both GZip.compress and Tar.write are lazy).
-- However, it seems less error prone and more predictable to read
-- everything in at once, so that's what we're doing for now:
let tarPath isDir fp = either error id
(Tar.toTarPath isDir (pkgId FP.</> fp))
packWith f isDir fp = liftIO $ f (pkgFp FP.</> fp) (tarPath isDir fp)
packDir = packWith Tar.packDirectoryEntry True
packFile fp
| tweakCabal && isCabalFp fp = do
lbs <- getCabalLbs pvpBounds $ toFilePath cabalfp
return $ Tar.fileEntry (tarPath False fp) lbs
| otherwise = packWith packFileEntry False fp
isCabalFp fp = toFilePath pkgDir FP.</> fp == toFilePath cabalfp
tarName = pkgId FP.<.> "tar.gz"
pkgId = packageIdentifierString (packageIdentifier (lpPackage lp))
dirEntries <- mapM packDir (dirsFromFiles files)
fileEntries <- mapM packFile files
return (tarName, GZip.compress (Tar.write (dirEntries ++ fileEntries)))
-- | Get the PVP bounds-enabled version of the given cabal file
getCabalLbs :: M env m => PvpBounds -> FilePath -> m L.ByteString
getCabalLbs pvpBounds fp = do
bs <- liftIO $ S.readFile fp
(_warnings, gpd) <- readPackageUnresolvedBS Nothing bs
(_, _, _, _, sourceMap) <- loadSourceMap AllowNoTargets defaultBuildOptsCLI
menv <- getMinimalEnvOverride
(installedMap, _, _, _) <- getInstalled menv GetInstalledOpts
{ getInstalledProfiling = False
, getInstalledHaddock = False
}
sourceMap
let gpd' = gtraverseT (addBounds sourceMap installedMap) gpd
return $ TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd'
where
addBounds :: SourceMap -> InstalledMap -> Dependency -> Dependency
addBounds sourceMap installedMap dep@(Dependency cname range) =
case lookupVersion (fromCabalPackageName cname) of
Nothing -> dep
Just version -> Dependency cname $ simplifyVersionRange
$ (if toAddUpper && not (hasUpper range) then addUpper version else id)
$ (if toAddLower && not (hasLower range) then addLower version else id)
range
where
lookupVersion name =
case Map.lookup name sourceMap of
Just (PSLocal lp) -> Just $ packageVersion $ lpPackage lp
Just (PSUpstream version _ _ _ _) -> Just version
Nothing ->
case Map.lookup name installedMap of
Just (_, installed) -> Just (installedVersion installed)
Nothing -> Nothing
addUpper version = intersectVersionRanges
(earlierVersion $ toCabalVersion $ nextMajorVersion version)
addLower version = intersectVersionRanges
(orLaterVersion (toCabalVersion version))
(toAddLower, toAddUpper) =
case pvpBounds of
PvpBoundsNone -> (False, False)
PvpBoundsUpper -> (False, True)
PvpBoundsLower -> (True, False)
PvpBoundsBoth -> (True, True)
-- | Traverse a data type.
gtraverseT :: (Data a,Typeable b) => (Typeable b => b -> b) -> a -> a
gtraverseT f =
gmapT (\x -> case cast x of
Nothing -> gtraverseT f x
Just b -> fromMaybe x (cast (f b)))
-- | Read in a 'LocalPackage' config. This makes some default decisions
-- about 'LocalPackage' fields that might not be appropriate for other
-- use-cases.
readLocalPackage :: M env m => Path Abs Dir -> m LocalPackage
readLocalPackage pkgDir = do
cabalfp <- findOrGenerateCabalFile pkgDir
config <- getDefaultPackageConfig
(warnings,package) <- readPackage config cabalfp
mapM_ (printCabalFileWarning cabalfp) warnings
return LocalPackage
{ lpPackage = package
, lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file.
, lpDir = pkgDir
, lpCabalFile = cabalfp
-- NOTE: these aren't the 'correct values, but aren't used in
-- the usage of this function in this module.
, lpTestDeps = Map.empty
, lpBenchDeps = Map.empty
, lpTestBench = Nothing
, lpForceDirty = False
, lpDirtyFiles = Nothing
, lpNewBuildCache = Map.empty
, lpFiles = Set.empty
, lpComponents = Set.empty
, lpUnbuildable = Set.empty
}
-- | Returns a newline-separate list of paths, and the absolute path to the .cabal file.
getSDistFileList :: M env m => LocalPackage -> m (String, Path Abs File)
getSDistFileList lp =
withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> do
menv <- getMinimalEnvOverride
let bopts = defaultBuildOpts
let boptsCli = defaultBuildOptsCLI
baseConfigOpts <- mkBaseConfigOpts boptsCli
(_, _mbp, locals, _extraToBuild, _sourceMap) <- loadSourceMap NeedTargets boptsCli
runInBase <- liftBaseWith $ \run -> return (void . run)
withExecuteEnv menv bopts boptsCli baseConfigOpts locals
[] [] [] -- provide empty list of globals. This is a hack around custom Setup.hs files
$ \ee ->
withSingleContext runInBase ac ee task Nothing (Just "sdist") $ \_package cabalfp _pkgDir cabal _announce _console _mlogFile -> do
let outFile = toFilePath tmpdir FP.</> "source-files-list"
cabal False ["sdist", "--list-sources", outFile]
contents <- liftIO (readFile outFile)
return (contents, cabalfp)
where
package = lpPackage lp
ac = ActionContext Set.empty
task = Task
{ taskProvides = PackageIdentifier (packageName package) (packageVersion package)
, taskType = TTLocal lp
, taskConfigOpts = TaskConfigOpts
{ tcoMissing = Set.empty
, tcoOpts = \_ -> ConfigureOpts [] []
}
, taskPresent = Map.empty
, taskAllInOne = True
}
normalizeTarballPaths :: M env m => [FilePath] -> m [FilePath]
normalizeTarballPaths fps = do
-- TODO: consider whether erroring out is better - otherwise the
-- user might upload an incomplete tar?
unless (null outsideDir) $
$logWarn $ T.concat
[ "Warning: These files are outside of the package directory, and will be omitted from the tarball: "
, T.pack (show outsideDir)]
return files
where
(outsideDir, files) = partitionEithers (map pathToEither fps)
pathToEither fp = maybe (Left fp) Right (normalizePath fp)
normalizePath :: FilePath -> Maybe FilePath
normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise
where
go [] = Just []
go ("..":_) = Nothing
go (_:"..":xs) = go xs
go (x:xs) = (x :) <$> go xs
dirsFromFiles :: [FilePath] -> [FilePath]
dirsFromFiles dirs = Set.toAscList (Set.delete "." results)
where
results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs
go s x
| Set.member x s = s
| otherwise = go (Set.insert x s) (FP.takeDirectory x)
-- | Check package in given tarball. This will log all warnings
-- and will throw an exception in case of critical errors.
--
-- Note that we temporarily decompress the archive to analyze it.
checkSDistTarball :: (MonadIO m, MonadMask m, MonadThrow m, MonadLogger m, MonadReader env m, HasEnvConfig env)
=> Path Abs File -- ^ Absolute path to tarball
-> m ()
checkSDistTarball tarball = withTempTarGzContents tarball $ \pkgDir' -> do
pkgDir <- (pkgDir' </>) `liftM`
(parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball)
-- ^ drop ".tar" ^ drop ".gz"
cabalfp <- findOrGenerateCabalFile pkgDir
name <- parsePackageNameFromFilePath cabalfp
config <- getDefaultPackageConfig
(gdesc, pkgDesc) <- readPackageDescriptionDir config pkgDir
$logInfo $
"Checking package '" <> packageNameText name <> "' for common mistakes"
let pkgChecks = Check.checkPackage gdesc (Just pkgDesc)
fileChecks <- liftIO $ Check.checkPackageFiles pkgDesc (toFilePath pkgDir)
let checks = pkgChecks ++ fileChecks
(errors, warnings) =
let criticalIssue (Check.PackageBuildImpossible _) = True
criticalIssue (Check.PackageDistInexcusable _) = True
criticalIssue _ = False
in partition criticalIssue checks
unless (null warnings) $
$logWarn $ "Package check reported the following warnings:\n" <>
T.pack (intercalate "\n" . fmap show $ warnings)
case NE.nonEmpty errors of
Nothing -> return ()
Just ne -> throwM $ CheckException ne
-- | Version of 'checkSDistTarball' that first saves lazy bytestring to
-- temporary directory and then calls 'checkSDistTarball' on it.
checkSDistTarball' :: (MonadIO m, MonadMask m, MonadThrow m, MonadLogger m, MonadReader env m, HasEnvConfig env)
=> String -- ^ Tarball name
-> L.ByteString -- ^ Tarball contents as a byte string
-> m ()
checkSDistTarball' name bytes = withSystemTempDir "stack" $ \tpath -> do
npath <- (tpath </>) `liftM` parseRelFile name
liftIO $ L.writeFile (toFilePath npath) bytes
checkSDistTarball npath
withTempTarGzContents :: (MonadIO m, MonadMask m, MonadThrow m)
=> Path Abs File -- ^ Location of tarball
-> (Path Abs Dir -> m a) -- ^ Perform actions given dir with tarball contents
-> m a
withTempTarGzContents apath f = withSystemTempDir "stack" $ \tpath -> do
archive <- liftIO $ L.readFile (toFilePath apath)
liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive
f tpath
--------------------------------------------------------------------------------
-- Copy+modified from the tar package to avoid issues with lazy IO ( see
-- https://github.com/commercialhaskell/stack/issues/1344 )
packFileEntry :: FilePath -- ^ Full path to find the file on the local disk
-> Tar.TarPath -- ^ Path to use for the tar Entry in the archive
-> IO Tar.Entry
packFileEntry filepath tarpath = do
mtime <- getModTime filepath
perms <- getPermissions filepath
content <- S.readFile filepath
let size = fromIntegral (S.length content)
return (Tar.simpleEntry tarpath (Tar.NormalFile (L.fromStrict content) size)) {
Tar.entryPermissions = if executable perms then Tar.executableFilePermissions
else Tar.ordinaryFilePermissions,
Tar.entryTime = mtime
}
getModTime :: FilePath -> IO Tar.EpochTime
getModTime path = do
t <- getModificationTime path
return . floor . utcTimeToPOSIXSeconds $ t
|
phadej/stack
|
src/Stack/SDist.hs
|
bsd-3-clause
| 14,896
| 0
| 19
| 3,775
| 3,586
| 1,879
| 1,707
| 266
| 10
|
import Data.Bits (xor)
solve :: Integer -> Integer
solve n = sum [ (cnt!!i) * (cnt!!j) * (cnt!!k) | i <- [0 .. 62], j <- [0 .. 62], k <- [0 .. 62], (i `xor` j `xor` k) /= 0] where
cnt = [ (n `div` (2^i) + 1) `div` 2 | i <- [0 .. 62] ]
main = print $ (solve n) `mod` modulo where
modulo = 1234567890
n = 123456787654321
|
foreverbell/project-euler-solutions
|
src/509.hs
|
bsd-3-clause
| 335
| 0
| 13
| 90
| 211
| 121
| 90
| 7
| 1
|
{-# LANGUAGE InstanceSigs #-}
-- Also see FAlgebra.hs for an alternative exploration
-- http://programmers.stackexchange.com/questions/242795/what-is-the-free-monad-interpreter-pattern
-- http://www.haskellforall.com/2012/07/purify-code-using-free-monads.html
-- https://ocharles.org.uk/blog/posts/2016-01-26-transformers-free-monads-mtl-laws.html
-- http://degoes.net/articles/modern-fp
-- | Free - An exploration of free monads. Think of this as a light-weight way
-- to create a DSL. We pull our code into a data type and use a 'Free' monad to
-- provide the supporting infrastructure to make it easier to work with. Once
-- done, we can define different interpreters to operate over our DSL as we
-- like. One that adds logging, one that runs against the production database,
-- one that mocks the database.
module Free where
-- | First DSL attempt. Non-composable!
data DSL1
= Get1 String
| Set1 String String
-- | Second DSL: add in 'next' type parameter for composing operations.
data DSL2 next
= Get String (String -> next)
| Set String String next
| End -- terminator / nil
-- | Run a get and then set the value against a new key.
getset1 :: DSL2 (DSL2 (DSL2 next))
getset1 = Get "key1" $ \val -> Set "key2" val End
-- Two things:
-- (1) Notice the 'next' parameter makes DSL2 a Functor! And with only one
-- possible instance.
-- (2) This DSL2 type is annoying to work with though as composition of
-- operators is reflected into the type. We'd like to construct a list at the
-- type level so that the value level list doesn't create an ever growing type.
-- Fixing (1)
instance Functor DSL2 where
fmap f (Get name k) = Get name (f . k)
fmap f (Set name val next) = Set name val (f next)
fmap f End = End
-- Fixing (2): This is where a 'free monad' comes in - giving us a type level
-- list.
data Free f a
= Free (f (Free f a))
| Return a
-- We can now give this type to getset - no growth!
getset2 :: Free DSL2 next
getset2 = Free (Get "key1" $ \val -> Free (Set "key2" val (Free End)))
-- Simple value
example1 :: Free DSL2 ()
example1 = Return ()
example2 :: Free DSL2 next
example2 = Free End
-- lets make example2 explicit in how the types are instantiated
example3 :: Free DSL2 next
example3 = Free x
-- We instantiate DSL2 next as:
where x = End :: DSL2 (Free DSL2 next)
example4 :: Free DSL2 next
example4 = Free s
where s = Set "key" "val" (Free e) :: DSL2 (Free DSL2 next)
e = End :: DSL2 (Free DSL2 next)
-- One problem stil - the above is very annoying to work with! So let's
-- construct a monad to avoid the boilerplate.
instance Functor f => Functor (Free f) where
fmap :: (a -> b) -> Free f a -> Free f b
fmap f (Free a) = Free $ fmap (fmap f) a
fmap f (Return n) = Return $ f n
instance Functor f => Applicative (Free f) where
pure :: a -> Free f a
pure = Return
(<*>) :: Free f (a -> b) -> Free f a -> Free f b
(<*>) (Free f) a = Free $ fmap (<*> a) f
(<*>) (Return f) a = fmap f a
instance Functor f => Monad (Free f) where
(>>=) :: Free f a -> (a -> Free f b) -> Free f b
(>>=) (Free a) f = Free $ fmap (>>= f) a
(>>=) (Return a) f = f a
return :: a -> Free f a
return = pure
-- nicer again using monad
getset3 :: Free DSL2 next
getset3 = do
key1 <- Free $ Get "key1" Return
Free $ Set "key2" key1 (Return ())
Free End
-- unpacking: nicer again using monad
getset3' :: Free DSL2 next
getset3' =
Free (Get "key1" Return) >>= \key1 ->
Free (Set "key2" key1 (Return ())) >>
Free End
example5 :: Free DSL2 next
example5 = Free (Set "key2" "val" (Return ())) >> Free End
-- (>>) a b = a >>= \_ -> b
-- Free $ fmap (>>= \_ -> (Free End)) (Set "key2" "val" (Return ()))
-- Fixing (3): Free and Return are still ugly, luckily the way we 'lift' a
-- action into Free is always the same, so can exploit.
liftFree :: Functor f => f a -> Free f a
liftFree f = Free $ Return <$> f
-- Using (3) we can write nicer versions of our DSL:
get key = liftFree $ Get key id
set key val = liftFree $ Set key val ()
end = liftFree $ End
getset4 :: Free DSL2 a
getset4 = do
key1 <- get "key1"
set "key2" key1
end
|
dterei/Scraps
|
haskell/freemtl/Free.hs
|
bsd-3-clause
| 4,193
| 0
| 13
| 963
| 1,101
| 573
| 528
| 69
| 1
|
{-# LANGUAGE PatternGuards #-}
module IRTS.CodegenJava (codegenJava) where
import Idris.Core.TT hiding (mkApp)
import IRTS.CodegenCommon
import IRTS.Java.ASTBuilding
import IRTS.Java.JTypes
import IRTS.Java.Mangling
import IRTS.Java.Pom (pomString)
import IRTS.Lang
import IRTS.Simplified
import IRTS.System
import Util.System
import Control.Applicative hiding (Const)
import Control.Arrow
import Control.Monad
import Control.Monad.Error
import qualified Control.Monad.Trans as T
import Control.Monad.Trans.State
import Data.List (foldl', isSuffixOf)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import qualified Data.Vector.Unboxed as V
import Language.Java.Parser
import Language.Java.Pretty
import Language.Java.Syntax hiding (Name)
import qualified Language.Java.Syntax as J
import System.Directory
import System.Exit
import System.FilePath
import System.IO
import System.Process
-----------------------------------------------------------------------
-- Main function
codegenJava :: [(Name, SExp)] -> -- initialization of globals
[(Name, SDecl)] -> -- decls
FilePath -> -- output file name
[String] -> -- headers
[String] -> -- libs
OutputType ->
IO ()
codegenJava globalInit defs out hdrs libs exec =
withTgtDir exec out (codegenJava' exec)
where
codegenJava' :: OutputType -> FilePath -> IO ()
codegenJava' Raw tgtDir = do
srcDir <- prepareSrcDir exec tgtDir
generateJavaFile globalInit defs hdrs srcDir out
codegenJava' MavenProject tgtDir = do
codegenJava' Raw tgtDir
generatePom tgtDir out libs
codegenJava' Object tgtDir = do
codegenJava' MavenProject tgtDir
invokeMvn tgtDir "compile"
copyClassFiles tgtDir out
cleanUpTmp tgtDir
codegenJava' _ tgtDir = do
codegenJava' MavenProject tgtDir
invokeMvn tgtDir "package";
copyJar tgtDir out
makeJarExecutable out
cleanUpTmp tgtDir
-----------------------------------------------------------------------
-- Compiler IO
withTgtDir :: OutputType -> FilePath -> (FilePath -> IO ()) -> IO ()
withTgtDir Raw out action = action (dropFileName out)
withTgtDir MavenProject out action = createDirectoryIfMissing False out >> action out
withTgtDir _ out action = withTempdir (takeBaseName out) action
prepareSrcDir :: OutputType -> FilePath -> IO FilePath
prepareSrcDir Raw tgtDir = return tgtDir
prepareSrcDir _ tgtDir = do
let srcDir = (tgtDir </> "src" </> "main" </> "java")
createDirectoryIfMissing True srcDir
return srcDir
javaFileName :: FilePath -> FilePath -> FilePath
javaFileName srcDir out =
either error (\ (Ident clsName) -> srcDir </> clsName <.> "java") (mkClassName out)
generateJavaFile :: [(Name, SExp)] -> -- initialization of globals
[(Name, SDecl)] -> -- definitions
[String] -> -- headers
FilePath -> -- Source dir
FilePath -> -- output target
IO ()
generateJavaFile globalInit defs hdrs srcDir out = do
let code = either error
(prettyPrint)-- flatIndent . prettyPrint)
(evalStateT (mkCompilationUnit globalInit defs hdrs out) mkCodeGenEnv)
writeFile (javaFileName srcDir out) code
pomFileName :: FilePath -> FilePath
pomFileName tgtDir = tgtDir </> "pom.xml"
generatePom :: FilePath -> -- tgt dir
FilePath -> -- output target
[String] -> -- libs
IO ()
generatePom tgtDir out libs = writeFile (pomFileName tgtDir) execPom
where
(Ident clsName) = either error id (mkClassName out)
execPom = pomString clsName (takeBaseName out) libs
invokeMvn :: FilePath -> String -> IO ()
invokeMvn tgtDir command = do
mvnCmd <- getMvn
let args = ["-f", pomFileName tgtDir]
(exit, mvout, err) <- readProcessWithExitCode mvnCmd (args ++ [command]) ""
when (exit /= ExitSuccess) $
error ("FAILURE: " ++ mvnCmd ++ " " ++ command ++ "\n" ++ err ++ mvout)
classFileDir :: FilePath -> FilePath
classFileDir tgtDir = tgtDir </> "target" </> "classes"
copyClassFiles :: FilePath -> FilePath -> IO ()
copyClassFiles tgtDir out = do
classFiles <- map (\ clsFile -> classFileDir tgtDir </> clsFile)
. filter ((".class" ==) . takeExtension)
<$> getDirectoryContents (classFileDir tgtDir)
mapM_ (\ clsFile -> copyFile clsFile (takeDirectory out </> takeFileName clsFile)) classFiles
jarFileName :: FilePath -> FilePath -> FilePath
jarFileName tgtDir out = tgtDir </> "target" </> (takeBaseName out) <.> "jar"
copyJar :: FilePath -> FilePath -> IO ()
copyJar tgtDir out =
copyFile (jarFileName tgtDir out) out
makeJarExecutable :: FilePath -> IO ()
makeJarExecutable out = do
handle <- openBinaryFile out ReadMode
contents <- TIO.hGetContents handle
hClose handle
handle <- openBinaryFile out WriteMode
TIO.hPutStr handle (T.append (T.pack jarHeader) contents)
hFlush handle
hClose handle
perms <- getPermissions out
setPermissions out (setOwnerExecutable True perms)
removePom :: FilePath -> IO ()
removePom tgtDir = removeFile (pomFileName tgtDir)
cleanUpTmp :: FilePath -> IO ()
cleanUpTmp tgtDir = do
invokeMvn tgtDir "clean"
removePom tgtDir
-----------------------------------------------------------------------
-- Jar and Pom infrastructure
jarHeader :: String
jarHeader =
"#!/usr/bin/env sh\n"
++ "MYSELF=`which \"$0\" 2>/dev/null`\n"
++ "[ $? -gt 0 -a -f \"$0\" ] && MYSELF=\"./$0\"\n"
++ "java=java\n"
++ "if test -n \"$JAVA_HOME\"; then\n"
++ " java=\"$JAVA_HOME/bin/java\"\n"
++ "fi\n"
++ "exec \"$java\" $java_args -jar $MYSELF \"$@\""
++ "exit 1\n"
-----------------------------------------------------------------------
-- Code generation environment
data CodeGenerationEnv
= CodeGenerationEnv
{ globalVariables :: [(Name, ArrayIndex)]
, localVariables :: [[(Int, Ident)]]
, localVarCounter :: Int
}
type CodeGeneration = StateT (CodeGenerationEnv) (Either String)
mkCodeGenEnv :: CodeGenerationEnv
mkCodeGenEnv = CodeGenerationEnv [] [] 0
varPos :: LVar -> CodeGeneration (Either ArrayIndex Ident)
varPos (Loc i) = do
vars <- (concat . localVariables) <$> get
case lookup i vars of
(Just varName) -> return (Right varName)
Nothing -> throwError $ "Invalid local variable id: " ++ show i
varPos (Glob name) = do
vars <- globalVariables <$> get
case lookup name vars of
(Just varIdx) -> return (Left varIdx)
Nothing -> throwError $ "Invalid global variable id: " ++ show name
pushScope :: CodeGeneration ()
pushScope =
modify (\ env -> env { localVariables = []:(localVariables env) })
popScope :: CodeGeneration ()
popScope = do
env <- get
let lVars = tail $ localVariables env
let vC = if null lVars then 0 else localVarCounter env
put $ env { localVariables = tail (localVariables env)
, localVarCounter = vC }
setVariable :: LVar -> CodeGeneration (Either ArrayIndex Ident)
setVariable (Loc i) = do
env <- get
let lVars = localVariables env
let getter = localVar $ localVarCounter env
let lVars' = ((i, getter) : head lVars) : tail lVars
put $ env { localVariables = lVars'
, localVarCounter = 1 + localVarCounter env}
return (Right getter)
setVariable (Glob n) = do
env <- get
let gVars = globalVariables env
let getter = globalContext @! length gVars
let gVars' = (n, getter):gVars
put (env { globalVariables = gVars' })
return (Left getter)
pushParams :: [Ident] -> CodeGeneration ()
pushParams paramNames =
let varMap = zipWith (flip (,)) paramNames [0..] in
modify (\ env -> env { localVariables = varMap:(localVariables env)
, localVarCounter = (length varMap) + (localVarCounter env) })
flatIndent :: String -> String
flatIndent (' ' : ' ' : xs) = flatIndent xs
flatIndent (x:xs) = x:flatIndent xs
flatIndent [] = []
-----------------------------------------------------------------------
-- Maintaining control structures over code blocks
data BlockPostprocessor
= BlockPostprocessor
{ ppInnerBlock :: [BlockStmt] -> Exp -> CodeGeneration [BlockStmt]
, ppOuterBlock :: [BlockStmt] -> CodeGeneration [BlockStmt]
}
ppExp :: BlockPostprocessor -> Exp -> CodeGeneration [BlockStmt]
ppExp pp exp = ((ppInnerBlock pp) [] exp) >>= ppOuterBlock pp
addReturn :: BlockPostprocessor
addReturn =
BlockPostprocessor
{ ppInnerBlock = (\ block exp -> return $ block ++ [jReturn exp])
, ppOuterBlock = return
}
ignoreResult :: BlockPostprocessor
ignoreResult =
BlockPostprocessor
{ ppInnerBlock = (\ block exp -> return block)
, ppOuterBlock = return
}
ignoreOuter :: BlockPostprocessor -> BlockPostprocessor
ignoreOuter pp = pp { ppOuterBlock = return }
throwRuntimeException :: BlockPostprocessor -> BlockPostprocessor
throwRuntimeException pp =
pp
{ ppInnerBlock =
(\ blk exp -> return $
blk ++ [ BlockStmt $ Throw
( InstanceCreation
[]
(toClassType runtimeExceptionType)
[exp]
Nothing
)
]
)
}
rethrowAsRuntimeException :: BlockPostprocessor -> BlockPostprocessor
rethrowAsRuntimeException pp =
pp
{ ppOuterBlock =
(\ blk -> do
ex <- ppInnerBlock (throwRuntimeException pp) [] (ExpName $ J.Name [Ident "ex"])
ppOuterBlock pp
$ [ BlockStmt $ Try
(Block blk)
[Catch (FormalParam [] exceptionType False (VarId (Ident "ex"))) $
Block ex
]
Nothing
]
)
}
-----------------------------------------------------------------------
-- File structure
mkCompilationUnit :: [(Name, SExp)] -> [(Name, SDecl)] -> [String] -> FilePath -> CodeGeneration CompilationUnit
mkCompilationUnit globalInit defs hdrs out = do
clsName <- mkClassName out
CompilationUnit Nothing ( [ ImportDecl False idrisRts True
, ImportDecl True idrisPrelude True
, ImportDecl False bigInteger False
, ImportDecl False runtimeException False
] ++ otherHdrs
)
<$> mkTypeDecl clsName globalInit defs
where
idrisRts = J.Name $ map Ident ["org", "idris", "rts"]
idrisPrelude = J.Name $ map Ident ["org", "idris", "rts", "Prelude"]
bigInteger = J.Name $ map Ident ["java", "math", "BigInteger"]
runtimeException = J.Name $ map Ident ["java", "lang", "RuntimeException"]
otherHdrs = map ( (\ name -> ImportDecl False name False)
. J.Name
. map (Ident . T.unpack)
. T.splitOn (T.pack ".")
. T.pack)
$ filter (not . isSuffixOf ".h") hdrs
-----------------------------------------------------------------------
-- Main class
mkTypeDecl :: Ident -> [(Name, SExp)] -> [(Name, SDecl)] -> CodeGeneration [TypeDecl]
mkTypeDecl name globalInit defs =
(\ body -> [ClassTypeDecl $ ClassDecl [ Public
, Annotation $ SingleElementAnnotation
(jName "SuppressWarnings")
(EVVal . InitExp $ jString "unchecked")
]
name
[]
Nothing
[]
body])
<$> mkClassBody globalInit (map (second (prefixCallNamespaces name)) defs)
mkClassBody :: [(Name, SExp)] -> [(Name, SDecl)] -> CodeGeneration ClassBody
mkClassBody globalInit defs =
(\ globals defs -> ClassBody . (globals++) . addMainMethod . mergeInnerClasses $ defs)
<$> mkGlobalContext globalInit
<*> mapM mkDecl defs
mkGlobalContext :: [(Name, SExp)] -> CodeGeneration [Decl]
mkGlobalContext [] = return []
mkGlobalContext initExps = do
pushScope
varInit <-
mapM (\ (name, exp) -> do
pos <- setVariable (Glob name)
mkUpdate ignoreResult (Glob name) exp
) initExps
popScope
return [ MemberDecl $ FieldDecl [Private, Static, Final]
(array objectType)
[ VarDecl (VarId $ globalContextID). Just . InitExp
$ ArrayCreate objectType [jInt $ length initExps] 0
]
, InitDecl True (Block $ concat varInit)
]
addMainMethod :: [Decl] -> [Decl]
addMainMethod decls
| findMain decls = mkMainMethod : decls
| otherwise = decls
where
findMain ((MemberDecl (MemberClassDecl (ClassDecl _ name _ _ _ (ClassBody body)))):_)
| name == mangle' (sUN "Main") = findMainMethod body
findMain (_:decls) = findMain decls
findMain [] = False
innerMainMethod = (either error id $ mangle (sUN "main"))
findMainMethod ((MemberDecl (MethodDecl _ _ _ name [] _ _)):_)
| name == mangle' (sUN "main") = True
findMainMethod (_:decls) = findMainMethod decls
findMainMethod [] = False
mkMainMethod :: Decl
mkMainMethod =
simpleMethod
[Public, Static]
Nothing
"main"
[FormalParam [] (array stringType) False (VarId $ Ident "args")]
$ Block [ BlockStmt . ExpStmt
$ call "idris_initArgs" [ (threadType ~> "currentThread") []
, jConst "args"
]
, BlockStmt . ExpStmt $ call (mangle' (sMN 0 "runMain")) []
]
-----------------------------------------------------------------------
-- Inner classes (idris namespaces)
mergeInnerClasses :: [Decl] -> [Decl]
mergeInnerClasses = foldl' mergeInner []
where
mergeInner ((decl@(MemberDecl (MemberClassDecl (ClassDecl priv name targs ext imp (ClassBody body))))):decls)
decl'@(MemberDecl (MemberClassDecl (ClassDecl _ name' _ ext' imp' (ClassBody body'))))
| name == name' =
(MemberDecl $ MemberClassDecl $
ClassDecl priv
name
targs
(mplus ext ext')
(imp ++ imp')
(ClassBody $ mergeInnerClasses (body ++ body')))
: decls
| otherwise = decl:(mergeInner decls decl')
mergeInner (decl:decls) decl' = decl:(mergeInner decls decl')
mergeInner [] decl' = [decl']
mkDecl :: (Name, SDecl) -> CodeGeneration Decl
mkDecl ((NS n (ns:nss)), decl) =
(\ name body ->
MemberDecl $ MemberClassDecl $ ClassDecl [Public, Static] name [] Nothing [] body)
<$> mangle (UN ns)
<*> mkClassBody [] [(NS n nss, decl)]
mkDecl (_, SFun name params stackSize body) = do
(Ident methodName) <- mangle name
methodParams <- mapM mkFormalParam params
paramNames <- mapM mangle params
pushParams paramNames
methodBody <- mkExp addReturn body
popScope
return $
simpleMethod [Public, Static] (Just objectType) methodName methodParams
(Block methodBody)
mkFormalParam :: Name -> CodeGeneration FormalParam
mkFormalParam name =
(\ name -> FormalParam [Final] objectType False (VarId name))
<$> mangle name
-----------------------------------------------------------------------
-- Expressions
-- | Compile a simple expression and use the given continuation to postprocess
-- the resulting value.
mkExp :: BlockPostprocessor -> SExp -> CodeGeneration [BlockStmt]
-- Variables
mkExp pp (SV var) =
(Nothing <>@! var) >>= ppExp pp
-- Applications
mkExp pp (SApp pushTail name args) =
mkApp pushTail name args >>= ppExp pp
-- Bindings
mkExp pp (SLet var newExp inExp) =
mkLet pp var newExp inExp
mkExp pp (SUpdate var@(Loc i) newExp) = -- can only update locals
mkUpdate pp var newExp
mkExp pp (SUpdate var newExp) =
mkExp pp newExp
-- Objects
mkExp pp (SCon conId _ args) =
mkIdrisObject conId args >>= ppExp pp
-- Case expressions
mkExp pp (SCase var alts) = mkCase pp True var alts
mkExp pp (SChkCase var alts) = mkCase pp False var alts
-- Projections
mkExp pp (SProj var i) =
mkProjection var i >>= ppExp pp
-- Constants
mkExp pp (SConst c) =
ppExp pp $ mkConstant c
-- Foreign function calls
mkExp pp (SForeign lang resTy text params) =
mkForeign pp lang resTy text params
-- Primitive functions
mkExp pp (SOp LFork [arg]) =
(mkThread arg) >>= ppExp pp
mkExp pp (SOp LPar [arg]) =
(Nothing <>@! arg) >>= ppExp pp
mkExp pp (SOp LNoOp args) =
(Nothing <>@! (last args)) >>= ppExp pp
mkExp pp (SOp LNullPtr args) =
ppExp pp $ Lit Null
mkExp pp (SOp op args) =
(mkPrimitiveFunction op args) >>= ppExp pp
-- Empty expressions
mkExp pp (SNothing) = ppExp pp $ Lit Null
-- Errors
mkExp pp (SError err) = ppExp (throwRuntimeException pp) (jString err)
-----------------------------------------------------------------------
-- Variable access
(<>@!) :: Maybe J.Type -> LVar -> CodeGeneration Exp
(<>@!) Nothing var =
either ArrayAccess (\ n -> ExpName $ J.Name [n]) <$> varPos var
(<>@!) (Just castTo) var =
(castTo <>) <$> (Nothing <>@! var)
-----------------------------------------------------------------------
-- Application (wrap method calls in tail call closures)
mkApp :: Bool -> Name -> [LVar] -> CodeGeneration Exp
mkApp False name args =
(\ methodName params ->
(idrisClosureType ~> "unwrapTailCall") [call methodName params]
)
<$> mangleFull name
<*> mapM (Nothing <>@!) args
mkApp True name args = mkMethodCallClosure name args
mkMethodCallClosure :: Name -> [LVar] -> CodeGeneration Exp
mkMethodCallClosure name args =
(\ name args -> closure (call name args))
<$> mangleFull name
<*> mapM (Nothing <>@!) args
-----------------------------------------------------------------------
-- Updates (change context array) and Let bindings (Update, execute)
mkUpdate :: BlockPostprocessor -> LVar -> SExp -> CodeGeneration [BlockStmt]
mkUpdate pp var exp =
mkExp
( pp
{ ppInnerBlock =
(\ blk rhs -> do
pos <- setVariable var
vExp <- Nothing <>@! var
ppInnerBlock pp (blk ++ [pos @:= rhs]) vExp
)
}
) exp
mkLet :: BlockPostprocessor -> LVar -> SExp -> SExp -> CodeGeneration [BlockStmt]
mkLet pp var@(Loc pos) newExp inExp =
mkUpdate (pp { ppInnerBlock =
(\ blk _ -> do
inBlk <- mkExp pp inExp
return (blk ++ inBlk)
)
}
) var newExp
mkLet _ (Glob _) _ _ = T.lift $ Left "Cannot let bind to global variable"
-----------------------------------------------------------------------
-- Object creation
mkIdrisObject :: Int -> [LVar] -> CodeGeneration Exp
mkIdrisObject conId args =
(\ args ->
InstanceCreation [] (toClassType idrisObjectType) ((jInt conId):args) Nothing
)
<$> mapM (Nothing <>@!) args
-----------------------------------------------------------------------
-- Case expressions
mkCase :: BlockPostprocessor -> Bool -> LVar -> [SAlt] -> CodeGeneration [BlockStmt]
mkCase pp checked var cases
| isDefaultOnlyCase cases = mkDefaultMatch pp cases
| isConstCase cases = do
ifte <- mkConstMatch (ignoreOuter pp) (\ pp -> mkDefaultMatch pp cases) var cases
ppOuterBlock pp [BlockStmt ifte]
| otherwise = do
switchExp <- mkGetConstructorId checked var
matchBlocks <- mkConsMatch (ignoreOuter pp) (\ pp -> mkDefaultMatch pp cases) var cases
ppOuterBlock pp [BlockStmt $ Switch switchExp matchBlocks]
isConstCase :: [SAlt] -> Bool
isConstCase ((SConstCase _ _):_) = True
isConstCase ((SDefaultCase _):cases) = isConstCase cases
isConstCase _ = False
isDefaultOnlyCase :: [SAlt] -> Bool
isDefaultOnlyCase [SDefaultCase _] = True
isDefaultOnlyCase [] = True
isDefaultOnlyCase _ = False
mkDefaultMatch :: BlockPostprocessor -> [SAlt] -> CodeGeneration [BlockStmt]
mkDefaultMatch pp (x@(SDefaultCase branchExpression):_) =
do pushScope
stmt <- mkExp pp branchExpression
popScope
return stmt
mkDefaultMatch pp (x:xs) = mkDefaultMatch pp xs
mkDefaultMatch pp [] =
ppExp (throwRuntimeException pp) (jString "Non-exhaustive pattern")
mkMatchConstExp :: LVar -> Const -> CodeGeneration Exp
mkMatchConstExp var c
| isPrimitive cty =
(\ var -> (primFnType ~> opName (LEq undefined)) [var, jc] ~==~ jInt 1)
<$> (Just cty <>@! var)
| isArray cty =
(\ var -> (arraysType ~> "equals") [var, jc])
<$> (Just cty <>@! var)
| isString cty =
(\ var -> ((primFnType ~> opName (LStrEq)) [var, jc] ~==~ jInt 1))
<$> (Just cty <>@! var)
| otherwise =
(\ var -> (var ~> "equals") [jc])
<$> (Just cty <>@! var)
where
cty = constType c
jc = mkConstant c
mkConstMatch :: BlockPostprocessor ->
(BlockPostprocessor -> CodeGeneration [BlockStmt]) ->
LVar ->
[SAlt] ->
CodeGeneration Stmt
mkConstMatch pp getDefaultStmts var ((SConstCase constant branchExpression):cases) = do
matchExp <- mkMatchConstExp var constant
pushScope
branchBlock <- mkExp pp branchExpression
popScope
otherBranches <- mkConstMatch pp getDefaultStmts var cases
return
$ IfThenElse matchExp (StmtBlock $ Block branchBlock) otherBranches
mkConstMatch pp getDefaultStmts var (c:cases) = mkConstMatch pp getDefaultStmts var cases
mkConstMatch pp getDefaultStmts _ [] = do
defaultBlock <- getDefaultStmts pp
return $ StmtBlock (Block defaultBlock)
mkGetConstructorId :: Bool -> LVar -> CodeGeneration Exp
mkGetConstructorId True var =
(\ var -> ((idrisObjectType <> var) ~> "getConstructorId") [])
<$> (Nothing <>@! var)
mkGetConstructorId False var =
(\ var match ->
Cond (InstanceOf var (toRefType idrisObjectType)) match (jInt (-1))
)
<$> (Nothing <>@! var)
<*> mkGetConstructorId True var
mkConsMatch :: BlockPostprocessor ->
(BlockPostprocessor -> CodeGeneration [BlockStmt]) ->
LVar ->
[SAlt] ->
CodeGeneration [SwitchBlock]
mkConsMatch pp getDefaultStmts var ((SConCase parentStackPos consIndex _ params branchExpression):cases) = do
pushScope
caseBranch <- mkCaseBinding pp var parentStackPos params branchExpression
popScope
otherBranches <- mkConsMatch pp getDefaultStmts var cases
return $
(SwitchBlock (SwitchCase $ jInt consIndex) caseBranch):otherBranches
mkConsMatch pp getDefaultStmts var (c:cases) = mkConsMatch pp getDefaultStmts var cases
mkConsMatch pp getDefaultStmts _ [] = do
defaultBlock <- getDefaultStmts pp
return $
[SwitchBlock Default defaultBlock]
mkCaseBinding :: BlockPostprocessor -> LVar -> Int -> [Name] -> SExp -> CodeGeneration [BlockStmt]
mkCaseBinding pp var stackStart params branchExpression =
mkExp pp (toLetIn var stackStart params branchExpression)
where
toLetIn :: LVar -> Int -> [Name] -> SExp -> SExp
toLetIn var stackStart members start =
foldr
(\ pos inExp -> SLet (Loc (stackStart + pos)) (SProj var pos) inExp)
start
[0.. (length members - 1)]
-----------------------------------------------------------------------
-- Projection (retrieve the n-th field of an object)
mkProjection :: LVar -> Int -> CodeGeneration Exp
mkProjection var memberNr =
(\ var -> ArrayAccess $ ((var ~> "getData") []) @! memberNr)
<$> (Just idrisObjectType <>@! var)
-----------------------------------------------------------------------
-- Constants
mkConstantArray :: (V.Unbox a) => J.Type -> (a -> Const) -> V.Vector a -> Exp
mkConstantArray cty elemToConst elems =
ArrayCreateInit
cty
0
(ArrayInit . map (InitExp . mkConstant . elemToConst) $ V.toList elems)
mkConstant :: Const -> Exp
mkConstant c@(I x) = constType c <> (Lit . Word $ toInteger x)
mkConstant c@(BI x) = bigInteger (show x)
mkConstant c@(Fl x) = constType c <> (Lit . Double $ x)
mkConstant c@(Ch x) = constType c <> (Lit . Char $ x)
mkConstant c@(Str x) = constType c <> (Lit . String $ x)
mkConstant c@(B8 x) = constType c <> (Lit . Word $ toInteger x)
mkConstant c@(B16 x) = constType c <> (Lit . Word $ toInteger x)
mkConstant c@(B32 x) = constType c <> (Lit . Word $ toInteger x)
mkConstant c@(B64 x) = (bigInteger (show c) ~> "longValue") []
mkConstant c@(B8V x) = mkConstantArray (constType c) B8 x
mkConstant c@(B16V x) = mkConstantArray (constType c) B16 x
mkConstant c@(B32V x) = mkConstantArray (constType c) B32 x
mkConstant c@(B64V x) = mkConstantArray (constType c) B64 x
mkConstant c@(AType x) = ClassLit (Just $ box (constType c))
mkConstant c@(StrType ) = ClassLit (Just $ stringType)
mkConstant c@(PtrType ) = ClassLit (Just $ objectType)
mkConstant c@(VoidType ) = ClassLit (Just $ voidType)
mkConstant c@(Forgot ) = ClassLit (Just $ objectType)
-----------------------------------------------------------------------
-- Foreign function calls
mkForeign :: BlockPostprocessor -> FLang -> FType -> String -> [(FType, LVar)] -> CodeGeneration [BlockStmt]
mkForeign pp (LANG_C) resTy text params = mkForeign pp (LANG_JAVA FStatic) resTy text params
mkForeign pp (LANG_JAVA callType) resTy text params
| callType <- FStatic = do
method <- liftParsed (parser name text)
args <- foreignVarAccess params
wrapReturn resTy (call method args)
| callType <- FObject = do
method <- liftParsed (parser ident text)
(tgt:args) <- foreignVarAccess params
wrapReturn resTy ((tgt ~> (show $ pretty method)) args)
| callType <- FConstructor = do
clsTy <- liftParsed (parser classType text)
args <- foreignVarAccess params
wrapReturn resTy (InstanceCreation [] clsTy args Nothing)
where
foreignVarAccess args =
mapM (\ (fty, var) -> (foreignType fty <>@! var)) args
pp' = rethrowAsRuntimeException pp
wrapReturn FUnit exp =
((ppInnerBlock pp') [BlockStmt $ ExpStmt exp] (Lit Null)) >>= ppOuterBlock pp'
wrapReturn _ exp =
((ppInnerBlock pp') [] exp) >>= ppOuterBlock pp'
-----------------------------------------------------------------------
-- Primitive functions
mkPrimitiveFunction :: PrimFn -> [LVar] -> CodeGeneration Exp
mkPrimitiveFunction op args =
(\ args -> (primFnType ~> opName op) args)
<$> sequence (zipWith (\ a t -> (Just t) <>@! a) args (sourceTypes op))
mkThread :: LVar -> CodeGeneration Exp
mkThread arg =
(\ closure -> (closure ~> "fork") []) <$> mkMethodCallClosure (sMN 0 "EVAL") [arg]
|
ctford/Idris-Elba-dev
|
src/IRTS/CodegenJava.hs
|
bsd-3-clause
| 26,975
| 0
| 23
| 6,777
| 8,480
| 4,324
| 4,156
| 571
| 5
|
module Board.Naive
( NaiveBoard(..)
) where
import Board.Common
import Types
import Data.Array
import Data.Maybe
newtype NaiveBoard = NaiveBoard (Array Coord Field)
deriving (Show)
instance Board NaiveBoard where
board0 = NaiveBoard $ listArray ((1,1), (8,8)) initialPosition
updateBoard b (Move from to) | from == to = b
updateBoard (NaiveBoard b) (Move from to) =
let Field fromColor fromPiece = b ! from
Field toColor _ = b ! to
in NaiveBoard $ b //
[ (to, Field toColor fromPiece)
, (from, Field fromColor Nothing)
]
fieldIsEmpty (NaiveBoard b) c = isNothing $ fPiece $ b ! c
fieldColor (NaiveBoard b) c = fColor $ b ! c
pieceCoord (NaiveBoard b) p color = head
[ coord
| coord <- indices b
, Just piece <- [fPiece $ b ! coord]
, piecePlayer piece == p
, pieceColor piece == color
]
piecesCoords (NaiveBoard b) p =
[ coord
| coord <- indices b
, Just piece <- [fPiece $ b ! coord]
, piecePlayer piece == p
]
|
sphynx/hamisado
|
Board/Naive.hs
|
bsd-3-clause
| 1,050
| 0
| 12
| 305
| 414
| 213
| 201
| 30
| 0
|
import Control.Concurrent
import Control.Exception (finally)
import Control.Monad.Reader
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import qualified OpenSSL as SSL
import qualified Network.Socket as Net
import Network.Socket (SockAddr)
import System.Environment (getArgs)
import System.IO
import Data.IterIO
import Data.IterIO.Http
import HttpServer
type L = L.ByteString
main :: IO ()
main = do
args <- getArgs
let port = case args of
[portS] -> fromInteger $ read portS
[] -> 8000
_ -> error "bad args"
-- create a channel to accept log messages
logChan <- newChan
-- process log messages in a separate thread
_ <- forkIO $ forever $ do
line <- readChan logChan
hPutStrLn stderr line
-- information about the application
let app = defaultApp { appLog = Just logChan }
-- handle HTTP connections
Net.withSocketsDo $ SSL.withOpenSSL $ do
server <- mkHttpServer port Nothing
runUntilFinished $ runHttpServer server handleHttpReq (runConn app)
--
-- Request-handling environment
--
data App = App { appLog :: Maybe (Chan String) }
defaultApp :: App
defaultApp = App Nothing
data Conn = Conn { connAddr :: SockAddr
, connApp :: App
, connRequests :: Int
}
type ConnM = ReaderT Conn IO
type ConnI = Iter L ConnM
runConn :: App -> SockAddr -> ConnM a -> IO a
runConn app addr m = runReaderT m $ Conn { connAddr = addr
, connApp = app
, connRequests = 0
}
--
-- Request-handling helpers
--
getPeerAddr :: ConnI SockAddr
getPeerAddr = lift $ asks connAddr
warn :: String -> ConnI ()
warn msg = do
chM <- lift $ asks (appLog . connApp)
case chM of
Nothing -> return ()
Just ch -> liftIO $ writeChan ch msg
--
-- Request handling
--
handleHttpReq :: HttpReq s -> Iter L ConnM (HttpResp ConnM)
handleHttpReq httpReq = do
addr <- getPeerAddr
resp <- return $ resp404 httpReq
warn $ showReqLine addr httpReq resp
return resp
--
-- Utilities
--
runUntilFinished :: IO () -> IO ()
runUntilFinished m = do
sem <- newQSem 0
_ <- forkIO $ m `finally` signalQSem sem
waitQSem sem
showReqLine :: (Monad m) => SockAddr -> HttpReq s -> HttpResp m -> String
showReqLine addr req resp =
show addr
++ " " ++ S.unpack (reqMethod req)
++ " " ++ S.unpack (reqPath req)
++ " -> " ++ showStatus (respStatus resp)
showStatus :: HttpStatus -> String
showStatus (HttpStatus code desc) = show code ++ " " ++ S.unpack desc
|
scslab/iterIO
|
Examples/httpServer.hs
|
bsd-3-clause
| 2,749
| 6
| 14
| 813
| 823
| 425
| 398
| 68
| 3
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Test.Pos.Cbor.CborSpec specification
module Test.Pos.Crypto.CborSpec
( spec
) where
import Universum
import Crypto.Hash (Blake2b_224, Blake2b_256)
import Test.Hspec (Spec, describe)
import Pos.Binary.Class
import qualified Pos.Crypto as Crypto
import Test.Pos.Binary.Helpers (U, binaryTest)
import Test.Pos.Crypto.Arbitrary ()
spec :: Spec
spec =
describe "Cbor.Bi instances" $ do
describe "Crypto" $ do
describe "Hashing" $ do
binaryTest @(Crypto.Hash Word64)
describe "Signing" $ do
describe "Bi instances" $ do
binaryTest @Crypto.SecretKey
binaryTest @Crypto.PublicKey
binaryTest @(Crypto.Signature ())
binaryTest @(Crypto.Signature U)
binaryTest @(Crypto.ProxyCert Int32)
binaryTest @(Crypto.ProxySecretKey Int32)
binaryTest @(Crypto.ProxySecretKey U)
binaryTest @(Crypto.ProxySignature Int32 Int32)
binaryTest @(Crypto.ProxySignature U U)
binaryTest @(Crypto.Signed Bool)
binaryTest @(Crypto.Signed U)
binaryTest @Crypto.RedeemSecretKey
binaryTest @Crypto.RedeemPublicKey
binaryTest @(Crypto.RedeemSignature Bool)
binaryTest @(Crypto.RedeemSignature U)
binaryTest @Crypto.Threshold
binaryTest @Crypto.VssPublicKey
binaryTest @Crypto.PassPhrase
binaryTest @Crypto.VssKeyPair
binaryTest @Crypto.Secret
binaryTest @Crypto.DecShare
binaryTest @Crypto.EncShare
binaryTest @Crypto.SecretProof
binaryTest @Crypto.HDAddressPayload
binaryTest @(Crypto.AbstractHash Blake2b_224 U)
binaryTest @(Crypto.AbstractHash Blake2b_256 U)
binaryTest @(AsBinary Crypto.VssPublicKey)
binaryTest @(AsBinary Crypto.Secret)
binaryTest @(AsBinary Crypto.DecShare)
binaryTest @(AsBinary Crypto.EncShare)
|
input-output-hk/pos-haskell-prototype
|
crypto/test/Test/Pos/Crypto/CborSpec.hs
|
mit
| 2,592
| 0
| 21
| 969
| 573
| 269
| 304
| -1
| -1
|
module Penguin where
data WherePenguinsLive =
Galapagos
| Antarctica
| Australia
| SouthAfrica
| SouthAmerica
deriving (Eq, Show)
data Penguin =
Peng WherePenguinsLive
deriving (Eq, Show)
isSouthAfrica :: WherePenguinsLive -> Bool
isSouthAfrica SouthAfrica = True
isSouthAfrica _ = False
gimmeWhereTheyLive :: Penguin -> WherePenguinsLive
gimmeWhereTheyLive (Peng whereItLives) = whereItLives
galapagosPenguin :: Penguin -> Bool
galapagosPenguin (Peng Galapagos) = True
galapagosPenguin _ = False
antarcticPenguin :: Penguin -> Bool
antarcticPenguin (Peng Antarctica) = True
antarcticPenguin _ = False
antarcticOrGalapagosPenguin :: Penguin -> Bool
antarcticOrGalapagosPenguin p =
(galapagosPenguin p)
|| (antarcticPenguin p)
|
brodyberg/Notes
|
ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/Penguin.hs
|
mit
| 800
| 0
| 7
| 164
| 197
| 106
| 91
| 26
| 1
|
-- Aside from sharing license info as a value itself, this is here so that our code will be recognized as FLO by LibreJS http://www.gnu.org/software/librejs
module Handler.JsLicense where
import Import
import qualified Data.Text as T
import Yesod.Form.Jquery
data Lib =
Lib { libName :: Text
, libRoute :: Text
, libLicenseName :: Text
, libLicenseRoute :: Text
, libOrigName :: Text
, libOrigRoute :: Text
}
getJsLicenseR :: Handler Html
getJsLicenseR = do
app <- getYesod
render <- getUrlRender
let jqueryUrl = either render id $ urlJqueryJs app
unMin lib = maybe lib (`T.append` "js") $ T.stripSuffix "min.js" . fst . T.breakOnEnd "?" $ lib
libs :: [Lib]
libs =
[ Lib "jquery.min.js" jqueryUrl "Expat License" "http://www.jclark.com/xml/copying.txt" "jquery.js" (unMin jqueryUrl)
, Lib "bootstrap.min.js" (render $ StaticR js_bootstrap_min_js) "Expat License" "http://www.jclark.com/xml/copying.txt" "bootstrap.js" (render $ StaticR js_bootstrap_js)
, Lib "modernizr.js" (render $ StaticR js_modernizr_js) "Expat License" "http://www.jclark.com/xml/copying.txt" "modernizr.js" (render $ StaticR js_modernizr_js)
, Lib "include.js" "https://browserid.org/include.js" "Mozilla Public License Version 2.0" "http://www.mozilla.org/MPL/2.0/" "include.orig.js" "https://login.persona.org/include.orig.js"
, Lib "jquery.jqplot.min.js" (render $ StaticR js_jquery_jqplot_min_js) "Expat License" "http://www.jclark.com/xml/copying.txt" "jquery.jqplot.js" (render $ StaticR js_jquery_jqplot_js)
, Lib "jqplot.logAxisRenderer.min.js" (render $ StaticR js_plugins_jqplot_logAxisRenderer_min_js) "Expat License" "http://www.jclark.com/xml/copying.txt" "jqplot.logAxisRenderer.js" (render $ StaticR js_plugins_jqplot_logAxisRenderer_js)
]
defaultLayout $ do
snowdriftTitle "Javascript Licenses"
[whamlet|
<h1>Javascript Licenses
<table .table id="jslicense-labels1">
$forall lib <- libs
<tr>
<td>
<a href=#{libRoute lib}>
#{libName lib}
<td>
<a href=#{libLicenseRoute lib}>
#{libLicenseName lib}
<td>
<a href=#{libOrigRoute lib}>
#{libOrigName lib}
|]
|
chreekat/snowdrift
|
Handler/JsLicense.hs
|
agpl-3.0
| 2,565
| 0
| 15
| 753
| 379
| 201
| 178
| -1
| -1
|
module Main where
fibNextPair :: (Integer, Integer) -> (Integer, Integer)
fibNextPair (x, y) = (y, x + y)
fibNthPair :: Integer -> (Integer, Integer)
fibNthPair 1 = (1, 1)
fibNthPair n = fibNextPair (fibNthPair (n - 1))
fib :: Integer -> Integer
fib = fst . fibNthPair
|
nmcl/scratch
|
haskell/fib_pair.hs
|
apache-2.0
| 306
| 0
| 9
| 84
| 120
| 69
| 51
| 8
| 1
|
{-# LANGUAGE TupleSections #-}
module Main where
import Test
import Config
import Benchmark
import Prelude hiding (catch)
import Data.List
import Data.Maybe
import Control.Arrow
import Control.Monad
import System.Environment
import System.Console.CmdArgs (cmdArgs, getVerbosity, Verbosity(..))
-- Process command line options, prepare selected test programs for benchmarking
-- or verification
--
processArgs :: IO (Config, [Test])
processArgs = do
testInfo <- map (title &&& description) `fmap` allTests undefined
config <- cmdArgs $ defaultConfig testInfo
tests <- filter (selected config) `fmap` allTests config
--
return (config, tests)
where
selected a = case cfgArgs a of
[] -> const True
ps -> \x -> any (\p -> p `isPrefixOf` title x) ps
-- Verify results with the chosen backend, turning exceptions into failures.
-- Pass back the tests which succeeded.
--
runVerify :: Config -> [Test] -> IO [Test]
runVerify cfg tests = do
results <- forM tests $ \t -> (t,) `fmap` verifyTest cfg t
return . map fst
$ filter (\(_,r) -> r `elem` [Ok, Skipped]) results
-- Run criterion timing tests in the chosen backend
--
runTiming :: Config -> [Test] -> IO ()
runTiming cfg tests = do
verbose <- getVerbosity
unless (verbose == Quiet) $ putStrLn ""
let args = [ maybe "" (\ci -> "--ci=" ++ show ci) (cfgConfidence cfg)
, maybe "" (\r -> "--resamples=" ++ show r) (cfgResamples cfg)
, maybe "" (\f -> "--summary=" ++ f) (cfgSummaryFile cfg)
, if cfgPerformGC cfg then "-g" else "-G"
, case verbose of
Loud -> "--verbose"
Quiet -> "--quiet"
Normal -> ""
]
--
withArgs args
. runBenchmark
. catMaybes
$ map (benchmarkTest cfg) tests
-- Main
-- ====
main :: IO ()
main = do
(config, tests) <- processArgs
valid <- runVerify config tests
--
unless (null valid || cfgVerify config) $ runTiming config valid
|
wilbowma/accelerate
|
accelerate-examples/src/Main.hs
|
bsd-3-clause
| 2,074
| 0
| 15
| 574
| 646
| 344
| 302
| 47
| 4
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fspecialise-aggressively #-}
module Reflex.Dom.Main where
import Prelude hiding (concat, mapM, mapM_, sequence, sequence_)
import Reflex.Adjustable.Class
import Reflex.Class
import Reflex.Dom.Builder.Immediate
import Reflex.Dom.Class
import Reflex.Host.Class
import Reflex.PerformEvent.Base
import Reflex.PostBuild.Base
import Reflex.Spider (Global, Spider, SpiderHost, runSpiderHost)
import Reflex.TriggerEvent.Base
import Reflex.TriggerEvent.Class
#ifdef PROFILE_REFLEX
import Reflex.Profiled
#endif
import Control.Concurrent
import Control.Lens
import Control.Monad
import Control.Monad.Reader hiding (forM, forM_, mapM, mapM_, sequence, sequence_)
import Control.Monad.Ref
import Data.ByteString (ByteString)
import Data.Dependent.Sum (DSum (..))
import Data.Foldable (for_)
import Data.IORef
import Data.Maybe
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding
import GHCJS.DOM
import GHCJS.DOM.Document
import GHCJS.DOM.Element
import GHCJS.DOM.Node
import GHCJS.DOM.NonElementParentNode
import GHCJS.DOM.Types (JSM)
import qualified GHCJS.DOM.Types as DOM
#ifdef PROFILE_REFLEX
import Reflex.Profiled
#endif
{-# INLINE mainHydrationWidgetWithHead #-}
mainHydrationWidgetWithHead :: (forall x. HydrationWidget x ()) -> (forall x. HydrationWidget x ()) -> JSM ()
mainHydrationWidgetWithHead = mainHydrationWidgetWithHead'
{-# INLINABLE mainHydrationWidgetWithHead' #-}
-- | Warning: `mainHydrationWidgetWithHead'` is provided only as performance tweak. It is expected to disappear in future releases.
mainHydrationWidgetWithHead' :: HydrationWidget () () -> HydrationWidget () () -> JSM ()
mainHydrationWidgetWithHead' = mainHydrationWidgetWithSwitchoverAction' (pure ())
{-# INLINE mainHydrationWidgetWithSwitchoverAction #-}
mainHydrationWidgetWithSwitchoverAction :: JSM () -> (forall x. HydrationWidget x ()) -> (forall x. HydrationWidget x ()) -> JSM ()
mainHydrationWidgetWithSwitchoverAction = mainHydrationWidgetWithSwitchoverAction'
{-# INLINABLE mainHydrationWidgetWithSwitchoverAction' #-}
-- | Warning: `mainHydrationWidgetWithSwitchoverAction'` is provided only as performance tweak. It is expected to disappear in future releases.
mainHydrationWidgetWithSwitchoverAction' :: JSM () -> HydrationWidget () () -> HydrationWidget () () -> JSM ()
mainHydrationWidgetWithSwitchoverAction' switchoverAction head' body = do
runHydrationWidgetWithHeadAndBody switchoverAction $ \appendHead appendBody -> do
appendHead head'
appendBody body
{-# INLINABLE attachHydrationWidget #-}
attachHydrationWidget
:: JSM ()
-> JSContextSingleton ()
-> ( Event DomTimeline ()
-> IORef HydrationMode
-> Maybe (IORef [(Node, HydrationRunnerT DomTimeline (DomCoreWidget ()) ())])
-> EventChannel
-> PerformEventT DomTimeline DomHost (a, IORef (Maybe (EventTrigger DomTimeline ())))
)
-> IO (a, FireCommand DomTimeline DomHost)
attachHydrationWidget switchoverAction jsSing w = do
hydrationMode <- liftIO $ newIORef HydrationMode_Hydrating
rootNodesRef <- liftIO $ newIORef []
events <- newChan
runDomHost $ flip runTriggerEventT events $ mdo
(syncEvent, fireSync) <- newTriggerEvent
((result, postBuildTriggerRef), fc@(FireCommand fire)) <- lift $ hostPerformEventT $ do
a <- w syncEvent hydrationMode (Just rootNodesRef) events
_ <- runWithReplace (return ()) $ delayedAction <$ syncEvent
pure a
mPostBuildTrigger <- readRef postBuildTriggerRef
lift $ forM_ mPostBuildTrigger $ \postBuildTrigger -> fire [postBuildTrigger :=> Identity ()] $ return ()
liftIO $ fireSync ()
rootNodes <- liftIO $ readIORef rootNodesRef
let delayedAction = do
for_ (reverse rootNodes) $ \(rootNode, runner) -> do
let hydrate = runHydrationRunnerT runner Nothing rootNode events
void $ runWithJSContextSingleton (runPostBuildT hydrate never) jsSing
liftIO $ writeIORef hydrationMode HydrationMode_Immediate
runWithJSContextSingleton (DOM.liftJSM switchoverAction) jsSing
pure (result, fc)
type HydrationWidget x a = HydrationDomBuilderT HydrationDomSpace DomTimeline (DomCoreWidget x) a
-- | A widget that isn't attached to any particular part of the DOM hierarchy
type FloatingWidget x = TriggerEventT DomTimeline (DomCoreWidget x)
type DomCoreWidget x = PostBuildT DomTimeline (WithJSContextSingleton x (PerformEventT DomTimeline DomHost))
{-# INLINABLE runHydrationWidgetWithHeadAndBody #-}
runHydrationWidgetWithHeadAndBody
:: JSM ()
-> ( (forall c. HydrationWidget () c -> FloatingWidget () c) -- "Append to head" --TODO: test invoking this more than once
-> (forall c. HydrationWidget () c -> FloatingWidget () c) -- "Append to body" --TODO: test invoking this more than once
-> FloatingWidget () ()
)
-> JSM ()
runHydrationWidgetWithHeadAndBody switchoverAction app = withJSContextSingletonMono $ \jsSing -> do
globalDoc <- currentDocumentUnchecked
headElement <- getHeadUnchecked globalDoc
bodyElement <- getBodyUnchecked globalDoc
(events, fc) <- liftIO . attachHydrationWidget switchoverAction jsSing $ \switchover hydrationMode hydrationResult events -> do
(postBuild, postBuildTriggerRef) <- newEventWithTriggerRef
let hydrateDom :: DOM.Node -> HydrationWidget () c -> FloatingWidget () c
hydrateDom n w = do
delayed <- liftIO $ newIORef $ pure ()
unreadyChildren <- liftIO $ newIORef 0
lift $ do
let builderEnv = HydrationDomBuilderEnv
{ _hydrationDomBuilderEnv_document = globalDoc
, _hydrationDomBuilderEnv_parent = Left $ toNode n
, _hydrationDomBuilderEnv_unreadyChildren = unreadyChildren
, _hydrationDomBuilderEnv_commitAction = pure ()
, _hydrationDomBuilderEnv_hydrationMode = hydrationMode
, _hydrationDomBuilderEnv_switchover = switchover
, _hydrationDomBuilderEnv_delayed = delayed
}
a <- runHydrationDomBuilderT w builderEnv events
forM_ hydrationResult $ \hr -> do
res <- liftIO $ readIORef delayed
liftIO $ modifyIORef' hr ((n, res) :)
pure a
runWithJSContextSingleton (runPostBuildT (runTriggerEventT (app (hydrateDom $ toNode headElement) (hydrateDom $ toNode bodyElement)) events) postBuild) jsSing
return (events, postBuildTriggerRef)
liftIO $ processAsyncEvents events fc
{-# INLINE mainWidget #-}
mainWidget :: (forall x. Widget x ()) -> JSM ()
mainWidget = mainWidget'
{-# INLINABLE mainWidget' #-}
-- | Warning: `mainWidget'` is provided only as performance tweak. It is expected to disappear in future releases.
mainWidget' :: Widget () () -> JSM ()
mainWidget' w = withJSContextSingletonMono $ \jsSing -> do
doc <- currentDocumentUnchecked
body <- getBodyUnchecked doc
attachWidget body jsSing w
--TODO: The x's should be unified here
{-# INLINABLE mainWidgetWithHead #-}
mainWidgetWithHead :: (forall x. Widget x ()) -> (forall x. Widget x ()) -> JSM ()
mainWidgetWithHead h b = withJSContextSingletonMono $ \jsSing -> do
doc <- currentDocumentUnchecked
headElement <- getHeadUnchecked doc
attachWidget headElement jsSing h
body <- getBodyUnchecked doc
attachWidget body jsSing b
{-# INLINABLE mainWidgetWithCss #-}
mainWidgetWithCss :: ByteString -> (forall x. Widget x ()) -> JSM ()
mainWidgetWithCss css w = withJSContextSingleton $ \jsSing -> do
doc <- currentDocumentUnchecked
headElement <- getHeadUnchecked doc
setInnerHTML headElement $ "<style>" <> T.unpack (decodeUtf8 css) <> "</style>" --TODO: Fix this
body <- getBodyUnchecked doc
attachWidget body jsSing w
-- | The Reflex timeline for interacting with the DOM
type DomTimeline =
#ifdef PROFILE_REFLEX
ProfiledTimeline
#endif
Spider
-- | The ReflexHost the DOM lives in
type DomHost =
#ifdef PROFILE_REFLEX
ProfiledM
#endif
(SpiderHost Global)
runDomHost :: DomHost a -> IO a
runDomHost = runSpiderHost
#ifdef PROFILE_REFLEX
. runProfiledM
#endif
type Widget x = ImmediateDomBuilderT DomTimeline (DomCoreWidget x)
{-# INLINABLE attachWidget #-}
attachWidget :: DOM.IsElement e => e -> JSContextSingleton x -> Widget x a -> JSM a
attachWidget rootElement wv w = fst <$> attachWidget' rootElement wv w
{-# INLINABLE runImmediateWidgetWithHeadAndBody #-}
runImmediateWidgetWithHeadAndBody
:: ( (forall c. Widget () c -> FloatingWidget () c) -- "Append to head"
-> (forall c. Widget () c -> FloatingWidget () c) -- "Append to body"
-> FloatingWidget () ()
)
-> JSM ()
runImmediateWidgetWithHeadAndBody app = withJSContextSingletonMono $ \jsSing -> do
globalDoc <- currentDocumentUnchecked
headElement <- getHeadUnchecked globalDoc
bodyElement <- getBodyUnchecked globalDoc
headFragment <- createDocumentFragment globalDoc
bodyFragment <- createDocumentFragment globalDoc
(events, fc) <- liftIO . attachImmediateWidget $ \hydrationMode events -> do
(postBuild, postBuildTriggerRef) <- newEventWithTriggerRef
let go :: forall c. DOM.DocumentFragment -> Widget () c -> FloatingWidget () c
go df w = do
unreadyChildren <- liftIO $ newIORef 0
delayed <- liftIO $ newIORef $ pure ()
let builderEnv = HydrationDomBuilderEnv
{ _hydrationDomBuilderEnv_document = globalDoc
, _hydrationDomBuilderEnv_parent = Left $ toNode df
, _hydrationDomBuilderEnv_unreadyChildren = unreadyChildren
, _hydrationDomBuilderEnv_commitAction = pure () --TODO: possibly `replaceElementContents n f`
, _hydrationDomBuilderEnv_hydrationMode = hydrationMode
, _hydrationDomBuilderEnv_switchover = never
, _hydrationDomBuilderEnv_delayed = delayed
}
lift $ runHydrationDomBuilderT w builderEnv events
runWithJSContextSingleton (runPostBuildT (runTriggerEventT (app (go headFragment) (go bodyFragment)) events) postBuild) jsSing
return (events, postBuildTriggerRef)
replaceElementContents headElement headFragment
replaceElementContents bodyElement bodyFragment
liftIO $ processAsyncEvents events fc
-- | Warning: `mainWidgetWithHead'` is provided only as performance tweak. It is expected to disappear in future releases.
mainWidgetWithHead' :: (a -> Widget () b, b -> Widget () a) -> JSM ()
mainWidgetWithHead' (h, b) = runImmediateWidgetWithHeadAndBody $ \appendHead appendBody -> do
rec hOut <- appendHead $ h bOut
bOut <- appendBody $ b hOut
pure ()
replaceElementContents :: DOM.IsElement e => e -> DOM.DocumentFragment -> JSM ()
replaceElementContents e df = do
setInnerHTML e ("" :: String)
_ <- appendChild e df
return ()
{-# INLINABLE attachWidget' #-}
attachWidget' :: DOM.IsElement e => e -> JSContextSingleton x -> Widget x a -> JSM (a, FireCommand DomTimeline DomHost)
attachWidget' rootElement jsSing w = do
doc <- getOwnerDocumentUnchecked rootElement
df <- createDocumentFragment doc
((a, events), fc) <- liftIO . attachImmediateWidget $ \hydrationMode events -> do
(postBuild, postBuildTriggerRef) <- newEventWithTriggerRef
unreadyChildren <- liftIO $ newIORef 0
delayed <- liftIO $ newIORef $ pure ()
let builderEnv = HydrationDomBuilderEnv
{ _hydrationDomBuilderEnv_document = toDocument doc
, _hydrationDomBuilderEnv_parent = Left $ toNode df
, _hydrationDomBuilderEnv_unreadyChildren = unreadyChildren
, _hydrationDomBuilderEnv_commitAction = return () --TODO
, _hydrationDomBuilderEnv_switchover = never
, _hydrationDomBuilderEnv_delayed = delayed
, _hydrationDomBuilderEnv_hydrationMode = hydrationMode
}
a <- runWithJSContextSingleton (runPostBuildT (runHydrationDomBuilderT w builderEnv events) postBuild) jsSing
return ((a, events), postBuildTriggerRef)
replaceElementContents rootElement df
liftIO $ processAsyncEvents events fc
return (a, fc)
type EventChannel = Chan [DSum (EventTriggerRef DomTimeline) TriggerInvocation]
{-# INLINABLE attachImmediateWidget #-}
attachImmediateWidget
:: ( IORef HydrationMode
-> EventChannel
-> PerformEventT DomTimeline DomHost (a, IORef (Maybe (EventTrigger DomTimeline ())))
)
-> IO (a, FireCommand DomTimeline DomHost)
attachImmediateWidget w = do
hydrationMode <- liftIO $ newIORef HydrationMode_Immediate
events <- newChan
runDomHost $ do
((result, postBuildTriggerRef), fc@(FireCommand fire)) <- hostPerformEventT $ w hydrationMode events
mPostBuildTrigger <- readRef postBuildTriggerRef
forM_ mPostBuildTrigger $ \postBuildTrigger -> fire [postBuildTrigger :=> Identity ()] $ return ()
return (result, fc)
processAsyncEvents :: EventChannel -> FireCommand DomTimeline DomHost -> IO ()
processAsyncEvents events (FireCommand fire) = void $ forkIO $ forever $ do
ers <- readChan events
_ <- runDomHost $ do
mes <- liftIO $ forM ers $ \(EventTriggerRef er :=> TriggerInvocation a _) -> do
me <- readIORef er
return $ fmap (\e -> e :=> Identity a) me
_ <- fire (catMaybes mes) $ return ()
liftIO $ forM_ ers $ \(_ :=> TriggerInvocation _ cb) -> cb
return ()
-- | Run a reflex-dom application inside of an existing DOM element with the given ID
mainWidgetInElementById :: Text -> (forall x. Widget x ()) -> JSM ()
mainWidgetInElementById eid w = withJSContextSingleton $ \jsSing -> do
doc <- currentDocumentUnchecked
root <- getElementByIdUnchecked doc eid
attachWidget root jsSing w
newtype AppInput t = AppInput
{ _appInput_window :: Window t
}
newtype AppOutput t = AppOutput --TODO: Add quit event
{ _appOutput_windowConfig :: WindowConfig t
}
runApp' :: (t ~ DomTimeline) => (forall x. AppInput t -> Widget x (AppOutput t)) -> JSM ()
runApp' app = withJSContextSingleton $ \jsSing -> do
doc <- currentDocumentUnchecked
body <- getBodyUnchecked doc
win <- getDefaultViewUnchecked doc
rec o <- attachWidget body jsSing $ do
w <- wrapWindow win $ _appOutput_windowConfig o
app $ AppInput
{ _appInput_window = w
}
return ()
{-# DEPRECATED attachWidget'' "Use 'attachImmediateWidget . const' instead" #-}
{-# INLINABLE attachWidget'' #-}
attachWidget'' :: (EventChannel -> PerformEventT DomTimeline DomHost (a, IORef (Maybe (EventTrigger DomTimeline ())))) -> IO (a, FireCommand DomTimeline DomHost)
attachWidget'' = attachImmediateWidget . const
|
ryantrinkle/reflex-dom
|
reflex-dom-core/src/Reflex/Dom/Main.hs
|
bsd-3-clause
| 15,010
| 0
| 29
| 2,787
| 3,814
| 1,937
| 1,877
| 266
| 1
|
module HeadersByteString (headers) where
import Common (pathTo, rechunkBS)
import Criterion.Main (bench, bgroup, nf, nfIO)
import Criterion.Types (Benchmark)
import HeadersByteString.Atto (request, response)
import Network.Wai.Handler.Warp.RequestHeader (parseHeaderLines)
import qualified Data.Attoparsec.ByteString.Char8 as B
import qualified Data.Attoparsec.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as B
headers :: IO Benchmark
headers = do
req <- B.readFile =<< pathTo "http-request.txt"
resp <- B.readFile =<< pathTo "http-response.txt"
let reql = rechunkBS 4 req
respl = rechunkBS 4 resp
return $ bgroup "headers" [
bgroup "B" [
bench "request" $ nf (B.parseOnly request) req
, bench "warp" $ nfIO (parseHeaderLines [req])
, bench "response" $ nf (B.parseOnly response) resp
]
, bgroup "BL" [
bench "request" $ nf (BL.parse request) reql
, bench "response" $ nf (BL.parse response) respl
]
]
|
beni55/attoparsec
|
benchmarks/HeadersByteString.hs
|
bsd-3-clause
| 999
| 0
| 16
| 202
| 315
| 171
| 144
| 23
| 1
|
{-# OPTIONS_GHC -Wall #-}
module Type.Environment
( Environment
, initialize
, getType, freshDataScheme, ctorNames
, addValues
, instantiateType
)
where
import qualified Control.Monad.State as State
import qualified Data.List as List
import Data.Map ((!))
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified AST.Type as T
import qualified AST.Variable as V
import qualified AST.Module as Module
import Type.Type
type TypeDict = Map.Map String Type
type VarDict = Map.Map String Variable
data Environment = Environment
{ _constructor :: Map.Map String (IO (Int, [Variable], [Type], Type))
, _types :: TypeDict
, _value :: TypeDict
}
initialize :: [Module.CanonicalUnion] -> IO Environment
initialize datatypes =
do types <- makeTypes datatypes
let env =
Environment
{ _constructor = Map.empty
, _value = Map.empty
, _types = types
}
return $ env { _constructor = makeConstructors env datatypes }
makeTypes :: [Module.CanonicalUnion] -> IO TypeDict
makeTypes datatypes =
do unions <- mapM makeImported datatypes
bs <- mapM makeBuiltin builtins
return (Map.fromList (unions ++ bs))
where
makeImported :: (V.Canonical, Module.UnionInfo V.Canonical) -> IO (String, Type)
makeImported (name, _) =
do tvar <- mkAtom name
return (V.toString name, VarN tvar)
makeBuiltin :: (String, Int) -> IO (String, Type)
makeBuiltin (name, _) =
do name' <- mkAtom (V.builtin name)
return (name, VarN name')
builtins :: [(String, Int)]
builtins =
concat
[ map tuple [0..9]
, kind 1 ["List"]
, kind 0 ["Int","Float","Char","String","Bool"]
]
where
tuple n = ("_Tuple" ++ show n, n)
kind n names = map (\name -> (name, n)) names
makeConstructors
:: Environment
-> [Module.CanonicalUnion]
-> Map.Map String (IO (Int, [Variable], [Type], Type))
makeConstructors env datatypes =
Map.fromList builtins
where
list t =
(_types env ! "List") <| t
inst :: Int -> ([Type] -> ([Type], Type)) -> IO (Int, [Variable], [Type], Type)
inst numTVars tipe =
do vars <- mapM (\_ -> mkVar Nothing) [1..numTVars]
let (args, result) = tipe (map (VarN) vars)
return (length args, vars, args, result)
tupleCtor n =
let name = "_Tuple" ++ show n
in (name, inst n $ \vs -> (vs, foldl (<|) (_types env ! name) vs))
builtins :: [ (String, IO (Int, [Variable], [Type], Type)) ]
builtins =
[ ("[]", inst 1 $ \ [t] -> ([], list t))
, ("::", inst 1 $ \ [t] -> ([t, list t], list t))
] ++ map tupleCtor [0..9]
++ concatMap (ctorToType env) datatypes
ctorToType
:: Environment
-> (V.Canonical, Module.UnionInfo V.Canonical)
-> [(String, IO (Int, [Variable], [Type], Type))]
ctorToType env (name, (tvars, ctors)) =
zip (map (V.toString . fst) ctors) (map inst ctors)
where
inst :: (V.Canonical, [T.Canonical]) -> IO (Int, [Variable], [Type], Type)
inst ctor =
do ((args, tipe), dict) <- State.runStateT (go ctor) Map.empty
return (length args, Map.elems dict, args, tipe)
go :: (V.Canonical, [T.Canonical]) -> State.StateT VarDict IO ([Type], Type)
go (_, args) =
do types <- mapM (instantiator env) args
returnType <- instantiator env (T.App (T.Type name) (map T.Var tvars))
return (types, returnType)
-- ACCESS TYPES
get :: (Environment -> Map.Map String a) -> Environment -> String -> a
get subDict env key =
Map.findWithDefault (error msg) key (subDict env)
where
msg = "Could not find type constructor `" ++ key ++ "` while checking types."
getType :: Environment -> String -> Type
getType =
get _types
freshDataScheme :: Environment -> String -> IO (Int, [Variable], [Type], Type)
freshDataScheme =
get _constructor
ctorNames :: Environment -> [String]
ctorNames env =
Map.keys (_constructor env)
-- UPDATE ENVIRONMENT
addValues :: Environment -> [(String, Variable)] -> Environment
addValues env newValues =
env
{ _value =
List.foldl'
(\dict (name, var) -> Map.insert name (VarN var) dict)
(_value env)
newValues
}
-- INSTANTIATE TYPES
instantiateType :: Environment -> T.Canonical -> VarDict -> IO ([Variable], Type)
instantiateType env sourceType dict =
do (tipe, dict') <- State.runStateT (instantiator env sourceType) dict
return (Map.elems dict', tipe)
instantiator :: Environment -> T.Canonical -> State.StateT VarDict IO Type
instantiator env sourceType =
instantiatorHelp env Set.empty sourceType
instantiatorHelp :: Environment -> Set.Set String -> T.Canonical -> State.StateT VarDict IO Type
instantiatorHelp env aliasVars sourceType =
let
go =
instantiatorHelp env aliasVars
in
case sourceType of
T.Lambda t1 t2 ->
(==>) <$> go t1 <*> go t2
T.Var name ->
if Set.member name aliasVars then
return (PlaceHolder name)
else
do dict <- State.get
case Map.lookup name dict of
Just variable ->
return (VarN variable)
Nothing ->
do variable <- State.liftIO (mkNamedVar name)
State.put (Map.insert name variable dict)
return (VarN variable)
T.Aliased name args aliasType ->
do targs <- mapM (\(arg,tipe) -> (,) arg <$> go tipe) args
realType <-
case aliasType of
T.Filled tipe ->
instantiatorHelp env Set.empty tipe
T.Holey tipe ->
instantiatorHelp env (Set.fromList (map fst args)) tipe
return (AliasN name targs realType)
T.Type name ->
case Map.lookup (V.toString name) (_types env) of
Just tipe ->
return tipe
Nothing ->
error $
"Could not find type constructor `" ++
V.toString name ++ "` while checking types."
T.App func args ->
do tfunc <- go func
targs <- mapM go args
return $ foldl (<|) tfunc targs
T.Record fields ext ->
do tfields <- traverse go (Map.fromList fields)
text <-
case ext of
Nothing ->
return $ TermN EmptyRecord1
Just extType ->
go extType
return $ TermN (Record1 tfields text)
|
mgold/Elm
|
src/Type/Environment.hs
|
bsd-3-clause
| 6,804
| 0
| 21
| 2,172
| 2,372
| 1,249
| 1,123
| 166
| 11
|
{-# LANGUAGE OverloadedStrings #-}
import Network.Wai
import Network.Wai.Handler.Webkit
import Network.HTTP.Types
main :: IO ()
main = run "Sample App" app
app :: Application
app _ = return $ responseLBS status200 [("Content-Type", "text/html")] "<h1>Hello World!</h1>"
|
beni55/wai
|
wai-handler-webkit/sample.hs
|
mit
| 272
| 2
| 8
| 36
| 80
| 41
| 39
| 8
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
-- Module : Network.AWS.ElastiCache.Waiters
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
module Network.AWS.ElastiCache.Waiters where
import Network.AWS.ElastiCache.DescribeCacheClusters
import Network.AWS.ElastiCache.DescribeReplicationGroups
import Network.AWS.ElastiCache.Types
import Network.AWS.Waiters
cacheClusterAvailable :: Wait DescribeCacheClusters
cacheClusterAvailable = Wait
{ _waitName = "CacheClusterAvailable"
, _waitAttempts = 60
, _waitDelay = 30
, _waitAcceptors =
[ matchAll "available" AcceptSuccess
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
, matchAny "deleted" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
, matchAny "deleting" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
, matchAny "incompatible-network" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
, matchAny "restore-failed" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
]
}
cacheClusterDeleted :: Wait DescribeCacheClusters
cacheClusterDeleted = Wait
{ _waitName = "CacheClusterDeleted"
, _waitAttempts = 60
, _waitDelay = 30
, _waitAcceptors =
[ matchError "CacheClusterNotFound" AcceptSuccess
, matchAny "creating" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
, matchAny "modifying" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
, matchAny "rebooting" AcceptFailure
(folding (concatOf dccrCacheClusters) . ccCacheClusterStatus . _Just)
]
}
replicationGroupAvailable :: Wait DescribeReplicationGroups
replicationGroupAvailable = Wait
{ _waitName = "ReplicationGroupAvailable"
, _waitAttempts = 60
, _waitDelay = 30
, _waitAcceptors =
[ matchAll "available" AcceptSuccess
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
, matchAny "deleted" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
, matchAny "deleting" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
, matchAny "incompatible-network" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
, matchAny "restore-failed" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
]
}
replicationGroupDeleted :: Wait DescribeReplicationGroups
replicationGroupDeleted = Wait
{ _waitName = "ReplicationGroupDeleted"
, _waitAttempts = 60
, _waitDelay = 30
, _waitAcceptors =
[ matchError "ReplicationGroupNotFoundFault" AcceptSuccess
, matchAny "creating" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
, matchAny "modifying" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
, matchAny "rebooting" AcceptFailure
(folding (concatOf drgrReplicationGroups) . rgStatus . _Just)
]
}
|
kim/amazonka
|
amazonka-elasticache/gen/Network/AWS/ElastiCache/Waiters.hs
|
mpl-2.0
| 3,888
| 0
| 14
| 935
| 700
| 382
| 318
| 65
| 1
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Install
-- Copyright : (c) 2005 David Himmelstrup
-- 2007 Bjorn Bringert
-- 2007-2010 Duncan Coutts
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- High level interface to package installation.
-----------------------------------------------------------------------------
module Distribution.Client.Install (
-- * High-level interface
install,
-- * Lower-level interface that allows to manipulate the install plan
makeInstallContext,
makeInstallPlan,
processInstallPlan,
InstallArgs,
InstallContext,
-- * Prune certain packages from the install plan
pruneInstallPlan
) where
import Data.List
( isPrefixOf, unfoldr, nub, sort, (\\) )
import qualified Data.Set as S
import Data.Maybe
( isJust, fromMaybe, mapMaybe, maybeToList )
import Control.Exception as Exception
( Exception(toException), bracket, catches
, Handler(Handler), handleJust, IOException, SomeException )
#ifndef mingw32_HOST_OS
import Control.Exception as Exception
( Exception(fromException) )
#endif
import System.Exit
( ExitCode(..) )
import Distribution.Compat.Exception
( catchIO, catchExit )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
( (<$>) )
#endif
import Control.Monad
( forM_, when, unless )
import System.Directory
( getTemporaryDirectory, doesDirectoryExist, doesFileExist,
createDirectoryIfMissing, removeFile, renameDirectory )
import System.FilePath
( (</>), (<.>), equalFilePath, takeDirectory )
import System.IO
( openFile, IOMode(AppendMode), hClose )
import System.IO.Error
( isDoesNotExistError, ioeGetFileName )
import Distribution.Client.Targets
import Distribution.Client.Configure
( chooseCabalVersion )
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
( Solver(..) )
import Distribution.Client.FetchUtils
import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages, getInstalledPackages )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.Setup
( GlobalFlags(..)
, ConfigFlags(..), configureCommand, filterConfigureFlags
, ConfigExFlags(..), InstallFlags(..) )
import Distribution.Client.Config
( defaultCabalDir, defaultUserInstall )
import Distribution.Client.Sandbox.Timestamp
( withUpdateTimestamps )
import Distribution.Client.Sandbox.Types
( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox
, whenUsingSandbox )
import Distribution.Client.Tar (extractTarGzFile)
import Distribution.Client.Types as Source
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import qualified Distribution.Client.BuildReports.Anonymous as BuildReports
import qualified Distribution.Client.BuildReports.Storage as BuildReports
( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure )
import qualified Distribution.Client.InstallSymlink as InstallSymlink
( symlinkBinaries )
import qualified Distribution.Client.PackageIndex as SourcePackageIndex
import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade
import qualified Distribution.Client.World as World
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.Client.Compat.ExecutablePath
import Distribution.Client.JobControl
import Distribution.Utils.NubList
import Distribution.Simple.Compiler
( CompilerId(..), Compiler(compilerId), compilerFlavor
, CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack )
import Distribution.Simple.Program (ProgramConfiguration,
defaultProgramConfiguration)
import qualified Distribution.Simple.InstallDirs as InstallDirs
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Simple.Setup
( haddockCommand, HaddockFlags(..)
, buildCommand, BuildFlags(..), emptyBuildFlags
, toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref )
import qualified Distribution.Simple.Setup as Cabal
( Flag(..)
, copyCommand, CopyFlags(..), emptyCopyFlags
, registerCommand, RegisterFlags(..), emptyRegisterFlags
, testCommand, TestFlags(..), emptyTestFlags )
import Distribution.Simple.Utils
( createDirectoryIfMissingVerbose, rawSystemExit, comparing
, writeFileAtomic, withTempFile , withUTF8FileContents )
import Distribution.Simple.InstallDirs as InstallDirs
( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate
, initialPathTemplateEnv, installDirsTemplateEnv )
import Distribution.Package
( PackageIdentifier(..), PackageId, packageName, packageVersion
, Package(..), PackageFixedDeps(..), PackageKey
, Dependency(..), thisPackageVersion, InstalledPackageId, installedPackageId )
import qualified Distribution.PackageDescription as PackageDescription
import Distribution.PackageDescription
( PackageDescription, GenericPackageDescription(..), Flag(..)
, FlagName(..), FlagAssignment )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.ParseUtils
( showPWarning )
import Distribution.Version
( Version, VersionRange, foldVersionRange )
import Distribution.Simple.Utils as Utils
( notice, info, warn, debug, debugNoWrap, die
, intercalate, withTempDirectory )
import Distribution.Client.Utils
( determineNumJobs, inDir, mergeBy, MergeResult(..)
, tryCanonicalizePath )
import Distribution.System
( Platform, OS(Windows), buildOS )
import Distribution.Text
( display )
import Distribution.Verbosity as Verbosity
( Verbosity, showForCabal, normal, verbose )
import Distribution.Simple.BuildPaths ( exeExtension )
--TODO:
-- * assign flags to packages individually
-- * complain about flags that do not apply to any package given as target
-- so flags do not apply to dependencies, only listed, can use flag
-- constraints for dependencies
-- * only record applicable flags in world file
-- * allow flag constraints
-- * allow installed constraints
-- * allow flag and installed preferences
-- * change world file to use cabal section syntax
-- * allow persistent configure flags for each package individually
-- ------------------------------------------------------------
-- * Top level user actions
-- ------------------------------------------------------------
-- | Installs the packages needed to satisfy a list of dependencies.
--
install
:: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> Platform
-> ProgramConfiguration
-> UseSandbox
-> Maybe SandboxPackageInfo
-> GlobalFlags
-> ConfigFlags
-> ConfigExFlags
-> InstallFlags
-> HaddockFlags
-> [UserTarget]
-> IO ()
install verbosity packageDBs repos comp platform conf useSandbox mSandboxPkgInfo
globalFlags configFlags configExFlags installFlags haddockFlags
userTargets0 = do
installContext <- makeInstallContext verbosity args (Just userTargets0)
planResult <- foldProgress logMsg (return . Left) (return . Right) =<<
makeInstallPlan verbosity args installContext
case planResult of
Left message -> do
reportPlanningFailure verbosity args installContext message
die' message
Right installPlan ->
processInstallPlan verbosity args installContext installPlan
where
args :: InstallArgs
args = (packageDBs, repos, comp, platform, conf, useSandbox, mSandboxPkgInfo,
globalFlags, configFlags, configExFlags, installFlags,
haddockFlags)
die' message = die (message ++ if isUseSandbox useSandbox
then installFailedInSandbox else [])
-- TODO: use a better error message, remove duplication.
installFailedInSandbox =
"\nNote: when using a sandbox, all packages are required to have "
++ "consistent dependencies. "
++ "Try reinstalling/unregistering the offending packages or "
++ "recreating the sandbox."
logMsg message rest = debugNoWrap verbosity message >> rest
-- TODO: Make InstallContext a proper data type with documented fields.
-- | Common context for makeInstallPlan and processInstallPlan.
type InstallContext = ( InstalledPackageIndex, SourcePackageDb
, [UserTarget], [PackageSpecifier SourcePackage] )
-- TODO: Make InstallArgs a proper data type with documented fields or just get
-- rid of it completely.
-- | Initial arguments given to 'install' or 'makeInstallContext'.
type InstallArgs = ( PackageDBStack
, [Repo]
, Compiler
, Platform
, ProgramConfiguration
, UseSandbox
, Maybe SandboxPackageInfo
, GlobalFlags
, ConfigFlags
, ConfigExFlags
, InstallFlags
, HaddockFlags )
-- | Make an install context given install arguments.
makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget]
-> IO InstallContext
makeInstallContext verbosity
(packageDBs, repos, comp, _, conf,_,_,
globalFlags, _, _, _, _) mUserTargets = do
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
sourcePkgDb <- getSourcePackages verbosity repos
(userTargets, pkgSpecifiers) <- case mUserTargets of
Nothing ->
-- We want to distinguish between the case where the user has given an
-- empty list of targets on the command-line and the case where we
-- specifically want to have an empty list of targets.
return ([], [])
Just userTargets0 -> do
-- For install, if no target is given it means we use the current
-- directory as the single target.
let userTargets | null userTargets0 = [UserTargetLocalDir "."]
| otherwise = userTargets0
pkgSpecifiers <- resolveUserTargets verbosity
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
return (userTargets, pkgSpecifiers)
return (installedPkgIndex, sourcePkgDb, userTargets, pkgSpecifiers)
-- | Make an install plan given install context and install arguments.
makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext
-> IO (Progress String String InstallPlan)
makeInstallPlan verbosity
(_, _, comp, platform, _, _, mSandboxPkgInfo,
_, configFlags, configExFlags, installFlags,
_)
(installedPkgIndex, sourcePkgDb,
_, pkgSpecifiers) = do
solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags))
(compilerInfo comp)
notice verbosity "Resolving dependencies..."
return $ planPackages comp platform mSandboxPkgInfo solver
configFlags configExFlags installFlags
installedPkgIndex sourcePkgDb pkgSpecifiers
-- | Given an install plan, perform the actual installations.
processInstallPlan :: Verbosity -> InstallArgs -> InstallContext
-> InstallPlan
-> IO ()
processInstallPlan verbosity
args@(_,_, comp, _, _, _, _, _, _, _, installFlags, _)
(installedPkgIndex, sourcePkgDb,
userTargets, pkgSpecifiers) installPlan = do
checkPrintPlan verbosity comp installedPkgIndex installPlan sourcePkgDb
installFlags pkgSpecifiers
unless (dryRun || nothingToInstall) $ do
installPlan' <- performInstallations verbosity
args installedPkgIndex installPlan
postInstallActions verbosity args userTargets installPlan'
where
dryRun = fromFlag (installDryRun installFlags)
nothingToInstall = null (InstallPlan.ready installPlan)
-- ------------------------------------------------------------
-- * Installation planning
-- ------------------------------------------------------------
planPackages :: Compiler
-> Platform
-> Maybe SandboxPackageInfo
-> Solver
-> ConfigFlags
-> ConfigExFlags
-> InstallFlags
-> InstalledPackageIndex
-> SourcePackageDb
-> [PackageSpecifier SourcePackage]
-> Progress String String InstallPlan
planPackages comp platform mSandboxPkgInfo solver
configFlags configExFlags installFlags
installedPkgIndex sourcePkgDb pkgSpecifiers =
resolveDependencies
platform (compilerInfo comp)
solver
resolverParams
>>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return
where
resolverParams =
setMaxBackjumps (if maxBackjumps < 0 then Nothing
else Just maxBackjumps)
. setIndependentGoals independentGoals
. setReorderGoals reorderGoals
. setAvoidReinstalls avoidReinstalls
. setShadowPkgs shadowPkgs
. setStrongFlags strongFlags
. setPreferenceDefault (if upgradeDeps then PreferAllLatest
else PreferLatestForSelected)
. removeUpperBounds allowNewer
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- configPreferences configExFlags ]
. addConstraints
-- version constraints from the config file or command line
(map userToPackageConstraint (configExConstraints configExFlags))
. addConstraints
--FIXME: this just applies all flags to all targets which
-- is silly. We should check if the flags are appropriate
[ PackageConstraintFlags (pkgSpecifierTarget pkgSpecifier) flags
| let flags = configConfigurationsFlags configFlags
, not (null flags)
, pkgSpecifier <- pkgSpecifiers ]
. addConstraints
[ PackageConstraintStanzas (pkgSpecifierTarget pkgSpecifier) stanzas
| pkgSpecifier <- pkgSpecifiers ]
. maybe id applySandboxInstallPolicy mSandboxPkgInfo
. (if reinstall then reinstallTargets else id)
$ standardInstallPolicy
installedPkgIndex sourcePkgDb pkgSpecifiers
stanzas = concat
[ if testsEnabled then [TestStanzas] else []
, if benchmarksEnabled then [BenchStanzas] else []
]
testsEnabled = fromFlagOrDefault False $ configTests configFlags
benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags
reinstall = fromFlag (installReinstall installFlags)
reorderGoals = fromFlag (installReorderGoals installFlags)
independentGoals = fromFlag (installIndependentGoals installFlags)
avoidReinstalls = fromFlag (installAvoidReinstalls installFlags)
shadowPkgs = fromFlag (installShadowPkgs installFlags)
strongFlags = fromFlag (installStrongFlags installFlags)
maxBackjumps = fromFlag (installMaxBackjumps installFlags)
upgradeDeps = fromFlag (installUpgradeDeps installFlags)
onlyDeps = fromFlag (installOnlyDeps installFlags)
allowNewer = fromFlag (configAllowNewer configExFlags)
-- | Remove the provided targets from the install plan.
pruneInstallPlan :: Package pkg => [PackageSpecifier pkg] -> InstallPlan
-> Progress String String InstallPlan
pruneInstallPlan pkgSpecifiers =
-- TODO: this is a general feature and should be moved to D.C.Dependency
-- Also, the InstallPlan.remove should return info more precise to the
-- problem, rather than the very general PlanProblem type.
either (Fail . explain) Done
. InstallPlan.remove (\pkg -> packageName pkg `elem` targetnames)
where
explain :: [InstallPlan.PlanProblem] -> String
explain problems =
"Cannot select only the dependencies (as requested by the "
++ "'--only-dependencies' flag), "
++ (case pkgids of
[pkgid] -> "the package " ++ display pkgid ++ " is "
_ -> "the packages "
++ intercalate ", " (map display pkgids) ++ " are ")
++ "required by a dependency of one of the other targets."
where
pkgids =
nub [ depid
| InstallPlan.PackageMissingDeps _ depids <- problems
, depid <- depids
, packageName depid `elem` targetnames ]
targetnames = map pkgSpecifierTarget pkgSpecifiers
-- ------------------------------------------------------------
-- * Informational messages
-- ------------------------------------------------------------
-- | Perform post-solver checks of the install plan and print it if
-- either requested or needed.
checkPrintPlan :: Verbosity
-> Compiler
-> InstalledPackageIndex
-> InstallPlan
-> SourcePackageDb
-> InstallFlags
-> [PackageSpecifier SourcePackage]
-> IO ()
checkPrintPlan verbosity comp installed installPlan sourcePkgDb
installFlags pkgSpecifiers = do
-- User targets that are already installed.
let preExistingTargets =
[ p | let tgts = map pkgSpecifierTarget pkgSpecifiers,
InstallPlan.PreExisting p <- InstallPlan.toList installPlan,
packageName p `elem` tgts ]
-- If there's nothing to install, we print the already existing
-- target packages as an explanation.
when nothingToInstall $
notice verbosity $ unlines $
"All the requested packages are already installed:"
: map (display . packageId) preExistingTargets
++ ["Use --reinstall if you want to reinstall anyway."]
let lPlan = linearizeInstallPlan comp installed installPlan
-- Are any packages classified as reinstalls?
let reinstalledPkgs = concatMap (extractReinstalls . snd) lPlan
-- Packages that are already broken.
let oldBrokenPkgs =
map Installed.installedPackageId
. PackageIndex.reverseDependencyClosure installed
. map (Installed.installedPackageId . fst)
. PackageIndex.brokenPackages
$ installed
let excluded = reinstalledPkgs ++ oldBrokenPkgs
-- Packages that are reverse dependencies of replaced packages are very
-- likely to be broken. We exclude packages that are already broken.
let newBrokenPkgs =
filter (\ p -> not (Installed.installedPackageId p `elem` excluded))
(PackageIndex.reverseDependencyClosure installed reinstalledPkgs)
let containsReinstalls = not (null reinstalledPkgs)
let breaksPkgs = not (null newBrokenPkgs)
let adaptedVerbosity
| containsReinstalls && not overrideReinstall = verbosity `max` verbose
| otherwise = verbosity
-- We print the install plan if we are in a dry-run or if we are confronted
-- with a dangerous install plan.
when (dryRun || containsReinstalls && not overrideReinstall) $
printPlan (dryRun || breaksPkgs && not overrideReinstall)
adaptedVerbosity lPlan sourcePkgDb
-- If the install plan is dangerous, we print various warning messages. In
-- particular, if we can see that packages are likely to be broken, we even
-- bail out (unless installation has been forced with --force-reinstalls).
when containsReinstalls $ do
if breaksPkgs
then do
(if dryRun || overrideReinstall then warn verbosity else die) $ unlines $
"The following packages are likely to be broken by the reinstalls:"
: map (display . Installed.sourcePackageId) newBrokenPkgs
++ if overrideReinstall
then if dryRun then [] else
["Continuing even though the plan contains dangerous reinstalls."]
else
["Use --force-reinstalls if you want to install anyway."]
else unless dryRun $ warn verbosity
"Note that reinstalls are always dangerous. Continuing anyway..."
where
nothingToInstall = null (InstallPlan.ready installPlan)
dryRun = fromFlag (installDryRun installFlags)
overrideReinstall = fromFlag (installOverrideReinstall installFlags)
linearizeInstallPlan :: Compiler
-> InstalledPackageIndex
-> InstallPlan
-> [(ReadyPackage, PackageStatus)]
linearizeInstallPlan comp installedPkgIndex plan =
unfoldr next plan
where
next plan' = case InstallPlan.ready plan' of
[] -> Nothing
(pkg:_) -> Just ((pkg, status), plan'')
where
pkgid = installedPackageId pkg
status = packageStatus comp installedPkgIndex pkg
plan'' = InstallPlan.completed pkgid
(BuildOk DocsNotTried TestsNotTried
(Just $ Installed.emptyInstalledPackageInfo
{ Installed.sourcePackageId = packageId pkg
, Installed.installedPackageId = pkgid }))
(InstallPlan.processing [pkg] plan')
--FIXME: This is a bit of a hack,
-- pretending that each package is installed
-- It's doubly a hack because the installed package ID
-- didn't get updated...
data PackageStatus = NewPackage
| NewVersion [Version]
| Reinstall [InstalledPackageId] [PackageChange]
type PackageChange = MergeResult PackageIdentifier PackageIdentifier
extractReinstalls :: PackageStatus -> [InstalledPackageId]
extractReinstalls (Reinstall ipids _) = ipids
extractReinstalls _ = []
packageStatus :: Compiler -> InstalledPackageIndex -> ReadyPackage -> PackageStatus
packageStatus _comp installedPkgIndex cpkg =
case PackageIndex.lookupPackageName installedPkgIndex
(packageName cpkg) of
[] -> NewPackage
ps -> case filter ((== packageId cpkg)
. Installed.sourcePackageId) (concatMap snd ps) of
[] -> NewVersion (map fst ps)
pkgs@(pkg:_) -> Reinstall (map Installed.installedPackageId pkgs)
(changes pkg cpkg)
where
changes :: Installed.InstalledPackageInfo
-> ReadyPackage
-> [MergeResult PackageIdentifier PackageIdentifier]
changes pkg pkg' =
filter changed
$ mergeBy (comparing packageName)
-- get dependencies of installed package (convert to source pkg ids via
-- index)
(nub . sort . concatMap
(maybeToList . fmap Installed.sourcePackageId .
PackageIndex.lookupInstalledPackageId installedPkgIndex) .
Installed.depends $ pkg)
-- get dependencies of configured package
(nub . sort . depends $ pkg')
changed (InBoth pkgid pkgid') = pkgid /= pkgid'
changed _ = True
printPlan :: Bool -- is dry run
-> Verbosity
-> [(ReadyPackage, PackageStatus)]
-> SourcePackageDb
-> IO ()
printPlan dryRun verbosity plan sourcePkgDb = case plan of
[] -> return ()
pkgs
| verbosity >= Verbosity.verbose -> notice verbosity $ unlines $
("In order, the following " ++ wouldWill ++ " be installed:")
: map showPkgAndReason pkgs
| otherwise -> notice verbosity $ unlines $
("In order, the following " ++ wouldWill
++ " be installed (use -v for more details):")
: map showPkg pkgs
where
wouldWill | dryRun = "would"
| otherwise = "will"
showPkg (pkg, _) = display (packageId pkg) ++
showLatest (pkg)
showPkgAndReason (pkg', pr) = display (packageId pkg') ++
showLatest pkg' ++
showFlagAssignment (nonDefaultFlags pkg') ++
showStanzas (stanzas pkg') ++ " " ++
case pr of
NewPackage -> "(new package)"
NewVersion _ -> "(new version)"
Reinstall _ cs -> "(reinstall)" ++ case cs of
[] -> ""
diff -> " changes: " ++ intercalate ", " (map change diff)
showLatest :: ReadyPackage -> String
showLatest pkg = case mLatestVersion of
Just latestVersion ->
if packageVersion pkg < latestVersion
then (" (latest: " ++ display latestVersion ++ ")")
else ""
Nothing -> ""
where
mLatestVersion :: Maybe Version
mLatestVersion = case SourcePackageIndex.lookupPackageName
(packageIndex sourcePkgDb)
(packageName pkg) of
[] -> Nothing
x -> Just $ packageVersion $ last x
toFlagAssignment :: [Flag] -> FlagAssignment
toFlagAssignment = map (\ f -> (flagName f, flagDefault f))
nonDefaultFlags :: ReadyPackage -> FlagAssignment
nonDefaultFlags (ReadyPackage spkg fa _ _) =
let defaultAssignment =
toFlagAssignment
(genPackageFlags (Source.packageDescription spkg))
in fa \\ defaultAssignment
stanzas :: ReadyPackage -> [OptionalStanza]
stanzas (ReadyPackage _ _ sts _) = sts
showStanzas :: [OptionalStanza] -> String
showStanzas = concatMap ((' ' :) . showStanza)
showStanza TestStanzas = "*test"
showStanza BenchStanzas = "*bench"
-- FIXME: this should be a proper function in a proper place
showFlagAssignment :: FlagAssignment -> String
showFlagAssignment = concatMap ((' ' :) . showFlagValue)
showFlagValue (f, True) = '+' : showFlagName f
showFlagValue (f, False) = '-' : showFlagName f
showFlagName (FlagName f) = f
change (OnlyInLeft pkgid) = display pkgid ++ " removed"
change (InBoth pkgid pkgid') = display pkgid ++ " -> "
++ display (packageVersion pkgid')
change (OnlyInRight pkgid') = display pkgid' ++ " added"
-- ------------------------------------------------------------
-- * Post installation stuff
-- ------------------------------------------------------------
-- | Report a solver failure. This works slightly differently to
-- 'postInstallActions', as (by definition) we don't have an install plan.
reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String -> IO ()
reportPlanningFailure verbosity
(_, _, comp, platform, _, _, _
,_, configFlags, _, installFlags, _)
(_, sourcePkgDb, _, pkgSpecifiers)
message = do
when reportFailure $ do
-- Only create reports for explicitly named packages
let pkgids =
filter (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $
mapMaybe theSpecifiedPackage pkgSpecifiers
buildReports = BuildReports.fromPlanningFailure platform (compilerId comp)
pkgids (configConfigurationsFlags configFlags)
when (not (null buildReports)) $
info verbosity $
"Solver failure will be reported for "
++ intercalate "," (map display pkgids)
-- Save reports
BuildReports.storeLocal (compilerInfo comp)
(fromNubList $ installSummaryFile installFlags) buildReports platform
-- Save solver log
case logFile of
Nothing -> return ()
Just template -> forM_ pkgids $ \pkgid ->
let env = initialPathTemplateEnv pkgid dummyPackageKey
(compilerInfo comp) platform
path = fromPathTemplate $ substPathTemplate env template
in writeFile path message
where
reportFailure = fromFlag (installReportPlanningFailure installFlags)
logFile = flagToMaybe (installLogFile installFlags)
-- A PackageKey is calculated from the transitive closure of
-- dependencies, but when the solver fails we don't have that.
-- So we fail.
dummyPackageKey = error "reportPlanningFailure: package key not available"
-- | If a 'PackageSpecifier' refers to a single package, return Just that package.
theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId
theSpecifiedPackage pkgSpec =
case pkgSpec of
NamedPackage name [PackageConstraintVersion name' version]
| name == name' -> PackageIdentifier name <$> trivialRange version
NamedPackage _ _ -> Nothing
SpecificSourcePackage pkg -> Just $ packageId pkg
where
-- | If a range includes only a single version, return Just that version.
trivialRange :: VersionRange -> Maybe Version
trivialRange = foldVersionRange
Nothing
Just -- "== v"
(\_ -> Nothing)
(\_ -> Nothing)
(\_ _ -> Nothing)
(\_ _ -> Nothing)
-- | Various stuff we do after successful or unsuccessfully installing a bunch
-- of packages. This includes:
--
-- * build reporting, local and remote
-- * symlinking binaries
-- * updating indexes
-- * updating world file
-- * error reporting
--
postInstallActions :: Verbosity
-> InstallArgs
-> [UserTarget]
-> InstallPlan
-> IO ()
postInstallActions verbosity
(packageDBs, _, comp, platform, conf, useSandbox, mSandboxPkgInfo
,globalFlags, configFlags, _, installFlags, _)
targets installPlan = do
unless oneShot $
World.insert verbosity worldFile
--FIXME: does not handle flags
[ World.WorldPkgInfo dep []
| UserTargetNamed dep <- targets ]
let buildReports = BuildReports.fromInstallPlan installPlan
BuildReports.storeLocal (compilerInfo comp) (fromNubList $ installSummaryFile installFlags) buildReports
(InstallPlan.planPlatform installPlan)
when (reportingLevel >= AnonymousReports) $
BuildReports.storeAnonymous buildReports
when (reportingLevel == DetailedReports) $
storeDetailedBuildReports verbosity logsDir buildReports
regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox
configFlags installFlags installPlan
symlinkBinaries verbosity comp configFlags installFlags installPlan
printBuildFailures installPlan
updateSandboxTimestampsFile useSandbox mSandboxPkgInfo
comp platform installPlan
where
reportingLevel = fromFlag (installBuildReports installFlags)
logsDir = fromFlag (globalLogsDir globalFlags)
oneShot = fromFlag (installOneShot installFlags)
worldFile = fromFlag $ globalWorldFile globalFlags
storeDetailedBuildReports :: Verbosity -> FilePath
-> [(BuildReports.BuildReport, Maybe Repo)] -> IO ()
storeDetailedBuildReports verbosity logsDir reports = sequence_
[ do dotCabal <- defaultCabalDir
let logFileName = display (BuildReports.package report) <.> "log"
logFile = logsDir </> logFileName
reportsDir = dotCabal </> "reports" </> remoteRepoName remoteRepo
reportFile = reportsDir </> logFileName
handleMissingLogFile $ do
buildLog <- readFile logFile
createDirectoryIfMissing True reportsDir -- FIXME
writeFile reportFile (show (BuildReports.show report, buildLog))
| (report, Just Repo { repoKind = Left remoteRepo }) <- reports
, isLikelyToHaveLogFile (BuildReports.installOutcome report) ]
where
isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True
isLikelyToHaveLogFile BuildReports.BuildFailed {} = True
isLikelyToHaveLogFile BuildReports.InstallFailed {} = True
isLikelyToHaveLogFile BuildReports.InstallOk {} = True
isLikelyToHaveLogFile _ = False
handleMissingLogFile = Exception.handleJust missingFile $ \ioe ->
warn verbosity $ "Missing log file for build report: "
++ fromMaybe "" (ioeGetFileName ioe)
missingFile ioe
| isDoesNotExistError ioe = Just ioe
missingFile _ = Nothing
regenerateHaddockIndex :: Verbosity
-> [PackageDB]
-> Compiler
-> Platform
-> ProgramConfiguration
-> UseSandbox
-> ConfigFlags
-> InstallFlags
-> InstallPlan
-> IO ()
regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox
configFlags installFlags installPlan
| haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do
defaultDirs <- InstallDirs.defaultInstallDirs
(compilerFlavor comp)
(fromFlag (configUserInstall configFlags))
True
let indexFileTemplate = fromFlag (installHaddockIndex installFlags)
indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate
notice verbosity $
"Updating documentation index " ++ indexFile
--TODO: might be nice if the install plan gave us the new InstalledPackageInfo
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
Haddock.regenerateHaddockIndex verbosity installedPkgIndex conf indexFile
| otherwise = return ()
where
haddockIndexFileIsRequested =
fromFlag (installDocumentation installFlags)
&& isJust (flagToMaybe (installHaddockIndex installFlags))
-- We want to regenerate the index if some new documentation was actually
-- installed. Since the index can be only per-user or per-sandbox (see
-- #1337), we don't do it for global installs or special cases where we're
-- installing into a specific db.
shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall)
&& someDocsWereInstalled installPlan
where
someDocsWereInstalled = any installedDocs . InstallPlan.toList
normalUserInstall = (UserPackageDB `elem` packageDBs)
&& all (not . isSpecificPackageDB) packageDBs
installedDocs (InstallPlan.Installed _ (BuildOk DocsOk _ _)) = True
installedDocs _ = False
isSpecificPackageDB (SpecificPackageDB _) = True
isSpecificPackageDB _ = False
substHaddockIndexFileName defaultDirs = fromPathTemplate
. substPathTemplate env
where
env = env0 ++ installDirsTemplateEnv absoluteDirs
env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp)
++ InstallDirs.platformTemplateEnv platform
++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform
absoluteDirs = InstallDirs.substituteInstallDirTemplates
env0 templateDirs
templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs configFlags)
symlinkBinaries :: Verbosity
-> Compiler
-> ConfigFlags
-> InstallFlags
-> InstallPlan -> IO ()
symlinkBinaries verbosity comp configFlags installFlags plan = do
failed <- InstallSymlink.symlinkBinaries comp configFlags installFlags plan
case failed of
[] -> return ()
[(_, exe, path)] ->
warn verbosity $
"could not create a symlink in " ++ bindir ++ " for "
++ exe ++ " because the file exists there already but is not "
++ "managed by cabal. You can create a symlink for this executable "
++ "manually if you wish. The executable file has been installed at "
++ path
exes ->
warn verbosity $
"could not create symlinks in " ++ bindir ++ " for "
++ intercalate ", " [ exe | (_, exe, _) <- exes ]
++ " because the files exist there already and are not "
++ "managed by cabal. You can create symlinks for these executables "
++ "manually if you wish. The executable files have been installed at "
++ intercalate ", " [ path | (_, _, path) <- exes ]
where
bindir = fromFlag (installSymlinkBinDir installFlags)
printBuildFailures :: InstallPlan -> IO ()
printBuildFailures plan =
case [ (pkg, reason)
| InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of
[] -> return ()
failed -> die . unlines
$ "Error: some packages failed to install:"
: [ display (packageId pkg) ++ printFailureReason reason
| (pkg, reason) <- failed ]
where
printFailureReason reason = case reason of
DependentFailed pkgid -> " depends on " ++ display pkgid
++ " which failed to install."
DownloadFailed e -> " failed while downloading the package."
++ showException e
UnpackFailed e -> " failed while unpacking the package."
++ showException e
ConfigureFailed e -> " failed during the configure step."
++ showException e
BuildFailed e -> " failed during the building phase."
++ showException e
TestsFailed e -> " failed during the tests phase."
++ showException e
InstallFailed e -> " failed during the final install step."
++ showException e
-- This will never happen, but we include it for completeness
PlanningFailed -> " failed during the planning phase."
showException e = " The exception was:\n " ++ show e ++ maybeOOM e
#ifdef mingw32_HOST_OS
maybeOOM _ = ""
#else
maybeOOM e = maybe "" onExitFailure (fromException e)
onExitFailure (ExitFailure n)
| n == 9 || n == -9 =
"\nThis may be due to an out-of-memory condition."
onExitFailure _ = ""
#endif
-- | If we're working inside a sandbox and some add-source deps were installed,
-- update the timestamps of those deps.
updateSandboxTimestampsFile :: UseSandbox -> Maybe SandboxPackageInfo
-> Compiler -> Platform -> InstallPlan
-> IO ()
updateSandboxTimestampsFile (UseSandbox sandboxDir)
(Just (SandboxPackageInfo _ _ _ allAddSourceDeps))
comp platform installPlan =
withUpdateTimestamps sandboxDir (compilerId comp) platform $ \_ -> do
let allInstalled = [ pkg | InstallPlan.Installed pkg _
<- InstallPlan.toList installPlan ]
allSrcPkgs = [ pkg | ReadyPackage pkg _ _ _ <- allInstalled ]
allPaths = [ pth | LocalUnpackedPackage pth
<- map packageSource allSrcPkgs]
allPathsCanonical <- mapM tryCanonicalizePath allPaths
return $! filter (`S.member` allAddSourceDeps) allPathsCanonical
updateSandboxTimestampsFile _ _ _ _ _ = return ()
-- ------------------------------------------------------------
-- * Actually do the installations
-- ------------------------------------------------------------
data InstallMisc = InstallMisc {
rootCmd :: Maybe FilePath,
libVersion :: Maybe Version
}
-- | If logging is enabled, contains location of the log file and the verbosity
-- level for logging.
type UseLogFile = Maybe (PackageIdentifier -> PackageKey -> FilePath, Verbosity)
performInstallations :: Verbosity
-> InstallArgs
-> InstalledPackageIndex
-> InstallPlan
-> IO InstallPlan
performInstallations verbosity
(packageDBs, _, comp, _, conf, useSandbox, _,
globalFlags, configFlags, configExFlags, installFlags, haddockFlags)
installedPkgIndex installPlan = do
-- With 'install -j' it can be a bit hard to tell whether a sandbox is used.
whenUsingSandbox useSandbox $ \sandboxDir ->
when parallelInstall $
notice verbosity $ "Notice: installing into a sandbox located at "
++ sandboxDir
jobControl <- if parallelInstall then newParallelJobControl
else newSerialJobControl
buildLimit <- newJobLimit numJobs
fetchLimit <- newJobLimit (min numJobs numFetchJobs)
installLock <- newLock -- serialise installation
cacheLock <- newLock -- serialise access to setup exe cache
executeInstallPlan verbosity comp jobControl useLogFile installPlan $ \rpkg ->
-- Calculate the package key (ToDo: Is this right for source install)
let pkg_key = readyPackageKey comp rpkg in
installReadyPackage platform cinfo configFlags
rpkg $ \configFlags' src pkg pkgoverride ->
fetchSourcePackage verbosity fetchLimit src $ \src' ->
installLocalPackage verbosity buildLimit
(packageId pkg) src' distPref $ \mpath ->
installUnpackedPackage verbosity buildLimit installLock numJobs pkg_key
(setupScriptOptions installedPkgIndex cacheLock)
miscOptions configFlags' installFlags haddockFlags
cinfo platform pkg pkgoverride mpath useLogFile
where
platform = InstallPlan.planPlatform installPlan
cinfo = InstallPlan.planCompiler installPlan
numJobs = determineNumJobs (installNumJobs installFlags)
numFetchJobs = 2
parallelInstall = numJobs >= 2
distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(configDistPref configFlags)
setupScriptOptions index lock = SetupScriptOptions {
useCabalVersion = chooseCabalVersion configExFlags
(libVersion miscOptions),
useCompiler = Just comp,
usePlatform = Just platform,
-- Hack: we typically want to allow the UserPackageDB for finding the
-- Cabal lib when compiling any Setup.hs even if we're doing a global
-- install. However we also allow looking in a specific package db.
usePackageDB = if UserPackageDB `elem` packageDBs
then packageDBs
else let (db@GlobalPackageDB:dbs) = packageDBs
in db : UserPackageDB : dbs,
--TODO: use Ord instance:
-- insert UserPackageDB packageDBs
usePackageIndex = if UserPackageDB `elem` packageDBs
then Just index
else Nothing,
useProgramConfig = conf,
useDistPref = distPref,
useLoggingHandle = Nothing,
useWorkingDir = Nothing,
forceExternalSetupMethod = parallelInstall,
useWin32CleanHack = False,
setupCacheLock = Just lock
}
reportingLevel = fromFlag (installBuildReports installFlags)
logsDir = fromFlag (globalLogsDir globalFlags)
-- Should the build output be written to a log file instead of stdout?
useLogFile :: UseLogFile
useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName)
logFileTemplate
where
installLogFile' = flagToMaybe $ installLogFile installFlags
defaultTemplate = toPathTemplate $ logsDir </> "$pkgid" <.> "log"
-- If the user has specified --remote-build-reporting=detailed, use the
-- default log file location. If the --build-log option is set, use the
-- provided location. Otherwise don't use logging, unless building in
-- parallel (in which case the default location is used).
logFileTemplate :: Maybe PathTemplate
logFileTemplate
| useDefaultTemplate = Just defaultTemplate
| otherwise = installLogFile'
-- If the user has specified --remote-build-reporting=detailed or
-- --build-log, use more verbose logging.
loggingVerbosity :: Verbosity
loggingVerbosity | overrideVerbosity = max Verbosity.verbose verbosity
| otherwise = verbosity
useDefaultTemplate :: Bool
useDefaultTemplate
| reportingLevel == DetailedReports = True
| isJust installLogFile' = False
| parallelInstall = True
| otherwise = False
overrideVerbosity :: Bool
overrideVerbosity
| reportingLevel == DetailedReports = True
| isJust installLogFile' = True
| parallelInstall = False
| otherwise = False
substLogFileName :: PathTemplate -> PackageIdentifier -> PackageKey -> FilePath
substLogFileName template pkg pkg_key = fromPathTemplate
. substPathTemplate env
$ template
where env = initialPathTemplateEnv (packageId pkg) pkg_key
(compilerInfo comp) platform
miscOptions = InstallMisc {
rootCmd = if fromFlag (configUserInstall configFlags)
|| (isUseSandbox useSandbox)
then Nothing -- ignore --root-cmd if --user
-- or working inside a sandbox.
else flagToMaybe (installRootCmd installFlags),
libVersion = flagToMaybe (configCabalVersion configExFlags)
}
executeInstallPlan :: Verbosity
-> Compiler
-> JobControl IO (PackageId, PackageKey, BuildResult)
-> UseLogFile
-> InstallPlan
-> (ReadyPackage -> IO BuildResult)
-> IO InstallPlan
executeInstallPlan verbosity comp jobCtl useLogFile plan0 installPkg =
tryNewTasks 0 plan0
where
tryNewTasks taskCount plan = do
case InstallPlan.ready plan of
[] | taskCount == 0 -> return plan
| otherwise -> waitForTasks taskCount plan
pkgs -> do
sequence_
[ do info verbosity $ "Ready to install " ++ display pkgid
spawnJob jobCtl $ do
buildResult <- installPkg pkg
return (packageId pkg, pkg_key, buildResult)
| pkg <- pkgs
, let pkgid = packageId pkg
pkg_key = readyPackageKey comp pkg ]
let taskCount' = taskCount + length pkgs
plan' = InstallPlan.processing pkgs plan
waitForTasks taskCount' plan'
waitForTasks taskCount plan = do
info verbosity $ "Waiting for install task to finish..."
(pkgid, pkg_key, buildResult) <- collectJob jobCtl
printBuildResult pkgid pkg_key buildResult
let taskCount' = taskCount-1
plan' = updatePlan pkgid buildResult plan
tryNewTasks taskCount' plan'
updatePlan :: PackageIdentifier -> BuildResult -> InstallPlan -> InstallPlan
updatePlan pkgid (Right buildSuccess) =
InstallPlan.completed (Source.fakeInstalledPackageId pkgid) buildSuccess
updatePlan pkgid (Left buildFailure) =
InstallPlan.failed (Source.fakeInstalledPackageId pkgid) buildFailure depsFailure
where
depsFailure = DependentFailed pkgid
-- So this first pkgid failed for whatever reason (buildFailure).
-- All the other packages that depended on this pkgid, which we
-- now cannot build, we mark as failing due to 'DependentFailed'
-- which kind of means it was not their fault.
-- Print build log if something went wrong, and 'Installed $PKGID'
-- otherwise.
printBuildResult :: PackageId -> PackageKey -> BuildResult -> IO ()
printBuildResult pkgid pkg_key buildResult = case buildResult of
(Right _) -> notice verbosity $ "Installed " ++ display pkgid
(Left _) -> do
notice verbosity $ "Failed to install " ++ display pkgid
when (verbosity >= normal) $
case useLogFile of
Nothing -> return ()
Just (mkLogFileName, _) -> do
let logName = mkLogFileName pkgid pkg_key
putStr $ "Build log ( " ++ logName ++ " ):\n"
printFile logName
printFile :: FilePath -> IO ()
printFile path = readFile path >>= putStr
-- | Call an installer for an 'SourcePackage' but override the configure
-- flags with the ones given by the 'ReadyPackage'. In particular the
-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly
-- versioned package dependencies. So we ignore any previous partial flag
-- assignment or dependency constraints and use the new ones.
--
-- NB: when updating this function, don't forget to also update
-- 'configurePackage' in D.C.Configure.
installReadyPackage :: Platform -> CompilerInfo
-> ConfigFlags
-> ReadyPackage
-> (ConfigFlags -> PackageLocation (Maybe FilePath)
-> PackageDescription
-> PackageDescriptionOverride -> a)
-> a
installReadyPackage platform cinfo configFlags
(ReadyPackage (SourcePackage _ gpkg source pkgoverride)
flags stanzas deps)
installPkg = installPkg configFlags {
configConfigurationsFlags = flags,
-- We generate the legacy constraints as well as the new style precise deps.
-- In the end only one set gets passed to Setup.hs configure, depending on
-- the Cabal version we are talking to.
configConstraints = [ thisPackageVersion (packageId deppkg)
| deppkg <- deps ],
configDependencies = [ (packageName (Installed.sourcePackageId deppkg),
Installed.installedPackageId deppkg)
| deppkg <- deps ],
-- Use '--exact-configuration' if supported.
configExactConfiguration = toFlag True,
configBenchmarks = toFlag False,
configTests = toFlag (TestStanzas `elem` stanzas)
} source pkg pkgoverride
where
pkg = case finalizePackageDescription flags
(const True)
platform cinfo [] (enableStanzas stanzas gpkg) of
Left _ -> error "finalizePackageDescription ReadyPackage failed"
Right (desc, _) -> desc
fetchSourcePackage
:: Verbosity
-> JobLimit
-> PackageLocation (Maybe FilePath)
-> (PackageLocation FilePath -> IO BuildResult)
-> IO BuildResult
fetchSourcePackage verbosity fetchLimit src installPkg = do
fetched <- checkFetched src
case fetched of
Just src' -> installPkg src'
Nothing -> onFailure DownloadFailed $ do
loc <- withJobLimit fetchLimit $
fetchPackage verbosity src
installPkg loc
installLocalPackage
:: Verbosity
-> JobLimit
-> PackageIdentifier -> PackageLocation FilePath -> FilePath
-> (Maybe FilePath -> IO BuildResult)
-> IO BuildResult
installLocalPackage verbosity jobLimit pkgid location distPref installPkg =
case location of
LocalUnpackedPackage dir ->
installPkg (Just dir)
LocalTarballPackage tarballPath ->
installLocalTarballPackage verbosity jobLimit
pkgid tarballPath distPref installPkg
RemoteTarballPackage _ tarballPath ->
installLocalTarballPackage verbosity jobLimit
pkgid tarballPath distPref installPkg
RepoTarballPackage _ _ tarballPath ->
installLocalTarballPackage verbosity jobLimit
pkgid tarballPath distPref installPkg
installLocalTarballPackage
:: Verbosity
-> JobLimit
-> PackageIdentifier -> FilePath -> FilePath
-> (Maybe FilePath -> IO BuildResult)
-> IO BuildResult
installLocalTarballPackage verbosity jobLimit pkgid
tarballPath distPref installPkg = do
tmp <- getTemporaryDirectory
withTempDirectory verbosity tmp (display pkgid) $ \tmpDirPath ->
onFailure UnpackFailed $ do
let relUnpackedPath = display pkgid
absUnpackedPath = tmpDirPath </> relUnpackedPath
descFilePath = absUnpackedPath
</> display (packageName pkgid) <.> "cabal"
withJobLimit jobLimit $ do
info verbosity $ "Extracting " ++ tarballPath
++ " to " ++ tmpDirPath ++ "..."
extractTarGzFile tmpDirPath relUnpackedPath tarballPath
exists <- doesFileExist descFilePath
when (not exists) $
die $ "Package .cabal file not found: " ++ show descFilePath
maybeRenameDistDir absUnpackedPath
installPkg (Just absUnpackedPath)
where
-- 'cabal sdist' puts pre-generated files in the 'dist'
-- directory. This fails when a nonstandard build directory name
-- is used (as is the case with sandboxes), so we need to rename
-- the 'dist' dir here.
--
-- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still
-- fails even with this workaround. We probably can live with that.
maybeRenameDistDir :: FilePath -> IO ()
maybeRenameDistDir absUnpackedPath = do
let distDirPath = absUnpackedPath </> defaultDistPref
distDirPathTmp = absUnpackedPath </> (defaultDistPref ++ "-tmp")
distDirPathNew = absUnpackedPath </> distPref
distDirExists <- doesDirectoryExist distDirPath
when (distDirExists
&& (not $ distDirPath `equalFilePath` distDirPathNew)) $ do
-- NB: we need to handle the case when 'distDirPathNew' is a
-- subdirectory of 'distDirPath' (e.g. the former is
-- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist').
debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '"
++ distDirPathTmp ++ "'."
renameDirectory distDirPath distDirPathTmp
when (distDirPath `isPrefixOf` distDirPathNew) $
createDirectoryIfMissingVerbose verbosity False distDirPath
debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '"
++ distDirPathNew ++ "'."
renameDirectory distDirPathTmp distDirPathNew
installUnpackedPackage
:: Verbosity
-> JobLimit
-> Lock
-> Int
-> PackageKey
-> SetupScriptOptions
-> InstallMisc
-> ConfigFlags
-> InstallFlags
-> HaddockFlags
-> CompilerInfo
-> Platform
-> PackageDescription
-> PackageDescriptionOverride
-> Maybe FilePath -- ^ Directory to change to before starting the installation.
-> UseLogFile -- ^ File to log output to (if any)
-> IO BuildResult
installUnpackedPackage verbosity buildLimit installLock numJobs pkg_key
scriptOptions miscOptions
configFlags installFlags haddockFlags
cinfo platform pkg pkgoverride workingDir useLogFile = do
-- Override the .cabal file if necessary
case pkgoverride of
Nothing -> return ()
Just pkgtxt -> do
let descFilePath = fromMaybe "." workingDir
</> display (packageName pkgid) <.> "cabal"
info verbosity $
"Updating " ++ display (packageName pkgid) <.> "cabal"
++ " with the latest revision from the index."
writeFileAtomic descFilePath pkgtxt
-- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if
-- the setup script was compiled against an old version of the Cabal lib).
configFlags' <- addDefaultInstallDirs configFlags
-- Filter out flags not supported by the old versions of the Cabal lib.
let configureFlags :: Version -> ConfigFlags
configureFlags = filterConfigureFlags configFlags' {
configVerbosity = toFlag verbosity'
}
-- Path to the optional log file.
mLogPath <- maybeLogPath
-- Configure phase
onFailure ConfigureFailed $ withJobLimit buildLimit $ do
when (numJobs > 1) $ notice verbosity $
"Configuring " ++ display pkgid ++ "..."
setup configureCommand configureFlags mLogPath
-- Build phase
onFailure BuildFailed $ do
when (numJobs > 1) $ notice verbosity $
"Building " ++ display pkgid ++ "..."
setup buildCommand' buildFlags mLogPath
-- Doc generation phase
docsResult <- if shouldHaddock
then (do setup haddockCommand haddockFlags' mLogPath
return DocsOk)
`catchIO` (\_ -> return DocsFailed)
`catchExit` (\_ -> return DocsFailed)
else return DocsNotTried
-- Tests phase
onFailure TestsFailed $ do
when (testsEnabled && PackageDescription.hasTests pkg) $
setup Cabal.testCommand testFlags mLogPath
let testsResult | testsEnabled = TestsOk
| otherwise = TestsNotTried
-- Install phase
onFailure InstallFailed $ criticalSection installLock $ do
-- Capture installed package configuration file
maybePkgConf <- maybeGenPkgConf mLogPath
-- Actual installation
withWin32SelfUpgrade verbosity pkg_key configFlags cinfo platform pkg $ do
case rootCmd miscOptions of
(Just cmd) -> reexec cmd
Nothing -> do
setup Cabal.copyCommand copyFlags mLogPath
when shouldRegister $ do
setup Cabal.registerCommand registerFlags mLogPath
return (Right (BuildOk docsResult testsResult maybePkgConf))
where
pkgid = packageId pkg
buildCommand' = buildCommand defaultProgramConfiguration
buildFlags _ = emptyBuildFlags {
buildDistPref = configDistPref configFlags,
buildVerbosity = toFlag verbosity'
}
shouldHaddock = fromFlag (installDocumentation installFlags)
haddockFlags' _ = haddockFlags {
haddockVerbosity = toFlag verbosity',
haddockDistPref = configDistPref configFlags
}
testsEnabled = fromFlag (configTests configFlags)
&& fromFlagOrDefault False (installRunTests installFlags)
testFlags _ = Cabal.emptyTestFlags {
Cabal.testDistPref = configDistPref configFlags
}
copyFlags _ = Cabal.emptyCopyFlags {
Cabal.copyDistPref = configDistPref configFlags,
Cabal.copyDest = toFlag InstallDirs.NoCopyDest,
Cabal.copyVerbosity = toFlag verbosity'
}
shouldRegister = PackageDescription.hasLibs pkg
registerFlags _ = Cabal.emptyRegisterFlags {
Cabal.regDistPref = configDistPref configFlags,
Cabal.regVerbosity = toFlag verbosity'
}
verbosity' = maybe verbosity snd useLogFile
tempTemplate name = name ++ "-" ++ display pkgid
addDefaultInstallDirs :: ConfigFlags -> IO ConfigFlags
addDefaultInstallDirs configFlags' = do
defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False
return $ configFlags' {
configInstallDirs = fmap Cabal.Flag .
InstallDirs.substituteInstallDirTemplates env $
InstallDirs.combineInstallDirs fromFlagOrDefault
defInstallDirs (configInstallDirs configFlags)
}
where
CompilerId flavor _ = compilerInfoId cinfo
env = initialPathTemplateEnv pkgid pkg_key cinfo platform
userInstall = fromFlagOrDefault defaultUserInstall
(configUserInstall configFlags')
maybeGenPkgConf :: Maybe FilePath
-> IO (Maybe Installed.InstalledPackageInfo)
maybeGenPkgConf mLogPath =
if shouldRegister then do
tmp <- getTemporaryDirectory
withTempFile tmp (tempTemplate "pkgConf") $ \pkgConfFile handle -> do
hClose handle
let registerFlags' version = (registerFlags version) {
Cabal.regGenPkgConf = toFlag (Just pkgConfFile)
}
setup Cabal.registerCommand registerFlags' mLogPath
withUTF8FileContents pkgConfFile $ \pkgConfText ->
case Installed.parseInstalledPackageInfo pkgConfText of
Installed.ParseFailed perror -> pkgConfParseFailed perror
Installed.ParseOk warns pkgConf -> do
unless (null warns) $
warn verbosity $ unlines (map (showPWarning pkgConfFile) warns)
return (Just pkgConf)
else return Nothing
pkgConfParseFailed :: Installed.PError -> IO a
pkgConfParseFailed perror =
die $ "Couldn't parse the output of 'setup register --gen-pkg-config':"
++ show perror
maybeLogPath :: IO (Maybe FilePath)
maybeLogPath =
case useLogFile of
Nothing -> return Nothing
Just (mkLogFileName, _) -> do
let logFileName = mkLogFileName (packageId pkg) pkg_key
logDir = takeDirectory logFileName
unless (null logDir) $ createDirectoryIfMissing True logDir
logFileExists <- doesFileExist logFileName
when logFileExists $ removeFile logFileName
return (Just logFileName)
setup cmd flags mLogPath =
Exception.bracket
(maybe (return Nothing)
(\path -> Just `fmap` openFile path AppendMode) mLogPath)
(maybe (return ()) hClose)
(\logFileHandle ->
setupWrapper verbosity
scriptOptions { useLoggingHandle = logFileHandle
, useWorkingDir = workingDir }
(Just pkg)
cmd flags [])
reexec cmd = do
-- look for our own executable file and re-exec ourselves using a helper
-- program like sudo to elevate privileges:
self <- getExecutablePath
weExist <- doesFileExist self
if weExist
then inDir workingDir $
rawSystemExit verbosity cmd
[self, "install", "--only"
,"--verbose=" ++ showForCabal verbosity]
else die $ "Unable to find cabal executable at: " ++ self
-- helper
onFailure :: (SomeException -> BuildFailure) -> IO BuildResult -> IO BuildResult
onFailure result action =
action `catches`
[ Handler $ \ioe -> handler (ioe :: IOException)
, Handler $ \exit -> handler (exit :: ExitCode)
]
where
handler :: Exception e => e -> IO BuildResult
handler = return . Left . result . toException
-- ------------------------------------------------------------
-- * Weird windows hacks
-- ------------------------------------------------------------
withWin32SelfUpgrade :: Verbosity
-> PackageKey
-> ConfigFlags
-> CompilerInfo
-> Platform
-> PackageDescription
-> IO a -> IO a
withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action
withWin32SelfUpgrade verbosity pkg_key configFlags cinfo platform pkg action = do
defaultDirs <- InstallDirs.defaultInstallDirs
compFlavor
(fromFlag (configUserInstall configFlags))
(PackageDescription.hasLibs pkg)
Win32SelfUpgrade.possibleSelfUpgrade verbosity
(exeInstallPaths defaultDirs) action
where
pkgid = packageId pkg
(CompilerId compFlavor _) = compilerInfoId cinfo
exeInstallPaths defaultDirs =
[ InstallDirs.bindir absoluteDirs </> exeName <.> exeExtension
| exe <- PackageDescription.executables pkg
, PackageDescription.buildable (PackageDescription.buildInfo exe)
, let exeName = prefix ++ PackageDescription.exeName exe ++ suffix
prefix = substTemplate prefixTemplate
suffix = substTemplate suffixTemplate ]
where
fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "")
prefixTemplate = fromFlagTemplate (configProgPrefix configFlags)
suffixTemplate = fromFlagTemplate (configProgSuffix configFlags)
templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs configFlags)
absoluteDirs = InstallDirs.absoluteInstallDirs
pkgid pkg_key
cinfo InstallDirs.NoCopyDest
platform templateDirs
substTemplate = InstallDirs.fromPathTemplate
. InstallDirs.substPathTemplate env
where env = InstallDirs.initialPathTemplateEnv pkgid pkg_key cinfo platform
|
DavidAlphaFox/ghc
|
libraries/Cabal/cabal-install/Distribution/Client/Install.hs
|
bsd-3-clause
| 65,104
| 0
| 31
| 18,305
| 12,303
| 6,395
| 5,908
| 1,137
| 12
|
{-# LANGUAGE TypeFamilies #-}
----------------------------------------------------------------------------
-- |
-- Module : Haddock.Interface.Rename
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Interface.Rename (renameInterface) where
import Data.Traversable (mapM)
import Haddock.GhcUtils
import Haddock.Types
import Bag (emptyBag)
import GHC hiding (NoLink)
import Name
import NameSet
import Coercion
import Control.Applicative
import Control.Monad hiding (mapM)
import Data.List
import qualified Data.Map as Map hiding ( Map )
import Prelude hiding (mapM)
renameInterface :: DynFlags -> LinkEnv -> Bool -> Interface -> ErrMsgM Interface
renameInterface dflags renamingEnv warnings iface =
-- first create the local env, where every name exported by this module
-- is mapped to itself, and everything else comes from the global renaming
-- env
let localEnv = foldl fn renamingEnv (ifaceVisibleExports iface)
where fn env name = Map.insert name (ifaceMod iface) env
-- rename names in the exported declarations to point to things that
-- are closer to, or maybe even exported by, the current module.
(renamedExportItems, missingNames1)
= runRnFM localEnv (renameExportItems (ifaceExportItems iface))
(rnDocMap, missingNames2) = runRnFM localEnv (mapM renameDoc (ifaceDocMap iface))
(rnArgMap, missingNames3) = runRnFM localEnv (mapM (mapM renameDoc) (ifaceArgMap iface))
(finalModuleDoc, missingNames4)
= runRnFM localEnv (renameDocumentation (ifaceDoc iface))
-- combine the missing names and filter out the built-ins, which would
-- otherwise allways be missing.
missingNames = nub $ filter isExternalName -- XXX: isExternalName filters out too much
(missingNames1 ++ missingNames2 ++ missingNames3 ++ missingNames4)
-- filter out certain built in type constructors using their string
-- representation. TODO: use the Name constants from the GHC API.
-- strings = filter (`notElem` ["()", "[]", "(->)"])
-- (map pretty missingNames)
strings = map (pretty dflags) . filter (\n -> not (isSystemName n || isBuiltInSyntax n)) $ missingNames
in do
-- report things that we couldn't link to. Only do this for non-hidden
-- modules.
unless (OptHide `elem` ifaceOptions iface || null strings || not warnings) $
tell ["Warning: " ++ moduleString (ifaceMod iface) ++
": could not find link destinations for:\n"++
unwords (" " : strings) ]
return $ iface { ifaceRnDoc = finalModuleDoc,
ifaceRnDocMap = rnDocMap,
ifaceRnArgMap = rnArgMap,
ifaceRnExportItems = renamedExportItems }
--------------------------------------------------------------------------------
-- Monad for renaming
--
-- The monad does two things for us: it passes around the environment for
-- renaming, and it returns a list of names which couldn't be found in
-- the environment.
--------------------------------------------------------------------------------
newtype RnM a =
RnM { unRn :: (Name -> (Bool, DocName)) -- name lookup function
-> (a,[Name])
}
instance Monad RnM where
(>>=) = thenRn
return = returnRn
instance Functor RnM where
fmap f x = do a <- x; return (f a)
instance Applicative RnM where
pure = return
(<*>) = ap
returnRn :: a -> RnM a
returnRn a = RnM (const (a,[]))
thenRn :: RnM a -> (a -> RnM b) -> RnM b
m `thenRn` k = RnM (\lkp -> case unRn m lkp of
(a,out1) -> case unRn (k a) lkp of
(b,out2) -> (b,out1++out2))
getLookupRn :: RnM (Name -> (Bool, DocName))
getLookupRn = RnM (\lkp -> (lkp,[]))
outRn :: Name -> RnM ()
outRn name = RnM (const ((),[name]))
lookupRn :: Name -> RnM DocName
lookupRn name = do
lkp <- getLookupRn
case lkp name of
(False,maps_to) -> do outRn name; return maps_to
(True, maps_to) -> return maps_to
runRnFM :: LinkEnv -> RnM a -> (a,[Name])
runRnFM env rn = unRn rn lkp
where
lkp n = case Map.lookup n env of
Nothing -> (False, Undocumented n)
Just mdl -> (True, Documented n mdl)
--------------------------------------------------------------------------------
-- Renaming
--------------------------------------------------------------------------------
rename :: Name -> RnM DocName
rename = lookupRn
renameL :: Located Name -> RnM (Located DocName)
renameL = mapM rename
renameExportItems :: [ExportItem Name] -> RnM [ExportItem DocName]
renameExportItems = mapM renameExportItem
renameDocForDecl :: DocForDecl Name -> RnM (DocForDecl DocName)
renameDocForDecl (doc, fnArgsDoc) =
(,) <$> renameDocumentation doc <*> renameFnArgsDoc fnArgsDoc
renameDocumentation :: Documentation Name -> RnM (Documentation DocName)
renameDocumentation (Documentation mDoc mWarning) =
Documentation <$> mapM renameDoc mDoc <*> mapM renameDoc mWarning
renameLDocHsSyn :: LHsDocString -> RnM LHsDocString
renameLDocHsSyn = return
renameDoc :: Traversable t => t Name -> RnM (t DocName)
renameDoc = traverse rename
renameFnArgsDoc :: FnArgsDoc Name -> RnM (FnArgsDoc DocName)
renameFnArgsDoc = mapM renameDoc
renameLType :: LHsType Name -> RnM (LHsType DocName)
renameLType = mapM renameType
renameLKind :: LHsKind Name -> RnM (LHsKind DocName)
renameLKind = renameLType
renameMaybeLKind :: Maybe (LHsKind Name) -> RnM (Maybe (LHsKind DocName))
renameMaybeLKind = traverse renameLKind
renameType :: HsType Name -> RnM (HsType DocName)
renameType t = case t of
HsForAllTy expl extra tyvars lcontext ltype -> do
tyvars' <- renameLTyVarBndrs tyvars
lcontext' <- renameLContext lcontext
ltype' <- renameLType ltype
return (HsForAllTy expl extra tyvars' lcontext' ltype')
HsTyVar n -> return . HsTyVar =<< rename n
HsBangTy b ltype -> return . HsBangTy b =<< renameLType ltype
HsAppTy a b -> do
a' <- renameLType a
b' <- renameLType b
return (HsAppTy a' b')
HsFunTy a b -> do
a' <- renameLType a
b' <- renameLType b
return (HsFunTy a' b')
HsListTy ty -> return . HsListTy =<< renameLType ty
HsPArrTy ty -> return . HsPArrTy =<< renameLType ty
HsIParamTy n ty -> liftM (HsIParamTy n) (renameLType ty)
HsEqTy ty1 ty2 -> liftM2 HsEqTy (renameLType ty1) (renameLType ty2)
HsTupleTy b ts -> return . HsTupleTy b =<< mapM renameLType ts
HsOpTy a (w, L loc op) b -> do
op' <- rename op
a' <- renameLType a
b' <- renameLType b
return (HsOpTy a' (w, L loc op') b')
HsParTy ty -> return . HsParTy =<< renameLType ty
HsKindSig ty k -> do
ty' <- renameLType ty
k' <- renameLKind k
return (HsKindSig ty' k')
HsDocTy ty doc -> do
ty' <- renameLType ty
doc' <- renameLDocHsSyn doc
return (HsDocTy ty' doc')
HsTyLit x -> return (HsTyLit x)
HsWrapTy a b -> HsWrapTy a <$> renameType b
HsRecTy a -> HsRecTy <$> mapM renameConDeclFieldField a
HsCoreTy a -> pure (HsCoreTy a)
HsExplicitListTy a b -> HsExplicitListTy a <$> mapM renameLType b
HsExplicitTupleTy a b -> HsExplicitTupleTy a <$> mapM renameLType b
HsQuasiQuoteTy a -> HsQuasiQuoteTy <$> renameHsQuasiQuote a
HsSpliceTy _ _ -> error "renameType: HsSpliceTy"
HsWildcardTy -> pure HsWildcardTy
HsNamedWildcardTy a -> HsNamedWildcardTy <$> rename a
renameHsQuasiQuote :: HsQuasiQuote Name -> RnM (HsQuasiQuote DocName)
renameHsQuasiQuote (HsQuasiQuote a b c) = HsQuasiQuote <$> rename a <*> pure b <*> pure c
renameLTyVarBndrs :: LHsTyVarBndrs Name -> RnM (LHsTyVarBndrs DocName)
renameLTyVarBndrs (HsQTvs { hsq_kvs = _, hsq_tvs = tvs })
= do { tvs' <- mapM renameLTyVarBndr tvs
; return (HsQTvs { hsq_kvs = error "haddock:renameLTyVarBndrs", hsq_tvs = tvs' }) }
-- This is rather bogus, but I'm not sure what else to do
renameLTyVarBndr :: LHsTyVarBndr Name -> RnM (LHsTyVarBndr DocName)
renameLTyVarBndr (L loc (UserTyVar n))
= do { n' <- rename n
; return (L loc (UserTyVar n')) }
renameLTyVarBndr (L loc (KindedTyVar (L lv n) kind))
= do { n' <- rename n
; kind' <- renameLKind kind
; return (L loc (KindedTyVar (L lv n') kind')) }
renameLContext :: Located [LHsType Name] -> RnM (Located [LHsType DocName])
renameLContext (L loc context) = do
context' <- mapM renameLType context
return (L loc context')
renameInstHead :: InstHead Name -> RnM (InstHead DocName)
renameInstHead (className, k, types, rest) = do
className' <- rename className
k' <- mapM renameType k
types' <- mapM renameType types
rest' <- case rest of
ClassInst cs -> ClassInst <$> mapM renameType cs
TypeInst ts -> TypeInst <$> traverse renameType ts
DataInst dd -> DataInst <$> renameTyClD dd
return (className', k', types', rest')
renameLDecl :: LHsDecl Name -> RnM (LHsDecl DocName)
renameLDecl (L loc d) = return . L loc =<< renameDecl d
renameDecl :: HsDecl Name -> RnM (HsDecl DocName)
renameDecl decl = case decl of
TyClD d -> do
d' <- renameTyClD d
return (TyClD d')
SigD s -> do
s' <- renameSig s
return (SigD s')
ForD d -> do
d' <- renameForD d
return (ForD d')
InstD d -> do
d' <- renameInstD d
return (InstD d')
_ -> error "renameDecl"
renameLThing :: (a Name -> RnM (a DocName)) -> Located (a Name) -> RnM (Located (a DocName))
renameLThing fn (L loc x) = return . L loc =<< fn x
renameTyClD :: TyClDecl Name -> RnM (TyClDecl DocName)
renameTyClD d = case d of
-- TyFamily flav lname ltyvars kind tckind -> do
FamDecl { tcdFam = decl } -> do
decl' <- renameFamilyDecl decl
return (FamDecl { tcdFam = decl' })
SynDecl { tcdLName = lname, tcdTyVars = tyvars, tcdRhs = rhs, tcdFVs = _fvs } -> do
lname' <- renameL lname
tyvars' <- renameLTyVarBndrs tyvars
rhs' <- renameLType rhs
return (SynDecl { tcdLName = lname', tcdTyVars = tyvars', tcdRhs = rhs', tcdFVs = placeHolderNames })
DataDecl { tcdLName = lname, tcdTyVars = tyvars, tcdDataDefn = defn, tcdFVs = _fvs } -> do
lname' <- renameL lname
tyvars' <- renameLTyVarBndrs tyvars
defn' <- renameDataDefn defn
return (DataDecl { tcdLName = lname', tcdTyVars = tyvars', tcdDataDefn = defn', tcdFVs = placeHolderNames })
ClassDecl { tcdCtxt = lcontext, tcdLName = lname, tcdTyVars = ltyvars
, tcdFDs = lfundeps, tcdSigs = lsigs, tcdATs = ats, tcdATDefs = at_defs } -> do
lcontext' <- renameLContext lcontext
lname' <- renameL lname
ltyvars' <- renameLTyVarBndrs ltyvars
lfundeps' <- mapM renameLFunDep lfundeps
lsigs' <- mapM renameLSig lsigs
ats' <- mapM (renameLThing renameFamilyDecl) ats
at_defs' <- mapM renameLTyFamDefltEqn at_defs
-- we don't need the default methods or the already collected doc entities
return (ClassDecl { tcdCtxt = lcontext', tcdLName = lname', tcdTyVars = ltyvars'
, tcdFDs = lfundeps', tcdSigs = lsigs', tcdMeths= emptyBag
, tcdATs = ats', tcdATDefs = at_defs', tcdDocs = [], tcdFVs = placeHolderNames })
where
renameLFunDep (L loc (xs, ys)) = do
xs' <- mapM rename (map unLoc xs)
ys' <- mapM rename (map unLoc ys)
return (L loc (map noLoc xs', map noLoc ys'))
renameLSig (L loc sig) = return . L loc =<< renameSig sig
renameFamilyDecl :: FamilyDecl Name -> RnM (FamilyDecl DocName)
renameFamilyDecl (FamilyDecl { fdInfo = info, fdLName = lname
, fdTyVars = ltyvars, fdKindSig = tckind }) = do
info' <- renameFamilyInfo info
lname' <- renameL lname
ltyvars' <- renameLTyVarBndrs ltyvars
tckind' <- renameMaybeLKind tckind
return (FamilyDecl { fdInfo = info', fdLName = lname'
, fdTyVars = ltyvars', fdKindSig = tckind' })
renameFamilyInfo :: FamilyInfo Name -> RnM (FamilyInfo DocName)
renameFamilyInfo DataFamily = return DataFamily
renameFamilyInfo OpenTypeFamily = return OpenTypeFamily
renameFamilyInfo (ClosedTypeFamily eqns)
= do { eqns' <- mapM renameLTyFamInstEqn eqns
; return $ ClosedTypeFamily eqns' }
renameDataDefn :: HsDataDefn Name -> RnM (HsDataDefn DocName)
renameDataDefn (HsDataDefn { dd_ND = nd, dd_ctxt = lcontext, dd_cType = cType
, dd_kindSig = k, dd_cons = cons }) = do
lcontext' <- renameLContext lcontext
k' <- renameMaybeLKind k
cons' <- mapM (mapM renameCon) cons
-- I don't think we need the derivings, so we return Nothing
return (HsDataDefn { dd_ND = nd, dd_ctxt = lcontext', dd_cType = cType
, dd_kindSig = k', dd_cons = cons', dd_derivs = Nothing })
renameCon :: ConDecl Name -> RnM (ConDecl DocName)
renameCon decl@(ConDecl { con_names = lnames, con_qvars = ltyvars
, con_cxt = lcontext, con_details = details
, con_res = restype, con_doc = mbldoc }) = do
lnames' <- mapM renameL lnames
ltyvars' <- renameLTyVarBndrs ltyvars
lcontext' <- renameLContext lcontext
details' <- renameDetails details
restype' <- renameResType restype
mbldoc' <- mapM renameLDocHsSyn mbldoc
return (decl { con_names = lnames', con_qvars = ltyvars', con_cxt = lcontext'
, con_details = details', con_res = restype', con_doc = mbldoc' })
where
renameDetails (RecCon (L l fields)) = do
fields' <- mapM renameConDeclFieldField fields
return (RecCon (L l fields'))
renameDetails (PrefixCon ps) = return . PrefixCon =<< mapM renameLType ps
renameDetails (InfixCon a b) = do
a' <- renameLType a
b' <- renameLType b
return (InfixCon a' b')
renameResType (ResTyH98) = return ResTyH98
renameResType (ResTyGADT l t) = return . ResTyGADT l =<< renameLType t
renameConDeclFieldField :: LConDeclField Name -> RnM (LConDeclField DocName)
renameConDeclFieldField (L l (ConDeclField names t doc)) = do
names' <- mapM renameL names
t' <- renameLType t
doc' <- mapM renameLDocHsSyn doc
return $ L l (ConDeclField names' t' doc')
renameSig :: Sig Name -> RnM (Sig DocName)
renameSig sig = case sig of
TypeSig lnames ltype _ -> do
lnames' <- mapM renameL lnames
ltype' <- renameLType ltype
return (TypeSig lnames' ltype' PlaceHolder)
PatSynSig lname (flag, qtvs) lreq lprov lty -> do
lname' <- renameL lname
qtvs' <- renameLTyVarBndrs qtvs
lreq' <- renameLContext lreq
lprov' <- renameLContext lprov
lty' <- renameLType lty
return $ PatSynSig lname' (flag, qtvs') lreq' lprov' lty'
FixSig (FixitySig lnames fixity) -> do
lnames' <- mapM renameL lnames
return $ FixSig (FixitySig lnames' fixity)
MinimalSig src s -> MinimalSig src <$> traverse renameL s
-- we have filtered out all other kinds of signatures in Interface.Create
_ -> error "expected TypeSig"
renameForD :: ForeignDecl Name -> RnM (ForeignDecl DocName)
renameForD (ForeignImport lname ltype co x) = do
lname' <- renameL lname
ltype' <- renameLType ltype
return (ForeignImport lname' ltype' co x)
renameForD (ForeignExport lname ltype co x) = do
lname' <- renameL lname
ltype' <- renameLType ltype
return (ForeignExport lname' ltype' co x)
renameInstD :: InstDecl Name -> RnM (InstDecl DocName)
renameInstD (ClsInstD { cid_inst = d }) = do
d' <- renameClsInstD d
return (ClsInstD { cid_inst = d' })
renameInstD (TyFamInstD { tfid_inst = d }) = do
d' <- renameTyFamInstD d
return (TyFamInstD { tfid_inst = d' })
renameInstD (DataFamInstD { dfid_inst = d }) = do
d' <- renameDataFamInstD d
return (DataFamInstD { dfid_inst = d' })
renameClsInstD :: ClsInstDecl Name -> RnM (ClsInstDecl DocName)
renameClsInstD (ClsInstDecl { cid_overlap_mode = omode
, cid_poly_ty =ltype, cid_tyfam_insts = lATs
, cid_datafam_insts = lADTs }) = do
ltype' <- renameLType ltype
lATs' <- mapM (mapM renameTyFamInstD) lATs
lADTs' <- mapM (mapM renameDataFamInstD) lADTs
return (ClsInstDecl { cid_overlap_mode = omode
, cid_poly_ty = ltype', cid_binds = emptyBag
, cid_sigs = []
, cid_tyfam_insts = lATs', cid_datafam_insts = lADTs' })
renameTyFamInstD :: TyFamInstDecl Name -> RnM (TyFamInstDecl DocName)
renameTyFamInstD (TyFamInstDecl { tfid_eqn = eqn })
= do { eqn' <- renameLTyFamInstEqn eqn
; return (TyFamInstDecl { tfid_eqn = eqn'
, tfid_fvs = placeHolderNames }) }
renameLTyFamInstEqn :: LTyFamInstEqn Name -> RnM (LTyFamInstEqn DocName)
renameLTyFamInstEqn (L loc (TyFamEqn { tfe_tycon = tc, tfe_pats = pats_w_bndrs, tfe_rhs = rhs }))
= do { tc' <- renameL tc
; pats' <- mapM renameLType (hswb_cts pats_w_bndrs)
; rhs' <- renameLType rhs
; return (L loc (TyFamEqn { tfe_tycon = tc'
, tfe_pats = HsWB pats' PlaceHolder PlaceHolder PlaceHolder
, tfe_rhs = rhs' })) }
renameLTyFamDefltEqn :: LTyFamDefltEqn Name -> RnM (LTyFamDefltEqn DocName)
renameLTyFamDefltEqn (L loc (TyFamEqn { tfe_tycon = tc, tfe_pats = tvs, tfe_rhs = rhs }))
= do { tc' <- renameL tc
; tvs' <- renameLTyVarBndrs tvs
; rhs' <- renameLType rhs
; return (L loc (TyFamEqn { tfe_tycon = tc'
, tfe_pats = tvs'
, tfe_rhs = rhs' })) }
renameDataFamInstD :: DataFamInstDecl Name -> RnM (DataFamInstDecl DocName)
renameDataFamInstD (DataFamInstDecl { dfid_tycon = tc, dfid_pats = pats_w_bndrs, dfid_defn = defn })
= do { tc' <- renameL tc
; pats' <- mapM renameLType (hswb_cts pats_w_bndrs)
; defn' <- renameDataDefn defn
; return (DataFamInstDecl { dfid_tycon = tc'
, dfid_pats
= HsWB pats' PlaceHolder PlaceHolder PlaceHolder
, dfid_defn = defn', dfid_fvs = placeHolderNames }) }
renameExportItem :: ExportItem Name -> RnM (ExportItem DocName)
renameExportItem item = case item of
ExportModule mdl -> return (ExportModule mdl)
ExportGroup lev id_ doc -> do
doc' <- renameDoc doc
return (ExportGroup lev id_ doc')
ExportDecl decl doc subs instances fixities splice -> do
decl' <- renameLDecl decl
doc' <- renameDocForDecl doc
subs' <- mapM renameSub subs
instances' <- forM instances $ \(L l inst, idoc) -> do
inst' <- renameInstHead inst
idoc' <- mapM renameDoc idoc
return (L l inst', idoc')
fixities' <- forM fixities $ \(name, fixity) -> do
name' <- lookupRn name
return (name', fixity)
return (ExportDecl decl' doc' subs' instances' fixities' splice)
ExportNoDecl x subs -> do
x' <- lookupRn x
subs' <- mapM lookupRn subs
return (ExportNoDecl x' subs')
ExportDoc doc -> do
doc' <- renameDoc doc
return (ExportDoc doc')
renameSub :: (Name, DocForDecl Name) -> RnM (DocName, DocForDecl DocName)
renameSub (n,doc) = do
n' <- rename n
doc' <- renameDocForDecl doc
return (n', doc')
type instance PostRn DocName NameSet = PlaceHolder
type instance PostRn DocName Fixity = PlaceHolder
type instance PostRn DocName Bool = PlaceHolder
type instance PostRn DocName [Name] = PlaceHolder
type instance PostTc DocName Kind = PlaceHolder
type instance PostTc DocName Type = PlaceHolder
type instance PostTc DocName Coercion = PlaceHolder
|
DavidAlphaFox/ghc
|
utils/haddock/haddock-api/src/Haddock/Interface/Rename.hs
|
bsd-3-clause
| 19,822
| 0
| 17
| 4,799
| 6,172
| 3,082
| 3,090
| 384
| 24
|
{-| @/proc/stat@ data collector.
-}
{-
Copyright (C) 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.DataCollectors.CPUload
( dcName
, dcVersion
, dcFormatVersion
, dcCategory
, dcKind
, dcReport
, dcUpdate
) where
import qualified Control.Exception as E
import Data.Attoparsec.Text.Lazy as A
import Data.Text.Lazy (pack, unpack)
import qualified Text.JSON as J
import qualified Data.Sequence as Seq
import System.Posix.Unistd (getSysVar, SysVar(ClockTick))
import qualified Ganeti.BasicTypes as BT
import qualified Ganeti.Constants as C
import Ganeti.Cpu.LoadParser(cpustatParser)
import Ganeti.DataCollectors.Types
import Ganeti.Utils
import Ganeti.Cpu.Types
-- | The default path of the CPU status file.
-- It is hardcoded because it is not likely to change.
defaultFile :: FilePath
defaultFile = C.statFile
-- | The buffer size of the values kept in the map.
bufferSize :: Int
bufferSize = C.cpuavgloadBufferSize
-- | The window size of the values that will export the average load.
windowSize :: Integer
windowSize = toInteger C.cpuavgloadWindowSize
-- | The default setting for the maximum amount of not parsed character to
-- print in case of error.
-- It is set to use most of the screen estate on a standard 80x25 terminal.
-- TODO: add the possibility to set this with a command line parameter.
defaultCharNum :: Int
defaultCharNum = 80*20
-- | The name of this data collector.
dcName :: String
dcName = "cpu-avg-load"
-- | The version of this data collector.
dcVersion :: DCVersion
dcVersion = DCVerBuiltin
-- | The version number for the data format of this data collector.
dcFormatVersion :: Int
dcFormatVersion = 1
-- | The category of this data collector.
dcCategory :: Maybe DCCategory
dcCategory = Nothing
-- | The kind of this data collector.
dcKind :: DCKind
dcKind = DCKPerf
-- | The data exported by the data collector, taken from the default location.
dcReport :: Maybe CollectorData -> IO DCReport
dcReport colData =
let cpuLoadData =
case colData of
Nothing -> Seq.empty
Just colData' ->
case colData' of
CPULoadData v -> v
in buildDCReport cpuLoadData
-- | Data stored by the collector in mond's memory.
type Buffer = Seq.Seq (Integer, [Int])
-- | Compute the load from a CPU.
computeLoad :: CPUstat -> Int
computeLoad cpuData =
csUser cpuData + csNice cpuData + csSystem cpuData
+ csIowait cpuData + csIrq cpuData + csSoftirq cpuData
+ csSteal cpuData + csGuest cpuData + csGuestNice cpuData
-- | Reads and Computes the load for each CPU.
dcCollectFromFile :: FilePath -> IO (Integer, [Int])
dcCollectFromFile inputFile = do
contents <-
((E.try $ readFile inputFile) :: IO (Either IOError String)) >>=
exitIfBad "reading from file" . either (BT.Bad . show) BT.Ok
cpustatData <-
case A.parse cpustatParser $ pack contents of
A.Fail unparsedText contexts errorMessage -> exitErr $
show (Prelude.take defaultCharNum $ unpack unparsedText) ++ "\n"
++ show contexts ++ "\n" ++ errorMessage
A.Done _ cpustatD -> return cpustatD
now <- getCurrentTime
let timestamp = now :: Integer
return (timestamp, map computeLoad cpustatData)
-- | Returns the collected data in the appropriate type.
dcCollect :: IO Buffer
dcCollect = do
l <- dcCollectFromFile defaultFile
return (Seq.singleton l)
-- | Formats data for JSON transformation.
formatData :: [Double] -> CPUavgload
formatData [] = CPUavgload (0 :: Int) [] (0 :: Double)
formatData l@(x:xs) = CPUavgload (length l - 1) xs x
-- | Update a Map Entry.
updateEntry :: Buffer -> Buffer -> Buffer
updateEntry newBuffer mapEntry =
(Seq.><) newBuffer
(if Seq.length mapEntry < bufferSize
then mapEntry
else Seq.drop 1 mapEntry)
-- | Updates the given Collector data.
dcUpdate :: Maybe CollectorData -> IO CollectorData
dcUpdate mcd = do
v <- dcCollect
let new_v =
case mcd of
Nothing -> v
Just cd ->
case cd of
CPULoadData old_v -> updateEntry v old_v
new_v `seq` return $ CPULoadData new_v
-- | Computes the average load for every CPU and the overall from data read
-- from the map.
computeAverage :: Buffer -> Integer -> Integer -> [Double]
computeAverage s w ticks =
let window = Seq.takeWhileL ((> w) . fst) s
go Seq.EmptyL _ = []
go _ Seq.EmptyR = []
go (leftmost Seq.:< _) (_ Seq.:> rightmost) = do
let (timestampL, listL) = leftmost
(timestampR, listR) = rightmost
work = zipWith (-) listL listR
overall = (timestampL - timestampR) * ticks
map (\x -> fromIntegral x / fromIntegral overall) work
in go (Seq.viewl window) (Seq.viewr window)
-- | This function computes the JSON representation of the CPU load.
buildJsonReport :: Buffer -> IO J.JSValue
buildJsonReport v = do
ticks <- getSysVar ClockTick
let res = computeAverage v windowSize ticks
return . J.showJSON $ formatData res
-- | This function computes the DCReport for the CPU load.
buildDCReport :: Buffer -> IO DCReport
buildDCReport v =
buildJsonReport v >>=
buildReport dcName dcVersion dcFormatVersion dcCategory dcKind
|
kawamuray/ganeti
|
src/Ganeti/DataCollectors/CPUload.hs
|
gpl-2.0
| 5,899
| 0
| 20
| 1,268
| 1,252
| 665
| 587
| 111
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Writer.Lazy
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (multi-param classes, functional dependencies)
--
-- Lazy writer monads.
--
-- Inspired by the paper
-- /Functional Programming with Overloading and Higher-Order Polymorphism/,
-- Mark P Jones (<http://web.cecs.pdx.edu/~mpj/pubs/springschool.html>)
-- Advanced School of Functional Programming, 1995.
-----------------------------------------------------------------------------
module Control.Monad.Writer.Lazy (
-- * MonadWriter class
MonadWriter(..),
listens,
censor,
-- * The Writer monad
Writer,
runWriter,
execWriter,
mapWriter,
-- * The WriterT monad transformer
WriterT(WriterT),
runWriterT,
execWriterT,
mapWriterT,
module Control.Monad,
module Control.Monad.Fix,
module Control.Monad.Trans,
module Data.Monoid,
) where
import Control.Monad.Writer.Class
import Control.Monad.Trans
import Control.Monad.Trans.Writer.Lazy (
Writer, runWriter, execWriter, mapWriter,
WriterT(WriterT), runWriterT, execWriterT, mapWriterT)
import Control.Monad
import Control.Monad.Fix
import Data.Monoid
|
johanneshilden/principle
|
public/mtl-2.2.1/Control/Monad/Writer/Lazy.hs
|
bsd-3-clause
| 1,514
| 0
| 6
| 296
| 166
| 120
| 46
| 26
| 0
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>JSON View</title>
<maps>
<homeID>jsonview</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/jsonview/src/main/javahelp/help_hr_HR/helpset_hr_HR.hs
|
apache-2.0
| 959
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
-- | A description of the platform we're compiling for.
--
module Platform (
Platform(..),
Arch(..),
OS(..),
ArmISA(..),
ArmISAExt(..),
ArmABI(..),
target32Bit,
isARM,
osElfTarget,
osMachOTarget,
platformUsesFrameworks,
platformBinariesAreStaticLibs,
)
where
-- | Contains enough information for the native code generator to emit
-- code for this platform.
data Platform
= Platform {
platformArch :: Arch,
platformOS :: OS,
-- Word size in bytes (i.e. normally 4 or 8,
-- for 32bit and 64bit platforms respectively)
platformWordSize :: {-# UNPACK #-} !Int,
platformUnregisterised :: Bool,
platformHasGnuNonexecStack :: Bool,
platformHasIdentDirective :: Bool,
platformHasSubsectionsViaSymbols :: Bool
}
deriving (Read, Show, Eq)
-- | Architectures that the native code generator knows about.
-- TODO: It might be nice to extend these constructors with information
-- about what instruction set extensions an architecture might support.
--
data Arch
= ArchUnknown
| ArchX86
| ArchX86_64
| ArchPPC
| ArchPPC_64
| ArchSPARC
| ArchARM
{ armISA :: ArmISA
, armISAExt :: [ArmISAExt]
, armABI :: ArmABI
}
| ArchARM64
| ArchAlpha
| ArchMipseb
| ArchMipsel
| ArchJavaScript
deriving (Read, Show, Eq)
isARM :: Arch -> Bool
isARM (ArchARM {}) = True
isARM ArchARM64 = True
isARM _ = False
-- | Operating systems that the native code generator knows about.
-- Having OSUnknown should produce a sensible default, but no promises.
data OS
= OSUnknown
| OSLinux
| OSDarwin
| OSiOS
| OSSolaris2
| OSMinGW32
| OSFreeBSD
| OSDragonFly
| OSOpenBSD
| OSNetBSD
| OSKFreeBSD
| OSHaiku
| OSOsf3
| OSQNXNTO
| OSAndroid
deriving (Read, Show, Eq)
-- | ARM Instruction Set Architecture, Extensions and ABI
--
data ArmISA
= ARMv5
| ARMv6
| ARMv7
deriving (Read, Show, Eq)
data ArmISAExt
= VFPv2
| VFPv3
| VFPv3D16
| NEON
| IWMMX2
deriving (Read, Show, Eq)
data ArmABI
= SOFT
| SOFTFP
| HARD
deriving (Read, Show, Eq)
target32Bit :: Platform -> Bool
target32Bit p = platformWordSize p == 4
-- | This predicates tells us whether the OS supports ELF-like shared libraries.
osElfTarget :: OS -> Bool
osElfTarget OSLinux = True
osElfTarget OSFreeBSD = True
osElfTarget OSDragonFly = True
osElfTarget OSOpenBSD = True
osElfTarget OSNetBSD = True
osElfTarget OSSolaris2 = True
osElfTarget OSDarwin = False
osElfTarget OSiOS = False
osElfTarget OSMinGW32 = False
osElfTarget OSKFreeBSD = True
osElfTarget OSHaiku = True
osElfTarget OSOsf3 = False -- I don't know if this is right, but as
-- per comment below it's safe
osElfTarget OSQNXNTO = False
osElfTarget OSAndroid = True
osElfTarget OSUnknown = False
-- Defaulting to False is safe; it means don't rely on any
-- ELF-specific functionality. It is important to have a default for
-- portability, otherwise we have to answer this question for every
-- new platform we compile on (even unreg).
-- | This predicate tells us whether the OS support Mach-O shared libraries.
osMachOTarget :: OS -> Bool
osMachOTarget OSDarwin = True
osMachOTarget _ = False
osUsesFrameworks :: OS -> Bool
osUsesFrameworks OSDarwin = True
osUsesFrameworks OSiOS = True
osUsesFrameworks _ = False
platformUsesFrameworks :: Platform -> Bool
platformUsesFrameworks = osUsesFrameworks . platformOS
osBinariesAreStaticLibs :: OS -> Bool
osBinariesAreStaticLibs OSiOS = True
osBinariesAreStaticLibs _ = False
platformBinariesAreStaticLibs :: Platform -> Bool
platformBinariesAreStaticLibs = osBinariesAreStaticLibs . platformOS
|
lukexi/ghc-7.8-arm64
|
compiler/utils/Platform.hs
|
bsd-3-clause
| 4,213
| 0
| 9
| 1,317
| 701
| 413
| 288
| 110
| 1
|
{-# LANGUAGE DeriveFunctor #-}
module Data.Trie.Naive
( Trie
, singleton
, singletonString
, lookup
, parser
, fromList
, fromListAppend
, fromStringList
) where
import Prelude hiding (lookup)
import Data.Semigroup (Semigroup)
import Data.Word (Word8)
import Data.Map (Map)
import Data.Bifunctor (second)
import Packed.Bytes (Bytes)
import qualified Data.Char
import qualified GHC.OldList as L
import qualified Packed.Bytes.Parser as P
import qualified Packed.Bytes as B
import qualified Data.Semigroup as SG
import qualified Data.Map.Strict as M
data Trie a = Trie (Maybe a) (Map Word8 (Trie a))
deriving (Functor)
instance Semigroup a => Semigroup (Trie a) where
(<>) = append
instance Semigroup a => Monoid (Trie a) where
mempty = Trie Nothing M.empty
mappend = (SG.<>)
append :: Semigroup a => Trie a -> Trie a -> Trie a
append (Trie v1 m1) (Trie v2 m2) = Trie
(SG.getOption (SG.Option v1 SG.<> SG.Option v2))
(M.unionWith append m1 m2)
singleton :: Bytes -> a -> Trie a
singleton k v = B.foldr (\b r -> Trie Nothing (M.singleton b r)) (Trie (Just v) M.empty) k
singletonString :: String -> a -> Trie a
singletonString k v = L.foldr (\c r -> Trie Nothing (M.singleton (c2w c) r)) (Trie (Just v) M.empty) k
lookup :: Bytes -> Trie a -> Maybe a
lookup k t0 = case B.foldr lookupStep (Just t0) k of
Nothing -> Nothing
Just (Trie v _) -> v
lookupStep :: Word8 -> Maybe (Trie a) -> Maybe (Trie a)
lookupStep w Nothing = Nothing
lookupStep w (Just (Trie _ m)) = M.lookup w m
parser :: Trie (P.Parser a) -> P.Parser a
parser (Trie mp m) = case mp of
Just p -> p
Nothing -> do
w <- P.any
case M.lookup w m of
Nothing -> P.failure
Just t -> parser t
fromList :: [(Bytes,a)] -> Trie a
fromList = fmap SG.getFirst . fromListAppend . map (second SG.First)
fromListAppend :: Semigroup a => [(Bytes,a)] -> Trie a
fromListAppend = foldMap (uncurry singleton)
fromStringList :: [(String,a)] -> Trie a
fromStringList = fmap SG.getFirst . fromStringListAppend . map (second SG.First)
fromStringListAppend :: Semigroup a => [(String,a)] -> Trie a
fromStringListAppend = foldMap (uncurry singletonString)
c2w :: Char -> Word8
c2w = fromIntegral . Data.Char.ord
|
sdiehl/ghc
|
testsuite/tests/codeGen/should_run/T15038/common/Data/Trie/Naive.hs
|
bsd-3-clause
| 2,220
| 0
| 13
| 435
| 958
| 504
| 454
| 62
| 3
|
module DC () where
data Foo a = F a a a
z :: Foo Int
z = F 1 2 3
|
mightymoose/liquidhaskell
|
tests/pos/datacon1.hs
|
bsd-3-clause
| 70
| 0
| 6
| 27
| 41
| 23
| 18
| 4
| 1
|
module A2 where
import C2
main = (sumSquares [1 .. 4]) + (anotherFun [1 .. 4])
|
kmate/HaRe
|
old/testing/liftToToplevel/A2_AstOut.hs
|
bsd-3-clause
| 80
| 0
| 8
| 17
| 40
| 24
| 16
| 3
| 1
|
{-# LANGUAGE RankNTypes, NamedWildCards #-}
-- See Trac #11098
module NamedWildcardExplicitForall where
foo :: forall _a . _a -> _a -- _a is a type variable
foo = not
bar :: _a -> _a -- _a is a named wildcard
bar = not
baz :: forall _a . _a -> _b -> (_a, _b) -- _a is a variable, _b is a wildcard
baz x y = (not x, not y)
qux :: _a -> (forall _a . _a -> _a) -> _a -- the _a bound by forall is a tyvar
qux x f = let _ = f 7 in not x -- the other _a are wildcards
|
olsner/ghc
|
testsuite/tests/partial-sigs/should_fail/NamedWildcardExplicitForall.hs
|
bsd-3-clause
| 526
| 0
| 9
| 174
| 143
| 80
| 63
| 10
| 1
|
-- |
-- Module : SpriteClip.Render
-- Description :
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental|stable
-- Portability : POSIX (not sure)
--
-- Created September 7 2015
-- TODO | - UI and graphics settings
-- - Factor out logic (and settings, input mode, etc.) (should not be embedded in rendering functions)
-- SPEC | -
-- -
--------------------------------------------------------------------------------------------------------------------------------------------
-- GHC Pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module SpriteClip.Render (module SpriteClip.Render,
module Render,
module Palette,
module Southpaw.Picasso.RenderUtils,
module Cairo) where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import Data.Complex
import Data.Maybe
import qualified Data.Set as S
import Text.Printf
import Control.Monad (forM_, when, liftM)
import Control.Lens
import qualified Graphics.Rendering.Cairo as Cairo
import qualified Graphics.UI.Gtk as Gtk
import Southpaw.Picasso.Palette (alphapart, redpart, greenpart, bluepart)
import qualified Southpaw.Picasso.Palette as Palette
import qualified Southpaw.Picasso.Render as Render
import Southpaw.Picasso.RenderUtils hiding (grid, closePath)
import qualified Southpaw.Cartesian.Plane.BoundingBox as BBox
import Southpaw.Cartesian.Plane.Utilities
import SpriteClip.Types
import SpriteClip.Lenses
import qualified SpriteClip.Logic as Logic
--------------------------------------------------------------------------------------------------------------------------------------------
-- Functions
--------------------------------------------------------------------------------------------------------------------------------------------
-- | Renders a frame
render :: AppState -> Cairo.Render ()
render appstate = do
--
canvasSize <- imageSurfaceSize sheet'
-- Render.imageWithClip (const $ const $ Render.circle (appstate ^. mouse) 120) (50:+50) (appstate ^. sheet)
Render.image (100:+100) (appstate ^. sheet)
--
-- Render.image (canvasSize*0.5) (appstate ^. sheet) -- Render sprite sheet
-- Cairo.liftIO $ print (length $ take 5 sections)
forM_ sections' (section mouse') -- Render sprite sections with markers
alignments sections' mouse' -- Render alignments
axes mouse' canvasSize -- Render cursor axes
debugHUD appstate -- Render debug overlay
where
sections' = (appstate ^. cutouts) ++ maybe [] (\click -> [BBox.fromCorners click mouse']) (appstate ^. pin) -- TODO: Refactor
mouse' = Logic.applyOptionals [(gridsnap, dotmap $ Logic.nearestMultiple 20.0), (square, smallest)] (appstate ^. mouse) -- TODO: Rename (eg. cursor or pointer)
sheet' = appstate ^. sheet
gridsnap = S.member "shift_r" $ appstate ^. keys --
square = S.member "control_r" $ appstate ^. keys --
smallest (re:+im) = min re im :+ min re im --
-- |
-- TODO: Options
section :: Complex Double -> BoundingBox Double -> Cairo.Render ()
section mouse bbox = do
-- Outline
Render.rectangle (_centre bbox) (_size bbox)
choose Palette.magenta
Cairo.stroke
-- Markers
sectionMarkers bbox mouse
-- | Renders the markers for a sheet section (respresented as a 'BoundingBox')
-- TODO: Options
-- TODO: Move out hover logic (?)
-- TODO: Refactor
sectionMarkers :: BoundingBox Double -> Complex Double -> Cairo.Render ()
sectionMarkers bbox mouse = forM_ markerBunches $ \ (radius, colour, offsets) ->
forM_ offsets $ \pt -> do
choose colour
Render.circle (centre pt) (r radius pt)
Cairo.fill
where
scale = max 0.3 $ min 1.6 (let szx:+szy = _size bbox in min (abs szx) (abs szy)/120.0) -- Scale depending on section size
focused r = Logic.within (scale*r) mouse . centre -- Does the cursor lie on the marker?
r r' pt = scale*r'*(if focused r' pt then 1.4 else 1.0) --
(rectx:+recty) = _size bbox
centre (dx:+dy) = _centre bbox + ((dx*rectx/2):+(dy*recty/2))
markerBunches = [(12, Palette.orange & alphapart .~ 0.88, Logic.corners),
(10, Palette.peru & alphapart .~ 0.88, Logic.midline),
( 7, Palette.plum & alphapart .~ 0.88, Logic.centre)]
-- | Renders the alignments for a sprite section (respresented as a 'BoundingBox')
-- TODO: Don't assume the point represents the mouse
alignments :: [BoundingBox Double] -> Complex Double -> Cairo.Render ()
alignments bboxes mouse@(mx:+my) = do
-- TODO: Implement actual snapping
-- TODO: Coalesce duplicate snaps
forM_ (Logic.uniqueAlignments 0.0 6.0 bboxes mouse) (uncurry referenceline)
-- TODO: Save and push state to prevent interference, rather than resetting manually
Cairo.newPath --
Cairo.setDash [] 0.0 -- Disable dashes
where
crosshairs (fr:+om) = Cairo.setLineWidth 1.0 >> Cairo.setDash [] 0.0 >> choose Palette.black >> Render.crosshairs (fr:+om) (22:+22) >> Cairo.stroke
cairosetup d = choose (markTheSpot d) >> Cairo.setLineWidth 1 >> Cairo.setDash [8, 8] 0
markTheSpot d = let i = if d /= 0.0 then 0.4 else 0.0 in (i, i, i, 1.0) --
referenceline (Vertical d) (fr:+om) = cairosetup d >> Render.line (fr:+om) (fr:+my) >> Cairo.stroke >> crosshairs (fr:+om)
referenceline (Horizontal d) (fr:+om) = cairosetup d >> Render.line (fr:+om) (mx:+om) >> Cairo.stroke >> crosshairs (fr:+om)
-- | Renders the X and Y axes meeting at the given origin point.
-- TODO: Options
-- TODO: Hide mouse, use axes instead
-- TODO: Use crosshairs with 'hollow middle' at the origin point (?)
axes :: Complex Double -> Complex Double -> Cairo.Render ()
axes (ox:+oy) (width:+height) = do
Cairo.setLineWidth 1
Cairo.setDash [5, 5] 14
choose Palette.green
Cairo.moveTo 0 oy
Cairo.lineTo width oy
Cairo.stroke
choose Palette.red
Cairo.moveTo ox 0
Cairo.lineTo ox height
Cairo.stroke
-- |
debugHUD :: AppState -> Cairo.Render ()
debugHUD appstate = do
-- Debug info (HUD)
Cairo.moveTo 20 20
choose Palette.darkblue
Cairo.setFontSize 16
-- Cairo.fontOptionsSetAntilias Cairo.AntialiasDefault
Cairo.selectFontFace ("Helvetica" :: String) Cairo.FontSlantNormal Cairo.FontWeightNormal
Cairo.showText $ (printf "Mouse=(%.02f, %.02f)" mx my :: String)
maybe pass (\(px:+py) -> Cairo.showText (printf " | Size=(%.02f, %.02f)" (abs $ px-mx) (abs $ py-my) :: String)) (appstate ^. pin)
where
(mx:+my) = appstate ^. mouse
msize = liftM (dotmap abs . subtract (mx:+my)) (appstate ^. pin)
pass = return ()
|
SwiftsNamesake/SpriteClip
|
src/SpriteClip/Render.hs
|
mit
| 7,562
| 0
| 15
| 1,517
| 1,735
| 930
| 805
| 90
| 3
|
module Control.Monad.Activatable (
-- * The 'MonadActivatable' class
MonadActivatable(..),
switching', activateWith, activate,
-- * The 'Activatable' monad
Activatable, runActivatable, finalizeActivatable,
-- * The 'ActivatableT' monad transformer
ActivatableT(), runActivatableT, finalizeActivatableT,
-- * Activation-related types
ActivationError(..), Switched(..)
) where
import Control.Monad.Trans.Activatable hiding (switching')
import Control.Monad.Activatable.Class
|
antalsz/hs-to-coq
|
src/lib/Control/Monad/Activatable.hs
|
mit
| 492
| 0
| 5
| 62
| 84
| 59
| 25
| 10
| 0
|
module FromToExperiments where
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter23.hsproj/FromToExperiments.hs
|
mit
| 35
| 0
| 2
| 7
| 4
| 3
| 1
| 1
| 0
|
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Com.Mysql.Cj.Mysqlx.Protobuf.Frame.Scope (Scope(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Scope = GLOBAL
| LOCAL
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data, Prelude'.Generic)
instance P'.Mergeable Scope
instance Prelude'.Bounded Scope where
minBound = GLOBAL
maxBound = LOCAL
instance P'.Default Scope where
defaultValue = GLOBAL
toMaybe'Enum :: Prelude'.Int -> P'.Maybe Scope
toMaybe'Enum 1 = Prelude'.Just GLOBAL
toMaybe'Enum 2 = Prelude'.Just LOCAL
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum Scope where
fromEnum GLOBAL = 1
fromEnum LOCAL = 2
toEnum
= P'.fromMaybe (Prelude'.error "hprotoc generated code: toEnum failure for type Com.Mysql.Cj.Mysqlx.Protobuf.Frame.Scope") .
toMaybe'Enum
succ GLOBAL = LOCAL
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Com.Mysql.Cj.Mysqlx.Protobuf.Frame.Scope"
pred LOCAL = GLOBAL
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Com.Mysql.Cj.Mysqlx.Protobuf.Frame.Scope"
instance P'.Wire Scope where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB Scope
instance P'.MessageAPI msg' (msg' -> Scope) Scope where
getVal m' f' = f' m'
instance P'.ReflectEnum Scope where
reflectEnum = [(1, "GLOBAL", GLOBAL), (2, "LOCAL", LOCAL)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".Mysqlx.Notice.Frame.Scope") [] ["Com", "Mysql", "Cj", "Mysqlx", "Protobuf", "Frame"] "Scope")
["Com", "Mysql", "Cj", "Mysqlx", "Protobuf", "Frame", "Scope.hs"]
[(1, "GLOBAL"), (2, "LOCAL")]
instance P'.TextType Scope where
tellT = P'.tellShow
getT = P'.getRead
|
naoto-ogawa/h-xproto-mysql
|
src/Com/Mysql/Cj/Mysqlx/Protobuf/Frame/Scope.hs
|
mit
| 2,332
| 0
| 11
| 368
| 638
| 352
| 286
| 52
| 1
|
{-
ghci c:\Users\Thomas\Documents\GitHub\practice\pe\nonvisualstudio\haskell\Spec\Problem0011.Spec.hs
c:\Users\Thomas\Documents\GitHub\practice\pe\nonvisualstudio\haskell\Implementation\Problem0011.hs
c:\Users\Thomas\Documents\GitHub\practice\utilities\nonvisualstudio\haskell\Implementation\TestAbstract.hs
-}
-- :r :q :set +s for times
module Problem0011Tests where
import Test.HUnit
import System.IO
import TestAbstract
import Problem0011
testCases = TestList
[
TestCase $ assertEqual "maxOfAllSetsOfFour testSpace1 should return 5832." 5832 (maxOfAllSetsOfFour testSpace1),
TestCase $ assertEqual "maxOfAllSetsOfFour testSpace2 should return 6561." 6561 (maxOfAllSetsOfFour testSpace2),
TestCase $ assertEqual "maxOfAllVerticalAndDiagonalTuples testSpace1 should return 1080." 1080 (maxOfAllVerticalAndDiagonalTuples testSpace1),
TestCase $ assertEqual "maxOfAllVerticalAndDiagonalTuples testSpace2 should return 6561." 6561 (maxOfAllVerticalAndDiagonalTuples testSpace2),
TestCase $ assertEqual "maxOfHorizontalMaxProducts testSpace1 should return 5832." 5832 (maxOfHorizontalMaxProducts testSpace1),
TestCase $ assertEqual "maxOfHorizontalMaxProducts testSpace2 should return 6561." 6561 (maxOfHorizontalMaxProducts testSpace2),
TestCase $ assertEqual "constructVerticalTuples" [(0,1,1,2),(0,1,1,5),(0,2,4,6),(0,2,4,7),(1,3,5,9), (1,1,2,3),(1,1,5,9),(2,4,6,9),(2,4,7,9),(3,5,9,8)] (constructVerticalTuples testSpace1),
TestCase $ assertEqual "constructDiagonalTuples" [(0,1,4,7),(0,2,4,9),(1,1,6,9),(1,4,7,8)] (constructDiagonalTuples testSpace1),
TestCase $ assertEqual "constructDiagonalTuplesLToR" [(1,2,4,5),(0,2,1,2),(3,4,6,9),(2,4,5,3)] (constructDiagonalTuplesLToR testSpace1),
TestCase $ assertEqual "constructAllVerticalAndDiagonalTuples" [(0,1,1,2),(0,1,1,5),(0,2,4,6),(0,2,4,7),(1,3,5,9),(1,1,2,3),(1,1,5,9),(2,4,6,9),(2,4,7,9),(3,5,9,8),(0,1,4,7),(0,2,4,9),(1,1,6,9),(1,4,7,8),(1,2,4,5),(0,2,1,2),(3,4,6,9),(2,4,5,3)] (constructAllVerticalAndDiagonalTuples testSpace1),
TestCase $ easyAssertEqual "maxOfListOf4WideTuples" maxOfListOf4WideTuples [(2,2,2,30),(1,2,3,4),(1,2,3,4)] 240,
TestCase $ easyAssertEqual "maxOfListOf4WideTuples" maxOfListOf4WideTuples [(2,2,2,30),(1,2,3,4),(1,2,3,4),(10,10,10,10)] 10000,
TestCase $ easyAssertEqual "multiply4WideTuple" multiply4WideTuple (1,2,3,4) 24,
TestCase $ easyAssertEqual "multiply4WideTuple" multiply4WideTuple (2,2,2,30) 240,
TestCase $ easyAssertEqual "maxProductOf4ElementsIn" maxProductOf4ElementsIn [1,2,3,4,5,6,7,8,9,9,9,9] 6561,
TestCase $ easyAssertEqual "maxProductOf4ElementsIn" maxProductOf4ElementsIn [1,2,3,9,9,9,8,7,6,5,2,3] 5832,
TestCase $ easyAssertEqual "maxProductOf4ElementsIn" maxProductOf4ElementsIn [1,2,1,1,0,0,0,0,0,9,9,9] 2
]
tests = runTestTT testCases
testSpace1 =
[
[0,0,0,0,1],
[1,1,2,2,3],
[1,1,4,4,5],
[2,5,6,7,9],
[3,9,9,9,8]
]
testSpace2 =
[
[8,9,9,9,9],
[1,1,2,2,9],
[1,1,4,4,9],
[2,5,6,7,9],
[3,9,9,9,8]
]
questionSpace =
[
[08, 02, 22, 97, 38, 15, 0, 40, 0, 75, 04, 05, 07, 78, 52, 12, 50, 77, 91, 8],
[49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 04, 56, 62, 0],
[81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 03, 49, 13, 36, 65],
[52, 70, 95, 23, 04, 60, 11, 42, 69, 24, 68, 56, 01, 32, 56, 71, 37, 02, 36, 91],
[22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80],
[24, 47, 32, 60, 99, 03, 45, 02, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50],
[32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70],
[67, 26, 20, 68, 02, 62, 12, 20, 95, 63, 94, 39, 63, 08, 40, 91, 66, 49, 94, 21],
[24, 55, 58, 05, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72],
[21, 36, 23, 09, 75, 0, 76, 44, 20, 45, 35, 14, 0, 61, 33, 97, 34, 31, 33, 95],
[78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03, 80, 04, 62, 16, 14, 09, 53, 56, 92],
[16, 39, 05, 42, 96, 35, 31, 47, 55, 58, 88, 24, 0, 17, 54, 24, 36, 29, 85, 57],
[86, 56, 0, 48, 35, 71, 89, 07, 05, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58],
[19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 04, 89, 55, 40],
[04, 52, 08, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66],
[88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69],
[04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 08, 46, 29, 32, 40, 62, 76, 36],
[20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 04, 36, 16],
[20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 05, 54],
[01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 01, 89, 19, 67, 48]]
|
Sobieck00/practice
|
pe/nonvisualstudio/haskell/OldWork/Spec/Problem0011.Spec.hs
|
mit
| 5,014
| 0
| 10
| 1,053
| 2,582
| 1,666
| 916
| 61
| 1
|
res :: Integer
main :: IO()
res = (sum [1..100] ^ 2) - (sum [x ^ 2 | x <- [1..100]])
main = print ("The answer to Euler Problem 6 is " ++ show res)
|
andrew-christianson/Polyglot-Euler
|
Problem 6.hs
|
mit
| 149
| 0
| 11
| 36
| 84
| 44
| 40
| 4
| 1
|
module GiveYouAHead.New
(
new
) where
import System.Time (getClockTime)
import GiveYouAHead.Common (writeF)
import Macro.MacroParser(MacroNode(..))
import Macro.MacroIO(getMacroFromFile)
import Macro.MacroReplace(findMacro,splitMacroDef,toText)
new :: String -- Template
-> String -- id or num
-> [String] -- import list
-> IO ()
new tp num imp= do
mnode <- getMacroFromFile $ "new." ++ if null tp then "default" else tp
lsnode <- getMacroFromFile "new"
let (dls,_) = splitMacroDef lsnode
time <- getClockTime
let (as,bs) = splitMacroDef mnode
writeF (findMacro dls "numLeft"++num++findMacro dls"numRight") $ concatMap show $ toText (MacroDef "timenow" (show time):List "importList" (if null imp then [""] else imp):as,bs)
return ()
|
Qinka/GiveYouAHead
|
lib/GiveYouAHead/New.hs
|
mit
| 923
| 0
| 15
| 286
| 288
| 150
| 138
| 20
| 3
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Betfair.StreamingAPI.Responses.ConnectionMessage
( ConnectionMessage(..)
) where
import Data.Aeson.TH (Options (omitNothingFields),
defaultOptions, deriveJSON)
import Protolude
import Text.PrettyPrint.GenericPretty
data ConnectionMessage = ConnectionMessage
{ op :: Text
, connectionId :: Text -- The connection id
} deriving (Eq, Read, Show, Generic, Pretty)
$(deriveJSON defaultOptions {omitNothingFields = True} ''ConnectionMessage)
|
joe9/streaming-betfair-api
|
src/Betfair/StreamingAPI/Responses/ConnectionMessage.hs
|
mit
| 698
| 0
| 9
| 154
| 119
| 74
| 45
| 16
| 0
|
{-# LANGUAGE RecordWildCards #-}
module Classify0Spec
( main
, spec
) where
import Ch02KNN.Classify0
import DataFiles
import qualified Data.Vector.Unboxed as VU
import qualified Data.Map as M
import MLUtil
import Test.Hspec
-- cf kNN.createDataSet
values :: Matrix
values = matrix 2
[ 1.0, 1.1
, 1.0, 1.0
, 0.0, 0.0
, 0.0, 0.1
]
-- cf kNN.createDataSet
labelIds :: VU.Vector LabelId
labelIds = VU.fromList
[ 1
, 1
, 2
, 2
]
k :: Int
k = 3
intFraction :: R -> Int -> Int
intFraction r x = round $ r * fromIntegral x
errorRate :: LabelledMatrix -> R -> R
errorRate m testRatio =
let MatrixNormalization{..} = normalizeMatrixColumns (lmValues m)
rowCount = rows mnValues
columnCount = cols mnValues
testRowCount = intFraction testRatio rowCount
testMatrix = subMatrix (0, 0) (testRowCount, columnCount) mnValues
trainingMatrix = subMatrix (testRowCount, 0) (rowCount - testRowCount, columnCount) mnValues
trainingLabelIds = VU.slice testRowCount (rowCount - testRowCount) (lmLabelIds m)
(passCount, errorCount) = forFold (0, 0) [0..testRowCount - 1] $ \r (passCount', errorCount') ->
let testVector = subMatrix (r, 0) (1, columnCount) testMatrix
actualLabelId = classify0 testVector trainingMatrix trainingLabelIds k
expectedLabelId = (VU.!) (lmLabelIds m) r
in if actualLabelId == expectedLabelId
then (passCount' + 1, errorCount')
else (passCount', errorCount' + 1)
in fromIntegral errorCount / fromIntegral (passCount + errorCount)
spec :: Spec
spec = do
describe "classify0" $ do
it "should classify small matrix correctly" $ do
classify0 (row [0.0, 0.0]) values labelIds k `shouldBe` 2
classify0 (row [1.0, 1.2]) values labelIds k `shouldBe` 1
-- cf kNN.datingClassTest
it "should classify large matrix correctly" $ do
path <- getDataFileName "datingTestSet2.txt"
Just m <- readLabelledMatrix path
100.0 * errorRate m 0.2 `shouldBe` 8.0
100.0 * errorRate m 0.1 `shouldBe` 5.0
100.0 * errorRate m 0.05 `shouldBe` 2.0
100.0 * errorRate m 0.02 `shouldBe` 0.0
100.0 * errorRate m 0.01 `shouldBe` 0.0
main :: IO ()
main = hspec spec
|
rcook/mlutil
|
ch02-knn/spec/Classify0Spec.hs
|
mit
| 2,420
| 0
| 17
| 700
| 712
| 383
| 329
| 59
| 2
|
-- |
-- | Module : Main.hs
-- | Description : Entry point for the steg program
-- | Copyright : (c) Jim Burton
-- | License : MIT
-- |
-- | Maintainer : j.burton@brighton.ac.uk
-- | Stability : provisional
-- | Portability : portable
-- |
module Main
where
import Data.Maybe (fromJust)
import Steg.Parse (dig, bury)
import System.Environment (getArgs)
import System.Exit (exitWith, ExitCode (ExitFailure))
-- | Lookup table mapping command-line options to functions.
dispatch :: [(String, [String] -> IO ())]
dispatch = [ ("bury", buryAct)
, ("dig", digAct)
]
-- | Bury some text.
buryAct :: [String] -> IO ()
buryAct (inP:msgP:outP:_) = bury inP msgP outP
buryAct _ = usageAndExit
-- | Dig some text.
digAct :: [String] -> IO ()
digAct (inP:_) = dig inP >>= putStrLn . fromJust
digAct _ = usageAndExit
usage :: String
usage = "steg v.0.1 \n\
\------------- \n\
\usage: steg bury imageIn txtFile imageOut \n\
\ steg dig image"
usageAndExit :: IO ()
usageAndExit = putStrLn usage >> exitWith (ExitFailure 1)
-- | The entry point for the program.
main :: IO ()
main = do
args <- getArgs
if null args
then usageAndExit
else case lookup (head args) dispatch of
(Just action) -> action (tail args)
Nothing -> usageAndExit
|
jimburton/steg
|
src/Main.hs
|
mit
| 1,359
| 0
| 13
| 351
| 342
| 191
| 151
| 26
| 3
|
module D20.Dice.Parser where
import Text.Regex.Posix
import D20.Dice
pattern :: String
pattern = "([0-9]*)d([0-9]+)([+-][0-9]+)?"
parseNumber :: String -> Maybe Int
parseNumber ('+':num) = Just $ read num
parseNumber ('-':num) =
Just $
-(read num)
parseNumber "" = Nothing
parseNumber num = Just $ read num
parse :: String -> Maybe Roll
parse input =
case input =~ pattern :: [[String]] of
[[_,multiplierStr,dieStr,additiveStr]] ->
do let multiplier = parseNumber multiplierStr
let additive = parseNumber additiveStr
dieNum <- parseNumber $ dieStr
die <- dieWithSides dieNum
return Roll {rollDie = die
,rollMultiplier = multiplier
,rollAdditive = additive}
_ -> Nothing
|
elkorn/d20
|
src/D20/Dice/Parser.hs
|
mit
| 773
| 1
| 13
| 199
| 250
| 130
| 120
| 24
| 2
|
import Data.Generics.Uniplate.Direct
data Expr a
= Fls
| Tru
| Var a
| Not (Expr a)
| And (Expr a) (Expr a)
| Or (Expr a) (Expr a)
deriving (Show, Eq)
instance Uniplate (Expr a) where
uniplate (Not f) = plate Not |* f
uniplate (And f1 f2) = plate And |* f1 |* f2
uniplate (Or f1 f2) = plate Or |* f1 |* f2
uniplate x = plate x
simplify :: Expr a -> Expr a
simplify = transform simp
where
simp (Not (Not f)) = f
simp (Not Fls) = Tru
simp (Not Tru) = Fls
simp x = x
reduce :: Show a => Expr a -> Expr a
reduce = rewrite cnf
where
-- double negation
cnf (Not (Not p)) = Just p
-- de Morgan
cnf (Not (p `Or` q)) = Just $ (Not p) `And` (Not q)
cnf (Not (p `And` q)) = Just $ (Not p) `Or` (Not q)
-- distribute conjunctions
cnf (p `Or` (q `And` r)) = Just $ (p `Or` q) `And` (p `Or` r)
cnf ((p `And` q) `Or` r) = Just $ (p `Or` q) `And` (p `Or` r)
cnf _ = Nothing
example1 :: Expr String
example1 = simplify (Not (Not (Not (Not (Var "a")))))
-- Var "a"
example2 :: [String]
example2 = [a | Var a <- universe ex]
where
ex = Or (And (Var "a") (Var "b")) (Not (And (Var "c") (Var "d")))
-- ["a","b","c","d"]
example3 :: Expr String
example3 = reduce $ ((a `And` b) `Or` (c `And` d)) `Or` e
where
a = Var "a"
b = Var "b"
c = Var "c"
d = Var "d"
e = Var "e"
|
riwsky/wiwinwlh
|
src/uniplate.hs
|
mit
| 1,375
| 0
| 15
| 407
| 757
| 408
| 349
| 40
| 6
|
-- | Implements Tarjan's algorithm for computing the strongly connected
-- components of a graph. For more details see:
-- <http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm>
{-# LANGUAGE Rank2Types, Trustworthy #-}
module Data.Graph.ArraySCC(scc) where
import Data.Graph(Graph,Vertex)
import Data.Array.ST(STUArray, newArray, readArray, writeArray)
import Data.Array as A
import Data.Array.Unsafe(unsafeFreeze)
import Control.Monad.ST
import Control.Monad(ap)
-- | Computes the strongly connected components (SCCs) of the graph in
-- O(#edges + #vertices) time. The resulting tuple contains:
--
-- * A (reversed) topologically sorted list of SCCs.
-- Each SCCs is assigned a unique identifier of type 'Int'.
--
-- * An O(1) mapping from vertices in the original graph to the identifier
-- of their SCC. This mapping will raise an \"out of bounds\"
-- exception if it is applied to integers that do not correspond to
-- vertices in the input graph.
--
-- This function assumes that the adjacency lists in the original graph
-- mention only nodes that are in the graph. Violating this assumption
-- will result in \"out of bounds\" array exception.
scc :: Graph -> ([(Int,[Vertex])], Vertex -> Int)
scc g = runST (
do ixes <- newArray (bounds g) 0
lows <- newArray (bounds g) 0
s <- roots g ixes lows (S [] 1 [] 1) (indices g)
sccm <- unsafeFreeze ixes
return (sccs s, \i -> sccm ! i)
)
type Func s a =
Graph -- The original graph
-> STUArray s Vertex Int -- Index in DFS traversal, or SCC for vertex.
-- Legend for the index array:
-- 0: Node not visited
-- -ve: Node is on the stack with the given number
-- +ve: Node belongs to the SCC with the given number
-> STUArray s Vertex Int -- Least reachable node
-> S -- State
-> a
data S = S { stack :: ![Vertex] -- ^ Traversal stack
, num :: !Int -- ^ Next node number
, sccs :: ![(Int,[Vertex])] -- ^ Finished SCCs
, next_scc :: !Int -- ^ Next SCC number
}
roots :: Func s ([Vertex] -> ST s S)
roots g ixes lows st (v:vs) =
do i <- readArray ixes v
if i == 0 then do s1 <- from_root g ixes lows st v
roots g ixes lows s1 vs
else roots g ixes lows st vs
roots _ _ _ s [] = return s
from_root :: Func s (Vertex -> ST s S)
from_root g ixes lows s v =
do let me = num s
writeArray ixes v (negate me)
writeArray lows v me
newS <- check_adj g ixes lows
s { stack = v : stack s, num = me + 1 } v (g ! v)
x <- readArray lows v
if x < me then return newS else
case span (/= v) (stack newS) of
(as,b:bs) ->
do let this = b : as
n = next_scc newS
mapM_ (\i -> writeArray ixes i n) this
return S { stack = bs
, num = num newS
, sccs = (n,this) : sccs newS
, next_scc = n + 1
}
_ -> error ("bug in scc---vertex not on the stack: " ++ show v)
check_adj :: Func s (Vertex -> [Vertex] -> ST s S)
check_adj g ixes lows st v (v':vs) =
do i <- readArray ixes v'
case () of
_ | i == 0 ->
do newS <- from_root g ixes lows st v'
new_low <- min `fmap` readArray lows v `ap` readArray lows v'
writeArray lows v new_low
check_adj g ixes lows newS v vs
| i < 0 ->
do j <- readArray lows v
writeArray lows v (min j (negate i))
check_adj g ixes lows st v vs
| otherwise -> check_adj g ixes lows st v vs
check_adj _ _ _ st _ [] = return st
|
yav/GraphSCC
|
Data/Graph/ArraySCC.hs
|
mit
| 3,842
| 0
| 17
| 1,286
| 1,075
| 558
| 517
| 74
| 3
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Javran.MaxFlow.Parser
( NetworkRep (..)
, parseFromRaw
)
where
import Control.Applicative
import Control.Monad.Except
import Control.Monad.State
import qualified Data.Attoparsec.Text as P
import Data.Char
import qualified Data.Text as T
import Javran.MaxFlow.Types
data RawLine
= RComment T.Text
| RProblem Int Int
| RNodeDesc Int Bool {- true for source -}
| RArc (Int, Int) Int
deriving (Show)
{-
The format is specified in: http://lpsolve.sourceforge.net/5.5/DIMACS_maxf.htm
in addition, lines with just spaces are treated as if it's an empty comment line,
this behavior is not specified by the format, but we do have those lines from generated examples.
-}
{-
parsing a line without consuming the '\n' in the end
which might or might not (if it is the last line) present.
-}
rawLine :: P.Parser RawLine
rawLine =
emptyLine
<|> commentLine
<|> problemLine
<|> nodeDescLine
<|> arcLine
where
emptyLine = do
_ <- P.takeWhile (\ch -> isSpace ch && ch /= '\n')
next <- P.peekChar
case next of
Just '\n' -> pure $ RComment ""
_ -> fail "invalid line"
commentLine = do
_ <- "c"
next <- P.peekChar
case next of
Just '\n' -> pure $ RComment ""
Just ' ' ->
" "
*> (RComment <$> P.takeWhile (/= '\n'))
_ -> fail "invalid lookahead char."
problemLine = do
_ <- "p max "
RProblem <$> (P.decimal <* " ") <*> P.decimal
nodeDescLine = do
_ <- "n "
nId <- P.decimal <* " "
isSource <- (True <$ "s") <|> (False <$ "t")
pure $ RNodeDesc nId isSource
arcLine = do
_ <- "a "
src <- P.decimal <* " "
dst <- P.decimal <* " "
cap <- P.decimal
pure $ RArc (src, dst) cap
parseContent :: T.Text -> Either String [RawLine]
parseContent =
P.parseOnly (rawLine `P.sepBy1` "\n" <* P.takeWhile isSpace)
parseNetwork :: StateT [RawLine] (Except String) NetworkRep
parseNetwork = do
-- drop comments
modify
(filter
(\case
RComment {} -> False
_ -> True))
{-
according to spec, first 3 lines (ignoring comments), must be:
- one "p" line
- two "n" lines marking a source and a sink.
-}
xs <- state $ splitAt 3
case xs of
[RProblem nrNodeCount nrArcCount, RNodeDesc v0 isSrc0, RNodeDesc v1 isSrc1]
| isSrc0 /= isSrc1 -> do
let (nrSource, nrSink) = if isSrc0 then (v0, v1) else (v1, v0)
arcDescs <- state (\s -> (s, []))
-- those must be arc lines
let convert (RArc p cap) = pure (p, cap)
convert t = throwError $ "not an arc: " <> show t
nrArcs <- mapM convert arcDescs
pure NetworkRep {nrNodeCount, nrArcCount, nrSource, nrSink, nrArcs}
_ -> throwError $ "invalid initial input lines (comments ignored): " <> show xs
parseFromRaw :: T.Text -> Either String NetworkRep
parseFromRaw raw = runExcept $ do
xs <- liftEither $ parseContent raw
r <- runStateT parseNetwork xs
case r of
(v, []) -> pure v
_ -> throwError "lines not fully consumed"
|
Javran/misc
|
max-flow/src/Javran/MaxFlow/Parser.hs
|
mit
| 3,266
| 0
| 18
| 880
| 891
| 460
| 431
| 85
| 5
|
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS -fno-warn-orphans #-}
-- ----------------------------------------------------------------------------
{- |
Module : Holumbus.Index.Common.Occurences
Copyright : Copyright (C) 2011 Sebastian M. Schlatt, Timo B. Huebel, Uwe Schmidt
License : MIT
Maintainer : Timo B. Huebel (tbh@holumbus.org)
Stability : experimental
Portability: none portable
The Occurences data type
-}
-- ----------------------------------------------------------------------------
module Holumbus.Index.Common.Occurences
where
import Control.Applicative ((<$>))
import qualified Data.Binary as B
import qualified Data.IntSet as IS
import qualified Data.IntSet.Cache as IS
import qualified Debug.Trace as DT
import Holumbus.Index.Common.BasicTypes
import Holumbus.Index.Common.DocId
import Holumbus.Index.Common.DocIdMap
import Text.XML.HXT.Core
-- ------------------------------------------------------------
-- | The occurrences in a number of documents.
-- A mapping from document ids to the positions in the document.
type Occurrences = DocIdMap Positions
-- | Create an empty set of positions.
emptyOccurrences :: Occurrences
emptyOccurrences = emptyDocIdMap
-- | Create an empty set of positions.
singletonOccurrence :: DocId -> Position -> Occurrences
singletonOccurrence d p = insertOccurrence d p emptyDocIdMap
-- | Test on empty set of positions.
nullOccurrences :: Occurrences -> Bool
nullOccurrences = nullDocIdMap
-- | Determine the number of positions in a set of occurrences.
sizeOccurrences :: Occurrences -> Int
sizeOccurrences = foldDocIdMap ((+) . IS.size) 0
insertOccurrence :: DocId -> Position -> Occurrences -> Occurrences
insertOccurrence d p = insertWithDocIdMap IS.union d (singletonPos p)
deleteOccurrence :: DocId -> Position -> Occurrences -> Occurrences
deleteOccurrence d p = substractOccurrences (singletonDocIdMap d (singletonPos p))
updateOccurrences :: (DocId -> DocId) -> Occurrences -> Occurrences
updateOccurrences f = foldWithKeyDocIdMap
(\ d ps res -> insertWithDocIdMap IS.union (f d) ps res) emptyOccurrences
-- | Merge two occurrences.
mergeOccurrences :: Occurrences -> Occurrences -> Occurrences
mergeOccurrences = unionWithDocIdMap IS.union
diffOccurrences :: Occurrences -> Occurrences -> Occurrences
diffOccurrences = differenceDocIdMap
-- | Substract occurrences from some other occurrences.
substractOccurrences :: Occurrences -> Occurrences -> Occurrences
substractOccurrences = differenceWithDocIdMap substractPositions
where
substractPositions p1 p2
= if IS.null diffPos
then Nothing
else Just diffPos
where
diffPos = IS.difference p1 p2
-- | The XML pickler for the occurrences of a word.
xpOccurrences :: PU Occurrences
xpOccurrences = xpWrap (fromListDocIdMap, toListDocIdMap)
(xpList xpOccurrence)
where
xpOccurrence = xpElem "doc" $
xpPair (xpAttr "idref" xpDocId)
xpPositions
-- ------------------------------------------------------------
-- | The positions of the word in the document.
type Positions = IS.IntSet
emptyPos :: Positions
emptyPos = IS.empty
singletonPos :: Position -> Positions
singletonPos = IS.cacheAt
memberPos :: Position -> Positions -> Bool
memberPos = IS.member
toAscListPos :: Positions -> [Position]
toAscListPos = IS.toAscList
fromListPos :: [Position] -> Positions
fromListPos = IS.fromList
sizePos :: Positions -> Int
sizePos = IS.size
unionPos :: Positions -> Positions -> Positions
unionPos = IS.union
foldPos :: (Position -> r -> r) -> r -> Positions -> r
foldPos = IS.fold
-- | The XML pickler for a set of positions.
xpPositions :: PU Positions
xpPositions = xpWrap ( IS.fromList . (map read) . words
, unwords . (map show) . IS.toList
) xpText
{-# INLINE emptyPos #-}
{-# INLINE singletonPos #-}
{-# INLINE memberPos #-}
{-# INLINE toAscListPos #-}
{-# INLINE fromListPos #-}
{-# INLINE sizePos #-}
{-# INLINE unionPos #-}
{-# INLINE foldPos #-}
-- ------------------------------------------------------------
newtype WrappedPositions = WPos {unWPos :: Positions}
instance B.Binary WrappedPositions where
put = B.put . IS.toList . unWPos
get = (WPos . IS.unions . map IS.cacheAt) <$> B.get
newtype WrappedOccs = WOccs {unWOccs :: Occurrences}
instance B.Binary WrappedOccs where
put = B.put . mapDocIdMap WPos . unWOccs
get = (WOccs . mapDocIdMap unWPos) <$> B.get
-- ------------------------------------------------------------
-- Just for space performance stats
sizeOccPos :: Occurrences -> (Int, Int)
sizeOccPos os
= foldDocIdMap (\ ps (!dc, !pc) -> (dc + 1, pc + IS.size ps)) (0, 0) os
traceOccPos :: Occurrences -> Occurrences
traceOccPos os
= DT.trace msg os
where
_sc@(!dc, !pc) = sizeOccPos os
v0 | dc == pc = show $ foldDocIdMap (\ ps res -> IS.elems ps ++ res) [] os
| otherwise = show $ foldDocIdMap (\ ps res -> IS.elems ps : res) [] os
msg = "traceOccPos: " ++ v0
-- ------------------------------------------------------------
|
ichistmeinname/holumbus
|
src/Holumbus/Index/Common/Occurences.hs
|
mit
| 5,851
| 0
| 14
| 1,656
| 1,081
| 603
| 478
| 92
| 2
|
read_stdin = do
input <- getContents
evaluate $ force input
int32_to_char :: Int32 -> Prelude.Char
int32_to_char !i = toEnum (fromIntegral i)
str_cons !x !xs =
concat [xs, [int32_to_char x]]
char_to_int32 :: Prelude.Char -> Int32
char_to_int32 !c = fromIntegral (fromEnum c)
string_to_reuse_string !s =
foldl (\ !s !c -> string_append (char_to_int32 c) s) string_empty s
stdin_get :: (Int32 -> a) -> a -> Prelude.String -> Int32 -> a
stdin_get !succ !fail !s !index =
let !i = fromIntegral index in
if i < (length s) && i >= 0 then
succ (char_to_int32 (s !! i))
else
fail
list_to_string = string_foldl str_cons ""
stdin_list = do
stdin_string <- read_stdin
return stdin_string
hs_string_next !iterable =
case iterable of
(Pair2 !s !index) -> (Pair2
(stdin_get (\ !x -> Some x) None s index)
(Pair2 s (index + 1)))
hs_string_to_indexed_iterator s = indexed_iterator_from_iterable (IterableClass hs_string_next) (Pair2 s 0)
|
redien/reuse-lang
|
compiler-backend/haskell/StdinWrapper.hs
|
cc0-1.0
| 1,070
| 0
| 14
| 291
| 384
| 183
| 201
| -1
| -1
|
--
-- Copyright (c) 2013 Bonelli Nicola <bonelli@antifork.org>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module Util where
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy.Char8 as LC
import Data.Maybe
toStrict :: LC.ByteString -> C.ByteString
toStrict = C.concat . LC.toChunks
toMaybe :: a -> Bool -> Maybe a
toMaybe a True = Just a
toMaybe _ False = Nothing
notNull :: [a] -> Bool
notNull = not . null
xor :: Bool -> Bool -> Bool
a `xor` b = a && not b || not a && b
prettyRead :: Read a => String -> String -> a
prettyRead xs err =
case value of
Just v -> v
_ -> error $ err ++ ": parse error near " ++ show(take 40 xs)
where value = readMaybe xs
readMaybe :: Read a => String -> Maybe a
readMaybe = fmap fst . listToMaybe . reads
|
YelaSeamless/cgrep
|
src/Util.hs
|
gpl-2.0
| 1,497
| 0
| 11
| 322
| 292
| 162
| 130
| 21
| 2
|
{-# LANGUAGE TemplateHaskell #-}
module Lambda.Derive.Config where
import Lambda.Type hiding ( free_variables )
import Autolib.Set
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Type = Make
{ start_size_range :: (Int, Int)
, free_variables :: Set Identifier
, overall_size_range :: (Int, Int)
, derivation_length :: Int
, require_exact_length :: Bool
}
deriving ( Typeable, Eq, Ord )
$(derives [makeReader, makeToDoc] [''Type])
example :: Type
example = Make
{ start_size_range = (15, 25)
, free_variables = mkSet $ read "[ x , y ]"
, overall_size_range = (5, 50)
, derivation_length = 5
, require_exact_length = True
}
-- local variables:
-- mode: haskell
-- end
|
florianpilz/autotool
|
src/Lambda/Derive/Config.hs
|
gpl-2.0
| 804
| 0
| 9
| 221
| 195
| 120
| 75
| 22
| 1
|
{-# LANGUAGE TemplateHaskell #-}
import Geometry
import Control.Applicative
import Control.Category
import Control.FilterCategory
import Control.Monad
import Data.ADT.Getters
import Data.Map (Map, findWithDefault, insert)
import Data.Monoid
import FRP.Peakachu
import FRP.Peakachu.Program
import FRP.Peakachu.Backend.File
import FRP.Peakachu.Backend.GLUT
import FRP.Peakachu.Backend.GLUT.Getters
import Graphics.UI.GLUT hiding (Name, Program, get)
import Prelude hiding ((.), id)
gridRadius :: Int
gridRadius = 4
type DrawPos = (GLfloat, GLfloat)
findWithDef :: (Monoid a, Ord k) => k -> Map k a -> a
findWithDef = findWithDefault mempty
adjustWithDef :: (Monoid a, Ord k) => (a -> a) -> k -> Map k a -> Map k a
adjustWithDef func key src =
insert key (func (findWithDef key src)) src
toGrid :: DrawPos -> DrawPos
toGrid (x, y) =
(t x, t y)
where
t =
(/ fromIntegral gridRadius) .
fromIntegral .
(round :: GLfloat -> Int) .
(* fromIntegral gridRadius)
snoc :: a -> [a] -> [a]
snoc x = (++ [x])
addPoint :: Eq a => [[a]] -> a -> [[a]]
addPoint [] x = [[x]]
addPoint ([] : ps) x = [x] : ps
addPoint (ys : ps) x
| x `elem` ys = [] : ys : ps
| otherwise = snoc x ys : ps
data MyIn = Glut (GlutToProgram ()) | FileI (FileToProgram ())
data MyOut
= GlutO (ProgramToGlut ())
| FileO (ProgramToFile ())
data MidLayer
= AText String
| APos DrawPos
| AClick MouseButton
| AFont (Map String [[DrawPos]])
| ADoLoad | ADoSave
$(mkADTGetters ''MyIn)
$(mkADTGetters ''MyOut)
$(mkADTGetters ''MidLayer)
draw :: Map String [[DrawPos]] -> String -> DrawPos -> Image
draw font text cpos@(cx, cy) =
Image $ do
color $ Color4 0.1 0.3 0.1 (1 :: GLfloat)
renderPrimitive Triangles .
forM_ (addPoint polygons cpos) $ \poly ->
forM_ ((
filter ((== 3) . length) .
map (expandPolygon (-0.01)) .
triangulatePolygon
) poly) .
mapM_ $ \(x, y) ->
vertex $ Vertex2 x y
forM_ gridLines $ \x ->
forM_ gridLines $ \y ->
drawPoint x y 0.03 (Color4 0.5 0.5 0.5 1)
forM_ (concat polygons) $ \(x, y) ->
drawPoint x y 0.07 (Color4 0.3 1 0.2 1)
drawPoint cx cy 0.05 (Color4 1 0.2 0.2 1)
currentRasterPosition $= Vertex4 (-1) (-1) 0 (1 :: GLfloat)
renderString Helvetica18 text
return ()
where
polygons = findWithDef text font
drawPoint :: GLfloat -> GLfloat -> GLfloat -> Color4 GLfloat -> IO ()
drawPoint x y s col =
renderPrimitive Quads $ do
color col
vertex $ Vertex2 (x-s) y
vertex $ Vertex2 x (y-s)
vertex $ Vertex2 (x+s) y
vertex $ Vertex2 x (y+s)
gridLines = map ((/ fromIntegral gridRadius) . fromIntegral) [-gridRadius..gridRadius]
gameProc :: Program MyIn MyOut
gameProc =
mconcat
[ GlutO . DrawImage <$> (draw <$> lstP gAFont <*> lstP gAText <*> lstP gAPos)
, FileO <$> mconcat
[ doLoad <$ mapMaybeC gADoLoad <*> lstP gAText
, doSave <$ mapMaybeC gADoSave <*> lstP gAText <*> lstP gAFont
]
]
. mconcat
[ id
, AFont <$>
scanlP fontStep mempty .
( (,,)
<$> mconcat
[ Left <$> mapMaybeC gAClick
, Right <$> mapMaybeC gAFont
]
<*> lstP gAText <*> lstP gAPos
)
]
. mconcat
[ mconcat
[ AText <$> scanlP textStep [] . mapMaybeC typedText
, ADoLoad <$ mapMaybeC (clicka (Char 'l') (Modifiers Up Up Down))
, ADoSave <$ mapMaybeC (clicka (Char 's') (Modifiers Up Up Down))
, AClick <$> mapMaybeC clicksFunc
] . mapMaybeC (gGlut >=> gKeyboardMouseEvent)
, APos <$> toGrid <$> mapMaybeC (gGlut >=> gMouseMotionEvent)
, AFont <$> read . fst <$> mapMaybeC (gFileI >=> gFileData)
]
where
typedText (c, s, m, _) = do
guard $ m == Modifiers Up Up Up
gDown s
gChar c
doLoad x = ReadFile (x ++ ".font") ()
doSave fn fnt = WriteFile (fn ++ ".font") (show fnt) ()
textStep "" '\DEL' = ""
textStep xs '\DEL' = init xs
textStep "" '\b' = ""
textStep xs '\b' = init xs
textStep xs x = snoc x xs
clicka key mods (k, s, m, _) = do
guard $ k == key && m == mods
gDown s
clicksFunc (key, state, _, _) = do
gDown state
gMouseButton key
fontStep prev (Left LeftButton, text, pos) =
adjustWithDef (`addPoint` pos) text prev
fontStep prev (Left _, text, pos) =
adjustWithDef
(([] :) . filter (not . null) .
filter (notElem pos))
text prev
fontStep _ (Right x, _, _) = x
main :: IO ()
main = do
initialWindowSize $= Size 600 600
initialDisplayCapabilities $=
[With DisplayRGB
,Where DisplaySamples IsAtLeast 2
]
let
backend =
mconcat
[ Glut <$> glut . mapMaybeC gGlutO
, FileI <$> fileB . mapMaybeC gFileO
]
runProgram backend gameProc
|
yairchu/defend
|
src/defendFontEdit.hs
|
gpl-3.0
| 4,797
| 0
| 22
| 1,279
| 1,989
| 1,026
| 963
| 147
| 7
|
{-# LANGUAGE NoImplicitPrelude, DeriveFunctor, DeriveFoldable, DeriveTraversable, OverloadedStrings #-}
module InferCombinators where
import Prelude.Compat
import Control.Lens (Lens')
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Control.Lens.Tuple
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Maybe.Utils (unsafeUnjust)
import qualified Data.Set as Set
import DefinitionTypes
import qualified Lamdu.Expr.Lens as ExprLens
import Lamdu.Expr.Scheme (Scheme(..))
import qualified Lamdu.Expr.Scheme as Scheme
import Lamdu.Expr.Type (Type)
import qualified Lamdu.Expr.Type as T
import Lamdu.Expr.TypeVars (TypeVars(..))
import qualified Lamdu.Expr.TypeVars as TV
import Lamdu.Expr.Val.Annotated (Val(..))
import qualified Lamdu.Expr.Val as V
import Lamdu.Expr.Val.Arbitrary ()
import qualified Lamdu.Infer as Infer
import Text.PrettyPrint ((<+>))
import qualified Text.PrettyPrint as PP
import Text.PrettyPrint.HughesPJClass (Pretty(..))
data ResumptionStep
-- Any resumptions will have no effect:
= Final
-- Only one ResumeWith allowed for given depth (rest must be Final/NewInferred)
| ResumeWith ExprWithResumptions
| ResumeOnSide
{-New expro load/infer-}ExprWithResumptions
{-Our new inferred-}Resumptions
-- Some ResumeWith must exist at our level, and it will cause uso
-- becomehis forhe next level:
| NewInferred Resumptions
data Resumptions = Resumptions
{ _rTyp :: Type
, _rStep :: ResumptionStep
}
rTyp :: Lens' Resumptions Type
rTyp f ipl = mk <$> f (_rTyp ipl)
where
mk x = ipl { _rTyp = x }
rStep :: Lens' Resumptions ResumptionStep
rStep f ipl = mk <$> f (_rStep ipl)
where
mk x = ipl { _rStep = x }
-- Like a ZipList but repeats the last elements of all lists infinitely
-- The digits of 1/3 would be represented as: RepeatList "0.3"
data RepeatList a = RRepeat a | RCons a (RepeatList a)
deriving (Functor, Eq, Ord, Read, Show, Foldable, Traversable)
instance Applicative RepeatList where
pure = RRepeat
RRepeat f <*> RRepeat x = RRepeat (f x)
RRepeat f <*> RCons x xs = RCons (f x) (RRepeat f <*> xs)
RCons f fs <*> RRepeat x = RCons (f x) (fs <*> RRepeat x)
RCons f fs <*> RCons x xs = RCons (f x) (fs <*> xs)
type TypeStream = RepeatList Type
typeStream :: Resumptions -> TypeStream
typeStream (Resumptions typ step) =
case step of
Final -> RRepeat typ
ResumeWith expr -> RCons typ $ exprTypeStream expr
ResumeOnSide _ rs -> RCons typ $ typeStream rs
NewInferred rs -> RCons typ $ typeStream rs
exprTypeStream :: ExprWithResumptions -> TypeStream
exprTypeStream = typeStream . (^. V.payload)
mkExprWithResumptions ::
V.Body ExprWithResumptions -> TypeStream -> ExprWithResumptions
mkExprWithResumptions body types =
Val (go types) body
where
go (RRepeat t) = Resumptions t Final
go (RCons t ts) = Resumptions t $ NewInferred $ go ts
type ExprWithResumptions = Val Resumptions
iType :: Lens' ExprWithResumptions Type
iType = V.payload . rTyp
resumeHere :: ExprWithResumptions -> ExprWithResumptions -> ExprWithResumptions
resumeHere (Val (Resumptions typ Final) body) newExpr =
Val (Resumptions typ (ResumeWith newExpr)) body
resumeHere (Val (Resumptions _ _) _) _ = error "Contradicting resumptions"
resumedType :: TypeStream -> TypeStream -> TypeStream
resumedType (RRepeat t) newTyp = RCons t newTyp
resumedType _ _ = error "Contradicting type resumptions"
compositeTypeVar :: T.Var (T.Composite p) -> RepeatList (T.Composite p)
compositeTypeVar ctv = pure $ T.CVar ctv
emptyCompositeType :: RepeatList (T.Composite p)
emptyCompositeType = pure T.CEmpty
compositeTypeExtend ::
T.Tag -> TypeStream ->
RepeatList T.Product ->
RepeatList T.Product
compositeTypeExtend tag typ base =
T.CExtend tag <$> typ <*> base
-- TODO: Re-use Subst and re-expose??
instantiate :: Scheme -> [(T.Var Type, Type)] -> Type
instantiate scheme typeVarAssignments =
onTVars subst (scheme ^. Scheme.schemeType)
where
subst =
unsafeUnjust "Missing type var assignment" .
(`lookup` typeVarAssignments)
onTVars :: (T.Var Type -> Type) -> Type -> Type
onTVars f (T.TVar v) = f v
onTVars f t = t & ExprLens.nextLayer %~ onTVars f
glob :: [TypeStream] -> V.GlobalId -> ExprWithResumptions
glob typeVarAssignments globalId
| Set.null rtvs && Set.null stvs =
mkExprWithResumptions (V.BLeaf (V.LGlobal globalId)) $
instantiate scheme <$>
Lens.sequenceAOf (Lens.traversed . _2) typeVarAssignments'
| otherwise = error "TODO: Handle record/sum type vars in globals"
where
scheme =
unsafeUnjust ("global " ++ show globalId ++ " does not exist") $
Map.lookup globalId $
Infer.loadedGlobalTypes definitionTypes
TypeVars tvs rtvs stvs = scheme ^. Scheme.schemeForAll
typeVarAssignments' = zip (Set.toList tvs) typeVarAssignments
intType :: TypeStream
intType = pure T.TInt
literalInteger :: Integer -> ExprWithResumptions
literalInteger x =
mkExprWithResumptions (V.BLeaf (V.LLiteralInteger x)) intType
-- TODO: Make this take a (TypeStream) (WHICH SHOULD BE NAMED TypeStream)
-- and then make combinators to build type streams?
holeWithInferredType :: TypeStream -> ExprWithResumptions
holeWithInferredType = mkExprWithResumptions (V.BLeaf V.LHole)
typeVar :: TV.VarKind b => T.Var b -> RepeatList b
typeVar x = pure . TV.lift $ x
infixr 1 ~>
(~>) :: TypeStream -> TypeStream -> TypeStream
a ~> r = T.TFun <$> a <*> r
lambda ::
V.Var -> TypeStream ->
(ExprWithResumptions -> ExprWithResumptions) -> ExprWithResumptions
lambda name paramType mkResult =
mkExprWithResumptions (V.BLam (V.Lam name result))
(T.TFun <$> paramType <*> exprTypeStream result)
where
result = mkResult $ mkExprWithResumptions (V.BLeaf (V.LVar name)) paramType
getField :: ExprWithResumptions -> T.Tag -> ExprWithResumptions
getField recordVal tag =
mkExprWithResumptions
(V.BGetField (V.GetField recordVal tag))
(findTypeOfField tag <$> exprTypeStream recordVal)
findTypeOfField :: T.Tag -> Type -> Type
findTypeOfField tag (T.TRecord p) = findTypeOfTagInComposite tag p
findTypeOfField _ _ = error "Test combinators type checking failed in findTypeOfField"
findTypeOfTagInComposite :: T.Tag -> T.Composite t -> Type
findTypeOfTagInComposite expectedTag (T.CExtend tag typ rest)
| expectedTag == tag = typ
| otherwise = findTypeOfTagInComposite expectedTag rest
findTypeOfTagInComposite _ _ = error "Test combinators type checking failed in findTypeOfTagInComposite"
-- TODO: Reuse FlatComposite if it gets exposed:
compositeOfList :: T.Composite t -> [(T.Tag, Type)] -> T.Composite t
compositeOfList base [] = base
compositeOfList base ((tag, typ):rest) = T.CExtend tag typ $ compositeOfList base rest
lambdaRecord ::
RepeatList T.Product -> V.Var -> [(T.Tag, TypeStream)] ->
([ExprWithResumptions] -> ExprWithResumptions) -> ExprWithResumptions
lambdaRecord baseRecord paramsName fields mkResult =
lambda paramsName recordType $ \params ->
mkResult $ map (getField params . fst) fields
where
recordType =
T.TRecord <$>
(compositeOfList <$> baseRecord <*> Lens.sequenceAOf (Lens.traversed . _2) fields)
letItem ::
V.Var -> ExprWithResumptions -> (ExprWithResumptions -> ExprWithResumptions) -> ExprWithResumptions
letItem name val mkBody = lambda name (exprTypeStream val) mkBody $$ val
-- Uses inferred holes for cons type
nonEmptyList :: [ExprWithResumptions] -> ExprWithResumptions
nonEmptyList [] = error "Given empty list in nonEmptyList"
nonEmptyList items@(x:_) =
foldr cons nil items
where
typ = exprTypeStream x
cons h t = glob [typ] ":" $$: [h, t]
nil = glob [typ] "[]"
tInst :: T.NominalId -> [(T.ParamId, TypeStream)] -> TypeStream
tInst name =
fmap (T.TInst name . Map.fromList) . Lens.sequenceAOf (Lens.traversed . _2)
boolType :: TypeStream
boolType = tInst "Bool" []
listOf :: TypeStream -> TypeStream
listOf t = tInst "List" [("val", t)]
maybeOf :: TypeStream -> TypeStream
maybeOf t = tInst "Maybe" [("val", t)]
eRecEmpty :: ExprWithResumptions
eRecEmpty = mkExprWithResumptions (V.BLeaf V.LRecEmpty) $ pure $ T.TRecord T.CEmpty
eRecExtend :: T.Tag -> ExprWithResumptions -> ExprWithResumptions -> ExprWithResumptions
eRecExtend tag v rest =
mkExprWithResumptions (V.BRecExtend (V.RecExtend tag v rest)) $
f <$> exprTypeStream v <*> exprTypeStream rest
where
f tv (T.TRecord txs) = T.TRecord $ T.CExtend tag tv txs
f _ _ = error "eRecExtend with non record type"
record :: [(T.Tag, ExprWithResumptions)] -> ExprWithResumptions
record = foldr (uncurry eRecExtend) eRecEmpty
infixl 4 $$
infixl 4 $$:
infixl 4 $.
($.) :: ExprWithResumptions -> T.Tag -> ExprWithResumptions
($.) = getField
($$) :: ExprWithResumptions -> ExprWithResumptions -> ExprWithResumptions
($$) func arg =
mkExprWithResumptions (V.BApp (V.Apply func arg)) $
mkType <$> exprTypeStream func <*> exprTypeStream arg
where
mkType (T.TFun p r) a
| p == a = r
| otherwise =
error $
"Incompatible types in '" ++
show (V.pPrintUnannotated func <+> PP.text "$$" <+> V.pPrintUnannotated arg) ++
"' param is " ++
show (pPrint p) ++ " and arg is " ++ show (pPrint a)
mkType _ _ = error "Apply of non-func type!"
($$:) :: ExprWithResumptions -> [ExprWithResumptions] -> ExprWithResumptions
($$:) f args =
f $$ record (zip tags args)
where
tags =
case f ^. iType of
T.TFun (T.TRecord p) _ -> compositeFieldTags p
_ -> error "not a record func in ($$:)"
compositeFieldTags :: T.Composite p -> [T.Tag]
compositeFieldTags T.CEmpty = []
compositeFieldTags T.CVar {} = error "unknown tags in compositeFieldTags"
compositeFieldTags (T.CExtend t _ r) = t : compositeFieldTags r
|
da-x/lamdu
|
test/InferCombinators.hs
|
gpl-3.0
| 10,228
| 0
| 18
| 2,201
| 3,028
| 1,573
| 1,455
| 210
| 4
|
{-# LANGUAGE TemplateHaskell, TypeApplications, RecordWildCards, ScopedTypeVariables, KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, DefaultSignatures #-}
module Lamdu.Sugar.Eval
( addEvaluationResults
) where
import qualified Control.Lens as Lens
import Data.CurAndPrev (CurAndPrev)
import Data.Kind (Type)
import qualified Data.Map as Map
import Hyper
import Hyper.Class.Morph
import Hyper.Syntax.Nominal (NominalDecl)
import Lamdu.Calc.Lens (tIds)
import qualified Lamdu.Calc.Type as T
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.Eval.Results as R
import Lamdu.Eval.Results (EvalResults)
import Lamdu.Eval.Results.Process (addTypes)
import qualified Lamdu.Sugar.Convert.Eval as ConvertEval
import Lamdu.Sugar.Convert.Load (makeNominalsMap)
import Lamdu.Sugar.Internal
import Lamdu.Sugar.Internal.EntityId (EntityId(..))
import qualified Lamdu.Sugar.Internal.EntityId as EntityId
import qualified Lamdu.Sugar.Lens as SugarLens
import qualified Lamdu.Sugar.Lens.Annotations as SugarLens
import Lamdu.Sugar.Types hiding (Type)
import Revision.Deltum.Transaction (Transaction)
import Lamdu.Prelude
type T = Transaction
data AddEvalCtx = AddEvalCtx
{ _evalResults :: CurAndPrev EvalResults
, _nominalsMap :: Map NominalId (Pure # NominalDecl T.Type)
}
Lens.makeLenses ''AddEvalCtx
class AddEvalToNode i n t0 t1 where
addToNode ::
(Monad m, Applicative i) =>
AddEvalCtx ->
Annotated (Annotation EvalPrep n, a, ConvertPayload m) # t0 ->
Annotated (Annotation (EvaluationScopes InternalName i) n, a, ConvertPayload m) # t1
instance AddEvalToNode i n (Const x) (Const x) where
addToNode r (Ann (Const pl) (Const x)) = Ann (Const (addToPayload r pl)) (Const x)
instance
(AddEval i n e, Applicative i) =>
AddEvalToNode i n
(e (Annotation EvalPrep n) n i o)
(e (Annotation (EvaluationScopes InternalName i) n) n i o) where
addToNode results (Ann a b) =
Ann
{ _hAnn = a & Lens._Wrapped %~ addToPayload results
, _hVal = addToBody results (a ^. Lens._Wrapped . _3 . pEntityId) b
}
type AddToBodyType e n (i :: Type -> Type) (o :: Type -> Type) m a =
AddEvalCtx -> EntityId ->
e (Annotation EvalPrep n) n i o #
Annotated (Annotation EvalPrep n, a, ConvertPayload m) ->
e (Annotation (EvaluationScopes InternalName i) n) n i o #
Annotated (Annotation (EvaluationScopes InternalName i) n, a, ConvertPayload m)
class AddEval i n e where
addToBody :: (Applicative i, Monad m) => AddToBodyType e n i o m a
default addToBody ::
( HMorphWithConstraint
(e (Annotation EvalPrep n) n i o)
(e (Annotation (EvaluationScopes InternalName i) n) n i o)
(AddEvalToNode i n)
, Applicative i, Monad m
) => AddToBodyType e n i o m a
addToBody r _ =
morphMap (Proxy @(AddEvalToNode i n) #?> addToNode r)
instance AddEval i n Assignment where
addToBody r i (BodyFunction x) = addToBody r i x & BodyFunction
addToBody r i (BodyPlain x) = x & apBody %~ addToBody r i & BodyPlain
instance AddEval i n Binder where
addToBody r i = bBody %~ addToBody r i
instance AddEval i n BinderBody where
addToBody r i (BinderLet x) = addToBody r i x & BinderLet
addToBody r i (BinderTerm x) = addToBody r i x & BinderTerm
instance AddEval i n Composite
instance AddEval i n Else where
addToBody r i (SimpleElse x) = addToBody r i x & SimpleElse
addToBody r i (ElseIf x) = x & eIfElse %~ addToBody r i & ElseIf
instance AddEval i n Function where
addToBody ctx i x@Function{..} =
x
{ _fParams = addToParams False nomsMap lamApplies _fParams
, _fBody = addToNode ctx _fBody
, _fBodyScopes =
ctx ^. evalResults
<&> (^. R.erAppliesOfLam . Lens.ix u)
<&> Lens.mapped . Lens.mapped %~ BinderParamScopeId . (^. _1)
}
where
EntityId u = i
nomsMap = ctx ^. nominalsMap
lamApplies =
ctx ^. evalResults
<&> (^. R.erAppliesOfLam . Lens.ix u)
<&> Map.fromList . (^.. traverse . traverse)
instance AddEval i n IfElse
instance AddEval i n LabeledApply
instance AddEval i n PostfixApply
instance AddEval i n PostfixFunc
instance AddEval i n Let where
addToBody r _ l =
l
{ _lValue = l ^. lValue & addToNode r
, _lNames = l ^. lNames & addToParams True (r ^. nominalsMap) vals
, _lBody = l ^. lBody & addToNode r
}
where
EntityId u = l ^. lValue . annotation . _3 . pEntityId
vals = r ^. evalResults <&> (^. R.erExprValues . Lens.ix u)
instance AddEval i n Term where
addToBody r i =
\case
BodyLeaf x -> BodyLeaf x
BodySimpleApply (App x y) -> App (addToNode r x) (addToNode r y) & BodySimpleApply
BodyRecord c -> addToBody r i c & BodyRecord
BodyIfElse x -> addToBody r i x & BodyIfElse
BodyLam lam -> lam & lamFunc %~ addToBody r i & BodyLam
BodyToNom nom -> nom & nVal %~ addToNode r & BodyToNom
BodyLabeledApply x -> addToBody r i x & BodyLabeledApply
BodyFragment f -> f & fExpr %~ addToNode r & BodyFragment
BodyPostfixApply x -> addToBody r i x & BodyPostfixApply
BodyPostfixFunc x -> addToBody r i x & BodyPostfixFunc
BodyNullaryInject (NullaryInject j e) ->
NullaryInject (addToNode r j) (addToNode r e) & BodyNullaryInject
addToParams ::
Applicative i =>
Bool ->
Map NominalId (Pure # NominalDecl T.Type) ->
CurAndPrev (Map ScopeId (R.Val ())) ->
LhsNames n i o (Annotation EvalPrep n) ->
LhsNames n i o (Annotation (EvaluationScopes InternalName i) n)
addToParams isLet nomsMap lamApplies =
\case
LhsVar v ->
v & vParam . fpAnnotation . _AnnotationVal %~
(if isLet then ConvertEval.results else ConvertEval.param)
(EntityId.ofEvalOf (v ^. vTag . oTag . tagRefTag . tagInstance)) .
appliesOfLam
& LhsVar
LhsRecord ps ->
ps
& SugarLens.taggedListItems %~ fixItem isLet nomsMap lamApplies
& LhsRecord
where
appliesOfLam v = lamApplies <&> traverse %~ addTypes nomsMap (v ^. eType)
fixItem ::
Applicative i =>
Bool ->
Map NominalId (Pure # NominalDecl T.Type) ->
CurAndPrev (Map ScopeId (R.Val ())) ->
TaggedItem n i o (LhsField n (Annotation EvalPrep n)) ->
TaggedItem n i o (LhsField n (Annotation (EvaluationScopes InternalName i) n))
fixItem isLet nomsMap lamApplies item =
item & tiValue %~ fixLhsField isLet nomsMap lamApplies tag
where
tag = item ^. tiTag . tagRefTag
fixLhsField ::
Applicative i =>
Bool ->
Map NominalId (Pure # NominalDecl T.Type) ->
CurAndPrev (Map ScopeId (R.Val ())) ->
Tag n ->
LhsField n (Annotation EvalPrep n) ->
LhsField n (Annotation (EvaluationScopes InternalName i) n)
fixLhsField isLet nomsMap lamApplies tag (LhsField p s) =
LhsField
(p <&> _AnnotationVal %~
\v ->
apps <&> traverse %~ addTypes nomsMap (v ^. eType)
& (if isLet then ConvertEval.results else ConvertEval.param)
(EntityId.ofEvalOf (tag ^. tagInstance))
)
(s <&> traverse %~
\(t, f) ->
(t, fixLhsField isLet nomsMap apps t f)
)
where
apps = lamApplies <&> traverse %~ R.extractField () (tag ^. tagVal)
addToPayload ::
Applicative i =>
AddEvalCtx ->
(Annotation EvalPrep n, a, ConvertPayload m) ->
(Annotation (EvaluationScopes InternalName i) n, a, ConvertPayload m)
addToPayload ctx a =
a
& _1 . _AnnotationVal %~
\v ->
ctx ^. evalResults
<&> (^. R.erExprValues . Lens.at u)
<&> fromMaybe mempty
<&> Lens.mapped %~ addTypes (ctx ^. nominalsMap) (v ^. eType)
& ConvertEval.results (EntityId.ofEvalOf i)
where
EntityId u = i
i = a ^. _3 . pEntityId
addEvaluationResults ::
forall n m i a.
(Monad m, Applicative i) =>
Anchors.CodeAnchors m ->
CurAndPrev EvalResults ->
WorkArea (Annotation EvalPrep n) n i (T m) (Annotation EvalPrep n, a, ConvertPayload m) ->
T m (
WorkArea (Annotation (EvaluationScopes InternalName i) n) n i (T m)
(Annotation (EvaluationScopes InternalName i) n, a, ConvertPayload m))
addEvaluationResults cp r wa@(WorkArea panes repl globals) =
makeNominalsMap
( wa ^..
SugarLens.annotations @(Annotation EvalPrep n)
. _AnnotationVal . eType . tIds
)
<&> AddEvalCtx r
<&>
\ctx ->
WorkArea
( panes <&> SugarLens.paneBinder %~ addToNode ctx)
( repl
& replExpr %~ addToNode ctx
& replResult .~ (r <&> (^. R.erCompleted) & ConvertEval.completion cp)
)
globals
|
lamdu/lamdu
|
src/Lamdu/Sugar/Eval.hs
|
gpl-3.0
| 9,042
| 0
| 18
| 2,493
| 3,068
| 1,582
| 1,486
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Refraction.Discover.Advertiser
( runAdvertiser
) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Chan (Chan, readChan)
import Control.Monad.CryptoRandom (crandomRs)
import Crypto.Random.DRBG (CtrDRBG, newGenIO)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import Data.List (find)
import Data.Serialize as S
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Network.Haskoin.Crypto (derivePubKey, PrvKey, pubKeyAddr)
import Network.Haskoin.Script (decodeOutputBS, isDataCarrier)
import Network.Haskoin.Transaction (scriptOutput, Tx)
import Network.Refraction.BitcoinUtils
import Network.Refraction.Blockchain (broadcastTx, fetchUTXOs)
import Network.Refraction.Discover.Types
import Network.Refraction.Generator (makeAdTransaction, makePairRequest)
import Network.Refraction.PeerToPeer (Msg)
-- TODO(hudon): don't use this...
fromEither = either undefined id
-- TODO(hudon): avoid partial functions https://wiki.haskell.org/Avoiding_partial_functions
runAdvertiser :: Chan Msg -> PrvKey -> UTXO -> Location -> IO (Location, Tx)
runAdvertiser chan prvkey utxo loc = do
putStrLn "Running advertiser..."
g <- newGenIO :: IO CtrDRBG
let aNonce = head $ crandomRs (minBound, maxBound) g :: Nonce
adTx <- publishAd prvkey utxo loc aNonce
(rNonce, rLoc) <- selectRespondent chan
tx <- publishPairResponse prvkey (getChangeUTXO adTx) (aNonce, rNonce)
return (rLoc, tx)
where
getChangeUTXO tx = case find isNotOPRETURN (getUTXOs tx) of
-- TODO(hudon): handle error
Nothing -> undefined
Just utxo -> utxo
isNotOPRETURN = not . isDataCarrier . fromEither . decodeOutputBS . scriptOutput . _txOut
publishAd :: PrvKey -> UTXO -> Location -> Nonce -> IO Tx
publishAd prvkey utxo loc nonce = do
putStrLn "Publish ad..."
let uniqueLoc = B8.take onionLengthWithoutTLD loc
tx = either undefined id $ makeAdTransaction [utxo] prvkey uniqueLoc nonce tao (encodeUtf8 adFinder)
broadcastTx tx
putStrLn "Ad published!"
return tx
selectRespondent :: Chan Msg -> IO (Nonce, Location)
selectRespondent chan = do
putStrLn "Selecting respondent..."
respondent <- waitForRespondents 0
putStrLn "Selected respondent!"
return respondent
where
waitForRespondents n = do
msg <- readChan chan
-- TODO have an actual picking mechanism. We pick the first respondent for now
if n == 0 then pickRespondent msg else waitForRespondents (n + 1)
pickRespondent msg = do
putStrLn "picked a respondent"
let (rNonce, rLoc) = B.splitAt 8 msg -- TODO: don't assume Word64 Nonce, use better schema
return (either undefined id $ S.decode rNonce, either undefined id $ S.decode rLoc)
publishPairResponse :: PrvKey -> UTXO -> (Nonce, Nonce) -> IO Tx
publishPairResponse prvkey utxo nonces = do
putStrLn "Publishing pair response"
-- TODO use a separate generator that hashes the respondent nonce
let tx = either undefined id $ makePairRequest [utxo] prvkey nonces tao (encodeUtf8 adFinder)
broadcastTx tx
putStrLn "Pair response published!"
return tx
|
hudon/refraction-hs
|
src/Network/Refraction/Discover/Advertiser.hs
|
gpl-3.0
| 3,209
| 0
| 13
| 577
| 848
| 441
| 407
| 62
| 2
|
module Strings where
import Test.QuickCheck
import Data.Char (chr)
import qualified Data.Text as TS
import qualified Data.Text.Lazy as TL
-- Text
instance Arbitrary TS.Text where
arbitrary = TS.pack <$> mgenName -- arbitrary
shrink xs = TS.pack <$> shrink (TS.unpack xs)
instance Arbitrary TL.Text where
arbitrary = TL.pack <$> mgenName --arbitrary
shrink xs = TL.pack <$> shrink (TL.unpack xs)
instance CoArbitrary TS.Text where
coarbitrary = coarbitrary . TS.unpack
instance CoArbitrary TL.Text where
coarbitrary = coarbitrary . TL.unpack
genName :: Gen String
genName = listOf1 validChars :: Gen String
where validChars = chr <$> choose (97, 122)
sgenName :: Int -> Gen String
sgenName 1 = do
c <- chr <$> choose (97,122)
return $ [c]
sgenName n = do
c <- chr <$> choose (97,122)
n <- sgenName (max (n `div` 2) 1)
return $ c : n
mgenName = oneof $ map return ["a", "b", "c"]
|
fcostantini/QuickFuzz
|
src/Strings.hs
|
gpl-3.0
| 954
| 0
| 12
| 221
| 354
| 189
| 165
| 27
| 1
|
module Main (main) where
import XMonad
import XMonad.Hooks.DynamicLog (statusBar, xmobarPP)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import System.Environment (lookupEnv)
import Data.List (intercalate)
import System.FilePath (dropFileName, (</>))
----------
keysOverrides :: XConfig Layout -> M.Map (KeyMask, KeySym) (X ())
keysOverrides conf@(XConfig {XMonad.modMask = modMask_}) =
flip M.union (keys def conf) $ M.fromList
[ ((modMask_ .|. shiftMask, xK_Return), spawn =<< fromMaybe (XMonad.terminal conf) <$> io (lookupEnv "NIX_MONAD_XFCE4_TERMINAL"))
, ((modMask_, xK_p), (spawn =<<) $ fmap withPathToSelf $ fromMaybe "dmenu_run" <$> io (lookupEnv "NIX_MONAD_DMENU_RUN"))
, ((modMask_ .|. shiftMask, xK_p), spawn =<< fromMaybe "gmrun" <$> io (lookupEnv "NIX_MONAD_GMRUN"))
, ((modMask_ .|. shiftMask, xK_slash), message help)
, ((modMask_, xK_question), message help)
, ((modMask_, xK_equal), keyboard "us")
, ((modMask_, xK_0), keyboard "ru")
, ((modMask_, xK_comma), volume "decrease")
, ((modMask_, xK_period), volume "increase")
, ((modMask_ .|. shiftMask, xK_comma), volume "mute")
, ((modMask_ .|. shiftMask, xK_period), volume "unmute")
]
where
message xs = do
m <- fromMaybe "xmessage" <$> io (lookupEnv "XMONAD_XMESSAGE")
spawn $ intercalate " " [ m, "-fn \"monospace\"", "\"" ++ xs ++ "\"" ]
keyboard xs = do
k <- fromMaybe "setxkbmap" <$> io (lookupEnv "NIX_MONAD_SETXKBMAP")
spawn $ intercalate " " [ k, xs ]
volume xs = do
v <- fromMaybe "volume_pulse" <$> io (lookupEnv "NIX_MONAD_VOLUME_PULSE")
spawn $ intercalate " " [ v, xs ]
help :: String
help = unlines
[ "Additional key-bindings:"
, ""
, "mod-= Switch to US keyboard layout"
, "mod-0 Switch to RU keyboard layout"
, "mod-, Decrease volume"
, "mod-. Increase volume"
, "mod-Shift-, Mute volume"
, "mod-Shift-. Unmute volume"
]
withPathToSelf :: FilePath -> String
withPathToSelf fp =
intercalate " "
[ "export"
, "PATH=" ++ intercalate ":" [ dropFileName fp, "$PATH" ]
, "&&"
, fp
]
----------
main :: IO ()
main = do
xmobar_ <- fromMaybe "xmobar" <$> lookupEnv "NIX_MONAD_XMOBAR"
let toggleStrutsKey XConfig {XMonad.modMask = modMask_} = (modMask_, xK_b)
config_ = def { modMask = mod4Mask
, keys = keysOverrides
}
xmonad =<< statusBar (withPathToSelf xmobar_) xmobarPP toggleStrutsKey config_
----------
|
artuuge/NixOS-files
|
xmobar-volume/xmonad.hs
|
gpl-3.0
| 2,565
| 0
| 14
| 590
| 803
| 440
| 363
| 55
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.YouTubeAnalytics.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.YouTubeAnalytics.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
data ErrorProtoLocationType
= Path
-- ^ @PATH@
-- location is an xpath-like path pointing to the request field that caused
-- the error.
| Other
-- ^ @OTHER@
-- other location type which can safely be shared externally.
| Parameter
-- ^ @PARAMETER@
-- Location is request parameter. This maps to the {\@link PARAMETERS} in
-- {\@link MessageLocation}.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ErrorProtoLocationType
instance FromHttpApiData ErrorProtoLocationType where
parseQueryParam = \case
"PATH" -> Right Path
"OTHER" -> Right Other
"PARAMETER" -> Right Parameter
x -> Left ("Unable to parse ErrorProtoLocationType from: " <> x)
instance ToHttpApiData ErrorProtoLocationType where
toQueryParam = \case
Path -> "PATH"
Other -> "OTHER"
Parameter -> "PARAMETER"
instance FromJSON ErrorProtoLocationType where
parseJSON = parseJSONText "ErrorProtoLocationType"
instance ToJSON ErrorProtoLocationType where
toJSON = toJSONText
-- | Global error code. Deprecated and ignored. Set custom error codes in
-- ErrorProto.domain and ErrorProto.code instead.
data ErrorsCode
= BadRequest
-- ^ @BAD_REQUEST@
| ForBidden
-- ^ @FORBIDDEN@
| NotFound
-- ^ @NOT_FOUND@
| Conflict
-- ^ @CONFLICT@
| Gone
-- ^ @GONE@
| PreconditionFailed
-- ^ @PRECONDITION_FAILED@
| InternalError
-- ^ @INTERNAL_ERROR@
| ServiceUnavailable
-- ^ @SERVICE_UNAVAILABLE@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ErrorsCode
instance FromHttpApiData ErrorsCode where
parseQueryParam = \case
"BAD_REQUEST" -> Right BadRequest
"FORBIDDEN" -> Right ForBidden
"NOT_FOUND" -> Right NotFound
"CONFLICT" -> Right Conflict
"GONE" -> Right Gone
"PRECONDITION_FAILED" -> Right PreconditionFailed
"INTERNAL_ERROR" -> Right InternalError
"SERVICE_UNAVAILABLE" -> Right ServiceUnavailable
x -> Left ("Unable to parse ErrorsCode from: " <> x)
instance ToHttpApiData ErrorsCode where
toQueryParam = \case
BadRequest -> "BAD_REQUEST"
ForBidden -> "FORBIDDEN"
NotFound -> "NOT_FOUND"
Conflict -> "CONFLICT"
Gone -> "GONE"
PreconditionFailed -> "PRECONDITION_FAILED"
InternalError -> "INTERNAL_ERROR"
ServiceUnavailable -> "SERVICE_UNAVAILABLE"
instance FromJSON ErrorsCode where
parseJSON = parseJSONText "ErrorsCode"
instance ToJSON ErrorsCode where
toJSON = toJSONText
|
brendanhay/gogol
|
gogol-youtube-analytics/gen/Network/Google/YouTubeAnalytics/Types/Sum.hs
|
mpl-2.0
| 3,918
| 0
| 11
| 979
| 655
| 355
| 300
| 83
| 0
|
-- This file is part of purebred
-- Copyright (C) 2017-2021 Róman Joost and Fraser Tweedale
--
-- purebred is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{- |
Core types and optics for Purebred.
-}
module Purebred.Types
( -- * Application state
AppState(..)
, asConfig
, bChan
, logSink
, asThreadsView
, asMailView
, asCompose
, asUserMessage
, asViews
, asFileBrowser
, asLocalTime
, Async(..)
, asAsync
-- ** Threads and Mails Lists
, ThreadsView(..)
, miListOfMails
, miListOfThreads
, miListOfThreadsGeneration
, miSearchThreadsEditor
, miMailTagsEditor
, miThreadTagsEditor
, miThreads
, miNewMail
, miMails
, NotmuchMail(..)
, mailSubject
, mailFrom
, mailDate
, mailTags
, mailId
, NotmuchThread(..)
, thSubject
, thAuthors
, thDate
, thTags
, thReplies
, thId
-- ** Mail Viewer
, MailView(..)
, mvMail
, mvBody
, mvAttachments
, mvSaveToDiskPath
, mvOpenCommand
, mvPipeCommand
, mvFindWordEditor
, mvScrollSteps
, MailBody(..)
, mbParagraph
, mbSource
, matchCount
, Source
, Paragraph(..)
, pLine
, Line(..)
, hasMatches
, lMatches
, lText
, lNumber
, ScrollStep
, stNumber
, stMatch
, Match(..)
, mLinenumber
-- ** Mail Composer
, Compose(..)
, cFrom
, cTo
, cCc
, cBcc
, cSubject
, cAttachments
, cKeepDraft
, ConfirmDraft(..)
-- ** File Browser
, FileBrowser(..)
, fbEntries
, fbSearchPath
, FileSystemEntry(..)
, fsEntryName
-- ** Concurrent actions
, aValidation
-- ** Widgets
, HeadersState(..)
-- ** Keybindings
, Keybinding(..)
, kbEvent
, kbAction
, Action(..)
, aDescription
, aAction
-- * Configuration
, UserConfiguration
, Configuration(..)
, confTheme
, confNotmuch
, confEditor
, confMailView
, confIndexView
, confComposeView
, confHelpView
, confDefaultView
, confFileBrowserView
, confCharsets
, confPlugins
-- ** Notmuch Configuration
, NotmuchSettings(..)
, nmSearch
, nmDatabase
, nmNewTag
, nmDraftTag
, nmSentTag
, nmHasNewMailSearch
, nmHasNewMailCheckDelay
, Delay(..)
, Tag
-- ** Mail Viewer
, MailViewSettings(..)
, mvIndexRows
, mvTextWidth
, mvHeadersToShow
, mvPreferredContentType
, mvHeadersState
, mvMailcap
-- *** Mail Viewer Keybindings
, mvKeybindings
, mvManageMailTagsKeybindings
, mvMailListOfAttachmentsKeybindings
, mvOpenWithKeybindings
, mvPipeToKeybindings
, mvFindWordEditorKeybindings
, mvSaveToDiskKeybindings
, mvToKeybindings
-- ** Threads Viewer
, IndexViewSettings(..)
-- *** Threads Viewer Keybindings
, ivBrowseThreadsKeybindings
, ivSearchThreadsKeybindings
, ivManageThreadTagsKeybindings
, ivFromKeybindings
, ivToKeybindings
, ivSubjectKeybindings
-- ** Mail Composer
, ComposeViewSettings(..)
, cvSendMailCmd
, cvIdentities
-- *** Mail Composer Keybindings
, cvFromKeybindings
, cvToKeybindings
, cvCcKeybindings
, cvBccKeybindings
, cvSubjectKeybindings
, cvListOfAttachmentsKeybindings
, cvConfirmKeybindings
-- ** Help Viewer
, HelpViewSettings(..)
, hvKeybindings
-- ** File Browser
, FileBrowserSettings(..)
, fbHomePath
-- *** Keybindings
, fbKeybindings
, fbSearchPathKeybindings
-- * Internals
, ListWithLength(..)
, listList
, listLength
, decodeLenient
, module Purebred.Types.Event
, module Purebred.Types.Mailcap
, module Purebred.Types.UI
) where
import Prelude hiding (Word)
import GHC.Generics (Generic)
import Brick.AttrMap (AttrMap)
import Brick.BChan (BChan)
import qualified Brick.Focus as Brick
import Brick.Types (EventM, Next)
import qualified Brick.Widgets.Edit as E
import qualified Brick.Widgets.List as L
import qualified Brick.Widgets.FileBrowser as FB
import Brick.Widgets.Dialog (Dialog)
import Control.Lens
( Getter, Lens', Traversal', _1, _3
, foldrOf, lens, notNullOf, to, view )
import Control.DeepSeq (NFData(rnf), force)
import Control.Monad.State
import Control.Concurrent (ThreadId)
import qualified Data.ByteString as B
import qualified Data.ByteString.Builder as B
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import qualified Graphics.Vty.Input.Events as Vty
import Data.Time (UTCTime)
import qualified Data.CaseInsensitive as CI
import qualified Data.Vector as V
import Notmuch (Tag)
import Data.MIME
import Purebred.UI.Widgets (StatefulEditor)
import {-# SOURCE #-} Purebred.Plugin.Internal
import Purebred.Types.Error
import Purebred.Types.Event
import Purebred.Types.Items
import Purebred.Types.Mailcap
import Purebred.Types.UI
{-# ANN module ("HLint: ignore Avoid lambda" :: String) #-}
-- | A brick list, with a field that optionally contains its length.
--
-- Rather than reading the length from the underlying list, to support
-- lazy loading we have a separate field that optionally contains the
-- length. Widgets should read the length from this field and must
-- handle the @Nothing@ case.
--
-- For strict lists (e.g. Vector-based) the length can be recorded when
-- constructed. For lazy lists, it could be left empty, or a thread
-- could be spawned to compute the length in the background and update
-- the value when the length is known.
--
data ListWithLength t a = ListWithLength (L.GenericList Name t a) (Maybe Int)
listList :: Lens' (ListWithLength t a) (L.GenericList Name t a)
listList f (ListWithLength a b) = (\a' -> ListWithLength a' b) <$> f a
{-# ANN listList ("HLint: ignore Avoid lambda using `infix`" :: String) #-}
listLength :: Lens' (ListWithLength t a) (Maybe Int)
listLength f (ListWithLength a b) = (\b' -> ListWithLength a b') <$> f b
-- | A view showing a list of threads.
-- This is the default view of the Purebred on startup.
--
data ThreadsView = ThreadsView
{ _miListOfMails :: ListWithLength V.Vector (Toggleable NotmuchMail)
, _miListOfThreads :: ListWithLength Items (Toggleable NotmuchThread)
, _miListOfThreadsGeneration :: Generation
, _miSearchThreadsEditor :: StatefulEditor T.Text Name
, _miMailTagsEditor :: E.Editor T.Text Name
, _miThreadTagsEditor :: E.Editor T.Text Name
, _miNewMail :: Int
}
miMails :: Lens' ThreadsView (ListWithLength V.Vector (Toggleable NotmuchMail))
miMails = lens _miListOfMails (\m v -> m { _miListOfMails = v })
miThreads :: Lens' ThreadsView (ListWithLength Items (Toggleable NotmuchThread))
miThreads = lens _miListOfThreads (\m v -> m { _miListOfThreads = v})
miListOfMails :: Lens' ThreadsView (L.GenericList Name V.Vector (Toggleable NotmuchMail))
miListOfMails = miMails . listList
miListOfThreads :: Lens' ThreadsView (L.GenericList Name Items (Toggleable NotmuchThread))
miListOfThreads = miThreads . listList
miListOfThreadsGeneration :: Lens' ThreadsView Generation
miListOfThreadsGeneration =
lens _miListOfThreadsGeneration (\s b -> s { _miListOfThreadsGeneration = b })
miSearchThreadsEditor :: Lens' ThreadsView (StatefulEditor T.Text Name)
miSearchThreadsEditor = lens _miSearchThreadsEditor (\m v -> m { _miSearchThreadsEditor = v})
miMailTagsEditor :: Lens' ThreadsView (E.Editor T.Text Name)
miMailTagsEditor = lens _miMailTagsEditor (\m v -> m { _miMailTagsEditor = v})
miThreadTagsEditor :: Lens' ThreadsView (E.Editor T.Text Name)
miThreadTagsEditor = lens _miThreadTagsEditor (\m v -> m { _miThreadTagsEditor = v})
miNewMail :: Lens' ThreadsView Int
miNewMail = lens _miNewMail (\m v -> m { _miNewMail = v})
-- | A loose annotation what produced the rendered output of the
-- entity
--
type Source = T.Text
-- | Type representing a specific entity from an e-mail for display.
--
data MailBody =
MailBody Source [Paragraph]
deriving (Show, Eq)
mbParagraph :: Traversal' MailBody Paragraph
mbParagraph f (MailBody s xs) = fmap (\xs' -> MailBody s xs') (traverse f xs)
mbSource :: Lens' MailBody Source
mbSource f (MailBody d xs) = fmap (\d' -> MailBody d' xs) (f d)
{-# ANN mbSource ("HLint: ignore Avoid lambda using `infix`" :: String) #-}
matchCount :: MailBody -> Int
matchCount =
foldrOf
(mbParagraph . pLine)
(\l amount -> view (lMatches . to length) l + amount)
0
-- | A paragraph in the mail body
--
newtype Paragraph =
Paragraph [Line]
deriving (Show, Eq)
pLine :: Traversal' Paragraph Line
pLine f (Paragraph xs) = fmap (\xs' -> Paragraph xs') (traverse f xs)
-- | A match of a substring in the current line of text
--
data Match =
Match Int -- ^ offset
Int -- ^ length
Int -- ^ line number
deriving (Show, Eq)
mLinenumber :: Lens' Match Int
mLinenumber f (Match a b c) = fmap (\c' -> Match a b c') (f c)
-- | A scroll step indicated by the sequential number, line number and
-- the match.
-- The sequential number is used for visual purposes to render a
-- status like: 2 of 30 matches
--
type ScrollStep = (Int, Int, Match)
stNumber :: Lens' ScrollStep Int
stNumber = _1
stMatch :: Lens' ScrollStep Match
stMatch = _3
-- | A line of text with arbitrary length and possible matching sub
-- strings
--
data Line =
Line [Match]
Int -- ^ line number
T.Text
deriving (Show, Eq)
hasMatches :: Line -> Bool
hasMatches = notNullOf (lMatches . traverse)
lMatches :: Lens' Line [Match]
lMatches f (Line xs n t) = fmap (\xs' -> Line xs' n t) (f xs)
lText :: Lens' Line T.Text
lText f (Line xs n t) = fmap (\t' -> Line xs n t') (f t)
lNumber :: Lens' Line Int
lNumber f (Line xs n t) = fmap (\n' -> Line xs n' t) (f n)
data HeadersState = ShowAll | Filtered
data MailView = MailView
{ _mvMail :: Maybe MIMEMessage
, _mvBody:: MailBody
, _mvHeadersState :: HeadersState
, _mvAttachments :: L.List Name WireEntity
, _mvSaveToDiskPath :: E.Editor T.Text Name
, _mvOpenCommand:: E.Editor T.Text Name
, _mvPipeCommand :: E.Editor T.Text Name
, _mvFindWordEditor :: E.Editor T.Text Name
, _mvScrollSteps :: Brick.FocusRing ScrollStep
}
mvMail :: Lens' MailView (Maybe MIMEMessage)
mvMail = lens _mvMail (\mv pm -> mv { _mvMail = pm })
mvHeadersState :: Lens' MailView HeadersState
mvHeadersState = lens _mvHeadersState (\mv hs -> mv { _mvHeadersState = hs })
mvAttachments :: Lens' MailView (L.List Name WireEntity)
mvAttachments = lens _mvAttachments (\mv hs -> mv { _mvAttachments = hs })
mvSaveToDiskPath :: Lens' MailView (E.Editor T.Text Name)
mvSaveToDiskPath = lens _mvSaveToDiskPath (\mv hs -> mv { _mvSaveToDiskPath = hs })
mvOpenCommand :: Lens' MailView (E.Editor T.Text Name)
mvOpenCommand = lens _mvOpenCommand (\mv hs -> mv { _mvOpenCommand = hs })
mvPipeCommand :: Lens' MailView (E.Editor T.Text Name)
mvPipeCommand = lens _mvPipeCommand (\mv hs -> mv { _mvPipeCommand = hs })
mvFindWordEditor :: Lens' MailView (E.Editor T.Text Name)
mvFindWordEditor = lens _mvFindWordEditor (\mv hs -> mv { _mvFindWordEditor = hs })
mvScrollSteps :: Lens' MailView (Brick.FocusRing ScrollStep)
mvScrollSteps = lens _mvScrollSteps (\mv hs -> mv { _mvScrollSteps = hs })
mvBody :: Lens' MailView MailBody
mvBody = lens _mvBody (\mv hs -> mv { _mvBody = hs })
data ConfirmDraft
= Keep
| Discard
deriving (Show)
data Compose = Compose
{ _cFrom :: StatefulEditor T.Text Name
, _cTo :: StatefulEditor T.Text Name
, _cCc :: StatefulEditor T.Text Name
, _cBcc :: StatefulEditor T.Text Name
, _cSubject :: StatefulEditor T.Text Name
, _cAttachments :: L.List Name MIMEMessage
, _cKeepDraft :: Dialog ConfirmDraft
}
cFrom :: Lens' Compose (StatefulEditor T.Text Name)
cFrom = lens _cFrom (\c x -> c { _cFrom = x })
cTo :: Lens' Compose (StatefulEditor T.Text Name)
cTo = lens _cTo (\c x -> c { _cTo = x })
cCc :: Lens' Compose (StatefulEditor T.Text Name)
cCc = lens _cCc (\c x -> c { _cCc = x })
cBcc :: Lens' Compose (StatefulEditor T.Text Name)
cBcc = lens _cBcc (\c x -> c { _cBcc = x })
cSubject :: Lens' Compose (StatefulEditor T.Text Name)
cSubject = lens _cSubject (\c x -> c { _cSubject = x })
cAttachments :: Lens' Compose (L.List Name MIMEMessage)
cAttachments = lens _cAttachments (\c x -> c { _cAttachments = x })
cKeepDraft :: Lens' Compose (Dialog ConfirmDraft)
cKeepDraft = lens _cKeepDraft (\c x -> c { _cKeepDraft = x })
data NotmuchSettings =
NotmuchSettings
{ _nmSearch :: T.Text -- ^ The default query used on startup.
, _nmDatabase :: FilePath -- ^ The 'FilePath' to the database.
, _nmNewTag :: Tag -- ^ The 'Tag' indicating a new mail or thread.
, _nmDraftTag :: Tag -- ^ The 'Tag' to attach mails during composition when saved as drafts.
, _nmSentTag :: Tag -- ^ The 'Tag' to attach to mails once successfully sent.
, _nmHasNewMailSearch :: T.Text -- ^ Search carried out by Purebred to determine the number of new mail.
, _nmHasNewMailCheckDelay :: Maybe Delay
-- ^ The interval in which Purebred queries for new mail. Set to 'Nothing' to disable
-- the check.
}
deriving (Generic, NFData)
nmSearch :: Lens' NotmuchSettings T.Text
nmSearch = lens _nmSearch (\nm x -> nm { _nmSearch = x })
nmDatabase :: Lens' NotmuchSettings FilePath
nmDatabase = lens _nmDatabase (\nm x -> nm { _nmDatabase = x })
nmNewTag :: Lens' NotmuchSettings Tag
nmNewTag = lens _nmNewTag (\nm x -> nm { _nmNewTag = x })
nmDraftTag :: Lens' NotmuchSettings Tag
nmDraftTag = lens _nmDraftTag (\nm x -> nm { _nmDraftTag = x })
nmSentTag :: Lens' NotmuchSettings Tag
nmSentTag = lens _nmSentTag (\nm x -> nm { _nmSentTag = x })
nmHasNewMailSearch :: Lens' NotmuchSettings T.Text
nmHasNewMailSearch = lens _nmHasNewMailSearch (\nm x -> nm { _nmHasNewMailSearch = x })
nmHasNewMailCheckDelay :: Lens' NotmuchSettings (Maybe Delay)
nmHasNewMailCheckDelay = lens _nmHasNewMailCheckDelay (\nm x -> nm { _nmHasNewMailCheckDelay = x })
data FileBrowserSettings = FileBrowserSettings
{ _fbKeybindings :: [Keybinding 'FileBrowser 'ListOfFiles]
, _fbSearchPathKeybindings :: [Keybinding 'FileBrowser 'ManageFileBrowserSearchPath]
, _fbHomePath :: FilePath
}
deriving (Generic, NFData)
fbKeybindings :: Lens' FileBrowserSettings [Keybinding 'FileBrowser 'ListOfFiles]
fbKeybindings = lens _fbKeybindings (\cv x -> cv { _fbKeybindings = x })
fbSearchPathKeybindings :: Lens' FileBrowserSettings [Keybinding 'FileBrowser 'ManageFileBrowserSearchPath]
fbSearchPathKeybindings = lens _fbSearchPathKeybindings (\cv x -> cv { _fbSearchPathKeybindings = x})
fbHomePath :: Lens' FileBrowserSettings FilePath
fbHomePath = lens _fbHomePath (\s a -> s { _fbHomePath = a })
data Delay
= Seconds Int
| Minutes Int
deriving (Generic, NFData)
type UserConfiguration = Configuration
data Configuration = Configuration
{ _confTheme :: AttrMap
, _confNotmuch :: NotmuchSettings
, _confEditor :: FilePath
, _confMailView :: MailViewSettings
, _confIndexView :: IndexViewSettings
, _confComposeView :: ComposeViewSettings
, _confHelpView :: HelpViewSettings
, _confDefaultView :: ViewName
, _confFileBrowserView :: FileBrowserSettings
, _confCharsets :: CharsetLookup
, _confPlugins :: [PluginDict]
}
deriving (Generic, NFData)
confTheme :: Lens' Configuration AttrMap
confTheme = lens _confTheme (\c x -> c { _confTheme = x })
confEditor :: Lens' Configuration FilePath
confEditor = lens _confEditor (\conf x -> conf { _confEditor = x })
confNotmuch :: Lens' Configuration NotmuchSettings
confNotmuch = lens _confNotmuch (\conf x -> conf { _confNotmuch = x })
confMailView :: Lens' Configuration MailViewSettings
confMailView = lens _confMailView (\conf x -> conf { _confMailView = x })
confIndexView :: Lens' Configuration IndexViewSettings
confIndexView = lens _confIndexView (\conf x -> conf { _confIndexView = x })
confComposeView :: Lens' Configuration ComposeViewSettings
confComposeView = lens _confComposeView (\conf x -> conf { _confComposeView = x})
confHelpView :: Lens' Configuration HelpViewSettings
confHelpView = lens _confHelpView (\conf x -> conf { _confHelpView = x })
confDefaultView :: Lens' Configuration ViewName
confDefaultView = lens _confDefaultView (\conf x -> conf { _confDefaultView = x })
confFileBrowserView :: Lens' Configuration FileBrowserSettings
confFileBrowserView = lens _confFileBrowserView (\conf x -> conf { _confFileBrowserView = x })
confCharsets :: Lens' Configuration CharsetLookup
confCharsets = lens _confCharsets (\conf x -> conf { _confCharsets = x })
confPlugins :: Lens' Configuration [PluginDict]
confPlugins = lens _confPlugins (\conf x -> conf { _confPlugins = x })
data ComposeViewSettings = ComposeViewSettings
{ _cvFromKeybindings :: [Keybinding 'ComposeView 'ComposeFrom]
, _cvToKeybindings :: [Keybinding 'ComposeView 'ComposeTo]
, _cvCcKeybindings :: [Keybinding 'ComposeView 'ComposeCc]
, _cvBccKeybindings :: [Keybinding 'ComposeView 'ComposeBcc]
, _cvSubjectKeybindings :: [Keybinding 'ComposeView 'ComposeSubject]
, _cvSendMailCmd :: B.Builder -> IO (Either Error ())
, _cvListOfAttachmentsKeybindings :: [Keybinding 'ComposeView 'ComposeListOfAttachments]
, _cvIdentities :: [Mailbox]
, _cvConfirmKeybindings :: [Keybinding 'ComposeView 'ConfirmDialog]
}
deriving (Generic, NFData)
cvFromKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ComposeFrom]
cvFromKeybindings = lens _cvFromKeybindings (\cv x -> cv { _cvFromKeybindings = x })
cvToKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ComposeTo]
cvToKeybindings = lens _cvToKeybindings (\cv x -> cv { _cvToKeybindings = x })
cvCcKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ComposeCc]
cvCcKeybindings = lens _cvCcKeybindings (\cv x -> cv { _cvCcKeybindings = x })
cvBccKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ComposeBcc]
cvBccKeybindings = lens _cvBccKeybindings (\cv x -> cv { _cvBccKeybindings = x })
cvSubjectKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ComposeSubject]
cvSubjectKeybindings = lens _cvSubjectKeybindings (\cv x -> cv { _cvSubjectKeybindings = x })
cvSendMailCmd :: Lens' ComposeViewSettings (B.Builder -> IO (Either Error ()))
cvSendMailCmd = lens _cvSendMailCmd (\cv x -> cv { _cvSendMailCmd = x })
cvListOfAttachmentsKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ComposeListOfAttachments]
cvListOfAttachmentsKeybindings = lens _cvListOfAttachmentsKeybindings (\cv x -> cv { _cvListOfAttachmentsKeybindings = x })
cvIdentities :: Lens' ComposeViewSettings [Mailbox]
cvIdentities = lens _cvIdentities (\cv x -> cv { _cvIdentities = x })
cvConfirmKeybindings :: Lens' ComposeViewSettings [Keybinding 'ComposeView 'ConfirmDialog]
cvConfirmKeybindings = lens _cvConfirmKeybindings (\cv x -> cv { _cvConfirmKeybindings = x })
newtype HelpViewSettings = HelpViewSettings
{ _hvKeybindings :: [Keybinding 'Help 'ScrollingHelpView]
}
deriving (Generic, NFData)
hvKeybindings :: Lens' HelpViewSettings [Keybinding 'Help 'ScrollingHelpView]
hvKeybindings f (HelpViewSettings a) = fmap (\a' -> HelpViewSettings a') (f a)
data IndexViewSettings = IndexViewSettings
{ _ivBrowseThreadsKeybindings :: [Keybinding 'Threads 'ListOfThreads]
, _ivSearchThreadsKeybindings :: [Keybinding 'Threads 'SearchThreadsEditor]
, _ivManageThreadTagsKeybindings :: [Keybinding 'Threads 'ManageThreadTagsEditor]
, _ivFromKeybindings :: [Keybinding 'Threads 'ComposeFrom]
, _ivToKeybindings :: [Keybinding 'Threads 'ComposeTo]
, _ivSubjectKeybindings :: [Keybinding 'Threads 'ComposeSubject]
}
deriving (Generic, NFData)
ivBrowseThreadsKeybindings :: Lens' IndexViewSettings [Keybinding 'Threads 'ListOfThreads]
ivBrowseThreadsKeybindings = lens _ivBrowseThreadsKeybindings (\s x -> s { _ivBrowseThreadsKeybindings = x })
ivSearchThreadsKeybindings :: Lens' IndexViewSettings [Keybinding 'Threads 'SearchThreadsEditor]
ivSearchThreadsKeybindings = lens _ivSearchThreadsKeybindings (\s x -> s { _ivSearchThreadsKeybindings = x })
ivManageThreadTagsKeybindings :: Lens' IndexViewSettings [Keybinding 'Threads 'ManageThreadTagsEditor]
ivManageThreadTagsKeybindings = lens _ivManageThreadTagsKeybindings (\s x -> s { _ivManageThreadTagsKeybindings = x })
ivFromKeybindings :: Lens' IndexViewSettings [Keybinding 'Threads 'ComposeFrom]
ivFromKeybindings = lens _ivFromKeybindings (\s x -> s { _ivFromKeybindings = x })
ivToKeybindings :: Lens' IndexViewSettings [Keybinding 'Threads 'ComposeTo]
ivToKeybindings = lens _ivToKeybindings (\s x -> s { _ivToKeybindings = x })
ivSubjectKeybindings :: Lens' IndexViewSettings [Keybinding 'Threads 'ComposeSubject]
ivSubjectKeybindings = lens _ivSubjectKeybindings (\s x -> s { _ivSubjectKeybindings = x })
data MailViewSettings = MailViewSettings
{ _mvIndexRows :: Int
, _mvTextWidth :: Int
, _mvPreferredContentType :: ContentType
, _mvHeadersToShow :: CI.CI B.ByteString -> Bool
, _mvKeybindings :: [Keybinding 'ViewMail 'ScrollingMailView]
, _mvManageMailTagsKeybindings :: [Keybinding 'ViewMail 'ManageMailTagsEditor]
, _mvMailListOfAttachmentsKeybindings :: [Keybinding 'ViewMail 'MailListOfAttachments]
, _mvOpenWithKeybindings :: [Keybinding 'ViewMail 'MailAttachmentOpenWithEditor]
, _mvPipeToKeybindings :: [Keybinding 'ViewMail 'MailAttachmentPipeToEditor]
, _mvFindWordEditorKeybindings :: [Keybinding 'ViewMail 'ScrollingMailViewFindWordEditor]
, _mvMailcap :: [(ContentType -> Bool, MailcapHandler)]
, _mvSaveToDiskKeybindings :: [Keybinding 'ViewMail 'SaveToDiskPathEditor]
-- used for forwarding mails
, _mvToKeybindings :: [Keybinding 'ViewMail 'ComposeTo]
}
deriving (Generic, NFData)
mvIndexRows :: Lens' MailViewSettings Int
mvIndexRows = lens _mvIndexRows (\mv x -> mv { _mvIndexRows = x })
mvTextWidth :: Lens' MailViewSettings Int
mvTextWidth = lens _mvTextWidth (\mv x -> mv { _mvTextWidth = x })
mvPreferredContentType :: Lens' MailViewSettings ContentType
mvPreferredContentType = lens _mvPreferredContentType (\mv x -> mv { _mvPreferredContentType = x })
mvHeadersToShow :: Getter MailViewSettings (CI.CI B.ByteString -> Bool)
mvHeadersToShow = lens _mvHeadersToShow (\mv x -> mv { _mvHeadersToShow = x })
mvKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'ScrollingMailView]
mvKeybindings = lens _mvKeybindings (\mv x -> mv { _mvKeybindings = x })
mvManageMailTagsKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'ManageMailTagsEditor]
mvManageMailTagsKeybindings = lens _mvManageMailTagsKeybindings (\mv x -> mv { _mvManageMailTagsKeybindings = x })
mvMailListOfAttachmentsKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'MailListOfAttachments]
mvMailListOfAttachmentsKeybindings = lens _mvMailListOfAttachmentsKeybindings (\s x -> s { _mvMailListOfAttachmentsKeybindings = x })
mvOpenWithKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'MailAttachmentOpenWithEditor]
mvOpenWithKeybindings = lens _mvOpenWithKeybindings (\s x -> s { _mvOpenWithKeybindings = x })
mvPipeToKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'MailAttachmentPipeToEditor]
mvPipeToKeybindings = lens _mvPipeToKeybindings (\s x -> s { _mvPipeToKeybindings = x })
mvFindWordEditorKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'ScrollingMailViewFindWordEditor]
mvFindWordEditorKeybindings = lens _mvFindWordEditorKeybindings (\s x -> s { _mvFindWordEditorKeybindings = x })
mvMailcap :: Lens' MailViewSettings [(ContentType -> Bool, MailcapHandler)]
mvMailcap = lens _mvMailcap (\s x -> s { _mvMailcap = x })
mvSaveToDiskKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'SaveToDiskPathEditor]
mvSaveToDiskKeybindings = lens _mvSaveToDiskKeybindings (\s x -> s { _mvSaveToDiskKeybindings = x })
mvToKeybindings :: Lens' MailViewSettings [Keybinding 'ViewMail 'ComposeTo]
mvToKeybindings = lens _mvToKeybindings (\s x -> s { _mvToKeybindings = x })
data FileSystemEntry
= Directory String
| File String
deriving (Show,Ord,Eq)
fsEntryName :: Getter FileSystemEntry String
fsEntryName = let toName (Directory n) = n
toName (File n) = n
in to toName
data FileBrowser = CreateFileBrowser
{ _fbEntries :: FB.FileBrowser Name
, _fbSearchPath :: StatefulEditor FilePath Name
}
fbEntries :: Lens' FileBrowser (FB.FileBrowser Name)
fbEntries = lens _fbEntries (\cv x -> cv { _fbEntries = x })
fbSearchPath :: Lens' FileBrowser (StatefulEditor FilePath Name)
fbSearchPath = lens _fbSearchPath (\c x -> c { _fbSearchPath = x})
-- | State needed to be kept for keeping track of
-- concurrent/asynchronous actions
newtype Async = Async
{ _aValidation :: Maybe ThreadId
}
aValidation :: Lens' Async (Maybe ThreadId)
aValidation = lens _aValidation (\as x -> as { _aValidation = x })
-- | The application state holding state to render widgets, error
-- management, as well as views and more.
--
data AppState = AppState
{ _asConfig :: Configuration
, _bChan :: BChan PurebredEvent
, _logSink :: LT.Text -> IO ()
, _asThreadsView :: ThreadsView
, _asMailView :: MailView
, _asCompose :: Compose -- ^ state to keep when user creates a new mail
, _asUserMessage :: Maybe UserMessage
, _asViews :: ViewSettings -- ^ stores widget and focus information
, _asFileBrowser :: FileBrowser
, _asLocalTime :: UTCTime
, _asAsync :: Async
}
asConfig :: Lens' AppState Configuration
asConfig = lens _asConfig (\appstate x -> appstate { _asConfig = x })
bChan :: Lens' AppState (BChan PurebredEvent)
bChan = lens _bChan (\s a -> s { _bChan = a })
logSink :: Lens' AppState (LT.Text -> IO ())
logSink = lens _logSink (\s a -> s { _logSink = a })
asThreadsView :: Lens' AppState ThreadsView
asThreadsView = lens _asThreadsView (\appstate x -> appstate { _asThreadsView = x })
asMailView :: Lens' AppState MailView
asMailView = lens _asMailView (\appstate x -> appstate { _asMailView = x })
asCompose :: Lens' AppState Compose
asCompose = lens _asCompose (\appstate x -> appstate { _asCompose = x })
asUserMessage :: Lens' AppState (Maybe UserMessage)
asUserMessage = lens _asUserMessage (\appstate x -> appstate { _asUserMessage = x })
asViews :: Lens' AppState ViewSettings
asViews = lens _asViews (\appstate x -> appstate { _asViews = x })
asFileBrowser :: Lens' AppState FileBrowser
asFileBrowser = lens _asFileBrowser (\as x -> as { _asFileBrowser = x })
asLocalTime :: Lens' AppState UTCTime
asLocalTime = lens _asLocalTime (\as x -> as { _asLocalTime = x })
asAsync :: Lens' AppState Async
asAsync = lens _asAsync (\as x -> as { _asAsync = x })
data Action (v :: ViewName) (ctx :: Name) a = Action
{ _aDescription :: [T.Text]
-- ^ sequential list of things that the action does
, _aAction :: StateT AppState (EventM Name) a
}
instance NFData (Action v ctx a) where
rnf (Action desc (StateT f)) = Action (force desc) (StateT (force f)) `seq` ()
instance Functor (Action v ctx) where
fmap f (Action desc go) = Action desc (fmap f go)
instance Applicative (Action v ctx) where
pure a = Action [] (pure a)
Action desc1 f <*> Action desc2 a = Action (desc1 <> desc2) (f <*> a)
aAction :: Getter (Action v ctx a) (StateT AppState (EventM Name) a)
aAction = to (\(Action _ b) -> b)
aDescription :: Getter (Action v ctx a) [T.Text]
aDescription = to (\(Action a _ ) -> a)
data Keybinding (v :: ViewName) (ctx :: Name) = Keybinding
{ _kbEvent :: Vty.Event
, _kbAction :: Action v ctx (Next AppState)
}
-- | __HACK__: the 'Vty.Event' is only evaluated to WHNF.
-- There is no 'NFData' instance for 'Vty.Event' and I don't want
-- to make an orphan instance for it.
instance NFData (Keybinding v ctx) where
rnf (Keybinding ev act) = Keybinding ev (force act) `seq` ()
instance Eq (Keybinding v ctx) where
(==) (Keybinding a _) (Keybinding b _) = a == b
(/=) (Keybinding a _) (Keybinding b _) = a /= b
kbEvent :: Getter (Keybinding v ctx) Vty.Event
kbEvent = to (\(Keybinding b _) -> b)
kbAction :: Getter (Keybinding v ctx) (Action v ctx (Next AppState))
kbAction = to (\(Keybinding _ c) -> c)
-- | An email from the notmuch database represented in Purebred.
data NotmuchMail = NotmuchMail
{ _mailSubject :: T.Text
, _mailFrom :: T.Text
, _mailDate :: UTCTime
, _mailTags :: [Tag]
, _mailId :: B.ByteString
} deriving (Show, Eq)
mailSubject :: Lens' NotmuchMail T.Text
mailSubject = lens _mailSubject (\m s -> m { _mailSubject = s })
mailFrom :: Lens' NotmuchMail T.Text
mailFrom = lens _mailFrom (\m f -> m { _mailFrom = f })
mailDate :: Lens' NotmuchMail UTCTime
mailDate = lens _mailDate (\m d -> m { _mailDate = d })
mailTags :: Lens' NotmuchMail [Tag]
mailTags = lens _mailTags (\m t -> m { _mailTags = t })
mailId :: Lens' NotmuchMail B.ByteString
mailId = lens _mailId (\m i -> m { _mailId = i })
-- | A thread of mails from the notmuch database represented in Purebred.
data NotmuchThread = NotmuchThread
{ _thSubject :: T.Text
, _thAuthors :: [T.Text]
, _thDate :: UTCTime
, _thTags :: [Tag]
, _thReplies :: Int
, _thId :: B.ByteString
} deriving (Show, Eq)
thSubject :: Lens' NotmuchThread T.Text
thSubject = lens _thSubject (\m s -> m { _thSubject = s })
thAuthors :: Lens' NotmuchThread [T.Text]
thAuthors = lens _thAuthors (\m f -> m { _thAuthors = f })
thDate :: Lens' NotmuchThread UTCTime
thDate = lens _thDate (\m d -> m { _thDate = d })
thTags :: Lens' NotmuchThread [Tag]
thTags = lens _thTags (\m t -> m { _thTags = t })
thReplies :: Lens' NotmuchThread Int
thReplies = lens _thReplies (\m t -> m { _thReplies = t })
thId :: Lens' NotmuchThread B.ByteString
thId = lens _thId (\m t -> m { _thId = t })
-- | Utility for safe conversion from bytestring to text
decodeLenient :: B.ByteString -> T.Text
decodeLenient = T.decodeUtf8With T.lenientDecode
|
purebred-mua/purebred
|
src/Purebred/Types.hs
|
agpl-3.0
| 30,636
| 0
| 13
| 5,475
| 8,378
| 4,744
| 3,634
| 616
| 2
|
-- -*-haskell-*-
-- GIMP Toolkit (GTK) OpenGL Extension
--
-- Author : Duncan Coutts
--
-- Created: 9 June 2005
--
-- Copyright (C) 2005 Duncan Coutts
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- |
-- Maintainer : gtk2hs-users@lists.sourceforge.net
-- Stability : provisional
-- Portability : portable (depends on GHC)
--
-- OpenGL extension for Gtk+
--
module Graphics.UI.Gtk.OpenGL (
-- * Detail
-- * Simple OpenGL drawing area widget
module Graphics.UI.Gtk.OpenGL.DrawingArea,
-- * Initialisation and query functions
module Graphics.UI.Gtk.OpenGL.General,
-- * Lower level modules
module Graphics.UI.Gtk.OpenGL.Config,
module Graphics.UI.Gtk.OpenGL.Context,
module Graphics.UI.Gtk.OpenGL.Drawable,
module Graphics.UI.Gtk.OpenGL.Pixmap,
module Graphics.UI.Gtk.OpenGL.Window,
) where
import Graphics.UI.Gtk.OpenGL.Config
import Graphics.UI.Gtk.OpenGL.Context
import Graphics.UI.Gtk.OpenGL.Drawable
import Graphics.UI.Gtk.OpenGL.Pixmap
import Graphics.UI.Gtk.OpenGL.Window
import Graphics.UI.Gtk.OpenGL.General
import Graphics.UI.Gtk.OpenGL.DrawingArea
|
gtk2hs/gtkglext
|
Graphics/UI/Gtk/OpenGL.hs
|
lgpl-2.1
| 1,577
| 0
| 5
| 241
| 160
| 129
| 31
| 15
| 0
|
{-# language DeriveDataTypeable, DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
module Base.GameGrounds (
GameGrounds(GameGrounds, gameMainLayerUpdatingRange),
gameBackgrounds,
gameMainLayer,
gameForegrounds,
GameLayer(GameLayer, gameXDistance, gameYDistance),
gameContent,
mkGameGrounds,
) where
import Data.Accessor
import Data.Data
import Data.Indexable
import Data.Indexable.Range
import Base.Grounds
data GameGrounds a = GameGrounds {
gameBackgrounds_ :: [GameLayer a],
gameMainLayer_ :: Indexable a,
gameMainLayerUpdatingRange :: Range,
gameForegrounds_ :: [GameLayer a]
}
deriving (Show, Foldable, Data, Typeable)
gameBackgrounds :: Accessor (GameGrounds a) [GameLayer a]
gameBackgrounds = accessor gameBackgrounds_ (\ a r -> r{gameBackgrounds_ = a})
gameMainLayer :: Accessor (GameGrounds a) (Indexable a)
gameMainLayer = accessor gameMainLayer_ (\ a r -> r{gameMainLayer_ = a})
gameForegrounds :: Accessor (GameGrounds a) [GameLayer a]
gameForegrounds = accessor gameForegrounds_ (\ a r -> r{gameForegrounds_ = a})
data GameLayer a = GameLayer {
gameContent_ :: [a],
gameXDistance :: Double,
gameYDistance :: Double
}
deriving (Show, Read, Data, Typeable, Foldable)
gameContent :: Accessor (GameLayer a) [a]
gameContent = accessor gameContent_ (\ a r -> r{gameContent_ = a})
-- * creation
mkGameGrounds :: Grounds o -> Range -> GameGrounds o
mkGameGrounds (Grounds backgrounds mainLayer foregrounds) updatingRange =
GameGrounds
(multi backgrounds)
(mainLayer ^. content)
updatingRange
(multi foregrounds)
where
multi :: Indexable (Layer a) -> [GameLayer a]
multi = fmap mkGameLayer . toList
mkGameLayer :: Layer a -> GameLayer a
mkGameLayer (Layer content xd yd) =
GameLayer (toList content) xd yd
|
nikki-and-the-robots/nikki
|
src/Base/GameGrounds.hs
|
lgpl-3.0
| 1,897
| 0
| 10
| 397
| 542
| 302
| 240
| 50
| 1
|
module ReplaceExperiment where
replaceWithP :: b -> Char
replaceWithP = const 'p'
lms :: [Maybe [Char]]
lms = [Just "Ave", Nothing, Just "woohoo"]
replaceWithP' :: [Maybe [Char]] -> Char
replaceWithP' = replaceWithP
liftedReplace :: [Maybe [Char]] -> [Char]
liftedReplace = fmap replaceWithP
twiceLifted :: [Maybe [Char]] -> [Maybe Char]
twiceLifted = (fmap . fmap) replaceWithP
thriceLifted :: [Maybe [Char]] -> [Maybe [Char]]
thriceLifted = (fmap . fmap . fmap) replaceWithP
a = fmap (+1) $ read "[1]" :: [Int]
b = (fmap . fmap) (++ "lol") (Just ["Hi,", "Hello"])
c = (*2) . (\x -> x - 2) $ 1 -- but, but... no fmap?
d = (return '1' ++) . show . (\x -> [x, 1..3]) $ 0
main :: IO ()
main = do
putStr "replaceWithP' lms: "
print (replaceWithP' lms)
putStr "liftedReplace lms: "
print (liftedReplace lms)
putStr "twiceLifted lms: "
print (twiceLifted lms)
putStr "thriceLifted lms: "
print (thriceLifted lms)
putStr "a ? "
print (a == [2])
putStr "b ? "
print (b == Just ["Hi,lol", "Hellolol"])
putStr "c ? "
print (c == (-2))
putStr "d ? "
print (d == "1[0,1,2,3]")
|
thewoolleyman/haskellbook
|
16/07/maor/ReplaceExperiment.hs
|
unlicense
| 1,187
| 0
| 11
| 309
| 500
| 258
| 242
| 35
| 1
|
module Parse.Monad
(
-- * Parse
MonadParse(..)
-- * Lex
, MonadLex(..)
-- * Combinator
, optional
-- * Error
, Error(..)
-- * Reexports
, M.mapM
, M.mapM_
)
where
import qualified Control.Monad as M
import qualified Parse.Location as L
{- |
An inhabitant of @m a@ is like an inhabitant of
@[t] -> Maybe (a, [t])@ where @t@ is the token type.
-}
class (Monad m) => MonadParse m where
getLocation :: m L.Location
-- | This matches the end of input.
end :: m ()
-- | The expression @many x@ matches zero or more occurrences of @x@.
many :: m a -> m [a]
-- | The expression @many1 x@ matches one or more occurrences of @x@.
many1 :: m a -> m [a]
choice :: [m a] -> m a
{- |
The expression @'try' x \<|\> y@ matches @x@ or backtracks and matches @y@.
You must use 'try' if @x@ and @y@ share a prefix.
-}
(<|>) :: m a -> m a -> m a
infixr 3 <|>
{- |
The expression @try x@ is the same as @x@,
but if @x@ fails, then @try x@ does not consume any input.
-}
try :: m a -> m a
unexpected :: String -> m a
expected :: String -> m a
named :: String -> m a -> m a
-- | A @MonadLex@ instance is a 'MonadParse' instance that works with 'Char's.
class (MonadParse m) => MonadLex m where
-- | This matches a character satisfying 'Data.Char.isAlphaNum'.
alphaNum :: m Char
-- | This matches a character satisfying 'Data.Char.isSpace'.
uniWhite :: m Char
lower :: m Char
upper :: m Char
digit :: m Char
oneOf :: [Char] -> m Char
anyChar :: m Char
charSatisfying :: (Char -> Bool) -> m Char
-- | The expression @char x@ matches the character @x@.
char :: Char -> m Char
-- | The expression @string x@ matches the string @x@.
string :: String -> m String
data Error
= MkError
{
location :: L.Location
, message :: String
}
deriving (Read, Show)
{- |
The expression @optional x@ consumes the same input as @x@ does.
-}
optional :: (MonadParse m) => m a -> m (Maybe a)
optional x = fmap Just x <|> pure Nothing
|
edom/ptt
|
src/Parse/Monad.hs
|
apache-2.0
| 2,128
| 0
| 9
| 623
| 462
| 253
| 209
| 40
| 1
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QColorDialog.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:16
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QColorDialog (
qColorDialogCustomColor
,qColorDialogCustomCount
,QqColorDialogGetColor(..)
,QqColorDialogGetRgba(..)
,qColorDialogSetCustomColor
,qColorDialogSetStandardColor
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
qColorDialogCustomColor :: ((Int)) -> IO (Int)
qColorDialogCustomColor (x1)
= withUnsignedIntResult $
qtc_QColorDialog_customColor (toCInt x1)
foreign import ccall "qtc_QColorDialog_customColor" qtc_QColorDialog_customColor :: CInt -> IO CUInt
qColorDialogCustomCount :: (()) -> IO (Int)
qColorDialogCustomCount ()
= withIntResult $
qtc_QColorDialog_customCount
foreign import ccall "qtc_QColorDialog_customCount" qtc_QColorDialog_customCount :: IO CInt
class QqColorDialogGetColor x1 where
qColorDialogGetColor :: x1 -> IO (QColor ())
instance QqColorDialogGetColor (()) where
qColorDialogGetColor ()
= withQColorResult $
qtc_QColorDialog_getColor
foreign import ccall "qtc_QColorDialog_getColor" qtc_QColorDialog_getColor :: IO (Ptr (TQColor ()))
instance QqColorDialogGetColor ((QColor t1)) where
qColorDialogGetColor (x1)
= withQColorResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_getColor1 cobj_x1
foreign import ccall "qtc_QColorDialog_getColor1" qtc_QColorDialog_getColor1 :: Ptr (TQColor t1) -> IO (Ptr (TQColor ()))
instance QqColorDialogGetColor ((QColor t1, QWidget t2)) where
qColorDialogGetColor (x1, x2)
= withQColorResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QColorDialog_getColor2 cobj_x1 cobj_x2
foreign import ccall "qtc_QColorDialog_getColor2" qtc_QColorDialog_getColor2 :: Ptr (TQColor t1) -> Ptr (TQWidget t2) -> IO (Ptr (TQColor ()))
class QqColorDialogGetRgba x1 where
qColorDialogGetRgba :: x1 -> IO (Int)
instance QqColorDialogGetRgba (()) where
qColorDialogGetRgba ()
= withUnsignedIntResult $
qtc_QColorDialog_getRgba
foreign import ccall "qtc_QColorDialog_getRgba" qtc_QColorDialog_getRgba :: IO CUInt
instance QqColorDialogGetRgba ((Int)) where
qColorDialogGetRgba (x1)
= withUnsignedIntResult $
qtc_QColorDialog_getRgba1 (toCUInt x1)
foreign import ccall "qtc_QColorDialog_getRgba1" qtc_QColorDialog_getRgba1 :: CUInt -> IO CUInt
qColorDialogSetCustomColor :: ((Int, Int)) -> IO ()
qColorDialogSetCustomColor (x1, x2)
= qtc_QColorDialog_setCustomColor (toCInt x1) (toCUInt x2)
foreign import ccall "qtc_QColorDialog_setCustomColor" qtc_QColorDialog_setCustomColor :: CInt -> CUInt -> IO ()
qColorDialogSetStandardColor :: ((Int, Int)) -> IO ()
qColorDialogSetStandardColor (x1, x2)
= qtc_QColorDialog_setStandardColor (toCInt x1) (toCUInt x2)
foreign import ccall "qtc_QColorDialog_setStandardColor" qtc_QColorDialog_setStandardColor :: CInt -> CUInt -> IO ()
instance QaddAction (QColorDialog ()) ((QAction t1)) (IO ()) where
addAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_addAction cobj_x0 cobj_x1
foreign import ccall "qtc_QColorDialog_addAction" qtc_QColorDialog_addAction :: Ptr (TQColorDialog a) -> Ptr (TQAction t1) -> IO ()
instance QaddAction (QColorDialogSc a) ((QAction t1)) (IO ()) where
addAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_addAction cobj_x0 cobj_x1
instance Qmove (QColorDialog ()) ((Int, Int)) where
move x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_move1 cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QColorDialog_move1" qtc_QColorDialog_move1 :: Ptr (TQColorDialog a) -> CInt -> CInt -> IO ()
instance Qmove (QColorDialogSc a) ((Int, Int)) where
move x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_move1 cobj_x0 (toCInt x1) (toCInt x2)
instance Qmove (QColorDialog ()) ((Point)) where
move x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QColorDialog_move_qth cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QColorDialog_move_qth" qtc_QColorDialog_move_qth :: Ptr (TQColorDialog a) -> CInt -> CInt -> IO ()
instance Qmove (QColorDialogSc a) ((Point)) where
move x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QColorDialog_move_qth cobj_x0 cpoint_x1_x cpoint_x1_y
instance Qqmove (QColorDialog ()) ((QPoint t1)) where
qmove x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_move cobj_x0 cobj_x1
foreign import ccall "qtc_QColorDialog_move" qtc_QColorDialog_move :: Ptr (TQColorDialog a) -> Ptr (TQPoint t1) -> IO ()
instance Qqmove (QColorDialogSc a) ((QPoint t1)) where
qmove x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_move cobj_x0 cobj_x1
instance Qrepaint (QColorDialog ()) (()) where
repaint x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_repaint cobj_x0
foreign import ccall "qtc_QColorDialog_repaint" qtc_QColorDialog_repaint :: Ptr (TQColorDialog a) -> IO ()
instance Qrepaint (QColorDialogSc a) (()) where
repaint x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_repaint cobj_x0
instance Qrepaint (QColorDialog ()) ((Int, Int, Int, Int)) where
repaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_repaint2 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QColorDialog_repaint2" qtc_QColorDialog_repaint2 :: Ptr (TQColorDialog a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance Qrepaint (QColorDialogSc a) ((Int, Int, Int, Int)) where
repaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_repaint2 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance Qrepaint (QColorDialog ()) ((QRegion t1)) where
repaint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_repaint1 cobj_x0 cobj_x1
foreign import ccall "qtc_QColorDialog_repaint1" qtc_QColorDialog_repaint1 :: Ptr (TQColorDialog a) -> Ptr (TQRegion t1) -> IO ()
instance Qrepaint (QColorDialogSc a) ((QRegion t1)) where
repaint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_repaint1 cobj_x0 cobj_x1
instance Qresize (QColorDialog ()) ((Int, Int)) (IO ()) where
resize x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_resize1 cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QColorDialog_resize1" qtc_QColorDialog_resize1 :: Ptr (TQColorDialog a) -> CInt -> CInt -> IO ()
instance Qresize (QColorDialogSc a) ((Int, Int)) (IO ()) where
resize x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_resize1 cobj_x0 (toCInt x1) (toCInt x2)
instance Qqresize (QColorDialog ()) ((QSize t1)) where
qresize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_resize cobj_x0 cobj_x1
foreign import ccall "qtc_QColorDialog_resize" qtc_QColorDialog_resize :: Ptr (TQColorDialog a) -> Ptr (TQSize t1) -> IO ()
instance Qqresize (QColorDialogSc a) ((QSize t1)) where
qresize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_resize cobj_x0 cobj_x1
instance Qresize (QColorDialog ()) ((Size)) (IO ()) where
resize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QColorDialog_resize_qth cobj_x0 csize_x1_w csize_x1_h
foreign import ccall "qtc_QColorDialog_resize_qth" qtc_QColorDialog_resize_qth :: Ptr (TQColorDialog a) -> CInt -> CInt -> IO ()
instance Qresize (QColorDialogSc a) ((Size)) (IO ()) where
resize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QColorDialog_resize_qth cobj_x0 csize_x1_w csize_x1_h
instance QsetGeometry (QColorDialog ()) ((Int, Int, Int, Int)) where
setGeometry x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_setGeometry1 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QColorDialog_setGeometry1" qtc_QColorDialog_setGeometry1 :: Ptr (TQColorDialog a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetGeometry (QColorDialogSc a) ((Int, Int, Int, Int)) where
setGeometry x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_setGeometry1 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance QqsetGeometry (QColorDialog ()) ((QRect t1)) where
qsetGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_setGeometry cobj_x0 cobj_x1
foreign import ccall "qtc_QColorDialog_setGeometry" qtc_QColorDialog_setGeometry :: Ptr (TQColorDialog a) -> Ptr (TQRect t1) -> IO ()
instance QqsetGeometry (QColorDialogSc a) ((QRect t1)) where
qsetGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColorDialog_setGeometry cobj_x0 cobj_x1
instance QsetGeometry (QColorDialog ()) ((Rect)) where
setGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QColorDialog_setGeometry_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
foreign import ccall "qtc_QColorDialog_setGeometry_qth" qtc_QColorDialog_setGeometry_qth :: Ptr (TQColorDialog a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetGeometry (QColorDialogSc a) ((Rect)) where
setGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QColorDialog_setGeometry_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
instance QsetMouseTracking (QColorDialog ()) ((Bool)) where
setMouseTracking x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_setMouseTracking cobj_x0 (toCBool x1)
foreign import ccall "qtc_QColorDialog_setMouseTracking" qtc_QColorDialog_setMouseTracking :: Ptr (TQColorDialog a) -> CBool -> IO ()
instance QsetMouseTracking (QColorDialogSc a) ((Bool)) where
setMouseTracking x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColorDialog_setMouseTracking cobj_x0 (toCBool x1)
|
uduki/hsQt
|
Qtc/Gui/QColorDialog.hs
|
bsd-2-clause
| 10,765
| 0
| 13
| 1,707
| 3,437
| 1,783
| 1,654
| -1
| -1
|
{-# LANGUAGE DefaultSignatures, FlexibleContexts, TypeOperators #-}
module Data.Rule.Alignable
( Alignable(..)
, GAlignable
) where
import Control.Applicative
import Control.Applicative.Backwards
import Control.Comonad.Cofree
import Control.Comonad.Env
import Control.Monad.Free
import Control.Monad.Trans.Either
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Writer
import Control.Parallel (pseq)
import Data.Functor.Compose
import Data.Functor.Identity
import Data.Functor.Product
import Data.Functor.Coproduct
import Data.Functor.Reverse
import Data.Tuple (swap)
import GHC.Generics
--------------------------------------------------------------------------------
-- * Alignable
--------------------------------------------------------------------------------
-- | Shapes we can try to match exactly and glue with applicative side-effects.
class Alignable t where
align :: Alternative m => (a -> b -> m c) -> t a -> t b -> m (t c)
default align :: (Generic1 t, GAlignable (Rep1 t), Alternative m) => (a -> b -> m c) -> t a -> t b -> m (t c)
align f xs ys = to1 <$> galign f (from1 xs) (from1 ys)
instance Alignable Maybe where
align f (Just a) (Just b) = Just <$> f a b
align _ _ _ = empty
instance Alignable [] where
align f (x:xs) (y:ys) = (:) <$> f x y <*> align f xs ys
align _ _ _ = empty
instance Eq e => Alignable (Either e) where
align f (Right a) (Right b) = Right <$> f a b
align _ (Left a) (Left b) | a == b = pure (Left a)
align _ _ _ = empty
instance Eq e => Alignable ((,) e) where
align f (e, a) (e', b)
| e == e' = (,) e <$> f a b
| otherwise = empty
instance (Alignable f, Alignable g) => Alignable (Compose f g) where
align f (Compose xs) (Compose ys) = Compose <$> align (align f) xs ys
instance Alignable f => Alignable (IdentityT f) where
align f (IdentityT xs) (IdentityT ys) = IdentityT <$> align f xs ys
instance Alignable Identity where
align f (Identity a) (Identity b) = Identity <$> f a b
instance (Alignable m, Eq e) => Alignable (WriterT e m) where
align f (WriterT xs) (WriterT ys) = WriterT <$> align (\as bs -> swap <$> align f (swap as) (swap bs)) xs ys
instance (Alignable m, Eq e) => Alignable (EitherT e m) where
align f (EitherT xs) (EitherT ys) = EitherT <$> align (align f) xs ys
instance (Alignable f, Alignable g) => Alignable (Product f g) where
align f (Pair as as') (Pair bs bs') = Pair <$> align f as bs <*> align f as' bs'
instance (Alignable f, Alignable g) => Alignable (Coproduct f g) where
align f (Coproduct (Left xs)) (Coproduct (Left ys)) = Coproduct . Left <$> align f xs ys
align f (Coproduct (Right xs)) (Coproduct (Right ys)) = Coproduct . Right <$> align f xs ys
align _ _ _ = empty
instance Alignable f => Alignable (Free f) where
align f (Pure a) (Pure b) = Pure <$> f a b
align f (Free as) (Free bs) = Free <$> align (align f) as bs
align _ _ _ = empty
instance Alignable f => Alignable (Cofree f) where
align f (a :< as) (b :< bs) = (:<) <$> f a b <*> align (align f) as bs
instance (Alignable w, Eq e) => Alignable (EnvT e w) where
align f (EnvT e wa) (EnvT e' wb)
| e == e' = EnvT e <$> align f wa wb
| otherwise = empty
instance Alignable f => Alignable (Reverse f) where
align f (Reverse as) (Reverse bs) = fmap Reverse . forwards $ align (\a b -> Backwards $ f a b) as bs
--------------------------------------------------------------------------------
-- * Generic Alignable
--------------------------------------------------------------------------------
class GAlignable t where
galign :: Alternative m => (a -> b -> m c) -> t a -> t b -> m (t c)
instance GAlignable U1 where
galign _ U1 U1 = pure U1
instance GAlignable V1 where
galign _ as _ = as `pseq` undefined
instance (GAlignable f, GAlignable g) => GAlignable (f :*: g) where
galign f (as :*: as') (bs :*: bs') = (:*:) <$> galign f as bs <*> galign f as' bs'
instance (GAlignable f, GAlignable g) => GAlignable (f :+: g) where
galign f (L1 as) (L1 bs) = L1 <$> galign f as bs
galign f (R1 as) (R1 bs) = R1 <$> galign f as bs
galign _ _ _ = empty
instance GAlignable f => GAlignable (M1 i c f) where
galign f (M1 as) (M1 bs) = M1 <$> galign f as bs
instance Eq c => GAlignable (K1 i c) where
galign _ (K1 x) (K1 y)
| x == y = pure (K1 x)
| otherwise = empty
instance Alignable f => GAlignable (Rec1 f) where
galign f (Rec1 as) (Rec1 bs) = Rec1 <$> align f as bs
instance GAlignable Par1 where
galign f (Par1 a) (Par1 b) = Par1 <$> f a b
|
ekmett/rules
|
src/Data/Rule/Alignable.hs
|
bsd-2-clause
| 4,579
| 0
| 14
| 995
| 2,075
| 1,046
| 1,029
| 88
| 0
|
module Road
where
import Control.Monad
import qualified Data.BitSet.Dynamic as Bits
import Data.BitSet.Dynamic (BitSet)
data RoadBits = RoadBits { _road_bits :: BitSet Int }
isRoad :: RoadBits -> Int -> Bool
isRoad rbits i = Bits.member i (_road_bits rbits)
instance Show RoadBits where
show = showRoadBits
showRoadBits :: RoadBits -> String
showRoadBits rbits = map go [0]
where go i = if isRoad rbits 0 then 'X' else '.'
-- this segfaults in ghci:
test = RoadBits $ Bits.fromList [0..63] -- 62 works
test2 = do
let bset = Bits.fromList [0..63]
forM_ [0..10] $ \i -> do
putChar $ if Bits.member i bset then 'X' else '.'
putStrLn ""
|
erantapaa/test-bitset
|
src/Road.hs
|
bsd-3-clause
| 658
| 0
| 14
| 133
| 237
| 127
| 110
| 18
| 2
|
module Zero.Exchange.Internal
(
-- * Type synonyms
CurrencyPair
, Currency
, Amount
, Price
, BidEntry
, AskEntry
, MarketBidEntry
, MarketAskEntry
, UserId
, OrderId
, OrderBookId
, BidId
, AskId
, EntryId
, TradeId
-- * Data types
, LimitOrder(..)
, MarketOrder(..)
, Bid(..)
, Ask(..)
, OrderBook(..)
, SingleEntry(..)
, DoubleEntry(..)
, Trade(..)
-- * Functions
, newDoubleEntry
, newTrade
, newBid
, newAsk
) where
import GHC.Generics (Generic)
import Data.Text (Text)
import Data.Function (on)
import ProjectM36.Tupleable
import Zero.Crypto (generateUUID)
------------------------------------------------------------------------------
type UserId = Text
type Currency = Text
type CurrencyPair = (Currency, Currency)
-- FIXME: Should be Integer
type Amount = Int
type Price = Double
type OrderId = Text
type OrderBookId = Text
type BidId = Text
type AskId = Text
type EntryId = Text
type TradeId = Text
data LimitOrder = LimitOrder
{ lo_id :: OrderId
, lo_user :: UserId
, lo_fromAmount :: Amount
, lo_toAmount :: Amount
} deriving (Eq, Show, Generic)
instance Tupleable LimitOrder
newLimitOrder :: UserId -> Amount -> Amount -> IO LimitOrder
newLimitOrder uid fromAmount toAmount = do
id <- generateUUID
return $ LimitOrder id uid fromAmount toAmount
data MarketOrder = MarketOrder
{ mo_id :: OrderId
, mo_user :: UserId
, mo_amount :: Amount
} deriving (Eq, Show, Generic)
instance Tupleable MarketOrder
newMarketOrder :: UserId -> Amount -> IO MarketOrder
newMarketOrder uid amount = do
id <- generateUUID
return $ MarketOrder id uid amount
data Bid = Bid
{ b_id :: BidId
, b_price :: Price
, b_order :: OrderId
} deriving (Eq, Show, Generic)
data Ask = Ask
{ a_id :: AskId
, a_price :: Price
, a_order :: OrderId
} deriving (Eq, Show, Generic)
instance Tupleable Bid
instance Tupleable Ask
type BidEntry = (Bid, LimitOrder)
type MarketBidEntry = (Bid, MarketOrder)
type AskEntry = (Ask, LimitOrder)
type MarketAskEntry = (Ask, MarketOrder)
newBid :: UserId -> Amount -> Amount -> IO BidEntry
newBid user fromAmount toAmount = do
id <- generateUUID
let price = ((/) `on` fromIntegral) fromAmount toAmount
order <- newLimitOrder user fromAmount toAmount
return (Bid id price (lo_id order), order)
newAsk :: UserId -> Amount -> Amount -> IO AskEntry
newAsk user fromAmount toAmount = do
id <- generateUUID
let price = ((/) `on` fromIntegral) fromAmount toAmount
order <- newLimitOrder user fromAmount toAmount
return (Ask id price (lo_id order), order)
data OrderBook = OrderBook
{ ob_id :: OrderBookId
, ob_fromCurrency :: Currency
, ob_toCurrency :: Currency
} deriving (Eq, Show, Generic)
instance Tupleable OrderBook
data SingleEntry = SingleEntry
{ se_id :: EntryId
, se_account :: UserId
, se_currency :: Currency
, se_amount :: Amount
} deriving (Eq, Show, Generic)
instance Tupleable SingleEntry
data DoubleEntry = DoubleEntry
{ de_id :: EntryId
, de_fromAccount :: UserId
, de_toAccount :: UserId
, de_currency :: Currency
, de_amount :: Amount
} deriving (Eq, Show, Generic)
instance Tupleable DoubleEntry
newDoubleEntry :: UserId -> UserId -> Currency -> Amount -> IO DoubleEntry
newDoubleEntry from to currency amount = do
id <- generateUUID
return $ DoubleEntry id from to currency amount
data Trade = Trade
{ t_id :: TradeId
, t_from :: EntryId
, t_to :: EntryId
} deriving (Eq, Show, Generic)
instance Tupleable Trade
newTrade :: EntryId -> EntryId -> IO Trade
newTrade uid1 uid2 = do
id <- generateUUID
return $ Trade id uid1 uid2
|
et4te/zero
|
server/src/Zero/Exchange/Internal.hs
|
bsd-3-clause
| 3,781
| 0
| 12
| 869
| 1,126
| 638
| 488
| 129
| 1
|
-- | Helper functions for making openGL Drawings
module Degu.DrawingUtils where
import Degu.Font
import Graphics.Rendering.OpenGL
import Foreign
import Control.Monad
import Graphics.Rendering.FreeType.Internal.Bitmap
makeTexture :: TextureSize2D -> ForeignPtr GLbyte -> IO TextureObject
makeTexture sz@(TextureSize2D _w _h) fbm = do
[tid] <- genObjectNames 1
textureBinding Texture2D $= Just tid
withForeignPtr fbm $ \bm -> do
texImage2D Nothing NoProxy 0 Alpha' sz 0 (PixelData Alpha UnsignedByte bm)
-- dump (fromIntegral w) (fromIntegral h) bm
textureFilter Texture2D $= ((Nearest, Nothing), Nearest)
textureWrapMode Texture2D S $= (Repeated, Clamp)
textureWrapMode Texture2D T $= (Repeated, Clamp)
return tid
where _dump w_ h_ p =
forM_ [0 .. w_ - 1] $ \x -> do
forM_ [0 .. h_ - 1] $ \y ->
putChar . _nice =<< peek (p `plusPtr` (x + y * w_))
putStr "$\n"
_nice :: Word8 -> Char
_nice 0 = ' '
_nice x | x < 64 = '.'
| x < 128 = ','
| x < 196 = ':'
| x < 255 = '='
| otherwise = '#'
renderText :: FontFace -> String -> Int -> IO (Int, TextureSize2D, ForeignPtr GLbyte)
renderText face text sz = do
(bbw, bbh) <- faceMaxSize face sz
align <- faceAlign face sz
let bw = bbw * length text; bh = bbh
fbm <- mallocForeignPtrBytes (bw * bh)
bw' <- fmap (either id id) $ withForeignPtr fbm $ \bm -> do
write face sz text $ \_ (x,y) (w,h) bitmap_ ->
if x + w > bw then return $ Just x else do
forM_ [0 .. w - 1] $ \xx -> do
forM_ [0 .. h - 1] $ \yy -> do
px <- peek ((buffer bitmap_) `plusPtr` (fromIntegral $ yy * w + xx))
poke (bm `plusPtr` (y + yy + ((x + xx) * bh))) (px :: Word8)
return Nothing
return (align, TextureSize2D (fromIntegral bh) (fromIntegral bw'), fbm)
|
AtnNn/degu
|
Degu/DrawingUtils.hs
|
bsd-3-clause
| 1,900
| 0
| 33
| 543
| 772
| 393
| 379
| 43
| 2
|
import Control.Monad
import Control.Concurrent
import Network
import TestSimple
main :: IO ()
main = forever . (void . forkIO . testSimple . fst3 =<<) . accept
=<< listenOn (PortNumber 54492)
fst3 :: (a, b, c) -> a
fst3 (x, _, _) = x
|
YoshikuniJujo/xml-push
|
examples/simpleServer.hs
|
bsd-3-clause
| 238
| 0
| 11
| 47
| 104
| 58
| 46
| 9
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.APPLE.ClientStorage
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/APPLE/client_storage.txt APPLE_client_storage> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.APPLE.ClientStorage (
-- * Enums
gl_UNPACK_CLIENT_STORAGE_APPLE
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/APPLE/ClientStorage.hs
|
bsd-3-clause
| 671
| 0
| 4
| 78
| 37
| 31
| 6
| 3
| 0
|
-- Copyright : Daan Leijen (c) 1999, daan@cs.uu.nl
-- HWT Group (c) 2003, haskelldb-users@lists.sourceforge.net
-- License : BSD-style
module Opaleye.Internal.HaskellDB.Sql.Default where
import Opaleye.Internal.HaskellDB.PrimQuery
import qualified Opaleye.Internal.HaskellDB.PrimQuery as PQ
import Opaleye.Internal.HaskellDB.Sql
import Opaleye.Internal.HaskellDB.Sql.Generate
import qualified Opaleye.Internal.HaskellDB.Sql as Sql
import Opaleye.Internal.Tag (tagWith)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Base16 as Base16
import qualified Data.List.NonEmpty as NEL
mkSqlGenerator :: SqlGenerator -> SqlGenerator
mkSqlGenerator gen = SqlGenerator
{
sqlUpdate = defaultSqlUpdate gen,
sqlDelete = defaultSqlDelete gen,
sqlInsert = defaultSqlInsert gen,
sqlExpr = defaultSqlExpr gen,
sqlLiteral = defaultSqlLiteral gen,
sqlQuote = defaultSqlQuote gen
}
defaultSqlGenerator :: SqlGenerator
defaultSqlGenerator = mkSqlGenerator defaultSqlGenerator
toSqlOrder :: SqlGenerator -> OrderExpr -> (SqlExpr,SqlOrder)
toSqlOrder gen (OrderExpr o e) =
(sqlExpr gen e, Sql.SqlOrder { sqlOrderDirection = o'
, sqlOrderNulls = orderNulls' })
where o' = case PQ.orderDirection o of
PQ.OpAsc -> Sql.SqlAsc
PQ.OpDesc -> Sql.SqlDesc
orderNulls' = case PQ.orderNulls o of
PQ.NullsFirst -> Sql.SqlNullsFirst
PQ.NullsLast -> Sql.SqlNullsLast
toSqlColumn :: Attribute -> SqlColumn
toSqlColumn attr = SqlColumn attr
toSqlAssoc :: SqlGenerator -> Assoc -> [(SqlColumn,SqlExpr)]
toSqlAssoc gen = map (\(attr,expr) -> (toSqlColumn attr, sqlExpr gen expr))
defaultSqlUpdate :: SqlGenerator
-> SqlTable -- ^ Table to update
-> [PrimExpr] -- ^ Conditions which must all be true for a row
-- to be updated.
-> Assoc -- ^ Update the data with this.
-> SqlUpdate
defaultSqlUpdate gen tbl criteria assigns
= SqlUpdate tbl (toSqlAssoc gen assigns) (map (sqlExpr gen) criteria)
defaultSqlInsert :: SqlGenerator
-> SqlTable
-> [Attribute]
-> NEL.NonEmpty [PrimExpr]
-> SqlInsert
defaultSqlInsert gen tbl attrs exprs =
SqlInsert tbl (map toSqlColumn attrs) ((fmap . map) (sqlExpr gen) exprs)
defaultSqlDelete :: SqlGenerator
-> SqlTable
-> [PrimExpr] -- ^ Criteria which must all be true for a row
-- to be deleted.
-> SqlDelete
defaultSqlDelete gen tbl criteria = SqlDelete tbl (map (sqlExpr gen) criteria)
defaultSqlExpr :: SqlGenerator -> PrimExpr -> SqlExpr
defaultSqlExpr gen expr =
case expr of
AttrExpr (Symbol a t) -> ColumnSqlExpr (SqlColumn (tagWith t a))
BaseTableAttrExpr a -> ColumnSqlExpr (SqlColumn a)
CompositeExpr e x -> CompositeSqlExpr (defaultSqlExpr gen e) x
BinExpr op e1 e2 ->
let leftE = sqlExpr gen e1
rightE = sqlExpr gen e2
paren = ParensSqlExpr
(expL, expR) = case (op, e1, e2) of
(OpAnd, BinExpr OpOr _ _, BinExpr OpOr _ _) ->
(paren leftE, paren rightE)
(OpOr, BinExpr OpAnd _ _, BinExpr OpAnd _ _) ->
(paren leftE, paren rightE)
(OpAnd, BinExpr OpOr _ _, _) ->
(paren leftE, rightE)
(OpAnd, _, BinExpr OpOr _ _) ->
(leftE, paren rightE)
(OpOr, BinExpr OpAnd _ _, _) ->
(paren leftE, rightE)
(OpOr, _, BinExpr OpAnd _ _) ->
(leftE, paren rightE)
(_, ConstExpr _, ConstExpr _) ->
(leftE, rightE)
(_, _, ConstExpr _) ->
(paren leftE, rightE)
(_, ConstExpr _, _) ->
(leftE, paren rightE)
_ -> (paren leftE, paren rightE)
in BinSqlExpr (showBinOp op) expL expR
UnExpr op e -> let (op',t) = sqlUnOp op
e' = sqlExpr gen e
in case t of
UnOpFun -> FunSqlExpr op' [e']
UnOpPrefix -> PrefixSqlExpr op' (ParensSqlExpr e')
UnOpPostfix -> PostfixSqlExpr op' e'
-- TODO: The current arrangement whereby the delimeter parameter
-- of string_agg is in the AggrStringAggr constructor, but the
-- parameter being aggregated is not, seems unsatisfactory
-- because it leads to a non-uniformity of treatment, as seen
-- below. Perhaps we should have just `AggrExpr AggrOp` and
-- always put the `PrimExpr` in the `AggrOp`.
AggrExpr op e -> let op' = showAggrOp op
e' = sqlExpr gen e
moreAggrFunParams = case op of
AggrStringAggr primE -> [sqlExpr gen primE]
_ -> []
in AggrFunSqlExpr op' (e' : moreAggrFunParams)
ConstExpr l -> ConstSqlExpr (sqlLiteral gen l)
CaseExpr cs e -> let cs' = [(sqlExpr gen c, sqlExpr gen x)| (c,x) <- cs]
e' = sqlExpr gen e
in CaseSqlExpr cs' e'
ListExpr es -> ListSqlExpr (map (sqlExpr gen) es)
ParamExpr n _ -> ParamSqlExpr n PlaceHolderSqlExpr
FunExpr n exprs -> FunSqlExpr n (map (sqlExpr gen) exprs)
CastExpr typ e1 -> CastSqlExpr typ (sqlExpr gen e1)
DefaultInsertExpr -> DefaultSqlExpr
showBinOp :: BinOp -> String
showBinOp OpEq = "="
showBinOp OpLt = "<"
showBinOp OpLtEq = "<="
showBinOp OpGt = ">"
showBinOp OpGtEq = ">="
showBinOp OpNotEq = "<>"
showBinOp OpAnd = "AND"
showBinOp OpOr = "OR"
showBinOp OpLike = "LIKE"
showBinOp OpIn = "IN"
showBinOp (OpOther s) = s
showBinOp OpCat = "||"
showBinOp OpPlus = "+"
showBinOp OpMinus = "-"
showBinOp OpMul = "*"
showBinOp OpDiv = "/"
showBinOp OpMod = "MOD"
showBinOp OpBitNot = "~"
showBinOp OpBitAnd = "&"
showBinOp OpBitOr = "|"
showBinOp OpBitXor = "^"
showBinOp OpAsg = "="
showBinOp OpAtTimeZone = "AT TIME ZONE"
data UnOpType = UnOpFun | UnOpPrefix | UnOpPostfix
sqlUnOp :: UnOp -> (String,UnOpType)
sqlUnOp OpNot = ("NOT", UnOpPrefix)
sqlUnOp OpIsNull = ("IS NULL", UnOpPostfix)
sqlUnOp OpIsNotNull = ("IS NOT NULL", UnOpPostfix)
sqlUnOp OpLength = ("LENGTH", UnOpFun)
sqlUnOp OpAbs = ("@", UnOpFun)
sqlUnOp OpNegate = ("-", UnOpFun)
sqlUnOp OpLower = ("LOWER", UnOpFun)
sqlUnOp OpUpper = ("UPPER", UnOpFun)
sqlUnOp (UnOpOther s) = (s, UnOpFun)
showAggrOp :: AggrOp -> String
showAggrOp AggrCount = "COUNT"
showAggrOp AggrSum = "SUM"
showAggrOp AggrAvg = "AVG"
showAggrOp AggrMin = "MIN"
showAggrOp AggrMax = "MAX"
showAggrOp AggrStdDev = "StdDev"
showAggrOp AggrStdDevP = "StdDevP"
showAggrOp AggrVar = "Var"
showAggrOp AggrVarP = "VarP"
showAggrOp AggrBoolAnd = "BOOL_AND"
showAggrOp AggrBoolOr = "BOOL_OR"
showAggrOp AggrArr = "ARRAY_AGG"
showAggrOp (AggrStringAggr _) = "STRING_AGG"
showAggrOp (AggrOther s) = s
defaultSqlLiteral :: SqlGenerator -> Literal -> String
defaultSqlLiteral _ l =
case l of
NullLit -> "NULL"
DefaultLit -> "DEFAULT"
BoolLit True -> "TRUE"
BoolLit False -> "FALSE"
ByteStringLit s
-> binQuote s
StringLit s -> quote s
IntegerLit i -> show i
DoubleLit d -> if isNaN d then "'NaN'"
else if isInfinite d && d < 0 then "'-Infinity'"
else if isInfinite d && d > 0 then "'Infinity'"
else show d
OtherLit o -> o
defaultSqlQuote :: SqlGenerator -> String -> String
defaultSqlQuote _ s = quote s
-- | Quote a string and escape characters that need escaping
-- We use Postgres "escape strings", i.e. strings prefixed
-- with E, to ensure that escaping with backslash is valid.
quote :: String -> String
quote s = "E'" ++ concatMap escape s ++ "'"
-- | Escape characters that need escaping
escape :: Char -> String
escape '\NUL' = "\\0"
escape '\'' = "''"
escape '"' = "\\\""
escape '\b' = "\\b"
escape '\n' = "\\n"
escape '\r' = "\\r"
escape '\t' = "\\t"
escape '\\' = "\\\\"
escape c = [c]
-- | Quote binary literals using Postgresql's hex format.
binQuote :: ByteString -> String
binQuote s = "E'\\\\x" ++ BS8.unpack (Base16.encode s) ++ "'"
|
hesselink/haskell-opaleye
|
src/Opaleye/Internal/HaskellDB/Sql/Default.hs
|
bsd-3-clause
| 8,975
| 0
| 16
| 2,962
| 2,304
| 1,211
| 1,093
| 188
| 25
|
module Mire.Test.Telnet (telnetTestGroup) where
import Dawn
import qualified Codec.Compression.Zlib as Zlib
import qualified Data.ByteString.Char8 as CS
import qualified Data.ByteString.Lazy.Char8 as CSL
import Data.ByteString.Internal (c2w)
import Mire.Telnet
import Mire.Data.Flow
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Mire.Test.Base
telnetTestGroup :: TestTree
telnetTestGroup = testGroup "Telnet tests" [
testCase "Simple text" simpleTest,
testCase "Zlib substreams" zlibStreamTest,
testProperty "Random escaped input" randomInputTest,
testCase "Term type cycle (TTYPE)" termTypeTest,
testCase "Charset request" charSetTest,
testCase "SE without SB" wrongSETest,
testCase "Simple GMCP" gmcpTest
]
-- a simple test, what goes in also comes out
simpleTest :: Assertion
simpleTest = assertEqual "" (toNet "some input") (simplePipeTest telnet (toW8 "some input"))
-- this is random input, so escaping IAC is nessecary
randomInputTest :: String -> Bool
randomInputTest s = simplePipeTest telnet (toW8 $ escapeIAC s) == (toNet s)
-- test decoding embedded zlib streams
zlibStreamTest :: Assertion
zlibStreamTest = assertEqual ""
((TelnetText <$> toW8 "a ")
<> [telnetDebug "Enabled COMPRESSv2"]
<> (toNet "ztest ccc ")
<> [telnetDebug "Enabled COMPRESSv2"]
<> (toNet "ztest done"))
(simplePipeTest telnet (toW8 ("a \255\250\86\255\240" ++ zstring ++ " ccc \255\250\86\255\240" ++ zstring ++ " done")))
zstring :: [Char]
zstring = CSL.unpack $ Zlib.compress $ CSL.pack "ztest"
-- test whether the term type is sent and that it cycles correctly when requested multiple times
termTypeTest :: Assertion
termTypeTest = assertEqual ""
( -- [TelnetMeta (MetaDebug "TELNET" "snd IAC SB TERM \"\\NULMXP\" IAC SE")]
-- <> sent "MXP"
[telnetDebug "snd IAC SB TERM \"\\NULXTERM\" IAC SE"]
<> sent "XTERM"
<> [telnetDebug "snd IAC SB TERM \"\\NULMIRE\" IAC SE"]
<> sent "MIRE"
<> [telnetDebug "snd IAC SB TERM \"\\NULUNKNOWN\" IAC SE"]
<> sent "UNKNOWN"
<> [telnetDebug "snd IAC SB TERM \"\\NULUNKNOWN\" IAC SE"]
<> sent "UNKNOWN"
<> [telnetDebug "snd IAC SB TERM \"\\NULXTERM\" IAC SE"]
<> sent "XTERM")
(simplePipeTest telnet (toW8 $ req <> req <> req <> req <> req))
where req = "\255\250\24\1\255\240" :: String
sent a = [subSent $ '\24' : '\0' : a]
-- test requesting the supported charset
charSetTest :: Assertion
charSetTest = assertEqual ""
(fs "test "
<> [telnetDebug "rcv IAC WILL CHARSET",
telnetDebug "snd IAC SB CHARSET ECHO \"utf8\" IAC SE"]
<> [subSent "\42\1utf8"] <> fs " test")
(simplePipeTest telnet (toW8 "test \255\251\42 test"))
-- test an SE without and SB
wrongSETest :: Assertion
wrongSETest = assertEqual ""
([telnetError "Found IAC SE without IAC SB"])
(simplePipeTest telnet (toW8 $ "\255\240"))
gmcpTest :: Assertion
gmcpTest = assertEqual ""
([TelnetMeta (MetaGMCP "{x: a, y: b}")])
(simplePipeTest telnet (toW8 $ "\255\250\201{x: a, y: b}\255\240"))
-- convert a string to a list of TelnetText elements
fs :: String -> [Telnet]
fs w = TelnetText <$> toW8 w
-- build a MetaSend for a suboption
subSent :: String -> Telnet
subSent a = TelnetMeta $ MetaSend $ CS.pack ("\255\250" <> a <> "\255\240")
-- simple String to [Word8]
toW8 :: String -> [Word8]
toW8 = fmap c2w
toNet :: String -> [Telnet]
toNet = fmap (TelnetText . c2w)
escapeIAC :: String -> String
escapeIAC ('\255':rest) = '\255' : '\255' : escapeIAC rest
escapeIAC (r:rest) = r : escapeIAC rest
escapeIAC [] = []
|
ellej/mire
|
test/Mire/Test/Telnet.hs
|
bsd-3-clause
| 3,641
| 0
| 17
| 719
| 889
| 478
| 411
| 78
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE ScopedTypeVariables #-}
module TPar.ProcessPipe ( ProcessOutput(..)
, runProcess
-- * Killing the process
, ProcessKilled(..)
-- * Deinterleaving output
, processOutputToHandles
, selectStream
, OutputStreams(..)
) where
import Control.Applicative
import Data.Monoid
import Data.Traversable
import qualified Pipes.Prelude as PP
import qualified Pipes.ByteString as PBS
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Control.Monad (msum)
import Control.Exception (Exception)
import System.IO (Handle)
import System.Exit
import Pipes
import Pipes.Safe () -- for MonadCatch instance
import qualified Pipes.Concurrent as PC
import System.Process (runInteractiveProcess, ProcessHandle, waitForProcess, terminateProcess)
import Control.Concurrent.STM
import Control.Distributed.Process
import Control.Monad.Catch (handle, throwM)
import Data.Binary
import Data.Binary.Put
import Data.Binary.Get
import GHC.Generics
import TPar.Utils
processPipes :: MonadIO m
=> FilePath -- ^ Executable name
-> [String] -- ^ Arguments
-> Maybe FilePath -- ^ Working directory
-> Maybe [(String,String)] -- ^ Optional environment
-> IO ( Consumer ByteString m ()
, Producer ByteString m ()
, Producer ByteString m ()
, ProcessHandle)
processPipes cmd args cwd env = do
(stdin, stdout, stderr, phandle) <- runInteractiveProcess cmd args cwd env
return (PBS.toHandle stdin, PBS.fromHandle stdout, PBS.fromHandle stderr, phandle)
data InterleaverCanTerminate = InterleaverCanTerminate deriving (Generic)
instance Binary InterleaverCanTerminate
data InterleaveException = InterleaveException String
deriving (Show)
instance Exception InterleaveException
interleave :: forall a. [Producer a Process ()] -> Producer a Process ()
interleave producers = do
inputs <- lift $ forM producers $ \prod -> do
(output, input, seal) <- liftIO $ PC.spawn' (PC.bounded 10)
pid <- spawnLocal $ runEffect $ do
prod >-> PC.toOutput output
liftIO $ atomically seal
_ <- monitor pid
return input
let matchTermination = match $ \(ProcessMonitorNotification _ _pid reason) ->
case reason of
DiedNormal -> return Nothing
_ -> throwM $ InterleaveException $ show reason
matchData = matchSTM (PC.recv $ msum inputs) pure
go :: Producer a Process ()
go = do
mx <- lift $ receiveWait [ matchTermination, matchData ]
case mx of
Nothing -> return ()
Just x -> yield x >> go
go
data ProcessOutput
= PutStdout !ByteString
| PutStderr !ByteString
deriving (Show, Generic)
instance Binary ProcessOutput
data OutputStreams a = OutputStreams { stdOut, stdErr :: a }
deriving (Show, Functor, Generic)
instance Binary a => Binary (OutputStreams a)
instance Foldable OutputStreams where
foldMap f (OutputStreams x y) = f x <> f y
instance Applicative OutputStreams where
pure x = OutputStreams x x
OutputStreams f g <*> OutputStreams x y = OutputStreams (f x) (g y)
-- Unfortunate orphan
instance Binary ExitCode where
get = do
code <- getInt32le
return $ case code of
0 -> ExitSuccess
_ -> ExitFailure (fromIntegral code)
put ExitSuccess = putInt32le 0
put (ExitFailure code) = putInt32le (fromIntegral code)
data ProcessKilled = ProcessKilled
deriving (Show, Generic)
instance Binary ProcessKilled
instance Exception ProcessKilled
selectStream :: OutputStreams (ByteString -> a) -> ProcessOutput -> a
selectStream (OutputStreams out err) outs =
case outs of
PutStdout bs -> out bs
PutStderr bs -> err bs
runProcess :: FilePath -> [String] -> Maybe FilePath
-> Producer ProcessOutput Process ExitCode
runProcess cmd args cwd = do
lift $ tparDebug "starting process"
(_stdin, stdout, stderr, phandle) <- liftIO $ processPipes cmd args cwd Nothing
let processKilled ProcessKilled = liftIO $ do
terminateProcess phandle
throwM ProcessKilled
handle processKilled $ do
interleave [ stderr >-> PP.map PutStderr
, stdout >-> PP.map PutStdout
]
liftIO $ waitForProcess phandle
processOutputToHandles :: MonadIO m
=> OutputStreams Handle -> ProcessOutput -> m ()
processOutputToHandles handles =
selectStream $ fmap (\hdl bs -> liftIO $ BS.hPut hdl bs) handles
|
bgamari/tpar
|
TPar/ProcessPipe.hs
|
bsd-3-clause
| 5,045
| 0
| 18
| 1,519
| 1,336
| 690
| 646
| 120
| 3
|
--------------------------------------------------------------------------------
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module LoremMarkdownum.Gen
( MonadGen (..)
, GenIO
, runGenIO
, randomBool
, oneOf
, oneOfFrequencies
, sampleFromList
, sampleFromFrequencies
, sampleFromFrequencyTree
, shuffle
, partitionNicely
, depth0
, deeper
) where
--------------------------------------------------------------------------------
import Control.Monad (join)
import Control.Monad.Reader (ReaderT, ask, local, mapReaderT,
runReaderT)
import Control.Monad.State (StateT, get, put, runStateT)
import Control.Monad.Trans (lift)
import Data.Tuple (swap)
import System.Random (randomRIO)
--------------------------------------------------------------------------------
import LoremMarkdownum.FrequencyTree (FrequencyTree)
import qualified LoremMarkdownum.FrequencyTree as FT
--------------------------------------------------------------------------------
class (Applicative m, Functor m, Monad m) => MonadGen m where
randomInt :: (Int, Int) -> m Int
depth :: m Int
withDepth :: Int -> m a -> m a
--------------------------------------------------------------------------------
instance MonadGen m => MonadGen (ReaderT r m) where
randomInt = lift . randomInt
depth = lift depth
withDepth d = mapReaderT (withDepth d)
--------------------------------------------------------------------------------
instance MonadGen m => MonadGen (StateT r m) where
randomInt = lift . randomInt
depth = lift depth
withDepth d = \ms -> do
s <- get
(x, s') <- lift $ withDepth d $ runStateT ms s
put s'
return x
--------------------------------------------------------------------------------
newtype GenIO a = GenIO {unGenIO :: ReaderT Int IO a}
deriving (Applicative, Functor, Monad)
--------------------------------------------------------------------------------
instance MonadGen GenIO where
randomInt = GenIO . lift . randomRIO
depth = GenIO ask
withDepth d = GenIO . local (const d) . unGenIO
--------------------------------------------------------------------------------
runGenIO :: GenIO a -> IO a
runGenIO gio = runReaderT (unGenIO gio) 0
--------------------------------------------------------------------------------
randomBool :: MonadGen m => Int -> Int -> m Bool
randomBool t f = (<= t) <$> randomInt (1, t + f)
--------------------------------------------------------------------------------
oneOf :: MonadGen m => [m a] -> m a
oneOf [] = error "Text.LoremMarkdownum.Gen.oneOf: empty list"
oneOf xs = do
idx <- randomInt (0, length xs - 1)
xs !! idx
--------------------------------------------------------------------------------
oneOfFrequencies :: MonadGen m => [(Int, m a)] -> m a
oneOfFrequencies = join . sampleFromFrequencies . map swap
--------------------------------------------------------------------------------
sampleFromList :: MonadGen m => [a] -> m a
sampleFromList = oneOf . map return
--------------------------------------------------------------------------------
sampleFromFrequencies :: MonadGen m => [(a, Int)] -> m a
sampleFromFrequencies freqs = do
-- We could also use 'sampleFromFrequencyTree' but this way we don't have
-- the 'Eq/Ord' constraint, which is nice (we can sample functions etc.).
idx <- randomInt (0, sum (map snd freqs) - 1)
return $ go idx freqs
where
go i ((x, f) : xs)
| i < f = x
| otherwise = go (i - f) xs
go _ [] = error
"Text.LoremMarkdownum.Gen.sampleFromFrequencies: empty list"
--------------------------------------------------------------------------------
sampleFromFrequencyTree :: (Ord a, MonadGen m)
=> FrequencyTree a -> m a
sampleFromFrequencyTree ft = do
idx <- randomInt (0, FT.sum ft - 1)
return $ FT.sample idx ft
--------------------------------------------------------------------------------
-- | Super-slow.
shuffle :: MonadGen m => [a] -> m [a]
shuffle list
| len < 2 = return list
| otherwise = do
i <- randomInt (0, len - 1)
let (xs, ys) = splitAt i list
(take 1 ys ++) <$> shuffle (xs ++ drop 1 ys)
where
len = length list
--------------------------------------------------------------------------------
partitionNicely :: MonadGen m => Int -> Int -> m [Int]
partitionNicely nGroups total = shuffle $ filter (> 0) $
let (x, remainder) = total `divMod` nGroups
in replicate remainder (x + 1) ++ replicate (nGroups - remainder) x
--------------------------------------------------------------------------------
depth0 :: MonadGen m => m a -> m a
depth0 = withDepth 0
--------------------------------------------------------------------------------
deeper :: MonadGen m => m a -> m a
deeper ma = depth >>= \d -> withDepth (d + 1) ma
|
jaspervdj/lorem-markdownum
|
lib/LoremMarkdownum/Gen.hs
|
bsd-3-clause
| 5,103
| 0
| 13
| 1,075
| 1,292
| 674
| 618
| 89
| 2
|
module Day4 where
import Crypto.Hash.MD5
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Char8 (pack)
import Data.Hex
partTwo :: Int
partTwo = solve (pack "000000")
partOne :: Int
partOne = solve (pack "00000")
solve :: ByteString -> Int
solve c = (+) 1 $ length $
takeWhile (not . B.isPrefixOf c) $
map (getHash . (input ++) . show) [1..]
getHash :: String -> ByteString
getHash = hex . hash . pack
input :: String
input = "ckczppom"
|
z0isch/advent-of-code
|
src/Day4.hs
|
bsd-3-clause
| 546
| 0
| 11
| 141
| 186
| 105
| 81
| 18
| 1
|
module Data where
import Data.Derive.UniplateDirect
import Language.Lua.Parser
import Language.Lua.Syntax
{-!
deriving instance UniplateDirect (Block NodeInfo)
!-}
|
mitchellwrosen/llint
|
src/Data.hs
|
bsd-3-clause
| 166
| 0
| 4
| 18
| 23
| 16
| 7
| 4
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
module Language.TransactSql.Types
( SqlType(..)
, Len(..)
, Precision
, Scale
, Collation
, precision
, scale
, len
) where
import Data.Generics (Data, Typeable)
type Precision = Int
data Len = Varying Int Int
| Fixed Int
| Unlimited
deriving (Eq, Ord, Show, Read, Data, Typeable)
type Scale = Int
type Collation = String
data SqlType
= Bit
| Numeric Precision Scale | Decimal Precision Scale
| Money | SmallMoney
| BigInt | Int | SmallInt | TinyInt
| Float Int
| Date | Time Scale | DateTime | DateTime2 Scale | SmallDateTime | DateTimeOffset Scale
| Char Int | VarChar Int | Text
| NChar Int | NVarChar Int | NText
| Binary Int | VarBinary Int | Image
| Cursor
| TimeStamp
| HierarchyId
| UniqueIdentifier
| SqlVariant
| Xml
| Table
| Geometry
| Geography
deriving (Eq, Ord, Show, Read, Data, Typeable)
precisionFromTimeScale 0 = 0
precisionFromTimeScale p = p + 1
precision :: SqlType -> Maybe Precision
precision Bit = Just 1
precision (Numeric p _) = Just p
precision (Decimal p _) = Just p
precision (Float bits)
| bits <= 24 = Just 7
| otherwise = Just 15
precision Money = Just 19
precision SmallMoney = Just 10
precision Date = Just 10
precision DateTime = Just 23
precision SmallDateTime = Just 16
precision (DateTimeOffset s) = Just (26 + precisionFromTimeScale s)
precision (DateTime2 s) = Just (19 + precisionFromTimeScale s)
precision (Time s) = Just (8 + precisionFromTimeScale s)
precision _ = Nothing
scale :: SqlType -> Maybe Scale
scale BigInt = Just 0
scale Int = Just 0
scale SmallInt = Just 0
scale TinyInt = Just 0
scale Money = Just 4
scale SmallMoney = Just 4
scale Date = Just 0
scale DateTime = Just 3
scale SmallDateTime = Just 0
scale (DateTimeOffset s) = Just s
scale (DateTime2 s) = Just s
scale (Time s) = Just s
scale _ = Nothing
-- Valid for numeric(p,s) and decimal(p,s)
lengthFromPrecision :: Precision -> Int
lengthFromPrecision p
| p <= 9 = 5
| p <= 19 = 9
| p <= 28 = 13
| otherwise = 17
lengthFromTimeScale :: Scale -> Int
lengthFromTimeScale s
| s <= 2 = 0
| s <= 4 = 1
| otherwise = 2
len :: SqlType -> Len
len Bit = Fixed 1
len (Numeric p _) = Fixed (lengthFromPrecision p)
len (Decimal p _) = Fixed (lengthFromPrecision p)
len (Char n) = Fixed n
len (VarChar (-1)) = Unlimited
len (VarChar n) = Varying 0 n
len (NChar n) = Fixed (n * 2)
len (NVarChar (-1)) = Unlimited
len (NVarChar n) = Varying 0 (n * 2)
len (Binary n) = Fixed n
len (VarBinary (-1)) = Unlimited
len (VarBinary n) = Varying 0 n
len (Float bits)
| bits <= 24 = Fixed 4
| otherwise = Fixed 8
len BigInt = Fixed 8
len Int = Fixed 4
len SmallInt = Fixed 2
len TinyInt = Fixed 1
len Date = Fixed 3
len DateTime = Fixed 8
len SmallDateTime = Fixed 4
len (DateTimeOffset s) = Fixed (8 + lengthFromTimeScale s)
len (DateTime2 s) = Fixed (6 + lengthFromTimeScale s)
len (Time s) = Fixed (3 + lengthFromTimeScale s)
len Cursor = Fixed 0
len TimeStamp = Fixed 8
len HierarchyId = Varying 1 892
len UniqueIdentifier = Fixed 16
len SqlVariant = Varying 16 8016
len Xml = Unlimited
len Table = Unlimited
len Geometry = Unlimited
len Geography = Unlimited
len _ = Unlimited
|
asztal/transact-sql
|
Language/TransactSql/Types.hs
|
bsd-3-clause
| 3,267
| 0
| 9
| 754
| 1,371
| 689
| 682
| 117
| 1
|
-- Copyright © 2012 Frank S. Thomas <frank@timepit.eu>
-- All rights reserved.
--
-- Use of this source code is governed by a BSD-style license that
-- can be found in the LICENSE file.
-- | Ohloh API Reference: <http://meta.ohloh.net/referencekudo_score/>
module Web.Ohloh.KudoScore (
KudoScore(..),
xpKudoScore
) where
import Text.XML.HXT.Arrow.Pickle
import Web.Ohloh.Common
data KudoScore = KudoScore {
ksCreatedAt :: Maybe String,
ksKudoRank :: Int,
ksPosition :: Int,
ksMaxPosition :: Maybe Int,
ksPositionDelta :: Maybe Int
} deriving (Eq, Read, Show)
instance XmlPickler KudoScore where
xpickle = xpKudoScore
instance ReadXmlString KudoScore
instance ShowXmlString KudoScore
xpKudoScore :: PU KudoScore
xpKudoScore =
xpElem "kudo_score" $
xpWrap (uncurry5 KudoScore,
\(KudoScore ca kr p mp pd) ->
(ca, kr, p, mp, pd)) $
xp5Tuple (xpOption (xpElem "created_at" xpText0))
(xpElem "kudo_rank" xpInt)
(xpElem "position" xpInt)
(xpOption (xpElem "max_position" xpInt))
(xpOption (xpElem "position_delta" xpInt))
|
fthomas/ohloh-hs
|
Web/Ohloh/KudoScore.hs
|
bsd-3-clause
| 1,140
| 0
| 11
| 260
| 268
| 150
| 118
| 27
| 1
|
{-# language MultiParamTypeClasses#-}
{-# language ScopedTypeVariables#-}
{-# language ViewPatterns#-}
{-# language GeneralizedNewtypeDeriving#-}
{-# language TypeSynonymInstances#-}
{-# language FlexibleInstances#-}
{-# language DataKinds#-}
{-# language GADTs#-}
{-# language FlexibleContexts#-}
{-# language UndecidableInstances #-}
{-|
Module : HDynTs.EulerTours.Core
Description : Core functionality of Euler Tours
Copyright : (c) Paolo Veronelli, 2016
License : BSD
Maintainer : paolo.veronelli@gmail.com
Stability : experimental
= Core functionalities for using Euler Tours.
== Intro
Euler tours algorithm is developed in two modules.
This and "HDynTs.EulerTours.Forest".
The separation is possible as each tour is expressing a full tree. This module
is about one tour or one tree.
The 'Tour' values support two core functionalities, 'splice' and 'extract', the
primitive operations for link and cut at a higher level.
To support sublinear operations we are using a couple of 'FingerTree'
to hold the tour and its reverse and the 'Monoid' is a composition of
@'Set' a@ and @'Sum' Int@.
The Set monoid let us split by name, while the Sum let us split by position.
== Use
You can create a Tour by conversion from a 'Tree' with 'fromTree' which is safe
as they are isomorphic, or you can use 'fromList' ('unsafeFromList') but
obviously you have to feed a correct tour.
>>> Just tl = fromList "abacdcaefgfhfea"
>>> tt = fromTree $ Node 'y' [Node 'x' [],Node 'z' []]
You can go back with 'toList' from 'Data.Foldable' and 'toTree'.
'splice' and 'extract' are kind of inverse each other so
>>> let (t1,t2) = extract 'y' (splice tt 'd' tl) in t1 == tt && t2 == tl
is evaluating to 'True'. This is possible as the Eq instance for Tour is taking
care of reordering children.
== Unsafeness
This module is mostly unsafe, it crashes with partial functions or, worse,
functions have an undefined behavior when feeded with an element not
present in the tour.
If you need to check the presence of an element, check membership with
'measure' on the 'Tour'.
Use 'HDynTs.Interface' with the instances in 'HDynTs.EulerTours.Forest'
for a safe access to a forest of tours.
>>> x `member` measure t
-}
module HDynTs.EulerTours.Core (
-- * types
Tour ,
-- * operation
splice,
extract,
reroot,
-- * query
path,
father,
-- * conversion
fromTree,
toTree,
unsafeFromList,
fromList,
toList,
-- * debug
valid,
-- * re-exports
(<>),
Tree(..)
)
where
import Data.Set (Set, member,singleton)
import qualified Data.Set as S
import Data.Monoid (Sum (Sum), (<>))
import Data.Foldable (toList, Foldable)
import Data.FingerTree (FingerTree, split, measure, Measured, viewl,viewr,
(<|) , (|>), ViewL ((:<),EmptyL), ViewR ((:>), EmptyR), empty )
import Data.Tree (Tree(Node))
import Data.Maybe (fromJust)
import Control.Monad (guard)
import HDynTs.Lib.Tree (SortedTree (..))
import HDynTs.Lib.Tree (insertC,focus,up, tree,mkZ)
newtype TourElem a = TourElem a deriving (Show,Ord,Eq)
-- The monoid for the fingertree representation of the tour
newtype TourMonoid a = TourMonoid (Set a,Sum Int) deriving (Monoid,Show)
-- a predicate to test the presence of an elem in the tour
tmMember :: Ord a => a -> TourMonoid a -> Bool
tmMember x (TourMonoid (v,_)) = x `member` v
-- position of an element in the tour
tmPosition :: TourMonoid a -> Int
tmPosition (TourMonoid (_,Sum s)) = s
-- set the position to one in the monoid
tmSet :: TourMonoid a -> Set a
tmSet (TourMonoid (x,_)) = x
instance Ord a => Measured (TourMonoid a) (TourElem a) where
measure (TourElem x) = TourMonoid (singleton x, 1)
type STour a = FingerTree (TourMonoid a) (TourElem a)
-- | Euler tour representation
data Tour a = Tour (STour a) (STour a) deriving (Show)
instance Ord a => Measured (Set a) (Tour a) where
measure (Tour o _) = tmSet . measure $ o
instance Foldable Tour where
foldr f x (Tour o _) = foldr f x $ map (\(TourElem x) -> x) $ toList o
instance Ord a => Eq (Tour a) where
x@(Tour (viewl -> TourElem h :< _) _) == y =
SortedTree (toTree x) == SortedTree (toTree $ reroot h y)
x@(Tour (viewl -> EmptyL) _) == y@(Tour (viewl -> EmptyL) _) = True
_ == _ = False
-- | Extract a valid monoid from a tour
tourMonoid :: Ord a => Tour a -> TourMonoid a
tourMonoid (Tour x _) = measure x
instance Ord a => Monoid (Tour a) where
Tour o r `mappend` Tour o' r' = Tour (o `mappend` o') (r' `mappend` r)
mempty = Tour mempty mempty
-- | insert a tour into another at specified vertex
splice :: Ord a
=> Tour a -- ^ tour to insert
-> a -- ^ insertion element
-> Tour a -- ^ accepting tour
-> Tour a -- ^ resulting tour
splice (Tour ot rt) c (Tour o r) = let
(o1,o2@(viewl -> wc :< _)) = split (tmMember c) o
(r1,r2) = split (flip (>) (tmPosition (measure o2)) . tmPosition) r
in Tour (o1 <> (wc <| ot) <> o2) (r1 <> (rt |> wc) <> r2)
{-| Find the father of a vertex in a tour.
-}
father :: Ord a
=> a -- ^ child
-> Tour a -- ^ tour containing the child
-> Maybe a -- ^ possibly the father
father x (Tour o _) = case viewr . fst $ split (tmMember x) o of
_ :> TourElem y -> Just y
EmptyR -> Nothing
-- | check validity of internal data
valid :: Ord a => Tour a -> Bool
valid (Tour (viewl -> x :< xs) (viewr -> ys :> y))
| x == y = valid (Tour xs ys)
| otherwise = False
valid (Tour (viewl -> EmptyL) (viewr -> EmptyR)) = True
valid (Tour _ _) = False
-- | extract a subtour from a tour delimited by a vertex, unsafe
extract :: Ord a
=> a -- ^ delimiting verte
-> Tour a -- ^ tour containing the vertex
-> (Tour a, Tour a) -- ^ subtour and orphaned tour
extract c (Tour o r) = let
(o1@(viewr -> o1' :> _),o2) = split (tmMember c) o
(r1@(viewr -> r1' :> _),r2) = split (tmMember c) r
l = (tmPosition (measure r2) - tmPosition (measure o1))
(o21,o22) = split ((> l) . tmPosition) o2
(r21,r22) = split ((> l) . tmPosition) r2
in (Tour o21 r21, Tour (o1' <> o22) (r1' <> r22))
-- | rotate a tour to represent a rerooting to a vertex, unsafe
reroot :: Ord a
=> a -- ^ new root
-> Tour a -- ^ old routed tour
-> Tour a -- ^ new rooted tour
reroot x e@(Tour o@(viewl -> TourElem x' :< _) r)
| x == x' = e
| otherwise = let
(o1,viewr -> o2 :> _) = split (tmMember x) o
(viewl -> _ :< r1, r2) = split
(flip (>) (tmPosition (measure o2) + 1) . tmPosition) r
in Tour ((o2 <> o1) |> TourElem x) (TourElem x <| (r2 <> r1))
-- | create a tour representing a given tree, safe
fromTree :: Ord a
=> Tree a -- ^ given tree
-> Tour a -- ^ corresponding tour
fromTree (Node x ts) = g . mconcat $ map f ts where
f t = let Tour o r = fromTree t in
Tour (TourElem x <| o) (r |> TourElem x)
g (Tour o r) = Tour (o |> TourElem x) (TourElem x <| r)
-- | reify a tour into the corrispondent tree, unsafe
toTree :: Ord a
=> Tour a -- ^ abstract tour
-> Tree a -- ^ correstponding tree
toTree (Tour (viewl -> TourElem x :< xs) _) = tree $ fromSTour (mkZ x) xs where
fromSTour z (viewl -> EmptyL) = z
fromSTour z (viewl -> TourElem x :< xs) = case focus <$> up z of
Just ((==) x -> True) -> fromSTour (fromJust $ up z) xs
_ -> fromSTour (insertC x z) xs
-- check invariants for tours as lists
isEuler :: Ord a => [a] -> Bool
isEuler = isEuler' mempty mempty . (zip <*> tail) where
isEuler' fs _ []
| S.null fs = True -- children closed at the end
| otherwise = False
isEuler' fs gs ((x,x'):xs)
| x == x' = False -- must change
| x' `member` gs = False -- reopening a children
| (x',x) `member` fs = isEuler' (S.delete (x',x) fs) (S.insert x gs) xs
-- closing a children
| otherwise = isEuler' (S.insert (x,x') fs) gs xs --opening a children
-- | safely create a Your from a list, it checks the list is a correct tour
fromList :: Ord a => [a] -> Maybe (Tour a)
fromList xs = guard (isEuler xs) >> return (unsafeFromList xs)
-- | set the tour from a list, no checks on the tour being valid
unsafeFromList :: Ord a => [a] -> Tour a
unsafeFromList [] = Tour empty empty
unsafeFromList (x:xs) = let
Tour o r = unsafeFromList xs
in Tour (TourElem x <| o) (r |> TourElem x)
-- | compute the path between 2 elements of the tour
path :: Ord a => a -> a -> Tour a -> [a]
path x y (reroot x -> Tour o r) = let
collect f (viewr -> EmptyR) = f y
collect f (viewr -> rs:> TourElem h) = collect ((h:) . f) z where
(z,_) = split (tmMember h) rs
(o1,_) = split (tmMember y) o
in collect return o1
|
paolino/HDynTs
|
src/HDynTs/EulerTours/Core.hs
|
bsd-3-clause
| 8,919
| 0
| 18
| 2,289
| 2,702
| 1,427
| 1,275
| 145
| 3
|
{-# LANGUAGE DeriveFunctor, GeneralizedNewtypeDeriving #-}
module General.ListBuilder(
ListBuilder, runListBuilder, newListBuilder,
Tree(..), flattenTree, unflattenTree
) where
import Data.Semigroup
import Prelude
-- ListBuilder is opaque outside this module
newtype ListBuilder a = ListBuilder (Tree a)
deriving (Semigroup, Monoid, Functor)
data Tree a
= Empty
| Leaf a
| Branch (Tree a) (Tree a)
deriving (Functor,Eq,Ord,Show)
instance Semigroup (Tree a) where
Empty <> x = x
x <> Empty = x
x <> y = Branch x y
instance Monoid (Tree a) where
mempty = Empty
mappend = (<>)
flattenTree :: Tree a -> [a]
flattenTree x = f x []
where
f Empty acc = acc
f (Leaf x) acc = x : acc
f (Branch x y) acc = f x (f y acc)
unflattenTree :: Tree a -> [b] -> Tree b
unflattenTree t xs = fst $ f t xs
where
f Empty xs = (Empty, xs)
f Leaf{} (x:xs) = (Leaf x, xs)
f (Branch a b) xs = (Branch a2 b2, xs3)
where (a2, xs2) = f a xs
(b2, xs3) = f b xs2
newListBuilder :: a -> ListBuilder a
newListBuilder = ListBuilder . Leaf
runListBuilder :: ListBuilder a -> [a]
runListBuilder (ListBuilder x) = flattenTree x
|
ndmitchell/shake
|
src/General/ListBuilder.hs
|
bsd-3-clause
| 1,243
| 0
| 9
| 353
| 499
| 264
| 235
| 36
| 3
|
{-# LANGUAGE DeriveDataTypeable #-}
-- |
-- Module : Crypto.PubKey.ECC.Types
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : Experimental
-- Portability : Excellent
--
-- References:
-- <https://tools.ietf.org/html/rfc5915>
--
module Crypto.PubKey.ECC.Types
( Curve(..)
, Point(..)
, PublicPoint
, PrivateNumber
, CurveBinary(..)
, CurvePrime(..)
, common_curve
, curveSizeBits
, ecc_fx
, ecc_p
, CurveCommon(..)
-- * Recommended curves definition
, CurveName(..)
, getCurveByName
) where
import Data.Data
import Crypto.Internal.Imports
import Crypto.Number.Basic (numBits)
-- | Define either a binary curve or a prime curve.
data Curve = CurveF2m CurveBinary -- ^ 𝔽(2^m)
| CurveFP CurvePrime -- ^ 𝔽p
deriving (Show,Read,Eq,Data,Typeable)
-- | ECC Public Point
type PublicPoint = Point
-- | ECC Private Number
type PrivateNumber = Integer
-- | Define a point on a curve.
data Point = Point Integer Integer
| PointO -- ^ Point at Infinity
deriving (Show,Read,Eq,Data,Typeable)
instance NFData Point where
rnf (Point x y) = x `seq` y `seq` ()
rnf PointO = ()
-- | Define an elliptic curve in 𝔽(2^m).
-- The firt parameter is the Integer representatioin of the irreducible polynomial f(x).
data CurveBinary = CurveBinary Integer CurveCommon
deriving (Show,Read,Eq,Data,Typeable)
instance NFData CurveBinary where
rnf (CurveBinary i cc) = i `seq` cc `seq` ()
-- | Define an elliptic curve in 𝔽p.
-- The first parameter is the Prime Number.
data CurvePrime = CurvePrime Integer CurveCommon
deriving (Show,Read,Eq,Data,Typeable)
-- | Parameters in common between binary and prime curves.
common_curve :: Curve -> CurveCommon
common_curve (CurveF2m (CurveBinary _ cc)) = cc
common_curve (CurveFP (CurvePrime _ cc)) = cc
-- | Irreducible polynomial representing the characteristic of a CurveBinary.
ecc_fx :: CurveBinary -> Integer
ecc_fx (CurveBinary fx _) = fx
-- | Prime number representing the characteristic of a CurvePrime.
ecc_p :: CurvePrime -> Integer
ecc_p (CurvePrime p _) = p
-- | Define common parameters in a curve definition
-- of the form: y^2 = x^3 + ax + b.
data CurveCommon = CurveCommon
{ ecc_a :: Integer -- ^ curve parameter a
, ecc_b :: Integer -- ^ curve parameter b
, ecc_g :: Point -- ^ base point
, ecc_n :: Integer -- ^ order of G
, ecc_h :: Integer -- ^ cofactor
} deriving (Show,Read,Eq,Data,Typeable)
-- | Define names for known recommended curves.
data CurveName =
SEC_p112r1
| SEC_p112r2
| SEC_p128r1
| SEC_p128r2
| SEC_p160k1
| SEC_p160r1
| SEC_p160r2
| SEC_p192k1
| SEC_p192r1 -- aka prime192v1
| SEC_p224k1
| SEC_p224r1
| SEC_p256k1
| SEC_p256r1 -- aka prime256v1
| SEC_p384r1
| SEC_p521r1
| SEC_t113r1
| SEC_t113r2
| SEC_t131r1
| SEC_t131r2
| SEC_t163k1
| SEC_t163r1
| SEC_t163r2
| SEC_t193r1
| SEC_t193r2
| SEC_t233k1 -- aka NIST K-233
| SEC_t233r1
| SEC_t239k1
| SEC_t283k1
| SEC_t283r1
| SEC_t409k1
| SEC_t409r1
| SEC_t571k1
| SEC_t571r1
deriving (Show,Read,Eq,Ord,Enum,Bounded,Data,Typeable)
{-
curvesOIDs :: [ (CurveName, [Integer]) ]
curvesOIDs =
[ (SEC_p112r1, [1,3,132,0,6])
, (SEC_p112r2, [1,3,132,0,7])
, (SEC_p128r1, [1,3,132,0,28])
, (SEC_p128r2, [1,3,132,0,29])
, (SEC_p160k1, [1,3,132,0,9])
, (SEC_p160r1, [1,3,132,0,8])
, (SEC_p160r2, [1,3,132,0,30])
, (SEC_p192k1, [1,3,132,0,31])
, (SEC_p192r1, [1,2,840,10045,3,1,1])
, (SEC_p224k1, [1,3,132,0,32])
, (SEC_p224r1, [1,3,132,0,33])
, (SEC_p256k1, [1,3,132,0,10])
, (SEC_p256r1, [1,2,840,10045,3,1,7])
, (SEC_p384r1, [1,3,132,0,34])
, (SEC_p521r1, [1,3,132,0,35])
, (SEC_t113r1, [1,3,132,0,4])
, (SEC_t113r2, [1,3,132,0,5])
, (SEC_t131r1, [1,3,132,0,22])
, (SEC_t131r2, [1,3,132,0,23])
, (SEC_t163k1, [1,3,132,0,1])
, (SEC_t163r1, [1,3,132,0,2])
, (SEC_t163r2, [1,3,132,0,15])
, (SEC_t193r1, [1,3,132,0,24])
, (SEC_t193r2, [1,3,132,0,25])
, (SEC_t233k1, [1,3,132,0,26])
, (SEC_t233r1, [1,3,132,0,27])
, (SEC_t239k1, [1,3,132,0,3])
, (SEC_t283k1, [1,3,132,0,16])
, (SEC_t283r1, [1,3,132,0,17])
, (SEC_t409k1, [1,3,132,0,36])
, (SEC_t409r1, [1,3,132,0,37])
, (SEC_t571k1, [1,3,132,0,38])
, (SEC_t571r1, [1,3,132,0,39])
]
-}
-- | get the size of the curve in bits
curveSizeBits :: Curve -> Int
curveSizeBits (CurveFP c) = numBits (ecc_p c)
curveSizeBits (CurveF2m c) = numBits (ecc_fx c) - 1
-- | Get the curve definition associated with a recommended known curve name.
getCurveByName :: CurveName -> Curve
getCurveByName SEC_p112r1 = CurveFP $ CurvePrime
0xdb7c2abf62e35e668076bead208b
(CurveCommon
{ ecc_a = 0xdb7c2abf62e35e668076bead2088
, ecc_b = 0x659ef8ba043916eede8911702b22
, ecc_g = Point 0x09487239995a5ee76b55f9c2f098
0xa89ce5af8724c0a23e0e0ff77500
, ecc_n = 0xdb7c2abf62e35e7628dfac6561c5
, ecc_h = 1
})
getCurveByName SEC_p112r2 = CurveFP $ CurvePrime
0xdb7c2abf62e35e668076bead208b
(CurveCommon
{ ecc_a = 0x6127c24c05f38a0aaaf65c0ef02c
, ecc_b = 0x51def1815db5ed74fcc34c85d709
, ecc_g = Point 0x4ba30ab5e892b4e1649dd0928643
0xadcd46f5882e3747def36e956e97
, ecc_n = 0x36df0aafd8b8d7597ca10520d04b
, ecc_h = 4
})
getCurveByName SEC_p128r1 = CurveFP $ CurvePrime
0xfffffffdffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0xfffffffdfffffffffffffffffffffffc
, ecc_b = 0xe87579c11079f43dd824993c2cee5ed3
, ecc_g = Point 0x161ff7528b899b2d0c28607ca52c5b86
0xcf5ac8395bafeb13c02da292dded7a83
, ecc_n = 0xfffffffe0000000075a30d1b9038a115
, ecc_h = 1
})
getCurveByName SEC_p128r2 = CurveFP $ CurvePrime
0xfffffffdffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0xd6031998d1b3bbfebf59cc9bbff9aee1
, ecc_b = 0x5eeefca380d02919dc2c6558bb6d8a5d
, ecc_g = Point 0x7b6aa5d85e572983e6fb32a7cdebc140
0x27b6916a894d3aee7106fe805fc34b44
, ecc_n = 0x3fffffff7fffffffbe0024720613b5a3
, ecc_h = 4
})
getCurveByName SEC_p160k1 = CurveFP $ CurvePrime
0x00fffffffffffffffffffffffffffffffeffffac73
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000007
, ecc_g = Point 0x003b4c382ce37aa192a4019e763036f4f5dd4d7ebb
0x00938cf935318fdced6bc28286531733c3f03c4fee
, ecc_n = 0x0100000000000000000001b8fa16dfab9aca16b6b3
, ecc_h = 1
})
getCurveByName SEC_p160r1 = CurveFP $ CurvePrime
0x00ffffffffffffffffffffffffffffffff7fffffff
(CurveCommon
{ ecc_a = 0x00ffffffffffffffffffffffffffffffff7ffffffc
, ecc_b = 0x001c97befc54bd7a8b65acf89f81d4d4adc565fa45
, ecc_g = Point 0x004a96b5688ef573284664698968c38bb913cbfc82
0x0023a628553168947d59dcc912042351377ac5fb32
, ecc_n = 0x0100000000000000000001f4c8f927aed3ca752257
, ecc_h = 1
})
getCurveByName SEC_p160r2 = CurveFP $ CurvePrime
0x00fffffffffffffffffffffffffffffffeffffac73
(CurveCommon
{ ecc_a = 0x00fffffffffffffffffffffffffffffffeffffac70
, ecc_b = 0x00b4e134d3fb59eb8bab57274904664d5af50388ba
, ecc_g = Point 0x0052dcb034293a117e1f4ff11b30f7199d3144ce6d
0x00feaffef2e331f296e071fa0df9982cfea7d43f2e
, ecc_n = 0x0100000000000000000000351ee786a818f3a1a16b
, ecc_h = 1
})
getCurveByName SEC_p192k1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffffffffffeffffee37
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000003
, ecc_g = Point 0xdb4ff10ec057e9ae26b07d0280b7f4341da5d1b1eae06c7d
0x9b2f2f6d9c5628a7844163d015be86344082aa88d95e2f9d
, ecc_n = 0xfffffffffffffffffffffffe26f2fc170f69466a74defd8d
, ecc_h = 1
})
getCurveByName SEC_p192r1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffeffffffffffffffff
(CurveCommon
{ ecc_a = 0xfffffffffffffffffffffffffffffffefffffffffffffffc
, ecc_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
, ecc_g = Point 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
, ecc_n = 0xffffffffffffffffffffffff99def836146bc9b1b4d22831
, ecc_h = 1
})
getCurveByName SEC_p224k1 = CurveFP $ CurvePrime
0x00fffffffffffffffffffffffffffffffffffffffffffffffeffffe56d
(CurveCommon
{ ecc_a = 0x0000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x0000000000000000000000000000000000000000000000000000000005
, ecc_g = Point 0x00a1455b334df099df30fc28a169a467e9e47075a90f7e650eb6b7a45c
0x007e089fed7fba344282cafbd6f7e319f7c0b0bd59e2ca4bdb556d61a5
, ecc_n = 0x010000000000000000000000000001dce8d2ec6184caf0a971769fb1f7
, ecc_h = 1
})
getCurveByName SEC_p224r1 = CurveFP $ CurvePrime
0xffffffffffffffffffffffffffffffff000000000000000000000001
(CurveCommon
{ ecc_a = 0xfffffffffffffffffffffffffffffffefffffffffffffffffffffffe
, ecc_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
, ecc_g = Point 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
, ecc_n = 0xffffffffffffffffffffffffffff16a2e0b8f03e13dd29455c5c2a3d
, ecc_h = 1
})
getCurveByName SEC_p256k1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
(CurveCommon
{ ecc_a = 0x0000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x0000000000000000000000000000000000000000000000000000000000000007
, ecc_g = Point 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
, ecc_n = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
, ecc_h = 1
})
getCurveByName SEC_p256r1 = CurveFP $ CurvePrime
0xffffffff00000001000000000000000000000000ffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0xffffffff00000001000000000000000000000000fffffffffffffffffffffffc
, ecc_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
, ecc_g = Point 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
, ecc_n = 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551
, ecc_h = 1
})
getCurveByName SEC_p384r1 = CurveFP $ CurvePrime
0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffff
(CurveCommon
{ ecc_a = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000fffffffc
, ecc_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
, ecc_g = Point 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
, ecc_n = 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973
, ecc_h = 1
})
getCurveByName SEC_p521r1 = CurveFP $ CurvePrime
0x01ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
(CurveCommon
{ ecc_a = 0x01fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc
, ecc_b = 0x0051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
, ecc_g = Point 0x00c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
0x011839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
, ecc_n = 0x01fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409
, ecc_h = 1
})
getCurveByName SEC_t113r1 = CurveF2m $ CurveBinary
0x020000000000000000000000000201
(CurveCommon
{ ecc_a = 0x003088250ca6e7c7fe649ce85820f7
, ecc_b = 0x00e8bee4d3e2260744188be0e9c723
, ecc_g = Point 0x009d73616f35f4ab1407d73562c10f
0x00a52830277958ee84d1315ed31886
, ecc_n = 0x0100000000000000d9ccec8a39e56f
, ecc_h = 2
})
getCurveByName SEC_t113r2 = CurveF2m $ CurveBinary
0x020000000000000000000000000201
(CurveCommon
{ ecc_a = 0x00689918dbec7e5a0dd6dfc0aa55c7
, ecc_b = 0x0095e9a9ec9b297bd4bf36e059184f
, ecc_g = Point 0x01a57a6a7b26ca5ef52fcdb8164797
0x00b3adc94ed1fe674c06e695baba1d
, ecc_n = 0x010000000000000108789b2496af93
, ecc_h = 2
})
getCurveByName SEC_t131r1 = CurveF2m $ CurveBinary
0x080000000000000000000000000000010d
(CurveCommon
{ ecc_a = 0x07a11b09a76b562144418ff3ff8c2570b8
, ecc_b = 0x0217c05610884b63b9c6c7291678f9d341
, ecc_g = Point 0x0081baf91fdf9833c40f9c181343638399
0x078c6e7ea38c001f73c8134b1b4ef9e150
, ecc_n = 0x0400000000000000023123953a9464b54d
, ecc_h = 2
})
getCurveByName SEC_t131r2 = CurveF2m $ CurveBinary
0x080000000000000000000000000000010d
(CurveCommon
{ ecc_a = 0x03e5a88919d7cafcbf415f07c2176573b2
, ecc_b = 0x04b8266a46c55657ac734ce38f018f2192
, ecc_g = Point 0x0356dcd8f2f95031ad652d23951bb366a8
0x0648f06d867940a5366d9e265de9eb240f
, ecc_n = 0x0400000000000000016954a233049ba98f
, ecc_h = 2
})
getCurveByName SEC_t163k1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000c9
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000001
, ecc_b = 0x000000000000000000000000000000000000000001
, ecc_g = Point 0x02fe13c0537bbc11acaa07d793de4e6d5e5c94eee8
0x0289070fb05d38ff58321f2e800536d538ccdaa3d9
, ecc_n = 0x04000000000000000000020108a2e0cc0d99f8a5ef
, ecc_h = 2
})
getCurveByName SEC_t163r1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000c9
(CurveCommon
{ ecc_a = 0x07b6882caaefa84f9554ff8428bd88e246d2782ae2
, ecc_b = 0x0713612dcddcb40aab946bda29ca91f73af958afd9
, ecc_g = Point 0x0369979697ab43897789566789567f787a7876a654
0x00435edb42efafb2989d51fefce3c80988f41ff883
, ecc_n = 0x03ffffffffffffffffffff48aab689c29ca710279b
, ecc_h = 2
})
getCurveByName SEC_t163r2 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000c9
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000001
, ecc_b = 0x020a601907b8c953ca1481eb10512f78744a3205fd
, ecc_g = Point 0x03f0eba16286a2d57ea0991168d4994637e8343e36
0x00d51fbc6c71a0094fa2cdd545b11c5c0c797324f1
, ecc_n = 0x040000000000000000000292fe77e70c12a4234c33
, ecc_h = 2
})
getCurveByName SEC_t193r1 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000008001
(CurveCommon
{ ecc_a = 0x0017858feb7a98975169e171f77b4087de098ac8a911df7b01
, ecc_b = 0x00fdfb49bfe6c3a89facadaa7a1e5bbc7cc1c2e5d831478814
, ecc_g = Point 0x01f481bc5f0ff84a74ad6cdf6fdef4bf6179625372d8c0c5e1
0x0025e399f2903712ccf3ea9e3a1ad17fb0b3201b6af7ce1b05
, ecc_n = 0x01000000000000000000000000c7f34a778f443acc920eba49
, ecc_h = 2
})
getCurveByName SEC_t193r2 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000008001
(CurveCommon
{ ecc_a = 0x0163f35a5137c2ce3ea6ed8667190b0bc43ecd69977702709b
, ecc_b = 0x00c9bb9e8927d4d64c377e2ab2856a5b16e3efb7f61d4316ae
, ecc_g = Point 0x00d9b67d192e0367c803f39e1a7e82ca14a651350aae617e8f
0x01ce94335607c304ac29e7defbd9ca01f596f927224cdecf6c
, ecc_n = 0x010000000000000000000000015aab561b005413ccd4ee99d5
, ecc_h = 2
})
getCurveByName SEC_t233k1 = CurveF2m $ CurveBinary
0x020000000000000000000000000000000000000004000000000000000001
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x017232ba853a7e731af129f22ff4149563a419c26bf50a4c9d6eefad6126
0x01db537dece819b7f70f555a67c427a8cd9bf18aeb9b56e0c11056fae6a3
, ecc_n = 0x008000000000000000000000000000069d5bb915bcd46efb1ad5f173abdf
, ecc_h = 4
})
getCurveByName SEC_t233r1 = CurveF2m $ CurveBinary
0x020000000000000000000000000000000000000004000000000000000001
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x0066647ede6c332c7f8c0923bb58213b333b20e9ce4281fe115f7d8f90ad
, ecc_g = Point 0x00fac9dfcbac8313bb2139f1bb755fef65bc391f8b36f8f8eb7371fd558b
0x01006a08a41903350678e58528bebf8a0beff867a7ca36716f7e01f81052
, ecc_n = 0x01000000000000000000000000000013e974e72f8a6922031d2603cfe0d7
, ecc_h = 2
})
getCurveByName SEC_t239k1 = CurveF2m $ CurveBinary
0x800000000000000000004000000000000000000000000000000000000001
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x29a0b6a887a983e9730988a68727a8b2d126c44cc2cc7b2a6555193035dc
0x76310804f12e549bdb011c103089e73510acb275fc312a5dc6b76553f0ca
, ecc_n = 0x2000000000000000000000000000005a79fec67cb6e91f1c1da800e478a5
, ecc_h = 4
})
getCurveByName SEC_t283k1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000000000000000000000000000000010a1
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x0503213f78ca44883f1a3b8162f188e553cd265f23c1567a16876913b0c2ac2458492836
0x01ccda380f1c9e318d90f95d07e5426fe87e45c0e8184698e45962364e34116177dd2259
, ecc_n = 0x01ffffffffffffffffffffffffffffffffffe9ae2ed07577265dff7f94451e061e163c61
, ecc_h = 4
})
getCurveByName SEC_t283r1 = CurveF2m $ CurveBinary
0x0800000000000000000000000000000000000000000000000000000000000000000010a1
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x027b680ac8b8596da5a4af8a19a0303fca97fd7645309fa2a581485af6263e313b79a2f5
, ecc_g = Point 0x05f939258db7dd90e1934f8c70b0dfec2eed25b8557eac9c80e2e198f8cdbecd86b12053
0x03676854fe24141cb98fe6d4b20d02b4516ff702350eddb0826779c813f0df45be8112f4
, ecc_n = 0x03ffffffffffffffffffffffffffffffffffef90399660fc938a90165b042a7cefadb307
, ecc_h = 2
})
getCurveByName SEC_t409k1 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000001
(CurveCommon
{ ecc_a = 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x0060f05f658f49c1ad3ab1890f7184210efd0987e307c84c27accfb8f9f67cc2c460189eb5aaaa62ee222eb1b35540cfe9023746
0x01e369050b7c4e42acba1dacbf04299c3460782f918ea427e6325165e9ea10e3da5f6c42e9c55215aa9ca27a5863ec48d8e0286b
, ecc_n = 0x007ffffffffffffffffffffffffffffffffffffffffffffffffffe5f83b2d4ea20400ec4557d5ed3e3e7ca5b4b5c83b8e01e5fcf
, ecc_h = 4
})
getCurveByName SEC_t409r1 = CurveF2m $ CurveBinary
0x02000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000001
(CurveCommon
{ ecc_a = 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x0021a5c2c8ee9feb5c4b9a753b7b476b7fd6422ef1f3dd674761fa99d6ac27c8a9a197b272822f6cd57a55aa4f50ae317b13545f
, ecc_g = Point 0x015d4860d088ddb3496b0c6064756260441cde4af1771d4db01ffe5b34e59703dc255a868a1180515603aeab60794e54bb7996a7
0x0061b1cfab6be5f32bbfa78324ed106a7636b9c5a7bd198d0158aa4f5488d08f38514f1fdf4b4f40d2181b3681c364ba0273c706
, ecc_n = 0x010000000000000000000000000000000000000000000000000001e2aad6a612f33307be5fa47c3c9e052f838164cd37d9a21173
, ecc_h = 2
})
getCurveByName SEC_t571k1 = CurveF2m $ CurveBinary
0x080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000425
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
, ecc_b = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_g = Point 0x026eb7a859923fbc82189631f8103fe4ac9ca2970012d5d46024804801841ca44370958493b205e647da304db4ceb08cbbd1ba39494776fb988b47174dca88c7e2945283a01c8972
0x0349dc807f4fbf374f4aeade3bca95314dd58cec9f307a54ffc61efc006d8a2c9d4979c0ac44aea74fbebbb9f772aedcb620b01a7ba7af1b320430c8591984f601cd4c143ef1c7a3
, ecc_n = 0x020000000000000000000000000000000000000000000000000000000000000000000000131850e1f19a63e4b391a8db917f4138b630d84be5d639381e91deb45cfe778f637c1001
, ecc_h = 4
})
getCurveByName SEC_t571r1 = CurveF2m $ CurveBinary
0x080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000425
(CurveCommon
{ ecc_a = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
, ecc_b = 0x02f40e7e2221f295de297117b7f3d62f5c6a97ffcb8ceff1cd6ba8ce4a9a18ad84ffabbd8efa59332be7ad6756a66e294afd185a78ff12aa520e4de739baca0c7ffeff7f2955727a
, ecc_g = Point 0x0303001d34b856296c16c0d40d3cd7750a93d1d2955fa80aa5f40fc8db7b2abdbde53950f4c0d293cdd711a35b67fb1499ae60038614f1394abfa3b4c850d927e1e7769c8eec2d19
0x037bf27342da639b6dccfffeb73d69d78c6c27a6009cbbca1980f8533921e8a684423e43bab08a576291af8f461bb2a8b3531d2f0485c19b16e2f1516e23dd3c1a4827af1b8ac15b
, ecc_n = 0x03ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe661ce18ff55987308059b186823851ec7dd9ca1161de93d5174d66e8382e9bb2fe84e47
, ecc_h = 2
})
|
tekul/cryptonite
|
Crypto/PubKey/ECC/Types.hs
|
bsd-3-clause
| 24,102
| 0
| 10
| 4,785
| 2,627
| 1,535
| 1,092
| 385
| 1
|
{-|
Module : HeapSort
Description : heapsort implemented in Haskell language
Copyright : (c) Julia Sypień, Patrycja Tarasińska, 2017
License : GPL-3
Maintainer : sasuke96@tlen.pl
Stability : experimental
Portability : POSIX
-}
module Heapsort(
-- *functions
parent,
leftChild,
rightChild,
heapify,
buildHeap,
heapSortLoop,
heapsort
)
where
-- |Function which takes index of a cell in a table and returns its parent
parent :: (Integral a, Num a, Ord a) => a -> a
parent a =
if a <= 0
then (-1)
else (a - 1) `div` 2
-- |Function which takes index of a cell in a table and returns its left child
leftChild :: (Num a) => a -> a
leftChild a = 2*a + 1
-- |Function which takes index of a cell in a table and returns its right child
rightChild :: (Num a) => a -> a
rightChild a = 2*a + 2
-- |Function which restores a heap condition in a node
heapify :: (Ord a) => [a] -> Int -> [a]
heapify [] _ = []
heapify xs a =
if a < 0
then []
else if((leftChild a) < length xs && xs!!(leftChild a) > xs!!a && ((rightChild a) >= length xs || xs!!(rightChild a) <= xs!!(leftChild a)))
then heapify ((take a xs)++xs!!(leftChild a):(drop (a+1) (take (leftChild a) xs))++xs!!a:(drop ((leftChild a) + 1) xs)) (leftChild a)
else if((rightChild a) < length xs && xs!!(rightChild a) > xs!!a)
then heapify ((take a xs)++xs!!(rightChild a):(drop (a+1) (take (rightChild a) xs))++xs!!a:(drop ((rightChild a) + 1) xs)) (rightChild a)
else xs
-- |Function which creates a heap out of a table
buildHeap :: (Ord a) => [a] -> Int -> [a]
buildHeap [] _ = []
buildHeap xs i =
if(i < 0)
then xs
else if (leftChild i) >= length xs
then buildHeap xs (i-1)
else buildHeap (heapify xs i) (i-1)
-- |Function which is a part of sorting algorithm
heapSortLoop :: (Ord a) => [a] -> Int ->[a]
heapSortLoop [] _ = []
heapSortLoop xs result =
if(result == 0)
then xs
else heapSortLoop ((heapify (xs!!result : tail (take result xs)) 0)++ xs!!0:(drop (result+1) xs)) (result - 1)
-- |Function which sorts elements in a table by using heapsort algorithm
heapsort :: (Ord a) => [a] -> [a]
heapsort [] = []
heapsort xs = heapSortLoop (buildHeap xs ((length xs) - 1)) ((length xs) - 1)
|
Regulareveryday/our_sorting_algos
|
src/Heapsort.hs
|
bsd-3-clause
| 2,264
| 0
| 19
| 521
| 924
| 495
| 429
| 44
| 4
|
module Main where
import Criterion.Main
import GR4J
runSeries :: Int -> [Double]
runSeries n =
[q | (q, _) <- run
(replicate n (Observation "" 20 10 0))
(ParameterSet 400 1 150 5)
(State 200 75 (replicate 5 0) (replicate 10 0))
]
runDay :: Int -> Double
runDay _ =
fst
(model
(Observation "" 20 10 0)
(ParameterSet 400 1 150 5)
(State 200 75 [0,0,0,0,0] [0,0,0,0,0,0,0,0,0,0])
)
updateUH :: Int -> [[Double]]
updateUH n =
[updateUnitHydrograph sCurve2 5 1 (replicate 10 0) | _ <- [1..n]]
main :: IO ()
main = defaultMain [
bgroup "GR4J" [
bench "1 day" $ nf runDay 1,
bench "30 years of daily (10,950)" $ nf runSeries 10950,
bench "30 years of hourly (262,800)" $ nf runSeries 262800,
bench "update UH" $ nf updateUH 10950
]
]
|
tinyrock/gr4j
|
src/Bench.hs
|
bsd-3-clause
| 948
| 0
| 12
| 356
| 369
| 196
| 173
| 26
| 1
|
module Main where
import Lib
main :: IO ()
main = putStrLn $ show helloWorld ++ ";"
|
cosmo0920/hrr-stack-template
|
app/Main.hs
|
bsd-3-clause
| 86
| 0
| 7
| 19
| 33
| 18
| 15
| 4
| 1
|
module Checked.X2 where
{-@ x :: {v:Int | v = 2} @-}
x :: Int
x = 2
|
spinda/liquidhaskell-cabal-demo
|
app/Checked/X2.hs
|
bsd-3-clause
| 69
| 0
| 4
| 19
| 17
| 11
| 6
| 3
| 1
|
module Iface.IfaceDriver where
-- this module works as a facade for
-- the other modules in this package.
-- it exports only the function to
-- write a module info and to read
-- a set of module infos.
-- it uses the import / export definitions
-- in a module to write / read and to
-- report error messages
import Data.List
import qualified Data.Map as Map
import Language.Haskell.Exts
import System.FilePath
import Iface.Iface
import Iface.IfaceReader
import Iface.IfaceWriter
import Tc.Assumption
import Tc.TySyn
import Tc.Kc.KcEnv hiding (unionl)
import Utils.Env (unionl)
import Utils.FileNameUtils
import Utils.Id
-- read a list of Ifaces based on imports of a given module
readIfaces :: FilePath -> Module -> IO [Iface]
readIfaces dir (Module _ _ _ _ _ is _)
= mapM (readIface dir) is
readIface :: FilePath -> ImportDecl -> IO Iface
readIface dir i
= do
let v = gen dir (importModule i)
parseInterface v
-- write Iface file using the export list
writeIface :: FilePath -> Module -> [Iface] -> Iface -> IO ()
writeIface dir (Module _ n _ _ Nothing _ _) _
(Iface i syn lbls kc cls ins ass)
= writeInterface dir n i syn lbls kc cls ins ass
writeIface dir (Module _ n _ _ (Just es) _ _) ms
(Iface i syn lbls kc cls ins ass)
= writeInterface dir n i syn lbls kc cls ins ass
-- some auxiliar functions
moduleName (Module _ n _ _ _ _ _) = n
gen dir (ModuleName s)
= interfaceFileName dir s
|
rodrigogribeiro/mptc
|
src/Iface/IfaceDriver.hs
|
bsd-3-clause
| 1,569
| 0
| 12
| 434
| 428
| 228
| 200
| 32
| 1
|
--
--
--
-----------------
-- Exercise 4.29.
-----------------
--
--
--
module E'4'29 where
import E'4'26 ( column )
import E'4'27 ( row , rowBlackAt )
import PicturesSVG
(
Picture
, black
, beside
, white
, above
)
diagonalBoth :: Integer -> Picture
diagonalBoth length
| length < 1 = error "Length is lesser than one."
| length == 1 = black
| length == 2 = (column black 2)
`beside` (column black 2)
| otherwise = outer
`above` inner
`above` outer
where
outer, inner :: Picture
outer = black
`beside` ( row white (length - 2) )
`beside` black
inner = column white ( length - 2 )
`beside` diagonalBoth ( length - 2 )
{- GHCi>
render (diagonalBoth 10)
-}
-- Other solution ...
rowTwoBlacksAt :: Integer -> Integer -> Integer -> Picture
rowTwoBlacksAt firstPosition secondPosition length
| firstPosition == secondPosition = rowBlackAt firstPosition length
| length < 1 = error "Length is lesser than one."
| length == 1 = black
| length == 2 = row black 2
| firstPosition < secondPosition = ( rowBlackAt firstPosition firstPosition )
`beside` ( rowBlackAt (secondPosition - firstPosition) (length - firstPosition) )
-- | secondPosition < firstPosition = ( rowBlackAt secondPosition secondPosition )
-- `beside` ( rowBlackAt (firstPosition - secondPosition) (length - secondPosition) )
| otherwise = ( rowBlackAt secondPosition secondPosition )
`beside` ( rowBlackAt (firstPosition - secondPosition) (length - secondPosition) )
diagonalBoth2 :: Integer -> Picture
diagonalBoth2 n
= diagonalBoth2' 1 n
where
diagonalBoth2' :: Integer -> Integer -> Picture
diagonalBoth2' left right
-- Odd:
| left == right = rowBlackAt left ( left + right )
-- Even:
| left == (right - 1) = column ( rowTwoBlacksAt left (left + 1) (left + right) ) 2
| otherwise = ( rowTwoBlacksAt left right (left + right) )
`above` ( diagonalBoth2' (left + 1) (right - 1) )
`above` ( rowTwoBlacksAt left right (left + right) )
|
pascal-knodel/haskell-craft
|
_/links/E'4'29.hs
|
mit
| 2,502
| 0
| 13
| 930
| 611
| 329
| 282
| 45
| 1
|
{- |
Module : $Header$
Description : Interface to the CspCASLProver (Isabelle based) theorem prover
Copyright : (c) Liam O'Reilly and Markus Roggenbach, Swansea University 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : csliam@swansea.ac.uk
Stability : provisional
Portability : portable
Interface for CspCASLProver theorem prover.
-}
{-
Interface between CspCASLProver and Hets:
Hets writes CspCASLProver's Isabelle .thy files and
starts Isabelle with CspProver
User extends .thy file with proofs
User finishes Isabelle
Hets reads in created *.deps files
-}
module CspCASLProver.CspCASLProver
( cspCASLProver
) where
import CASL.AS_Basic_CASL
import CASL.Fold
import CASL.Sign (CASLSign, Sign (..), sortSet)
import Common.AS_Annotation (Named, mapNamedM)
import Common.ProverTools
import Common.Result
import qualified Comorphisms.CASL2PCFOL as CASL2PCFOL
import qualified Comorphisms.CASL2SubCFOL as CASL2SubCFOL
import qualified Comorphisms.CFOL2IsabelleHOL as CFOL2IsabelleHOL
import CspCASL.SignCSP
import CspCASL.Morphism (CspCASLMorphism)
import CspCASLProver.Consts
import CspCASLProver.IsabelleUtils
import CspCASLProver.Utils
import qualified Data.Maybe as Maybe
import qualified Data.Set as Set
import Isabelle.IsaProve
import qualified Isabelle.IsaSign as Isa
import Logic.Prover
import Logic.Comorphism (wrapMapTheory)
-- | The string that Hets uses as CspCASLProver
cspCASLProverS :: String
cspCASLProverS = "CspCASLProver"
-- | The wrapper function that is CspCASL Prover
cspCASLProver :: Prover CspCASLSign CspCASLSen CspCASLMorphism () ()
cspCASLProver = (mkProverTemplate cspCASLProverS () cspCASLProverProve)
{ proverUsable = checkBinary "isabelle" }
-- | The main cspCASLProver function
cspCASLProverProve :: String -> Theory CspCASLSign CspCASLSen () -> a ->
IO [ProofStatus ()]
cspCASLProverProve thName (Theory ccSign ccSensThSens) _freedefs =
let -- get the CASL signature of the data part of the CspcASL theory
caslSign = ccSig2CASLSign ccSign
-- Get a list of CspCASL named sentences
ccNamedSens = toNamedList ccSensThSens
-- A filter to change a CspCASLSen to a CASLSen (if possible)
caslSenFilter ccSen = case ccSen of
ExtFORMULA (ProcessEq {}) -> Nothing
sen -> Just $ foldFormula (mapRecord $ const ()) sen
-- All named CASL sentences from the datapart
caslNamedSens = Maybe.mapMaybe (mapNamedM caslSenFilter) ccNamedSens
-- Generate data encoding. This may fail.
Result diag dataTh = produceDataEncoding caslSign caslNamedSens
in case dataTh of
Nothing -> do
-- Data translation failed
putStrLn $ "Sorry, could not encode the data part:" ++ show diag
return []
Just (dataThSig, dataThSens, pcfolSign, cfolSign) -> do
{- Data translation succeeded
Write out the data encoding -}
writeIsaTheory (mkThyNameDataEnc thName)
(Theory dataThSig (toThSens dataThSens))
{- Generate and write out the preAlpbate, justification theorems
and the instances code. -}
writeIsaTheory (mkThyNamePreAlphabet thName)
(producePreAlphabet thName caslSign pcfolSign)
{- Generate and write out the Alpbatet construction, bar types
and choose functions. -}
writeIsaTheory (mkThyNameAlphabet thName)
(produceAlphabet thName caslSign)
-- Generate and write out the integration theorems
writeIsaTheory (mkThyNameIntThms thName)
(produceIntegrationTheorems thName caslSign)
{- Generate and Isabelle to prove the process refinements (also produces
the processes) -}
isaProve JEdit thName
(produceProcesses thName ccSign ccNamedSens pcfolSign cfolSign) ()
{- |Produce the Isabelle theory of the data part of a CspCASL
specification. The data transalation can fail. If it does fail there will
be an error message. Its arguments are the CASL signature from the data
part and a list of the named CASL sentences from the data part. Returned
are the Isabelle signature, Isabelle named sentences and also the CASL
signature of the data part after translation to pcfol (i.e. with out
subsorting) and cfol (i.e. with out subsorting and partiality). -}
produceDataEncoding :: CASLSign -> [Named CASLFORMULA] ->
Result (Isa.Sign, [Named Isa.Sentence], CASLSign,
CASLSign)
produceDataEncoding caslSign caslNamedSens =
let -- Comorphisms
casl2pcfol = wrapMapTheory CASL2PCFOL.CASL2PCFOL
pcfol2cfol = wrapMapTheory $ CASL2SubCFOL.CASL2SubCFOL True
CASL2SubCFOL.AllSortBottoms
cfol2isabelleHol = wrapMapTheory CFOL2IsabelleHOL.CFOL2IsabelleHOL
in do
{- Remove Subsorting from the CASL part of the CspCASL
specification -}
th_pcfol <- casl2pcfol (caslSign, caslNamedSens)
-- Next Remove partial functions
th_cfol <- pcfol2cfol th_pcfol
-- Next Translate to IsabelleHOL code
(th_isa_Sig, th_isa_Sens) <- cfol2isabelleHol th_cfol
return (th_isa_Sig, th_isa_Sens, fst th_pcfol, fst th_cfol)
{- | Produce the Isabelle theory which contains the PreAlphabet,
Justification Theorems and also the instances code. We need the
PFOL signature which is the data part CASL signature after
translation to PCFOL (i.e. without subsorting) to pass on as an
argument. -}
producePreAlphabet :: String -> CASLSign -> CASLSign ->
Theory Isa.Sign Isa.Sentence ()
producePreAlphabet thName caslSign pfolSign =
let sortList = Set.toList (sortSet caslSign)
{- empty Isabelle signature which imports the data encoding
and quotient.thy (which is needed for the instances code) -}
isaSignEmpty = Isa.emptySign {Isa.imports = [mkThyNameDataEnc thName
, quotientThyS] }
-- Start with our empty Isabelle theory, add the constructs
(isaSign, isaSens) = addInstanceOfEquiv
$ addJustificationTheorems caslSign pfolSign
$ addAllGaAxiomsCollections caslSign pfolSign
$ addEqFun sortList
$ addAllCompareWithFun caslSign
$ addPreAlphabet sortList
(isaSignEmpty, [])
in Theory isaSign (toThSens isaSens)
{- |Produce the Isabelle theory which contains the Alphabet
construction, and also the bar types and choose
fucntions for CspCASLProver. -}
produceAlphabet :: String -> CASLSign -> Theory Isa.Sign Isa.Sentence ()
produceAlphabet thName caslSign =
let sortList = Set.toList (sortSet caslSign)
-- empty Isabelle signature which imports the preAlphabet encoding
isaSignEmpty = Isa.emptySign {
Isa.imports = [mkThyNamePreAlphabet thName]}
{- Start with our empty isabelle theory, add the Alphabet type
, then the bar types and finally the choose functions. -}
(isaSign, isaSens) = addAllChooseFunctions sortList
$ addAllBarTypes sortList
$ addAlphabetType
(isaSignEmpty, [])
in Theory isaSign (toThSens isaSens)
{- |Produce the Isabelle theory which contains the Integration
Theorems on data -}
produceIntegrationTheorems :: String -> CASLSign ->
Theory Isa.Sign Isa.Sentence ()
produceIntegrationTheorems thName caslSign =
let sortList = Set.toList (sortSet caslSign)
-- empty Isabelle signature which imports the alphabet encoding
isaSignEmpty = Isa.emptySign {Isa.imports = [mkThyNameAlphabet thName] }
{- Start with our empty isabelle theory and add the
integration theorems. -}
(isaSign, isaSens) = addAllIntegrationTheorems sortList caslSign
(isaSignEmpty, [])
in Theory isaSign (toThSens isaSens)
{- |Produce the Isabelle theory which contains the Process Translations and
process refinement theorems. We -- need the PCFOL and CFOL signatures of
the data part after translation to PCFOL and CFOL to pass -- along to the
process translation. -}
produceProcesses :: String -> CspCASLSign -> [Named CspCASLSen] ->
CASLSign -> CASLSign -> Theory Isa.Sign Isa.Sentence ()
produceProcesses thName ccSign ccNamedSens pcfolSign cfolSign =
let caslSign = ccSig2CASLSign ccSign
cspSign = ccSig2CspSign ccSign
sortList = Set.toList (sortSet caslSign)
sortRel' = sortRel caslSign
chanNameMap = chans cspSign
{- Isabelle signature which imports the integration theorems encoding
and CSP_F -}
isaSignEmpty = Isa.emptySign {Isa.imports = [mkThyNameIntThms thName
, cspFThyS] }
{- Start with our empty isabelle theory and add the
processes the the process refinement theorems. -}
(isaSign, isaSens) =
addProcTheorems ccNamedSens ccSign pcfolSign cfolSign
$ addProcMap ccNamedSens ccSign pcfolSign cfolSign
$ addProcNameDatatype cspSign
$ addFlatTypes sortList
$ addProjFlatFun
$ addEventDataType sortRel' chanNameMap
(isaSignEmpty, [])
in Theory isaSign (toThSens isaSens)
|
keithodulaigh/Hets
|
CspCASLProver/CspCASLProver.hs
|
gpl-2.0
| 9,449
| 0
| 17
| 2,399
| 1,394
| 734
| 660
| 118
| 3
|
-- Naive Sieve
module NaiveSieve (primes, sieve) where
primes :: [Integer]
primes = sieve [2..]
sieve :: [Integer] -> [Integer]
sieve (p : xs) = p : sieve [x | x <- xs, x `mod` p > 0]
|
dkensinger/haskell
|
haskell-primes/NaiveSieve.hs
|
gpl-3.0
| 187
| 0
| 10
| 41
| 94
| 54
| 40
| 5
| 1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section{Tidying up Core}
-}
module ETA.Main.TidyPgm (
mkBootModDetailsTc, tidyProgram, globaliseAndTidyId
) where
import ETA.TypeCheck.TcRnTypes
import ETA.Main.DynFlags
import ETA.Core.CoreSyn
import ETA.Core.CoreUnfold
import ETA.Core.CoreFVs
import ETA.Core.CoreTidy
import ETA.SimplCore.CoreMonad
import ETA.Core.CorePrep
import ETA.Core.CoreUtils
import ETA.Core.CoreLint
import ETA.BasicTypes.Literal
import ETA.Specialise.Rules
import ETA.BasicTypes.PatSyn
import ETA.BasicTypes.ConLike
import ETA.Core.CoreArity ( exprArity, exprBotStrictness_maybe )
import ETA.BasicTypes.VarEnv
import ETA.BasicTypes.VarSet
import ETA.BasicTypes.Var
import ETA.BasicTypes.Id
import qualified ETA.BasicTypes.Id as Id
import ETA.BasicTypes.MkId ( mkDictSelRhs )
import ETA.BasicTypes.IdInfo
import ETA.Types.InstEnv
import ETA.Types.FamInstEnv
import ETA.Types.Type ( tidyTopType )
import ETA.BasicTypes.Demand ( appIsBottom, isNopSig, isBottomingSig )
import ETA.BasicTypes.BasicTypes
import ETA.BasicTypes.Name hiding (varName)
import ETA.BasicTypes.NameSet
import ETA.BasicTypes.NameEnv
import ETA.BasicTypes.Avail
import ETA.Iface.IfaceEnv
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcRnMonad
import ETA.BasicTypes.DataCon
import ETA.Types.TyCon
import ETA.Types.Class
import ETA.BasicTypes.Module
import ETA.Main.Packages( isDllName )
import ETA.Main.HscTypes
import ETA.Utils.Maybes
import ETA.BasicTypes.UniqSupply
import ETA.Main.ErrUtils (Severity(..))
import ETA.Utils.Outputable
import ETA.Utils.FastBool hiding ( fastOr )
import ETA.BasicTypes.SrcLoc
import ETA.Utils.FastString
import qualified ETA.Main.ErrUtils as Err
import Control.Monad
import Data.Function
import Data.List ( sortBy )
import Data.IORef ( atomicModifyIORef )
{-
Constructing the TypeEnv, Instances, Rules, VectInfo from which the
ModIface is constructed, and which goes on to subsequent modules in
--make mode.
Most of the interface file is obtained simply by serialising the
TypeEnv. One important consequence is that if the *interface file*
has pragma info if and only if the final TypeEnv does. This is not so
important for *this* module, but it's essential for ghc --make:
subsequent compilations must not see (e.g.) the arity if the interface
file does not contain arity If they do, they'll exploit the arity;
then the arity might change, but the iface file doesn't change =>
recompilation does not happen => disaster.
For data types, the final TypeEnv will have a TyThing for the TyCon,
plus one for each DataCon; the interface file will contain just one
data type declaration, but it is de-serialised back into a collection
of TyThings.
************************************************************************
* *
Plan A: simpleTidyPgm
* *
************************************************************************
Plan A: mkBootModDetails: omit pragmas, make interfaces small
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Ignore the bindings
* Drop all WiredIn things from the TypeEnv
(we never want them in interface files)
* Retain all TyCons and Classes in the TypeEnv, to avoid
having to find which ones are mentioned in the
types of exported Ids
* Trim off the constructors of non-exported TyCons, both
from the TyCon and from the TypeEnv
* Drop non-exported Ids from the TypeEnv
* Tidy the types of the DFunIds of Instances,
make them into GlobalIds, (they already have External Names)
and add them to the TypeEnv
* Tidy the types of the (exported) Ids in the TypeEnv,
make them into GlobalIds (they already have External Names)
* Drop rules altogether
* Tidy the bindings, to ensure that the Caf and Arity
information is correct for each top-level binder; the
code generator needs it. And to ensure that local names have
distinct OccNames in case of object-file splitting
* If this an hsig file, drop the instances altogether too (they'll
get pulled in by the implicit module import.
-}
-- This is Plan A: make a small type env when typechecking only,
-- or when compiling a hs-boot file, or simply when not using -O
--
-- We don't look at the bindings at all -- there aren't any
-- for hs-boot files
mkBootModDetailsTc :: HscEnv -> TcGblEnv -> IO ModDetails
mkBootModDetailsTc hsc_env
TcGblEnv{ tcg_exports = exports,
tcg_type_env = type_env, -- just for the Ids
tcg_tcs = tcs,
tcg_patsyns = pat_syns,
tcg_insts = insts,
tcg_fam_insts = fam_insts
}
= do { let dflags = hsc_dflags hsc_env
; showPassIO dflags CoreTidy
; let { insts' = map (tidyClsInstDFun globaliseAndTidyId) insts
; type_env1 = mkBootTypeEnv (availsToNameSet exports)
(typeEnvIds type_env) tcs fam_insts
; pat_syns' = map (tidyPatSynIds globaliseAndTidyId) pat_syns
; type_env2 = extendTypeEnvWithPatSyns pat_syns' type_env1
; dfun_ids = map instanceDFunId insts'
; type_env' = extendTypeEnvWithIds type_env2 dfun_ids
}
; return (ModDetails { md_types = type_env'
, md_insts = insts'
, md_fam_insts = fam_insts
, md_rules = []
, md_anns = []
, md_exports = exports
, md_vect_info = noVectInfo
})
}
where
mkBootTypeEnv :: NameSet -> [Id] -> [TyCon] -> [FamInst] -> TypeEnv
mkBootTypeEnv exports ids tcs fam_insts
= tidyTypeEnv True $
typeEnvFromEntities final_ids tcs fam_insts
where
-- Find the LocalIds in the type env that are exported
-- Make them into GlobalIds, and tidy their types
--
-- It's very important to remove the non-exported ones
-- because we don't tidy the OccNames, and if we don't remove
-- the non-exported ones we'll get many things with the
-- same name in the interface file, giving chaos.
--
-- Do make sure that we keep Ids that are already Global.
-- When typechecking an .hs-boot file, the Ids come through as
-- GlobalIds.
final_ids = [ if isLocalId id then globaliseAndTidyId id
else id
| id <- ids
, keep_it id ]
-- default methods have their export flag set, but everything
-- else doesn't (yet), because this is pre-desugaring, so we
-- must test both.
keep_it id = isExportedId id || idName id `elemNameSet` exports
globaliseAndTidyId :: Id -> Id
-- Takes an LocalId with an External Name,
-- makes it into a GlobalId
-- * unchanged Name (might be Internal or External)
-- * unchanged details
-- * VanillaIdInfo (makes a conservative assumption about Caf-hood)
globaliseAndTidyId id
= Id.setIdType (globaliseId id) tidy_type
where
tidy_type = tidyTopType (idType id)
{-
************************************************************************
* *
Plan B: tidy bindings, make TypeEnv full of IdInfo
* *
************************************************************************
Plan B: include pragmas, make interfaces
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Figure out which Ids are externally visible
* Tidy the bindings, externalising appropriate Ids
* Drop all Ids from the TypeEnv, and add all the External Ids from
the bindings. (This adds their IdInfo to the TypeEnv; and adds
floated-out Ids that weren't even in the TypeEnv before.)
Step 1: Figure out external Ids
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note [choosing external names]
See also the section "Interface stability" in the
RecompilationAvoidance commentary:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/RecompilationAvoidance
First we figure out which Ids are "external" Ids. An
"external" Id is one that is visible from outside the compilation
unit. These are
a) the user exported ones
b) ones mentioned in the unfoldings, workers,
rules of externally-visible ones ,
or vectorised versions of externally-visible ones
While figuring out which Ids are external, we pick a "tidy" OccName
for each one. That is, we make its OccName distinct from the other
external OccNames in this module, so that in interface files and
object code we can refer to it unambiguously by its OccName. The
OccName for each binder is prefixed by the name of the exported Id
that references it; e.g. if "f" references "x" in its unfolding, then
"x" is renamed to "f_x". This helps distinguish the different "x"s
from each other, and means that if "f" is later removed, things that
depend on the other "x"s will not need to be recompiled. Of course,
if there are multiple "f_x"s, then we have to disambiguate somehow; we
use "f_x0", "f_x1" etc.
As far as possible we should assign names in a deterministic fashion.
Each time this module is compiled with the same options, we should end
up with the same set of external names with the same types. That is,
the ABI hash in the interface should not change. This turns out to be
quite tricky, since the order of the bindings going into the tidy
phase is already non-deterministic, as it is based on the ordering of
Uniques, which are assigned unpredictably.
To name things in a stable way, we do a depth-first-search of the
bindings, starting from the exports sorted by name. This way, as long
as the bindings themselves are deterministic (they sometimes aren't!),
the order in which they are presented to the tidying phase does not
affect the names we assign.
Step 2: Tidy the program
~~~~~~~~~~~~~~~~~~~~~~~~
Next we traverse the bindings top to bottom. For each *top-level*
binder
1. Make it into a GlobalId; its IdDetails becomes VanillaGlobal,
reflecting the fact that from now on we regard it as a global,
not local, Id
2. Give it a system-wide Unique.
[Even non-exported things need system-wide Uniques because the
byte-code generator builds a single Name->BCO symbol table.]
We use the NameCache kept in the HscEnv as the
source of such system-wide uniques.
For external Ids, use the original-name cache in the NameCache
to ensure that the unique assigned is the same as the Id had
in any previous compilation run.
3. Rename top-level Ids according to the names we chose in step 1.
If it's an external Id, make it have a External Name, otherwise
make it have an Internal Name. This is used by the code generator
to decide whether to make the label externally visible
4. Give it its UTTERLY FINAL IdInfo; in ptic,
* its unfolding, if it should have one
* its arity, computed from the number of visible lambdas
* its CAF info, computed from what is free in its RHS
Finally, substitute these new top-level binders consistently
throughout, including in unfoldings. We also tidy binders in
RHSs, so that they print nicely in interfaces.
-}
tidyProgram :: HscEnv -> ModGuts -> IO (CgGuts, ModDetails)
tidyProgram hsc_env (ModGuts { mg_module = mod
, mg_exports = exports
, mg_rdr_env = rdr_env
, mg_tcs = tcs
, mg_insts = cls_insts
, mg_fam_insts = fam_insts
, mg_binds = binds
, mg_patsyns = patsyns
, mg_rules = imp_rules
, mg_vect_info = vect_info
, mg_anns = anns
, mg_deps = deps
, mg_foreign = foreign_stubs
, mg_hpc_info = hpc_info
, mg_modBreaks = modBreaks
})
= do { let { dflags = hsc_dflags hsc_env
; omit_prags = gopt Opt_OmitInterfacePragmas dflags
; expose_all = gopt Opt_ExposeAllUnfoldings dflags
; print_unqual = mkPrintUnqualified dflags rdr_env
}
; showPassIO dflags CoreTidy
; let { type_env = typeEnvFromEntities [] tcs fam_insts
; implicit_binds
= concatMap getClassImplicitBinds (typeEnvClasses type_env) ++
concatMap getTyConImplicitBinds (typeEnvTyCons type_env)
}
; (unfold_env, tidy_occ_env)
<- chooseExternalIds hsc_env mod omit_prags expose_all
binds implicit_binds imp_rules (vectInfoVar vect_info)
; let { (trimmed_binds, trimmed_rules)
= findExternalRules omit_prags binds imp_rules unfold_env }
; (tidy_env, tidy_binds)
<- tidyTopBinds hsc_env mod unfold_env tidy_occ_env trimmed_binds
; let { final_ids = [ id | id <- bindersOfBinds tidy_binds,
isExternalName (idName id)]
; type_env1 = extendTypeEnvWithIds type_env final_ids
; tidy_cls_insts = map (tidyClsInstDFun (lookup_aux_id tidy_type_env)) cls_insts
-- A DFunId will have a binding in tidy_binds, and so will now be in
-- tidy_type_env, replete with IdInfo. Its name will be unchanged since
-- it was born, but we want Global, IdInfo-rich (or not) DFunId in the
-- tidy_cls_insts. Similarly the Ids inside a PatSyn.
; tidy_rules = tidyRules tidy_env trimmed_rules
-- You might worry that the tidy_env contains IdInfo-rich stuff
-- and indeed it does, but if omit_prags is on, ext_rules is
-- empty
; tidy_vect_info = tidyVectInfo tidy_env vect_info
-- Tidy the Ids inside each PatSyn, very similarly to DFunIds
-- and then override the PatSyns in the type_env with the new tidy ones
-- This is really the only reason we keep mg_patsyns at all; otherwise
-- they could just stay in type_env
; tidy_patsyns = map (tidyPatSynIds (lookup_aux_id tidy_type_env)) patsyns
; type_env2 = extendTypeEnvWithPatSyns tidy_patsyns type_env1
; tidy_type_env = tidyTypeEnv omit_prags type_env2
-- See Note [Injecting implicit bindings]
; all_tidy_binds = implicit_binds ++ tidy_binds
-- get the TyCons to generate code for. Careful! We must use
-- the untidied TypeEnv here, because we need
-- (a) implicit TyCons arising from types and classes defined
-- in this module
-- (b) wired-in TyCons, which are normally removed from the
-- TypeEnv we put in the ModDetails
-- (c) Constructors even if they are not exported (the
-- tidied TypeEnv has trimmed these away)
; alg_tycons = filter isAlgTyCon (typeEnvTyCons type_env)
}
; endPassIO hsc_env print_unqual CoreTidy all_tidy_binds tidy_rules
-- If the endPass didn't print the rules, but ddump-rules is
-- on, print now
; unless (dopt Opt_D_dump_simpl dflags) $
Err.dumpIfSet_dyn dflags Opt_D_dump_rules
(showSDoc dflags (ppr CoreTidy <+> ptext (sLit "rules")))
(pprRulesForUser tidy_rules)
-- Print one-line size info
; let cs = coreBindsStats tidy_binds
; when (dopt Opt_D_dump_core_stats dflags)
(log_action dflags dflags SevDump noSrcSpan defaultDumpStyle
(ptext (sLit "Tidy size (terms,types,coercions)")
<+> ppr (moduleName mod) <> colon
<+> int (cs_tm cs)
<+> int (cs_ty cs)
<+> int (cs_co cs) ))
; return (CgGuts { cg_module = mod,
cg_tycons = alg_tycons,
cg_binds = all_tidy_binds,
cg_foreign = foreign_stubs,
cg_dep_pkgs = map fst $ dep_pkgs deps,
cg_hpc_info = hpc_info,
cg_modBreaks = modBreaks },
ModDetails { md_types = tidy_type_env,
md_rules = tidy_rules,
md_insts = tidy_cls_insts,
md_vect_info = tidy_vect_info,
md_fam_insts = fam_insts,
md_exports = exports,
md_anns = anns -- are already tidy
})
}
lookup_aux_id :: TypeEnv -> Var -> Id
lookup_aux_id type_env id
= case lookupTypeEnv type_env (idName id) of
Just (AnId id') -> id'
_other -> pprPanic "lookup_aux_id" (ppr id)
tidyTypeEnv :: Bool -- Compiling without -O, so omit prags
-> TypeEnv -> TypeEnv
-- The competed type environment is gotten from
-- a) the types and classes defined here (plus implicit things)
-- b) adding Ids with correct IdInfo, including unfoldings,
-- gotten from the bindings
-- From (b) we keep only those Ids with External names;
-- the CoreTidy pass makes sure these are all and only
-- the externally-accessible ones
-- This truncates the type environment to include only the
-- exported Ids and things needed from them, which saves space
--
-- See Note [Don't attempt to trim data types]
tidyTypeEnv omit_prags type_env
= let
type_env1 = filterNameEnv (not . isWiredInName . getName) type_env
-- (1) remove wired-in things
type_env2 | omit_prags = mapNameEnv trimThing type_env1
| otherwise = type_env1
-- (2) trimmed if necessary
in
type_env2
--------------------------
trimThing :: TyThing -> TyThing
-- Trim off inessentials, for boot files and no -O
trimThing (AnId id)
| not (isImplicitId id)
= AnId (id `setIdInfo` vanillaIdInfo)
trimThing other_thing
= other_thing
extendTypeEnvWithPatSyns :: [PatSyn] -> TypeEnv -> TypeEnv
extendTypeEnvWithPatSyns tidy_patsyns type_env
= extendTypeEnvList type_env [AConLike (PatSynCon ps) | ps <- tidy_patsyns ]
tidyVectInfo :: TidyEnv -> VectInfo -> VectInfo
tidyVectInfo (_, var_env) info@(VectInfo { vectInfoVar = vars
, vectInfoParallelVars = parallelVars
})
= info { vectInfoVar = tidy_vars
, vectInfoParallelVars = tidy_parallelVars
}
where
-- we only export mappings whose domain and co-domain is exported (otherwise, the iface is
-- inconsistent)
tidy_vars = mkVarEnv [ (tidy_var, (tidy_var, tidy_var_v))
| (var, var_v) <- varEnvElts vars
, let tidy_var = lookup_var var
tidy_var_v = lookup_var var_v
, isExternalId tidy_var && isExportedId tidy_var
, isExternalId tidy_var_v && isExportedId tidy_var_v
, isDataConWorkId var || not (isImplicitId var)
]
tidy_parallelVars = mkVarSet [ tidy_var
| var <- varSetElems parallelVars
, let tidy_var = lookup_var var
, isExternalId tidy_var && isExportedId tidy_var
]
lookup_var var = lookupWithDefaultVarEnv var_env var var
-- We need to make sure that all names getting into the iface version of 'VectInfo' are
-- external; otherwise, 'MkIface' will bomb out.
isExternalId = isExternalName . idName
{-
Note [Don't attempt to trim data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For some time GHC tried to avoid exporting the data constructors
of a data type if it wasn't strictly necessary to do so; see Trac #835.
But "strictly necessary" accumulated a longer and longer list
of exceptions, and finally I gave up the battle:
commit 9a20e540754fc2af74c2e7392f2786a81d8d5f11
Author: Simon Peyton Jones <simonpj@microsoft.com>
Date: Thu Dec 6 16:03:16 2012 +0000
Stop attempting to "trim" data types in interface files
Without -O, we previously tried to make interface files smaller
by not including the data constructors of data types. But
there are a lot of exceptions, notably when Template Haskell is
involved or, more recently, DataKinds.
However Trac #7445 shows that even without TemplateHaskell, using
the Data class and invoking Language.Haskell.TH.Quote.dataToExpQ
is enough to require us to expose the data constructors.
So I've given up on this "optimisation" -- it's probably not
important anyway. Now I'm simply not attempting to trim off
the data constructors. The gain in simplicity is worth the
modest cost in interface file growth, which is limited to the
bits reqd to describe those data constructors.
************************************************************************
* *
Implicit bindings
* *
************************************************************************
Note [Injecting implicit bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We inject the implicit bindings right at the end, in CoreTidy.
Some of these bindings, notably record selectors, are not
constructed in an optimised form. E.g. record selector for
data T = MkT { x :: {-# UNPACK #-} !Int }
Then the unfolding looks like
x = \t. case t of MkT x1 -> let x = I# x1 in x
This generates bad code unless it's first simplified a bit. That is
why CoreUnfold.mkImplicitUnfolding uses simleExprOpt to do a bit of
optimisation first. (Only matters when the selector is used curried;
eg map x ys.) See Trac #2070.
[Oct 09: in fact, record selectors are no longer implicit Ids at all,
because we really do want to optimise them properly. They are treated
much like any other Id. But doing "light" optimisation on an implicit
Id still makes sense.]
At one time I tried injecting the implicit bindings *early*, at the
beginning of SimplCore. But that gave rise to real difficulty,
because GlobalIds are supposed to have *fixed* IdInfo, but the
simplifier and other core-to-core passes mess with IdInfo all the
time. The straw that broke the camels back was when a class selector
got the wrong arity -- ie the simplifier gave it arity 2, whereas
importing modules were expecting it to have arity 1 (Trac #2844).
It's much safer just to inject them right at the end, after tidying.
Oh: two other reasons for injecting them late:
- If implicit Ids are already in the bindings when we start TidyPgm,
we'd have to be careful not to treat them as external Ids (in
the sense of chooseExternalIds); else the Ids mentioned in *their*
RHSs will be treated as external and you get an interface file
saying a18 = <blah>
but nothing refererring to a18 (because the implicit Id is the
one that does, and implicit Ids don't appear in interface files).
- More seriously, the tidied type-envt will include the implicit
Id replete with a18 in its unfolding; but we won't take account
of a18 when computing a fingerprint for the class; result chaos.
There is one sort of implicit binding that is injected still later,
namely those for data constructor workers. Reason (I think): it's
really just a code generation trick.... binding itself makes no sense.
See Note [Data constructor workers] in CorePrep.
-}
getTyConImplicitBinds :: TyCon -> [CoreBind]
getTyConImplicitBinds tc = map get_defn (mapMaybe dataConWrapId_maybe (tyConDataCons tc))
getClassImplicitBinds :: Class -> [CoreBind]
getClassImplicitBinds cls
= [ NonRec op (mkDictSelRhs cls val_index)
| (op, val_index) <- classAllSelIds cls `zip` [0..] ]
get_defn :: Id -> CoreBind
get_defn id = NonRec id (unfoldingTemplate (realIdUnfolding id))
{-
************************************************************************
* *
\subsection{Step 1: finding externals}
* *
************************************************************************
See Note [Choosing external names].
-}
type UnfoldEnv = IdEnv (Name{-new name-}, Bool {-show unfolding-})
-- Maps each top-level Id to its new Name (the Id is tidied in step 2)
-- The Unique is unchanged. If the new Name is external, it will be
-- visible in the interface file.
--
-- Bool => expose unfolding or not.
chooseExternalIds :: HscEnv
-> Module
-> Bool -> Bool
-> [CoreBind]
-> [CoreBind]
-> [CoreRule]
-> VarEnv (Var, Var)
-> IO (UnfoldEnv, TidyOccEnv)
-- Step 1 from the notes above
chooseExternalIds hsc_env mod omit_prags expose_all binds implicit_binds imp_id_rules vect_vars
= do { (unfold_env1,occ_env1) <- search init_work_list emptyVarEnv init_occ_env
; let internal_ids = filter (not . (`elemVarEnv` unfold_env1)) binders
; tidy_internal internal_ids unfold_env1 occ_env1 }
where
nc_var = hsc_NC hsc_env
-- init_ext_ids is the intial list of Ids that should be
-- externalised. It serves as the starting point for finding a
-- deterministic, tidy, renaming for all external Ids in this
-- module.
--
-- It is sorted, so that it has adeterministic order (i.e. it's the
-- same list every time this module is compiled), in contrast to the
-- bindings, which are ordered non-deterministically.
init_work_list = zip init_ext_ids init_ext_ids
init_ext_ids = sortBy (compare `on` getOccName) $
filter is_external binders
-- An Id should be external if either (a) it is exported,
-- (b) it appears in the RHS of a local rule for an imported Id, or
-- (c) it is the vectorised version of an imported Id
-- See Note [Which rules to expose]
is_external id = isExportedId id || id `elemVarSet` rule_rhs_vars || id `elemVarSet` vect_var_vs
rule_rhs_vars = mapUnionVarSet ruleRhsFreeVars imp_id_rules
vect_var_vs = mkVarSet [var_v | (var, var_v) <- nameEnvElts vect_vars, isGlobalId var]
binders = bindersOfBinds binds
implicit_binders = bindersOfBinds implicit_binds
binder_set = mkVarSet binders
avoids = [getOccName name | bndr <- binders ++ implicit_binders,
let name = idName bndr,
isExternalName name ]
-- In computing our "avoids" list, we must include
-- all implicit Ids
-- all things with global names (assigned once and for
-- all by the renamer)
-- since their names are "taken".
-- The type environment is a convenient source of such things.
-- In particular, the set of binders doesn't include
-- implicit Ids at this stage.
-- We also make sure to avoid any exported binders. Consider
-- f{-u1-} = 1 -- Local decl
-- ...
-- f{-u2-} = 2 -- Exported decl
--
-- The second exported decl must 'get' the name 'f', so we
-- have to put 'f' in the avoids list before we get to the first
-- decl. tidyTopId then does a no-op on exported binders.
init_occ_env = initTidyOccEnv avoids
search :: [(Id,Id)] -- The work-list: (external id, referrring id)
-- Make a tidy, external Name for the external id,
-- add it to the UnfoldEnv, and do the same for the
-- transitive closure of Ids it refers to
-- The referring id is used to generate a tidy
--- name for the external id
-> UnfoldEnv -- id -> (new Name, show_unfold)
-> TidyOccEnv -- occ env for choosing new Names
-> IO (UnfoldEnv, TidyOccEnv)
search [] unfold_env occ_env = return (unfold_env, occ_env)
search ((idocc,referrer) : rest) unfold_env occ_env
| idocc `elemVarEnv` unfold_env = search rest unfold_env occ_env
| otherwise = do
(occ_env', name') <- tidyTopName mod nc_var (Just referrer) occ_env idocc
let
(new_ids, show_unfold)
| omit_prags = ([], False)
| otherwise = addExternal expose_all refined_id
-- add vectorised version if any exists
new_ids' = new_ids ++ maybeToList (fmap snd $ lookupVarEnv vect_vars idocc)
-- 'idocc' is an *occurrence*, but we need to see the
-- unfolding in the *definition*; so look up in binder_set
refined_id = case lookupVarSet binder_set idocc of
Just id -> id
Nothing -> {-WARN( True, ppr idocc )-} idocc
unfold_env' = extendVarEnv unfold_env idocc (name',show_unfold)
referrer' | isExportedId refined_id = refined_id
| otherwise = referrer
--
search (zip new_ids' (repeat referrer') ++ rest) unfold_env' occ_env'
tidy_internal :: [Id] -> UnfoldEnv -> TidyOccEnv
-> IO (UnfoldEnv, TidyOccEnv)
tidy_internal [] unfold_env occ_env = return (unfold_env,occ_env)
tidy_internal (id:ids) unfold_env occ_env = do
(occ_env', name') <- tidyTopName mod nc_var Nothing occ_env id
let unfold_env' = extendVarEnv unfold_env id (name',False)
tidy_internal ids unfold_env' occ_env'
addExternal :: Bool -> Id -> ([Id], Bool)
addExternal expose_all id = (new_needed_ids, show_unfold)
where
new_needed_ids = bndrFvsInOrder show_unfold id
idinfo = idInfo id
show_unfold = show_unfolding (unfoldingInfo idinfo)
never_active = isNeverActive (inlinePragmaActivation (inlinePragInfo idinfo))
loop_breaker = isStrongLoopBreaker (occInfo idinfo)
bottoming_fn = isBottomingSig (strictnessInfo idinfo)
-- Stuff to do with the Id's unfolding
-- We leave the unfolding there even if there is a worker
-- In GHCi the unfolding is used by importers
show_unfolding (CoreUnfolding { uf_src = src, uf_guidance = guidance })
= expose_all -- 'expose_all' says to expose all
-- unfoldings willy-nilly
|| isStableSource src -- Always expose things whose
-- source is an inline rule
|| not (bottoming_fn -- No need to inline bottom functions
|| never_active -- Or ones that say not to
|| loop_breaker -- Or that are loop breakers
|| neverUnfoldGuidance guidance)
show_unfolding (DFunUnfolding {}) = True
show_unfolding _ = False
{-
************************************************************************
* *
Deterministic free variables
* *
************************************************************************
We want a deterministic free-variable list. exprFreeVars gives us
a VarSet, which is in a non-deterministic order when converted to a
list. Hence, here we define a free-variable finder that returns
the free variables in the order that they are encountered.
See Note [Choosing external names]
-}
bndrFvsInOrder :: Bool -> Id -> [Id]
bndrFvsInOrder show_unfold id
= run (dffvLetBndr show_unfold id)
run :: DFFV () -> [Id]
run (DFFV m) = case m emptyVarSet (emptyVarSet, []) of
((_,ids),_) -> ids
newtype DFFV a
= DFFV (VarSet -- Envt: non-top-level things that are in scope
-- we don't want to record these as free vars
-> (VarSet, [Var]) -- Input State: (set, list) of free vars so far
-> ((VarSet,[Var]),a)) -- Output state
instance Functor DFFV where
fmap = liftM
instance Applicative DFFV where
pure = return
(<*>) = ap
instance Monad DFFV where
return a = DFFV $ \_ st -> (st, a)
(DFFV m) >>= k = DFFV $ \env st ->
case m env st of
(st',a) -> case k a of
DFFV f -> f env st'
extendScope :: Var -> DFFV a -> DFFV a
extendScope v (DFFV f) = DFFV (\env st -> f (extendVarSet env v) st)
extendScopeList :: [Var] -> DFFV a -> DFFV a
extendScopeList vs (DFFV f) = DFFV (\env st -> f (extendVarSetList env vs) st)
insert :: Var -> DFFV ()
insert v = DFFV $ \ env (set, ids) ->
let keep_me = isLocalId v &&
not (v `elemVarSet` env) &&
not (v `elemVarSet` set)
in if keep_me
then ((extendVarSet set v, v:ids), ())
else ((set, ids), ())
dffvExpr :: CoreExpr -> DFFV ()
dffvExpr (Var v) = insert v
dffvExpr (App e1 e2) = dffvExpr e1 >> dffvExpr e2
dffvExpr (Lam v e) = extendScope v (dffvExpr e)
dffvExpr (Tick (Breakpoint _ ids) e) = mapM_ insert ids >> dffvExpr e
dffvExpr (Tick _other e) = dffvExpr e
dffvExpr (Cast e _) = dffvExpr e
dffvExpr (Let (NonRec x r) e) = dffvBind (x,r) >> extendScope x (dffvExpr e)
dffvExpr (Let (Rec prs) e) = extendScopeList (map fst prs) $
(mapM_ dffvBind prs >> dffvExpr e)
dffvExpr (Case e b _ as) = dffvExpr e >> extendScope b (mapM_ dffvAlt as)
dffvExpr _other = return ()
dffvAlt :: (t, [Var], CoreExpr) -> DFFV ()
dffvAlt (_,xs,r) = extendScopeList xs (dffvExpr r)
dffvBind :: (Id, CoreExpr) -> DFFV ()
dffvBind(x,r)
| not (isId x) = dffvExpr r
| otherwise = dffvLetBndr False x >> dffvExpr r
-- Pass False because we are doing the RHS right here
-- If you say True you'll get *exponential* behaviour!
dffvLetBndr :: Bool -> Id -> DFFV ()
-- Gather the free vars of the RULES and unfolding of a binder
-- We always get the free vars of a *stable* unfolding, but
-- for a *vanilla* one (InlineRhs), the flag controls what happens:
-- True <=> get fvs of even a *vanilla* unfolding
-- False <=> ignore an InlineRhs
-- For nested bindings (call from dffvBind) we always say "False" because
-- we are taking the fvs of the RHS anyway
-- For top-level bindings (call from addExternal, via bndrFvsInOrder)
-- we say "True" if we are exposing that unfolding
dffvLetBndr vanilla_unfold id
= do { go_unf (unfoldingInfo idinfo)
; mapM_ go_rule (specInfoRules (specInfo idinfo)) }
where
idinfo = idInfo id
go_unf (CoreUnfolding { uf_tmpl = rhs, uf_src = src })
= case src of
InlineRhs | vanilla_unfold -> dffvExpr rhs
| otherwise -> return ()
_ -> dffvExpr rhs
go_unf (DFunUnfolding { df_bndrs = bndrs, df_args = args })
= extendScopeList bndrs $ mapM_ dffvExpr args
go_unf _ = return ()
go_rule (BuiltinRule {}) = return ()
go_rule (Rule { ru_bndrs = bndrs, ru_rhs = rhs })
= extendScopeList bndrs (dffvExpr rhs)
{-
************************************************************************
* *
findExternalRules
* *
************************************************************************
Note [Finding external rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The complete rules are gotten by combining
a) local rules for imported Ids
b) rules embedded in the top-level Ids
There are two complications:
* Note [Which rules to expose]
* Note [Trimming auto-rules]
Note [Which rules to expose]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The function 'expose_rule' filters out rules that mention, on the LHS,
Ids that aren't externally visible; these rules can't fire in a client
module.
The externally-visible binders are computed (by chooseExternalIds)
assuming that all orphan rules are externalised (see init_ext_ids in
function 'search'). So in fact it's a bit conservative and we may
export more than we need. (It's a sort of mutual recursion.)
Note [Trimming auto-rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Second, with auto-specialisation we may specialise local or imported
dfuns or INLINE functions, and then later inline them. That may leave
behind something like
RULE "foo" forall d. f @ Int d = f_spec
where f is either local or imported, and there is no remaining
reference to f_spec except from the RULE.
Now that RULE *might* be useful to an importing module, but that is
purely speculative, and meanwhile the code is taking up space and
codegen time. So is seeems better to drop the binding for f_spec if
the auto-generated rule is the *only* reason that it is being kept
alive.
(The RULE still might have been useful in the past; that is, it was
the right thing to have generated it in the first place. See Note
[Inline specialisations] in Specialise. But now it has served its
purpose, and can be discarded.)
So findExternalRules does this:
* Remove all bindings that are kept alive *only* by isAutoRule rules
(this is done in trim_binds)
* Remove all auto rules that mention bindings that have been removed
(this is done by filtering by keep_rule)
So if a binding is kept alive for some *other* reason (e.g. f_spec is
called in the final code), we keep the rule too.
I found that binary sizes jumped by 6-10% when I started to specialise
INLINE functions (again, Note [Inline specialisations] in Specialise).
Adding trimAutoRules removed all this bloat.
-}
findExternalRules :: Bool -- Omit pragmas
-> [CoreBind]
-> [CoreRule] -- Local rules for imported fns
-> UnfoldEnv -- Ids that are exported, so we need their rules
-> ([CoreBind], [CoreRule])
-- See Note [Finding external rules]
findExternalRules omit_prags binds imp_id_rules unfold_env
= (trimmed_binds, filter keep_rule all_rules)
where
imp_rules = filter expose_rule imp_id_rules
imp_user_rule_fvs = mapUnionVarSet user_rule_rhs_fvs imp_rules
user_rule_rhs_fvs rule | isAutoRule rule = emptyVarSet
| otherwise = ruleRhsFreeVars rule
(trimmed_binds, local_bndrs, _, all_rules) = trim_binds binds
keep_rule rule = ruleFreeVars rule `subVarSet` local_bndrs
-- Remove rules that make no sense, because they mention a
-- local binder (on LHS or RHS) that we have now discarded.
-- (NB: ruleFreeVars only includes LocalIds)
--
-- LHS: we have alrady filtered out rules that mention internal Ids
-- on LHS but that isn't enough because we might have by now
-- discarded a binding with an external Id. (How?
-- chooseExternalIds is a bit conservative.)
--
-- RHS: the auto rules that might mention a binder that has
-- been discarded; see Note [Trimming auto-rules]
expose_rule rule
| omit_prags = False
| otherwise = all is_external_id (varSetElems (ruleLhsFreeIds rule))
-- Don't expose a rule whose LHS mentions a locally-defined
-- Id that is completely internal (i.e. not visible to an
-- importing module). NB: ruleLhsFreeIds only returns LocalIds.
-- See Note [Which rules to expose]
is_external_id id = case lookupVarEnv unfold_env id of
Just (name, _) -> isExternalName name
Nothing -> False
trim_binds :: [CoreBind]
-> ( [CoreBind] -- Trimmed bindings
, VarSet -- Binders of those bindings
, VarSet -- Free vars of those bindings + rhs of user rules
-- (we don't bother to delete the binders)
, [CoreRule]) -- All rules, imported + from the bindings
-- This function removes unnecessary bindings, and gathers up rules from
-- the bindings we keep. See Note [Trimming auto-rules]
trim_binds [] -- Base case, start with imp_user_rule_fvs
= ([], emptyVarSet, imp_user_rule_fvs, imp_rules)
trim_binds (bind:binds)
| any needed bndrs -- Keep binding
= ( bind : binds', bndr_set', needed_fvs', local_rules ++ rules )
| otherwise -- Discard binding altogether
= stuff
where
stuff@(binds', bndr_set, needed_fvs, rules)
= trim_binds binds
needed bndr = isExportedId bndr || bndr `elemVarSet` needed_fvs
bndrs = bindersOf bind
rhss = rhssOfBind bind
bndr_set' = bndr_set `extendVarSetList` bndrs
needed_fvs' = needed_fvs `unionVarSet`
mapUnionVarSet idUnfoldingVars bndrs `unionVarSet`
-- Ignore type variables in the type of bndrs
mapUnionVarSet exprFreeVars rhss `unionVarSet`
mapUnionVarSet user_rule_rhs_fvs local_rules
-- In needed_fvs', we don't bother to delete binders from the fv set
local_rules = [ rule
| id <- bndrs
, is_external_id id -- Only collect rules for external Ids
, rule <- idCoreRules id
, expose_rule rule ] -- and ones that can fire in a client
{-
************************************************************************
* *
tidyTopName
* *
************************************************************************
This is where we set names to local/global based on whether they really are
externally visible (see comment at the top of this module). If the name
was previously local, we have to give it a unique occurrence name if
we intend to externalise it.
-}
tidyTopName :: Module -> IORef NameCache -> Maybe Id -> TidyOccEnv
-> Id -> IO (TidyOccEnv, Name)
tidyTopName mod nc_var maybe_ref occ_env id
| global && internal = return (occ_env, localiseName name)
| global && external = return (occ_env, name)
-- Global names are assumed to have been allocated by the renamer,
-- so they already have the "right" unique
-- And it's a system-wide unique too
-- Now we get to the real reason that all this is in the IO Monad:
-- we have to update the name cache in a nice atomic fashion
| local && internal = do { new_local_name <- atomicModifyIORef nc_var mk_new_local
; return (occ_env', new_local_name) }
-- Even local, internal names must get a unique occurrence, because
-- if we do -split-objs we externalise the name later, in the code generator
--
-- Similarly, we must make sure it has a system-wide Unique, because
-- the byte-code generator builds a system-wide Name->BCO symbol table
| local && external = do { new_external_name <- atomicModifyIORef nc_var mk_new_external
; return (occ_env', new_external_name) }
| otherwise = panic "tidyTopName"
where
name = idName id
external = isJust maybe_ref
global = isExternalName name
local = not global
internal = not external
loc = nameSrcSpan name
old_occ = nameOccName name
new_occ
| Just ref <- maybe_ref, ref /= id =
mkOccName (occNameSpace old_occ) $
let
ref_str = occNameString (getOccName ref)
occ_str = occNameString old_occ
in
case occ_str of
'$':'w':_ -> occ_str
-- workers: the worker for a function already
-- includes the occname for its parent, so there's
-- no need to prepend the referrer.
_other | isSystemName name -> ref_str
| otherwise -> ref_str ++ '_' : occ_str
-- If this name was system-generated, then don't bother
-- to retain its OccName, just use the referrer. These
-- system-generated names will become "f1", "f2", etc. for
-- a referrer "f".
| otherwise = old_occ
(occ_env', occ') = tidyOccName occ_env new_occ
mk_new_local nc = (nc { nsUniqs = us }, mkInternalName uniq occ' loc)
where
(uniq, us) = takeUniqFromSupply (nsUniqs nc)
mk_new_external nc = allocateGlobalBinder nc mod occ' loc
-- If we want to externalise a currently-local name, check
-- whether we have already assigned a unique for it.
-- If so, use it; if not, extend the table.
-- All this is done by allcoateGlobalBinder.
-- This is needed when *re*-compiling a module in GHCi; we must
-- use the same name for externally-visible things as we did before.
{-
************************************************************************
* *
\subsection{Step 2: top-level tidying}
* *
************************************************************************
-}
-- TopTidyEnv: when tidying we need to know
-- * nc_var: The NameCache, containing a unique supply and any pre-ordained Names.
-- These may have arisen because the
-- renamer read in an interface file mentioning M.$wf, say,
-- and assigned it unique r77. If, on this compilation, we've
-- invented an Id whose name is $wf (but with a different unique)
-- we want to rename it to have unique r77, so that we can do easy
-- comparisons with stuff from the interface file
--
-- * occ_env: The TidyOccEnv, which tells us which local occurrences
-- are 'used'
--
-- * subst_env: A Var->Var mapping that substitutes the new Var for the old
tidyTopBinds :: HscEnv
-> Module
-> UnfoldEnv
-> TidyOccEnv
-> CoreProgram
-> IO (TidyEnv, CoreProgram)
tidyTopBinds hsc_env this_mod unfold_env init_occ_env binds
= do mkIntegerId <- lookupMkIntegerName dflags hsc_env
integerSDataCon <- lookupIntegerSDataConName dflags hsc_env
let cvt_integer = cvtLitInteger dflags mkIntegerId integerSDataCon
return $ tidy cvt_integer init_env binds
where
dflags = hsc_dflags hsc_env
init_env = (init_occ_env, emptyVarEnv)
tidy _ env [] = (env, [])
tidy cvt_integer env (b:bs)
= let (env1, b') = tidyTopBind dflags this_mod
cvt_integer unfold_env env b
(env2, bs') = tidy cvt_integer env1 bs
in (env2, b':bs')
------------------------
tidyTopBind :: DynFlags
-> Module
-> (Integer -> CoreExpr)
-> UnfoldEnv
-> TidyEnv
-> CoreBind
-> (TidyEnv, CoreBind)
tidyTopBind dflags this_mod cvt_integer unfold_env
(occ_env,subst1) (NonRec bndr rhs)
= (tidy_env2, NonRec bndr' rhs')
where
Just (name',show_unfold) = lookupVarEnv unfold_env bndr
caf_info = hasCafRefs dflags this_mod (subst1, cvt_integer) (idArity bndr) rhs
(bndr', rhs') = tidyTopPair dflags show_unfold tidy_env2 caf_info name' (bndr, rhs)
subst2 = extendVarEnv subst1 bndr bndr'
tidy_env2 = (occ_env, subst2)
tidyTopBind dflags this_mod cvt_integer unfold_env
(occ_env, subst1) (Rec prs)
= (tidy_env2, Rec prs')
where
prs' = [ tidyTopPair dflags show_unfold tidy_env2 caf_info name' (id,rhs)
| (id,rhs) <- prs,
let (name',show_unfold) =
expectJust "tidyTopBind" $ lookupVarEnv unfold_env id
]
subst2 = extendVarEnvList subst1 (bndrs `zip` map fst prs')
tidy_env2 = (occ_env, subst2)
bndrs = map fst prs
-- the CafInfo for a recursive group says whether *any* rhs in
-- the group may refer indirectly to a CAF (because then, they all do).
caf_info
| or [ mayHaveCafRefs (hasCafRefs dflags this_mod
(subst1, cvt_integer)
(idArity bndr) rhs)
| (bndr,rhs) <- prs ] = MayHaveCafRefs
| otherwise = NoCafRefs
-----------------------------------------------------------
tidyTopPair :: DynFlags
-> Bool -- show unfolding
-> TidyEnv -- The TidyEnv is used to tidy the IdInfo
-- It is knot-tied: don't look at it!
-> CafInfo
-> Name -- New name
-> (Id, CoreExpr) -- Binder and RHS before tidying
-> (Id, CoreExpr)
-- This function is the heart of Step 2
-- The rec_tidy_env is the one to use for the IdInfo
-- It's necessary because when we are dealing with a recursive
-- group, a variable late in the group might be mentioned
-- in the IdInfo of one early in the group
tidyTopPair dflags show_unfold rhs_tidy_env caf_info name' (bndr, rhs)
= (bndr1, rhs1)
where
bndr1 = mkGlobalId details name' ty' idinfo'
details = idDetails bndr -- Preserve the IdDetails
ty' = tidyTopType (idType bndr)
rhs1 = tidyExpr rhs_tidy_env rhs
idinfo' = tidyTopIdInfo dflags rhs_tidy_env name' rhs rhs1 (idInfo bndr)
show_unfold caf_info
-- tidyTopIdInfo creates the final IdInfo for top-level
-- binders. There are two delicate pieces:
--
-- * Arity. After CoreTidy, this arity must not change any more.
-- Indeed, CorePrep must eta expand where necessary to make
-- the manifest arity equal to the claimed arity.
--
-- * CAF info. This must also remain valid through to code generation.
-- We add the info here so that it propagates to all
-- occurrences of the binders in RHSs, and hence to occurrences in
-- unfoldings, which are inside Ids imported by GHCi. Ditto RULES.
-- CoreToStg makes use of this when constructing SRTs.
tidyTopIdInfo :: DynFlags -> TidyEnv -> Name -> CoreExpr -> CoreExpr
-> IdInfo -> Bool -> CafInfo -> IdInfo
tidyTopIdInfo dflags rhs_tidy_env name orig_rhs tidy_rhs idinfo show_unfold caf_info
| not is_external -- For internal Ids (not externally visible)
= vanillaIdInfo -- we only need enough info for code generation
-- Arity and strictness info are enough;
-- c.f. CoreTidy.tidyLetBndr
`setCafInfo` caf_info
`setArityInfo` arity
`setStrictnessInfo` final_sig
| otherwise -- Externally-visible Ids get the whole lot
= vanillaIdInfo
`setCafInfo` caf_info
`setArityInfo` arity
`setStrictnessInfo` final_sig
`setOccInfo` robust_occ_info
`setInlinePragInfo` (inlinePragInfo idinfo)
`setUnfoldingInfo` unfold_info
-- NB: we throw away the Rules
-- They have already been extracted by findExternalRules
where
is_external = isExternalName name
--------- OccInfo ------------
robust_occ_info = zapFragileOcc (occInfo idinfo)
-- It's important to keep loop-breaker information
-- when we are doing -fexpose-all-unfoldings
--------- Strictness ------------
mb_bot_str = exprBotStrictness_maybe orig_rhs
sig = strictnessInfo idinfo
final_sig | not $ isNopSig sig
= {-WARN( _bottom_hidden sig , ppr name )-} sig
-- try a cheap-and-cheerful bottom analyser
| Just (_, nsig) <- mb_bot_str = nsig
| otherwise = sig
_bottom_hidden id_sig = case mb_bot_str of
Nothing -> False
Just (arity, _) -> not (appIsBottom id_sig arity)
--------- Unfolding ------------
unf_info = unfoldingInfo idinfo
unfold_info | show_unfold = tidyUnfolding rhs_tidy_env unf_info unf_from_rhs
| otherwise = noUnfolding
unf_from_rhs = mkTopUnfolding dflags is_bot tidy_rhs
is_bot = isBottomingSig final_sig
-- NB: do *not* expose the worker if show_unfold is off,
-- because that means this thing is a loop breaker or
-- marked NOINLINE or something like that
-- This is important: if you expose the worker for a loop-breaker
-- then you can make the simplifier go into an infinite loop, because
-- in effect the unfolding is exposed. See Trac #1709
--
-- You might think that if show_unfold is False, then the thing should
-- not be w/w'd in the first place. But a legitimate reason is this:
-- the function returns bottom
-- In this case, show_unfold will be false (we don't expose unfoldings
-- for bottoming functions), but we might still have a worker/wrapper
-- split (see Note [Worker-wrapper for bottoming functions] in WorkWrap.lhs
--------- Arity ------------
-- Usually the Id will have an accurate arity on it, because
-- the simplifier has just run, but not always.
-- One case I found was when the last thing the simplifier
-- did was to let-bind a non-atomic argument and then float
-- it to the top level. So it seems more robust just to
-- fix it here.
arity = exprArity orig_rhs
{-
************************************************************************
* *
\subsection{Figuring out CafInfo for an expression}
* *
************************************************************************
hasCafRefs decides whether a top-level closure can point into the dynamic heap.
We mark such things as `MayHaveCafRefs' because this information is
used to decide whether a particular closure needs to be referenced
in an SRT or not.
There are two reasons for setting MayHaveCafRefs:
a) The RHS is a CAF: a top-level updatable thunk.
b) The RHS refers to something that MayHaveCafRefs
Possible improvement: In an effort to keep the number of CAFs (and
hence the size of the SRTs) down, we could also look at the expression and
decide whether it requires a small bounded amount of heap, so we can ignore
it as a CAF. In these cases however, we would need to use an additional
CAF list to keep track of non-collectable CAFs.
Note [Disgusting computation of CafRefs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We compute hasCafRefs here, because IdInfo is supposed to be finalised
after TidyPgm. But CorePrep does some transformations that affect CAF-hood.
So we have to *predict* the result here, which is revolting.
In particular CorePrep expands Integer literals. So in the prediction code
here we resort to applying the same expansion (cvt_integer). Ugh!
-}
type CafRefEnv = (VarEnv Id, Integer -> CoreExpr)
-- The env finds the Caf-ness of the Id
-- The Integer -> CoreExpr is the desugaring function for Integer literals
-- See Note [Disgusting computation of CafRefs]
hasCafRefs :: DynFlags -> Module
-> CafRefEnv -> Arity -> CoreExpr
-> CafInfo
hasCafRefs dflags this_mod p@(_,cvt_integer) arity expr
| is_caf || mentions_cafs = MayHaveCafRefs
| otherwise = NoCafRefs
where
mentions_cafs = isFastTrue (cafRefsE p expr)
is_dynamic_name = isDllName dflags this_mod
is_caf = not (arity > 0 || rhsIsStatic (targetPlatform dflags) is_dynamic_name cvt_integer expr)
-- NB. we pass in the arity of the expression, which is expected
-- to be calculated by exprArity. This is because exprArity
-- knows how much eta expansion is going to be done by
-- CorePrep later on, and we don't want to duplicate that
-- knowledge in rhsIsStatic below.
cafRefsE :: CafRefEnv -> Expr a -> FastBool
cafRefsE p (Var id) = cafRefsV p id
cafRefsE p (Lit lit) = cafRefsL p lit
cafRefsE p (App f a) = fastOr (cafRefsE p f) (cafRefsE p) a
cafRefsE p (Lam _ e) = cafRefsE p e
cafRefsE p (Let b e) = fastOr (cafRefsEs p (rhssOfBind b)) (cafRefsE p) e
cafRefsE p (Case e _bndr _ alts) = fastOr (cafRefsE p e) (cafRefsEs p) (rhssOfAlts alts)
cafRefsE p (Tick _n e) = cafRefsE p e
cafRefsE p (Cast e _co) = cafRefsE p e
cafRefsE _ (Type _) = fastBool False
cafRefsE _ (Coercion _) = fastBool False
cafRefsEs :: CafRefEnv -> [Expr a] -> FastBool
cafRefsEs _ [] = fastBool False
cafRefsEs p (e:es) = fastOr (cafRefsE p e) (cafRefsEs p) es
cafRefsL :: CafRefEnv -> Literal -> FastBool
-- Don't forget that mk_integer id might have Caf refs!
-- We first need to convert the Integer into its final form, to
-- see whether mkInteger is used.
cafRefsL p@(_, cvt_integer) (LitInteger i _) = cafRefsE p (cvt_integer i)
cafRefsL _ _ = fastBool False
cafRefsV :: CafRefEnv -> Id -> FastBool
cafRefsV (subst, _) id
| not (isLocalId id) = fastBool (mayHaveCafRefs (idCafInfo id))
| Just id' <- lookupVarEnv subst id = fastBool (mayHaveCafRefs (idCafInfo id'))
| otherwise = fastBool False
fastOr :: FastBool -> (a -> FastBool) -> a -> FastBool
-- hack for lazy-or over FastBool.
fastOr a f x = fastBool (isFastTrue a || isFastTrue (f x))
{-
------------------------------------------------------------------------------
-- Old, dead, type-trimming code
-------------------------------------------------------------------------------
We used to try to "trim off" the constructors of data types that are
not exported, to reduce the size of interface files, at least without
-O. But that is not always possible: see the old Note [When we can't
trim types] below for exceptions.
Then (Trac #7445) I realised that the TH problem arises for any data type
that we have deriving( Data ), because we can invoke
Language.Haskell.TH.Quote.dataToExpQ
to get a TH Exp representation of a value built from that data type.
You don't even need {-# LANGUAGE TemplateHaskell #-}.
At this point I give up. The pain of trimming constructors just
doesn't seem worth the gain. So I've dumped all the code, and am just
leaving it here at the end of the module in case something like this
is ever resurrected.
Note [When we can't trim types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The basic idea of type trimming is to export algebraic data types
abstractly (without their data constructors) when compiling without
-O, unless of course they are explicitly exported by the user.
We always export synonyms, because they can be mentioned in the type
of an exported Id. We could do a full dependency analysis starting
from the explicit exports, but that's quite painful, and not done for
now.
But there are some times we can't do that, indicated by the 'no_trim_types' flag.
First, Template Haskell. Consider (Trac #2386) this
module M(T, makeOne) where
data T = Yay String
makeOne = [| Yay "Yep" |]
Notice that T is exported abstractly, but makeOne effectively exports it too!
A module that splices in $(makeOne) will then look for a declartion of Yay,
so it'd better be there. Hence, brutally but simply, we switch off type
constructor trimming if TH is enabled in this module.
Second, data kinds. Consider (Trac #5912)
{-# LANGUAGE DataKinds #-}
module M() where
data UnaryTypeC a = UnaryDataC a
type Bug = 'UnaryDataC
We always export synonyms, so Bug is exposed, and that means that
UnaryTypeC must be too, even though it's not explicitly exported. In
effect, DataKinds means that we'd need to do a full dependency analysis
to see what data constructors are mentioned. But we don't do that yet.
In these two cases we just switch off type trimming altogether.
mustExposeTyCon :: Bool -- Type-trimming flag
-> NameSet -- Exports
-> TyCon -- The tycon
-> Bool -- Can its rep be hidden?
-- We are compiling without -O, and thus trying to write as little as
-- possible into the interface file. But we must expose the details of
-- any data types whose constructors or fields are exported
mustExposeTyCon no_trim_types exports tc
| no_trim_types -- See Note [When we can't trim types]
= True
| not (isAlgTyCon tc) -- Always expose synonyms (otherwise we'd have to
-- figure out whether it was mentioned in the type
-- of any other exported thing)
= True
| isEnumerationTyCon tc -- For an enumeration, exposing the constructors
= True -- won't lead to the need for further exposure
| isFamilyTyCon tc -- Open type family
= True
-- Below here we just have data/newtype decls or family instances
| null data_cons -- Ditto if there are no data constructors
= True -- (NB: empty data types do not count as enumerations
-- see Note [Enumeration types] in TyCon
| any exported_con data_cons -- Expose rep if any datacon or field is exported
= True
| isNewTyCon tc && isFFITy (snd (newTyConRhs tc))
= True -- Expose the rep for newtypes if the rep is an FFI type.
-- For a very annoying reason. 'Foreign import' is meant to
-- be able to look through newtypes transparently, but it
-- can only do that if it can "see" the newtype representation
| otherwise
= False
where
data_cons = tyConDataCons tc
exported_con con = any (`elemNameSet` exports)
(dataConName con : dataConFieldLabels con)
-}
|
pparkkin/eta
|
compiler/ETA/Main/TidyPgm.hs
|
bsd-3-clause
| 63,405
| 0
| 18
| 19,043
| 7,592
| 4,122
| 3,470
| 558
| 5
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
-- | This module contains code to convert the statement form of FlatIR
-- to the SSA form.
module IR.FlatIR.SSAConvert(
ssaConvert
) where
import Data.BitArray.ST
import Data.Graph.Inductive.Graph
import IR.FlatIR.Syntax
import Prelude hiding (mapM_, mapM, foldr, foldl, sequence)
type TransState = (Word, [(Word, Type)])
-- | Convert a global to SSA form. Only changes functions.
ssaConvertGlobal :: Graph gr => Global Stm gr -> Global Bind gr
ssaConvertGlobal GlobalVar { gvarName = name, gvarTy = ty, gvarInit = initval,
gvarMutability = mut, gvarPos = pos } =
GlobalVar { gvarName = name, gvarTy = ty, gvarInit = initval,
gvarMutability = mut, gvarPos = pos }
ssaConvertGlobal f @ Function { funcBody = Nothing } = f { funcBody = Nothing }
ssaConvertGlobal f @ Function { funcValTys = valtys, funcParams = params,
funcBody = Just Body { bodyEntry = entry,
bodyCFG = body } } =
let
-- Get a DFS tree from a DFS forest (we shouldn't have
-- disconnected nodes).
[dfstree] = dff [entry] graph
-- Get the first node in the DFS tree
entrynode : _ = foldr (:) [] dfstree
noderange @ (startnode, endnode) = nodeRange graph
nodelist = nodes graph
(Id startid, Id endid) = bounds valtys
valids = indices valtys
-- How this works: ultimately, we'll do a walk over all the
-- instructions, mapping Id's in the statement form to Id's in the
-- SSA form. This is mostly straightforward; we'll have a map
-- from old Id's to new Id's that we update whenever there's an
-- assignment. The hardest part is figuring out where to place
-- the Phi-nodes.
--
-- To do that, we use the standard dominance frontier technique
-- and build a bitmap representing which Id's get replated by
-- Phi-nodes at the beginnings of which blocks. Ultimately, we
-- get a mapping from Node's to lists of (old) Id's, representing which
-- ones to replace.
-- First thing we need is a way to figure out which Id's get phi
-- nodes in a given block. To this end, we build the phi-sets.
-- We do the crunching with bitarrays to make it fast.
phiSet :: Node -> [Id]
phiSet =
let
-- Convert a node and an (old) Id into an index to be used in
-- the bit array representing the phi-sets.
getIndex :: Node -> Id -> Int
getIndex nodeid (Id varname) =
let
nodeid' = nodeid - startnode
varname' = (fromIntegral varname) - (fromIntegral startid)
nodespan = (fromIntegral endid) - (fromIntegral startid) + 1
in
(nodeid' * nodespan) + varname'
-- The sets of phi-values for each basic block. This is a bit
-- array, which we expect to access using getIndex. For a
-- given Node and (old) Id, this array contains a bit
-- indicating whether or not a phi-node needs to be generated
-- for that Id at the start of that Node.
phiBits :: BitArray
phiBits =
let
domfronts' = domFrontiers graph entry
-- Dominance frontiers as an array
domfronts :: Array Node [Node]
domfronts = array noderange domfronts'
-- Add id to the phi-set for node. Phi-sets are the sets of
-- phi-nodes to be generated for a given basic block.
addPhi :: STBitArray -> Node -> Id -> ST Int ()
addPhi arr nodeid var =
let
domset = domfronts ! nodeid
appfun nodeid' = writeBit arr (getIndex nodeid' var) True
in
mapM_ appfun domset
-- Run through a node, add anything it modifies to the
-- the phi-set of each node in its dominance frontier.
buildPhiSet :: STBitArray -> Node -> ST Int ()
buildPhiSet modset node =
let
Just (Block { blockStms = stms }) = lab graph node
appfun (Move { moveDst = Var { varName = name' } }) =
addPhi modset node name'
appfun _ = return ()
in do
mapM_ appfun stms
-- Run through all nodes, build the phi-sets for them all.
buildPhiSets :: ST Int BitArray
buildPhiSets =
do
sets <- newBitArray ((getIndex startnode (Id startid)),
(getIndex endnode (Id endid))) False
mapM_ (buildPhiSet sets) nodelist
-- unsafeFreeze is fine, this is the only reference to it
unsafeFreezeBitArray sets
in
runST buildPhiSets
-- Extract a list of (old) Id's representing the phi set for a
-- given Node. This indicates which (old) Id's need to have a
-- phi-node at the start of the Node.
getPhiSet :: Node -> [Id]
getPhiSet nodeid =
let
foldfun :: [Id] -> Id -> [Id]
foldfun phiset var =
if lookupBit phiBits (getIndex nodeid var)
then return (var : phiset)
else return phiset
in
foldr foldfun [] valids
-- Save all the phi sets
phiSets :: Array Node [Id]
phiSets = array noderange (map (\n -> (n, getPhiSet n)) nodelist)
in
(phiSets !)
-- Update the value map to replace all values with phi-values.
-- This is meant to be called when entering a block.
addPhiVals :: Node -> ValMap -> ValMap
addPhiVals nodeid valmap' =
let
-- Get the phi set
phis = phiSet nodeid
-- XXX need to get a fresh Id for the phi nodes
addPhi :: (Id, LLVM.ValueRef) -> ValMap -> ValMap
addPhi (Id ind, phival) = Map.insert ind (BindLoc phival)
in do
foldr addPhi valmap' phis
-- Now, we go ahead and rename all the (old) Id's. We do this
-- by keeping a map from old to new values, updating it for
-- each statement, and using it to rewrite all the expressions
-- and transfers.
--
-- In order to rewrite a block, we need to know the map at the
-- end of rewriting all of its predecessors, except for places
-- where we insert a phi node for a given value. Because
-- phi-nodes happen wherever we have a join or a loop, a
-- depth-first traversal works fine for our rewriting.
-- Traverse the CFG. This takes a DFS tree as an argument.
-- XXX this function needs to return two things: data to build a
-- new graph, and data to build the phi-nodes. We will
-- subsequently map over the node data and add all the phi nodes.
-- The edge data for the new CFG is taken directly from the old
-- graph.
-- XXX This needs to be in a state monad. We need to build up a
-- new type array and also generate fresh variable names.
traverseCFG :: ValMap -> Tree Node
-> ([(Node, ([Bind], Transfer, Pos))], Map Id Bind)
-- ^ The first component is a list of node data. The
-- second is a map from (old) Id's to Phi-nodes.
traverseCFG valmapin (Node { rootLabel = curr, subForest = nexts }) =
let
-- First, update the valmap with any phis
valmapphis = addPhiVals curr valmapin
-- Get the current block data
Just (Block { blockStms = stms, blockXfer = trans }) = lab graph curr
-- Walk over the statements, translating them, and updating the valmap
(valmapstms, ssastms) = mapAccumL genStm valmapphis stms
-- Translate the end-of-block transfer
ssatrans = ssaConvertTransfer valmapstms trans
-- XXX Now we need to recurse down the tree, so that the
-- phi-builder map will actually contain the phis that we want
-- it to.
-- XXX now we need to look at all our successors' phi-sets,
-- and add the values that we contribute to a map from
-- phi-values to incoming value lists.
-- All successors
successors = suc graph curr
in
-- The last thing we need to do is insert all the phi-nodes
-- for each block, and connect up their incoming values.
completeCFG :: Map Id Bind
-- ^ Map from (old) Ids to (new) phis
-> [(Node, ([Bind], Transfer, Pos))]
-- ^ Partial node data
-> (Node, Block Bind)
-- ^ Completed node data
completeCFG phimap nodedata =
let
-- Take the partial node data, look up the phi-set, then use
-- that to get the new phi instruction from the phi map.
completeBlock :: (Node, ([Bind], Transfer, Pos))
-- ^ Partial node data
-> (Node, Block Bind)
-- ^ Completed node data
completeBlock phimap (nodeid, (body, trans, p)) =
let
-- (old) Id's from the phi-set
oldids = phiSet nodeid
-- (new) Phi instructions
phis = map (phimap !) oldids
in
Block { blockBody = phis ++ body, blockXfer = trans, blockPos = p }
newnodes = map completeBlock nodedata
in
mkGraph newnodes (labEdges body)
in
f { funcBody = Just Body { bodyEntry = entry, {- XXX the new CFG -} } }
ssaConvertExp :: ValMap -> Exp -> Exp
ssaConvertExp valmap ex = -- XXX rename the exp using the valmap
-- | Convert a statement to the SSA form of the language
ssaConvertStm :: ValMap -> Stm -> (ValMap, Stm)
-- Move's need to be converted into Bind's, and the value map needs to
-- be update.
ssaConvertStm Move { moveDst = dst, moveSrc = src, movePos = pos } =
-- XXX generate a fresh variable name, update the value map
-- Do's and Effect's are the same thing in both languages
ssaConvertStm valmap (Do ex) = (valmap, Effect (ssaConvertExp valmap ex))
-- | Convert a transfer to the SSA form language. This process
-- consists solely of mapping identifiers; transfers don't do any
-- binding, and are the same in both languages.
ssaConvertTransfer :: ValMap -> Transfer
ssaConvertTransfer valmap trans @ Case { caseVal = val } =
trans { caseVal = ssaConvertExp valmap val }
ssaConvertTransfer valmap trans @ Ret { retVal = val } =
trans { retVal = ssaConvertExp valmap val }
ssaConvertTransfer _ trans = trans
-- | Convert a module to SSA form.
ssaConvert :: Graph gr => Module Stm gr -> Module Bind gr
ssaConvert m @ Module { modGlobals = globals } =
m { modGlobals = fmap ssaConvertGlobal globals }
|
emc2/chill
|
src/IR/FlatIR/SSAConvert.hs
|
bsd-3-clause
| 12,283
| 8
| 29
| 3,712
| 1,817
| 1,024
| 793
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, DeriveDataTypeable, CPP, OverloadedStrings #-}
{-# OPTIONS -Wall #-}
module Language.Hakaru.Simplify
( closeLoop
, simplify
, toMaple
, openLoop
, main
, Simplifiable(mapleType)
, MapleException(MapleException)
, InterpreterException(InterpreterException) ) where
-- Take strings from Maple and interpret them in Haskell (Hakaru)
import Control.Exception
import Language.Hakaru.Simplifiable (Simplifiable(mapleType))
-- import Language.Hakaru.Expect (Expect, unExpect)
import Language.Hakaru.Maple (Maple, runMaple)
import Language.Hakaru.Any (Any(Any), AnySimplifiable(AnySimplifiable))
import Language.Hakaru.PrettyPrint (runPrettyPrintNamesPrec)
import System.IO (stderr, hPrint, hPutStrLn)
import Data.Typeable (Typeable, typeOf)
import Data.List (tails, stripPrefix)
import Data.Text (replace, pack, unpack)
import Data.Char (isSpace)
import System.MapleSSH (maple)
import Language.Haskell.Interpreter.Unsafe (unsafeRunInterpreterWithArgs)
import Language.Haskell.Interpreter (
#ifdef PATCHED_HINT
unsafeInterpret,
#else
interpret,
#endif
InterpreterError(WontCompile), GhcError(GhcError),
MonadInterpreter, set, get, OptionVal((:=)),
searchPath, languageExtensions, Extension(UnknownExtension),
loadModules, setImports)
import Language.Hakaru.Util.Lex (readMapleString)
import Language.Hakaru.Paths
data MapleException = MapleException String String
deriving Typeable
data InterpreterException = InterpreterException InterpreterError String
deriving Typeable
-- Maple prints errors with "cursors" (^) which point to the specific position
-- of the error on the line above. The derived show instance doesn't preserve
-- positioning of the cursor.
instance Show MapleException where
show (MapleException toMaple_ fromMaple)
= "MapleException:\n" ++ fromMaple ++
"\nafter sending to Maple:\n" ++ toMaple_
instance Show InterpreterException where
show (InterpreterException (WontCompile es) cause)
= "InterpreterException:\n" ++ unlines [ msg | GhcError msg <- es ] ++
"\nwhile interpreting:\n" ++ cause
show (InterpreterException err cause)
= "InterpreterException:\n" ++ show err ++
"\nwhile interpreting:\n" ++ cause
instance Exception MapleException
instance Exception InterpreterException
ourGHCOptions, ourSearchPath :: [String]
ourGHCOptions = case sandboxPackageDB of
Nothing -> []
Just xs -> "-no-user-package-db" : map ("-package-db " ++) xs
ourSearchPath = [ hakaruRoot ]
ourContext :: MonadInterpreter m => m ()
ourContext = do
let modules = [ "Tests.Imports", "Tests.EmbedDatatypes" ]
set [ searchPath := ourSearchPath ]
loadModules modules
-- "Tag" requires DataKinds to use type list syntax
exts <- get languageExtensions
set [ languageExtensions := (UnknownExtension "DataKinds" : exts) ]
setImports modules
-- Type checking is fragile for this function. It compiles fine
-- from the commandline, but using `cabal repl` causes it to break
-- due to OverloadedStrings and (supposed) ambiguity about @a@ in
-- the Typeable constraint.
closeLoop :: (Typeable a) => String -> IO a
closeLoop s = action where
action = do
result <- unsafeRunInterpreterWithArgs ourGHCOptions $ ourContext >>
#ifdef PATCHED_HINT
unsafeInterpret s' typeStr
#else
interpret s' undefined
#endif
case result of Left err -> throw (InterpreterException err s')
Right a -> return a
s' = s ++ " :: " ++ typeStr
typeStr = unpack $ replace ":" "Cons"
$ replace "[]" "Nil"
$ pack (show (typeOf ((undefined :: f a -> a) action)))
mkTypeString :: (Simplifiable a) => String -> proxy a -> String
mkTypeString s t = "Typed(" ++ s ++ ", " ++ mapleType t ++ ")"
simplify :: (Simplifiable a) => Maple a -> IO (Any a)
simplify e = do
hakaru <- simplify' e
closeLoop ("Any (" ++ hakaru ++ ")")
simplify' :: (Simplifiable a) => Maple a -> IO String
simplify' e = do
let slo = toMaple e
hopeString <- maple ("timelimit(150,Haskell(SLO:-AST(SLO(" ++ slo ++ "))));")
case readMapleString hopeString of
Just hakaru -> return hakaru
Nothing -> throw (MapleException slo hopeString)
toMaple :: (Simplifiable a) => Maple a -> String
toMaple e = mkTypeString (runMaple e 0) e
main :: IO ()
main = action `catch` handler1 `catch` handler0 where
action :: IO ()
action = do s <- readFile "/tmp/t" -- getContents
let (before, middle, after) = trim s
middle' <- simplifyAny middle
putStr (before ++ middle' ++ after)
handler1 :: InterpreterError -> IO ()
handler1 (WontCompile es) = sequence_ [ hPutStrLn stderr msg
| GhcError msg <- es ]
handler1 exception = throw exception
handler0 :: SomeException -> IO ()
handler0 = hPrint stderr
trim :: String -> (String, String, String)
trim s = let (before, s') = span isSpace s
(after', middle') = span isSpace (reverse s')
in (before, reverse middle', reverse after')
simplifyAny :: String -> IO String
simplifyAny s = do
(names, AnySimplifiable e) <- openLoop [] s
Any e' <- simplify e
return (show (runPrettyPrintNamesPrec e' names 0))
openLoop :: [String] -> String -> IO ([String], AnySimplifiable)
openLoop names s =
fmap ((,) names) (closeLoop ("AnySimplifiable (" ++ s ++ ")")) `catch` h
where
h :: InterpreterException -> IO ([String], AnySimplifiable)
h (InterpreterException (WontCompile es) _)
| not (null unbound) && not (any (`elem` names) unbound)
= openLoop (unbound ++ names) (unlines header ++ s)
where unbound = [ init msg''
| GhcError msg <- es
, msg' <- tails msg
, Just msg'' <- [stripPrefix ": Not in scope: `" msg']
, last msg'' == '\'' ]
header = [ "lam $ \\" ++ name ++ " ->" | name <- unbound ]
h (InterpreterException exception _) = throw exception
|
bitemyapp/hakaru
|
Language/Hakaru/Simplify.hs
|
bsd-3-clause
| 6,077
| 0
| 17
| 1,323
| 1,695
| 896
| 799
| 130
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.DescribeDBParameters
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns the detailed parameter list for a particular DB parameter group.
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBParameters.html>
module Network.AWS.RDS.DescribeDBParameters
(
-- * Request
DescribeDBParameters
-- ** Request constructor
, describeDBParameters
-- ** Request lenses
, ddbpDBParameterGroupName
, ddbpFilters
, ddbpMarker
, ddbpMaxRecords
, ddbpSource
-- * Response
, DescribeDBParametersResponse
-- ** Response constructor
, describeDBParametersResponse
-- ** Response lenses
, ddbprMarker
, ddbprParameters
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data DescribeDBParameters = DescribeDBParameters
{ _ddbpDBParameterGroupName :: Text
, _ddbpFilters :: List "member" Filter
, _ddbpMarker :: Maybe Text
, _ddbpMaxRecords :: Maybe Int
, _ddbpSource :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeDBParameters' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddbpDBParameterGroupName' @::@ 'Text'
--
-- * 'ddbpFilters' @::@ ['Filter']
--
-- * 'ddbpMarker' @::@ 'Maybe' 'Text'
--
-- * 'ddbpMaxRecords' @::@ 'Maybe' 'Int'
--
-- * 'ddbpSource' @::@ 'Maybe' 'Text'
--
describeDBParameters :: Text -- ^ 'ddbpDBParameterGroupName'
-> DescribeDBParameters
describeDBParameters p1 = DescribeDBParameters
{ _ddbpDBParameterGroupName = p1
, _ddbpSource = Nothing
, _ddbpFilters = mempty
, _ddbpMaxRecords = Nothing
, _ddbpMarker = Nothing
}
-- | The name of a specific DB parameter group to return details for.
--
-- Constraints:
--
-- Must be 1 to 255 alphanumeric characters First character must be a letter Cannot end with a hyphen or contain two consecutive hyphens
--
ddbpDBParameterGroupName :: Lens' DescribeDBParameters Text
ddbpDBParameterGroupName =
lens _ddbpDBParameterGroupName
(\s a -> s { _ddbpDBParameterGroupName = a })
-- | This parameter is not currently supported.
ddbpFilters :: Lens' DescribeDBParameters [Filter]
ddbpFilters = lens _ddbpFilters (\s a -> s { _ddbpFilters = a }) . _List
-- | An optional pagination token provided by a previous 'DescribeDBParameters'
-- request. If this parameter is specified, the response includes only records
-- beyond the marker, up to the value specified by 'MaxRecords'.
ddbpMarker :: Lens' DescribeDBParameters (Maybe Text)
ddbpMarker = lens _ddbpMarker (\s a -> s { _ddbpMarker = a })
-- | The maximum number of records to include in the response. If more records
-- exist than the specified 'MaxRecords' value, a pagination token called a marker
-- is included in the response so that the remaining results may be retrieved.
--
-- Default: 100
--
-- Constraints: minimum 20, maximum 100
ddbpMaxRecords :: Lens' DescribeDBParameters (Maybe Int)
ddbpMaxRecords = lens _ddbpMaxRecords (\s a -> s { _ddbpMaxRecords = a })
-- | The parameter types to return.
--
-- Default: All parameter types returned
--
-- Valid Values: 'user | system | engine-default'
ddbpSource :: Lens' DescribeDBParameters (Maybe Text)
ddbpSource = lens _ddbpSource (\s a -> s { _ddbpSource = a })
data DescribeDBParametersResponse = DescribeDBParametersResponse
{ _ddbprMarker :: Maybe Text
, _ddbprParameters :: List "member" Parameter
} deriving (Eq, Read, Show)
-- | 'DescribeDBParametersResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddbprMarker' @::@ 'Maybe' 'Text'
--
-- * 'ddbprParameters' @::@ ['Parameter']
--
describeDBParametersResponse :: DescribeDBParametersResponse
describeDBParametersResponse = DescribeDBParametersResponse
{ _ddbprParameters = mempty
, _ddbprMarker = Nothing
}
-- | An optional pagination token provided by a previous request. If this
-- parameter is specified, the response includes only records beyond the marker,
-- up to the value specified by 'MaxRecords'.
ddbprMarker :: Lens' DescribeDBParametersResponse (Maybe Text)
ddbprMarker = lens _ddbprMarker (\s a -> s { _ddbprMarker = a })
-- | A list of 'Parameter' values.
ddbprParameters :: Lens' DescribeDBParametersResponse [Parameter]
ddbprParameters = lens _ddbprParameters (\s a -> s { _ddbprParameters = a }) . _List
instance ToPath DescribeDBParameters where
toPath = const "/"
instance ToQuery DescribeDBParameters where
toQuery DescribeDBParameters{..} = mconcat
[ "DBParameterGroupName" =? _ddbpDBParameterGroupName
, "Filters" =? _ddbpFilters
, "Marker" =? _ddbpMarker
, "MaxRecords" =? _ddbpMaxRecords
, "Source" =? _ddbpSource
]
instance ToHeaders DescribeDBParameters
instance AWSRequest DescribeDBParameters where
type Sv DescribeDBParameters = RDS
type Rs DescribeDBParameters = DescribeDBParametersResponse
request = post "DescribeDBParameters"
response = xmlResponse
instance FromXML DescribeDBParametersResponse where
parseXML = withElement "DescribeDBParametersResult" $ \x -> DescribeDBParametersResponse
<$> x .@? "Marker"
<*> x .@? "Parameters" .!@ mempty
instance AWSPager DescribeDBParameters where
page rq rs
| stop (rs ^. ddbprMarker) = Nothing
| otherwise = (\x -> rq & ddbpMarker ?~ x)
<$> (rs ^. ddbprMarker)
|
romanb/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/DescribeDBParameters.hs
|
mpl-2.0
| 6,603
| 0
| 12
| 1,470
| 874
| 522
| 352
| 90
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>Python Scriptleme</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçindekiler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Dizin</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/jython/src/main/javahelp/org/zaproxy/zap/extension/jython/resources/help_tr_TR/helpset_tr_TR.hs
|
apache-2.0
| 966
| 77
| 66
| 156
| 410
| 207
| 203
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>Python Scripting</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İndeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Axtar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/jython/src/main/javahelp/org/zaproxy/zap/extension/jython/resources/help_az_AZ/helpset_az_AZ.hs
|
apache-2.0
| 962
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Python Scripting</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/jython/src/main/javahelp/org/zaproxy/zap/extension/jython/resources/help_pl_PL/helpset_pl_PL.hs
|
apache-2.0
| 964
| 77
| 66
| 156
| 412
| 208
| 204
| -1
| -1
|
{-# LANGUAGE NoImplicitPrelude #-}
-- | Simple interface to complicated program arguments.
--
-- This is a "fork" of the @optparse-simple@ package that has some workarounds for
-- optparse-applicative issues that become problematic with programs that have many options and
-- subcommands. Because it makes the interface more complex, these workarounds are not suitable for
-- pushing upstream to optparse-applicative.
module Options.Applicative.Complicated
( addCommand
, addSubCommands
, complicatedOptions
, complicatedParser
) where
import Control.Monad.Trans.Except
import Control.Monad.Trans.Writer
import Data.Version
import Options.Applicative
import Options.Applicative.Types
import Options.Applicative.Builder.Internal
import Stack.Prelude
import System.Environment
-- | Generate and execute a complicated options parser.
complicatedOptions
:: Monoid a
=> Version
-- ^ numeric version
-> Maybe String
-- ^ version string
-> String
-- ^ hpack numeric version, as string
-> String
-- ^ header
-> String
-- ^ program description (displayed between usage and options listing in the help output)
-> String
-- ^ footer
-> Parser a
-- ^ common settings
-> Maybe (ParserFailure ParserHelp -> [String] -> IO (a,(b,a)))
-- ^ optional handler for parser failure; 'handleParseResult' is called by
-- default
-> ExceptT b (Writer (Mod CommandFields (b,a))) ()
-- ^ commands (use 'addCommand')
-> IO (a,b)
complicatedOptions numericVersion versionString numericHpackVersion h pd footerStr commonParser mOnFailure commandParser =
do args <- getArgs
(a,(b,c)) <- case execParserPure (prefs noBacktrack) parser args of
Failure _ | null args -> withArgs ["--help"] (execParser parser)
-- call onFailure handler if it's present and parsing options failed
Failure f | Just onFailure <- mOnFailure -> onFailure f args
parseResult -> handleParseResult parseResult
return (mappend c a,b)
where parser = info (helpOption <*> versionOptions <*> complicatedParser "COMMAND|FILE" commonParser commandParser) desc
desc = fullDesc <> header h <> progDesc pd <> footer footerStr
versionOptions =
case versionString of
Nothing -> versionOption (showVersion numericVersion)
Just s -> versionOption s <*> numericVersionOption <*> numericHpackVersionOption
versionOption s =
infoOption
s
(long "version" <>
help "Show version")
numericVersionOption =
infoOption
(showVersion numericVersion)
(long "numeric-version" <>
help "Show only version number")
numericHpackVersionOption =
infoOption
numericHpackVersion
(long "hpack-numeric-version" <>
help "Show only hpack's version number")
-- | Add a command to the options dispatcher.
addCommand :: String -- ^ command string
-> String -- ^ title of command
-> String -- ^ footer of command help
-> (a -> b) -- ^ constructor to wrap up command in common data type
-> Parser c -- ^ common parser
-> Parser a -- ^ command parser
-> ExceptT b (Writer (Mod CommandFields (b,c))) ()
addCommand cmd title footerStr constr =
addCommand' cmd title footerStr (\a c -> (constr a,c))
-- | Add a command that takes sub-commands to the options dispatcher.
addSubCommands
:: Monoid c
=> String
-- ^ command string
-> String
-- ^ title of command
-> String
-- ^ footer of command help
-> Parser c
-- ^ common parser
-> ExceptT b (Writer (Mod CommandFields (b,c))) ()
-- ^ sub-commands (use 'addCommand')
-> ExceptT b (Writer (Mod CommandFields (b,c))) ()
addSubCommands cmd title footerStr commonParser commandParser =
addCommand' cmd
title
footerStr
(\(c1,(a,c2)) c3 -> (a,mconcat [c3, c2, c1]))
commonParser
(complicatedParser "COMMAND" commonParser commandParser)
-- | Add a command to the options dispatcher.
addCommand' :: String -- ^ command string
-> String -- ^ title of command
-> String -- ^ footer of command help
-> (a -> c -> (b,c)) -- ^ constructor to wrap up command in common data type
-> Parser c -- ^ common parser
-> Parser a -- ^ command parser
-> ExceptT b (Writer (Mod CommandFields (b,c))) ()
addCommand' cmd title footerStr constr commonParser inner =
lift (tell (command cmd
(info (constr <$> inner <*> commonParser)
(progDesc title <> footer footerStr))))
-- | Generate a complicated options parser.
complicatedParser
:: Monoid a
=> String
-- ^ metavar for the sub-command
-> Parser a
-- ^ common settings
-> ExceptT b (Writer (Mod CommandFields (b,a))) ()
-- ^ commands (use 'addCommand')
-> Parser (a,(b,a))
complicatedParser commandMetavar commonParser commandParser =
(,) <$>
commonParser <*>
case runWriter (runExceptT commandParser) of
(Right (),d) -> hsubparser' commandMetavar d
(Left b,_) -> pure (b,mempty)
-- | Subparser with @--help@ argument. Borrowed with slight modification
-- from Options.Applicative.Extra.
hsubparser' :: String -> Mod CommandFields a -> Parser a
hsubparser' commandMetavar m = mkParser d g rdr
where
Mod _ d g = metavar commandMetavar `mappend` m
(groupName, cmds, subs) = mkCommand m
rdr = CmdReader groupName cmds (fmap add_helper . subs)
add_helper pinfo = pinfo
{ infoParser = infoParser pinfo <**> helpOption }
-- | Non-hidden help option.
helpOption :: Parser (a -> a)
helpOption =
abortOption ShowHelpText $
long "help" <>
help "Show this help text"
|
MichielDerhaeg/stack
|
src/Options/Applicative/Complicated.hs
|
bsd-3-clause
| 5,893
| 0
| 20
| 1,531
| 1,319
| 696
| 623
| 113
| 4
|
-----------------------------------------------------------------------------
-- |
-- Module : RefacRemoveField
-- Copyright : (c) Christopher Brown 2007
--
-- Maintainer : cmb21@kent.ac.uk
-- Stability : provisional
-- Portability : portable
--
-- This module contains a transformation for HaRe.
-- Remove a field from a constructor and resolve
-- pattern matching by placing references to said
-- field by calls to error.
-----------------------------------------------------------------------------
module RefacRemoveField where
import PrettyPrint
import PrettyPrint
import PosSyntax
import AbstractIO
import Data.Maybe
import TypedIds
import UniqueNames hiding (srcLoc)
import PNT
import TiPNT
import Data.List
import RefacUtils hiding (getParams)
import PFE0 (findFile)
import MUtils (( # ))
import RefacLocUtils
-- import System
import System.IO
import Data.Char
refacRemoveField args
= do
let
fileName = args!!0
pos = args!!1
row = read (args!!2)::Int
col = read (args!!3)::Int
AbstractIO.putStrLn "refacRemoveField"
modInfo@(inscps, exps, mod, tokList) <- parseSourceFile (fileName)
case checkCursor fileName row col mod of
Left errMsg -> do error errMsg
Right dat ->
do
let res = locToPNT fileName (row, col) mod
let res2 = locToPN fileName (row, col) mod
let decs = hsDecls mod
let datDec = definingDecls [res2] decs False True
let datName = (declToName (head datDec))
let datPNT = (declToPNT (head datDec))
-- if the field in question is a record we must get
-- the name of the record...
((_,m), (newToks, newMod)) <- applyRefac (removeField datName datPNT res pos tokList)
(Just (inscps, exps, mod, tokList)) fileName
writeRefactoredFiles False [((fileName, m), (newToks, newMod))]
AbstractIO.putStrLn "Completed.\n"
checkCursor :: String -> Int -> Int -> HsModuleP -> Either String HsDeclP
checkCursor fileName row col mod
= case locToTypeDecl of
Nothing -> Left ("Invalid cursor position. Please place cursor at the beginning of the constructor name!")
Just decl@(Dec (HsDataDecl loc c tp xs _)) -> Right decl
where
locToTypeDecl = find (definesTypeCon (locToPNT fileName (row, col) mod)) (hsModDecls mod)
-- definesTypeCon pnt (Dec (HsDataDecl loc c tp xs _))
-- = isDataCon pnt && (findPNT pnt tp)
definesTypeCon pnt (Dec (HsDataDecl _ _ _ i _))
= isDataCon pnt && (findPNT pnt i)
definesTypeCon pnt _ = False
removeField datName datPNT pnt pos tok (_, _, t)
= do
let fType = getField pnt ((read pos)::Int) t
let fName = typToPNT (convertBang fType)
newMod <- removingField pnt pos t
d <- checkPat2 datName pnt pos newMod
newMod' <- removingFieldInPat datName pnt pos tok d
if d /= newMod
then do
newD' <- addDecl newMod' Nothing ([errorDecl], Nothing) True
dataType <- getDataType datPNT newD'
newTypedMod <- cleanTypeSigs fType fName datPNT datName pnt dataType newD'
return newTypedMod
else do
dataType <- getDataType datPNT newMod'
newTypedMod <- cleanTypeSigs fType fName datPNT datName pnt dataType newMod'
return newTypedMod
where
errorDecl = (Dec (HsFunBind loc0
[HsMatch loc0 (nameToPNT "errorData")
listOfPats
(HsBody (createFunc (nameToPNT ("error" ++ datName)) listOfExps)) []]))
listOfPats = nameToPat "field" : (nameToPat "dat" : [nameToPat "function"])
listOfExps = [Exp (HsId (HsVar (nameToPNT "(\"the binding for \" ++ field ++ \" in a pattern binding involving \" ++ dat ++ \" has been removed in function \" ++ function)"
)))]
convertBang (HsUnBangedType t) = t
convertBang (HsBangedType t) = t
getField pnt pos
= (fromMaybe (error "not a valid field to remove!")).(applyTU (once_tdTU (failTU `adhocTU` inDatDeclaration)))
where
inDatDeclaration (dat@(HsConDecl _ _ _ i ts)::HsConDeclP)
| pnt == i
= Just (ts !! (pos-1))
inDatDeclaration x = Nothing
flatternTApp :: HsTypeP -> [HsTypeP]
flatternTApp (Typ (HsTyFun t1 t2)) = flatternTApp t1 ++ flatternTApp t2
flatternTApp (Typ (HsTyApp t1 t2)) = flatternTApp t1 ++ flatternTApp t2
flatternTApp x = [x]
getDataType datPNT t
= applyTU (once_tdTU (failTU `adhocTU` inDatDeclaration)) t
where
inDatDeclaration (dat@(Dec (HsDataDecl a b tp c d))::HsDeclP)
| (defineLoc datPNT == (defineLoc.typToPNT.(ghead "inDatDeclaration").flatternTApp) tp)
= return dat
inDatDeclaration d = fail ""
flatternTApp :: HsTypeP -> [HsTypeP]
flatternTApp (Typ (HsTyFun t1 t2)) = flatternTApp t1 ++ flatternTApp t2
flatternTApp (Typ (HsTyApp t1 t2)) = flatternTApp t1 ++ flatternTApp t2
flatternTApp x = [x]
cleanTypeSigs fType fName datPNT datName pnt dataType t
= applyTP (full_buTP (idTP `adhocTP` inDatDeclaration)) t
where
inDatDeclaration (dat@(Dec (HsDataDecl a b tp c d))::HsDeclP)
| (defineLoc datPNT == (defineLoc.typToPNT.(ghead "inDatDeclaration").flatternTApp) tp) &&
(redunParam (tail (flatternTApp tp)) c)
=
-- update the data declaration with the type removed!
update dat (Dec (HsDataDecl a b (createDataFunc ((typToPNT.(ghead "inDatDeclaration").flatternTApp) tp)
( (tail (flatternTApp tp)) <-> (convertBang fType))) c d)) dat
inDatDeclaration (dat@(Dec (HsTypeSig s is c t))::HsDeclP)
| datName `elem` (map (pNTtoName.typToPNT) (flatternTApp t) )
= do
let res = changeType t dataType
if res == t
then return dat
else update dat (Dec (HsTypeSig s is c res)) dat
inDatDeclaration t =return t
changeType :: HsTypeP -> HsDeclP -> HsTypeP
changeType t@(Typ (HsTyFun t1 t2)) x
= (Typ (HsTyFun (changeType t1 x) (changeType t2 x)))
changeType t@(Typ (HsTyApp t1 t2)) (Dec (HsDataDecl a b tp c d))
| (defineLoc datPNT) == (defineLoc (convertToCon t1)) &&
((pNTtoName fName) `elem` (map (pNTtoName.typToPNT) (flatternTApp t))) &&
isLower (head (pNTtoName fName)) &&
(redunParam (tail (flatternTApp tp)) c)
= (createDataFunc ((typToPNT.(ghead "inDatDeclaration").flatternTApp) t)
( ( (tail (flatternTApp t)) <-> (convertBang fType))))
changeType t _ = t
convertToCon :: HsTypeP -> PNT
convertToCon (Typ (HsTyFun t1 t2)) = convertToCon t1
convertToCon (Typ (HsTyApp t1 t2)) = convertToCon t1
convertToCon (Typ (HsTyCon t1)) = t1
convertToCon t = defaultPNT
-- (<->) :: Term t => [a] -> a -> [a]
(<->) [] _ = []
(<->) (x:xs) y
| (pNTtoName (typToPNT x)) == (pNTtoName (typToPNT y)) = (xs <-> y)
| otherwise = x : (xs <-> y)
-- convertBangs :: [HsBangType t] -> [HsTypeP]
redunParam [] _ = False
redunParam xs (cs)
= length (filter id (map (inT cs) (map (pNTtoName.typToPNT) xs))) == 1
inT2 [] _ = [False]
inT2 ((HsConDecl s i c p types) :cs) x
= ((x `elem` (map (pNTtoName.typToPNT) (map convertBang types))) : (cs `inT2` x))
inT x y = (not.or) (x `inT2` y)
flatternTApp :: HsTypeP -> [HsTypeP]
flatternTApp (Typ (HsTyFun t1 t2)) = flatternTApp t1 ++ flatternTApp t2
flatternTApp (Typ (HsTyApp t1 t2)) = flatternTApp t1 ++ flatternTApp t2
flatternTApp x = [x]
removingField pnt pos t
= applyTP (stop_tdTP (failTP `adhocTP` inDat)) t
where
inDat (dat@(HsConDecl s i c p types)::HsConDeclP)
| p == pnt = do
r <- update dat (HsConDecl s i c p (newTypes types (read pos::Int))) dat
return r
inDat (dat@(HsRecDecl s i c p types)::HsConDeclP)
| p == pnt = do
r <- update dat (HsRecDecl s i c p (newRecTypes types (read pos::Int))) dat
return r
inDat _ = fail ""
newRecTypes :: (Eq a) => [([a], b)] -> Int -> [([a], b)]
newRecTypes xs i = newRecTypes' xs i 1
where
newRecTypes' :: (Eq a) => [([a], b)] -> Int -> Int -> [([a], b)]
newRecTypes' [] i n = error "Please select a valid field position for this constructor!"
newRecTypes' ((x,y):xs) i n
| i >= n && i < (n+(length x)) = case newTypes' x i n of
[] -> xs
x -> (x,y) : xs
| otherwise = (x,y) : (newRecTypes' xs i (n+(length x)))
newTypes :: [a] -> Int -> [a] -- -> [HsBangType HsTypeP]
newTypes xs i = newTypes' xs i 1
newTypes' :: [a] -> Int -> Int -> [a]
newTypes' [] _ _ = error "Please select a valid field position for this constructor!"
newTypes' (x:xs) i n
| n == i = xs
| otherwise = x : (newTypes' xs i (n+1))
removingFieldInPat datName pnt pos tok t
= applyTP (stop_tdTP (failTP `adhocTP` inPat)) t
where
inPat d@(Dec _::HsDeclP)
= do
d' <- checkPat1 pnt pos d
d'' <- checkCall pnt pos d'
return d''
where
-- checkPat :: (Term t) => PNT -> String -> t -> HsPatP
checkPat1 pnt pos t
= applyTP (full_tdTP (idTP `adhocTP` inP)) t
inP pat@(Pat (HsPApp i p))
| (defineLoc i) == (defineLoc pnt) = RefacUtils.delete (p !! ((read pos::Int)-1)) pat
inP x = return x
checkCall pnt pos t
= applyTP (stop_tdTP (failTP `adhocTP` conApp)) t
-- a constructor application...
conApp exp@(Exp (HsApp e1 e2))
| defineLoc pnt == (defineLoc.expToPNT.(ghead "inE").flatternApp) exp
= RefacUtils.delete ((tail (flatternApp exp)) !! ((read pos::Int)-1)) exp
conApp _ = mzero
flatternApp :: HsExpP -> [HsExpP]
flatternApp (Exp (HsApp e1 e2)) = flatternApp e1 ++ flatternApp e2
flatternApp x = [x]
(!-) :: Int -> Int -> [a] -> [a]
(!-) _ _ [] = []
(!-) n pos (x:xs)
| n == pos = xs
| otherwise = x : (!-) n (pos + 1) xs
newPats :: [HsPatP] -> Int -> Int -> [HsPatP]
newPats (p:ps) pos n
| n == pos = ps
| otherwise = p : (newPats ps pos (n+1))
-- inPat x = fail ""
checkPat2 datName pnt pos t
= applyTP (stop_tdTP (failTP `adhocTP` inPat)) t
where
inPat d@(Dec _::HsDeclP)
= do
d'' <- addCall (declToName d) pnt pos d
return d''
addCall funcName pnt pos t
= applyTP (stop_tdTP (failTP `adhocTP` (inE funcName))) t
-- a name associated to a pattern binding somewhere...
inE funcName exp@(Exp (HsId (HsVar x)))
| (findPatBind pnt x (read pos::Int) t)
= update exp (Exp (HsApp (Exp (HsApp (Exp (HsApp (nameToExp ("error" ++ datName))
(nameToExp ("\"" ++ (pNTtoName x) ++ "\""))))
(nameToExp ("\"" ++ (pNTtoName pnt) ++ "\""))))
(nameToExp ("\"" ++ funcName ++ "\"")))) exp
inE _ x = mzero
findPatBind :: (Term t) => PNT -> PNT -> Int -> t -> Bool
findPatBind pnt x pos
= (fromMaybe False).(applyTU (once_tdTU (failTU `adhocTU` inBind)))
where
inBind (dat@(Pat (HsPApp i types))::HsPatP)
| pnt == i && checkInTypes x (types!!(pos-1)) = Just True
inBind _ = Nothing
checkInTypes x (Pat (HsPId (HsVar typePnt)))
| defineLoc x == defineLoc typePnt = True
checkInTypes _ x = False
-- thePNT id = (PNT (PN (UnQual id) (G (PlainModule "unknown") id (N (Just loc0)))) (Type (TypeInfo (Just Primitive) [] [])) (N (Just loc0)))
|
kmate/HaRe
|
old/refactorer/RefacRemoveField.hs
|
bsd-3-clause
| 12,828
| 7
| 27
| 4,427
| 4,320
| 2,217
| 2,103
| -1
| -1
|
{-@ LIQUID "--no-termination" @-}
module Foo (add, remove, deleteMin, deleteMin') where
import Language.Haskell.Liquid.Prelude
data RBTree a = Leaf
| Node Color a !(RBTree a) !(RBTree a)
deriving (Show)
data Color = B -- ^ Black
| R -- ^ Red
deriving (Eq,Show)
---------------------------------------------------------------------------
-- | Add an element -------------------------------------------------------
---------------------------------------------------------------------------
{-@ add :: (Ord a) => a -> ORBT a -> ORBT a @-}
add x s = makeBlack (ins x s)
{-@ ins :: (Ord a) => a -> ORBT a -> ORBT a @-}
ins kx Leaf = Node R kx Leaf Leaf
ins kx s@(Node B x l r) = case compare kx x of
LT -> let t = lbal x (ins kx l) r in t
GT -> let t = rbal x l (ins kx r) in t
EQ -> s
ins kx s@(Node R x l r) = case compare kx x of
LT -> Node R x (ins kx l) r
GT -> Node R x l (ins kx r)
EQ -> s
---------------------------------------------------------------------------
-- | Delete an element ----------------------------------------------------
---------------------------------------------------------------------------
{-@ remove :: (Ord a) => a -> ORBT a -> ORBT a @-}
remove x t = makeBlack (del x t)
{-@ del :: (Ord a) => a -> ORBT a -> ORBT a @-}
del x Leaf = Leaf
del x (Node _ y a b) = case compare x y of
EQ -> append y a b
LT -> case a of
Leaf -> Node R y Leaf b
Node B _ _ _ -> lbalS y (del x a) b
_ -> let t = Node R y (del x a) b in t
GT -> case b of
Leaf -> Node R y a Leaf
Node B _ _ _ -> rbalS y a (del x b)
_ -> Node R y a (del x b)
{-@ append :: y:a -> ORBT {v:a | v < y} -> ORBT {v:a | y < v} -> ORBT a @-}
append :: a -> RBTree a -> RBTree a -> RBTree a
append _ Leaf r
= r
append _ l Leaf
= l
append piv (Node R lx ll lr) (Node R rx rl rr)
= case append piv lr rl of
Node R x lr' rl' -> Node R x (Node R lx ll lr') (Node R rx rl' rr)
lrl -> Node R lx ll (Node R rx lrl rr)
append piv (Node B lx ll lr) (Node B rx rl rr)
= case append piv lr rl of
Node R x lr' rl' -> Node R x (Node B lx ll lr') (Node B rx rl' rr)
lrl -> lbalS lx ll (Node B rx lrl rr)
append piv l@(Node B _ _ _) (Node R rx rl rr)
= Node R rx (append piv l rl) rr
append piv l@(Node R lx ll lr) r@(Node B _ _ _)
= Node R lx ll (append piv lr r)
---------------------------------------------------------------------------
-- | Delete Minimum Element -----------------------------------------------
---------------------------------------------------------------------------
{-@ deleteMin :: ORBT a -> ORBT a @-}
deleteMin (Leaf) = Leaf
deleteMin (Node _ x l r) = makeBlack t
where
(_, t) = deleteMin' x l r
{-@ deleteMin' :: k:a -> ORBT {v:a | v < k} -> ORBT {v:a | k < v} -> (a, ORBT a) @-}
deleteMin' k Leaf r = (k, r)
deleteMin' x (Node R lx ll lr) r = (k, Node R x l' r) where (k, l') = deleteMin' lx ll lr
deleteMin' x (Node B lx ll lr) r = (k, lbalS x l' r ) where (k, l') = deleteMin' lx ll lr
---------------------------------------------------------------------------
-- | Rotations ------------------------------------------------------------
---------------------------------------------------------------------------
lbalS k (Node R x a b) r = Node R k (Node B x a b) r
lbalS k l (Node B y a b) = let t = rbal k l (Node R y a b) in t
lbalS k l (Node R z (Node B y a b) c) = Node R y (Node B k l a) (rbal z b (makeRed c))
lbalS k l r = error "nein"
rbalS k l (Node R y b c) = Node R k l (Node B y b c)
rbalS k (Node B x a b) r = let t = lbal k (Node R x a b) r in t
rbalS k (Node R x a (Node B y b c)) r = Node R y (lbal x (makeRed a) b) (Node B k c r)
rbalS k l r = error "nein"
lbal k (Node R y (Node R x a b) c) r = Node R y (Node B x a b) (Node B k c r)
lbal k (Node R x a (Node R y b c)) r = Node R y (Node B x a b) (Node B k c r)
lbal k l r = Node B k l r
rbal x a (Node R y b (Node R z c d)) = Node R y (Node B x a b) (Node B z c d)
rbal x a (Node R z (Node R y b c) d) = Node R y (Node B x a b) (Node B z c d)
rbal x l r = Node B x l r
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
makeRed (Node _ x l r) = Node R x l r
makeRed Leaf = error "nein"
makeBlack Leaf = Leaf
makeBlack (Node _ x l r) = Node B x l r
---------------------------------------------------------------------------
-- | Specifications -------------------------------------------------------
---------------------------------------------------------------------------
-- | Ordered Red-Black Trees
{-@ type ORBT a = RBTree <{\root v -> v < root }, {\root v -> v > root}> a @-}
-- | Binary Search Ordering
{-@ data RBTree a <l :: a -> a -> Prop, r :: a -> a -> Prop>
= Leaf
| Node (c :: Color)
(key :: a)
(left :: RBTree <l, r> (a <l key>))
(left :: RBTree <l, r> (a <r key>))
@-}
|
mightymoose/liquidhaskell
|
docs/slides/BOS14/hs/long/RBTree-ord.hs
|
bsd-3-clause
| 5,703
| 0
| 17
| 1,868
| 1,961
| 983
| 978
| 75
| 7
|
module LetIn1 where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe_global x = let
fringe :: Tree a -> [a]
fringe (Leaf x ) = [x]
fringe (Branch left right) = fringe left ++ fringe right
in fringe x
|
kmate/HaRe
|
old/testing/subIntroPattern/LetIn1.hs
|
bsd-3-clause
| 295
| 0
| 11
| 133
| 108
| 55
| 53
| 7
| 2
|
import System.Exit
import Control.Monad.Trans.State.Strict
eval :: Int -> State Int a -> a
eval p = fst . flip runState p
advance :: Int -> State Int ()
advance = modify' . (+)
loc :: State Int Int
loc = get
emit1 :: State Int ()
emit1 = advance 1
emitN :: Int -> State Int ()
-- adding in the 0 case, breaks with HEAD. 8.2.1 is fine with it.
-- emitN 0 = advance 0
emitN 0 = pure ()
emitN n = advance n
align8 :: State Int ()
align8 = do
bits <- (`mod` 8) <$> loc
emitN (8 - bits)
main :: IO ()
main = do
let p = eval 0 (emit1 >> align8 >> loc)
putStrLn $ show p
if p == 8
then putStrLn "OK" >> exitSuccess
else putStrLn "FAIL" >> exitFailure
|
ezyang/ghc
|
testsuite/tests/simplCore/should_run/T14178.hs
|
bsd-3-clause
| 669
| 0
| 13
| 165
| 279
| 143
| 136
| 24
| 2
|
{-# LANGUAGE TypeFamilies #-}
module T14033 where
newtype Zero = Zero
newtype Succ a = Succ a
type family Add n m :: * where
Add Zero m = m
Add (Succ n) m = Succ (Add n m)
|
shlevy/ghc
|
testsuite/tests/indexed-types/should_fail/T14033.hs
|
bsd-3-clause
| 185
| 1
| 8
| 52
| 70
| 39
| 31
| -1
| -1
|
module ShouldSucceed where
-- import TheUtils
import qualified Data.Set as Set
import Data.Set (Set)
import Data.List (partition )
data Digraph vertex = MkDigraph [vertex]
type Edge vertex = (vertex, vertex)
type Cycle vertex = [vertex]
mkDigraph = MkDigraph
stronglyConnComp :: Eq vertex => [Edge vertex] -> [vertex] -> [[vertex]]
stronglyConnComp es vs
= snd (span_tree (new_range reversed_edges)
([],[])
( snd (dfs (new_range es) ([],[]) vs) )
)
where
reversed_edges = map swap es
swap :: Edge v -> Edge v
swap (x,y) = (y, x)
new_range [] w = []
new_range ((x,y):xys) w
= if x==w
then (y : (new_range xys w))
else (new_range xys w)
span_tree r (vs,ns) [] = (vs,ns)
span_tree r (vs,ns) (x:xs)
| x `elem` vs = span_tree r (vs,ns) xs
| otherwise = span_tree r (vs',(x:ns'):ns) xs
where
(vs',ns') = dfs r (x:vs,[]) (r x)
dfs r (vs,ns) [] = (vs,ns)
dfs r (vs,ns) (x:xs) | x `elem` vs = dfs r (vs,ns) xs
| otherwise = dfs r (vs',(x:ns')++ns) xs
where
(vs',ns') = dfs r (x:vs,[]) (r x)
isCyclic :: Eq vertex => [Edge vertex] -> [vertex] -> Bool
isCyclic edges [v] = (v,v) `elem` edges
isCyclic edges vs = True
topSort :: (Eq vertex) => [Edge vertex] -> [vertex]
-> MaybeErr [vertex] [[vertex]]
topSort edges vertices
= case cycles of
[] -> Succeeded [v | [v] <- singletons]
_ -> Failed cycles
where
sccs = stronglyConnComp edges vertices
(cycles, singletons) = partition (isCyclic edges) sccs
type FlattenedDependencyInfo vertex name code
= [(vertex, Set name, Set name, code)]
mkVertices :: FlattenedDependencyInfo vertex name code -> [vertex]
mkVertices info = [ vertex | (vertex,_,_,_) <- info]
mkEdges :: (Ord name) =>
[vertex]
-> FlattenedDependencyInfo vertex name code
-> [Edge vertex]
mkEdges vertices flat_info
= [ (source_vertex, target_vertex)
| (source_vertex, _, used_names, _) <- flat_info,
target_name <- Set.toList used_names,
target_vertex <- vertices_defining target_name flat_info
]
where
vertices_defining name flat_info
= [ vertex | (vertex, names_defined, _, _) <- flat_info,
name `Set.member` names_defined
]
lookupVertex :: (Eq vertex) =>
FlattenedDependencyInfo vertex name code
-> vertex
-> code
lookupVertex flat_info vertex
= head code_list
where
code_list = [ code | (vertex',_,_,code) <- flat_info, vertex == vertex']
isRecursiveCycle :: (Eq vertex) => Cycle vertex -> [Edge vertex] -> Bool
isRecursiveCycle [vertex] edges = (vertex, vertex) `elem` edges
isRecursiveCycle cycle edges = True
-- may go to TheUtils
data MaybeErr a b = Succeeded a | Failed b
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/tc065.hs
|
bsd-3-clause
| 2,795
| 31
| 14
| 711
| 1,117
| 627
| 490
| 68
| 4
|
{-# LANGUAGE TypeFamilies, GADTs #-}
module GADT8 where
data Pair p where
Pair :: p~(a,b) => a -> b -> Pair p
-- this works:
-- Pair :: a -> b -> Pair (a,b)
foo :: Pair ((), ()) -> Pair ((), ())
foo (Pair x y) = Pair x y
|
urbanslug/ghc
|
testsuite/tests/indexed-types/should_compile/GADT8.hs
|
bsd-3-clause
| 232
| 0
| 9
| 62
| 99
| 55
| 44
| 6
| 1
|
{-# htermination sqrt :: Float -> Float #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_sqrt_1.hs
|
mit
| 44
| 0
| 2
| 8
| 3
| 2
| 1
| 1
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.