code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
-- Intermediate Haskell/Other data structures
-- https://en.wikibooks.org/wiki/Haskell/Other_data_structures
data Tree a = Leaf a | Branch (Tree a) (Tree a) deriving (Show)
tree = Branch (Branch (Leaf 1) (Leaf 2)) (Leaf 3)
mapTree :: (a -> b) -> Tree a -> Tree b
mapTree f (Leaf x) = Leaf (f x)
mapTree f (Branch x y) = Branch (mapTree f x) (mapTree f y)
foldTree :: (b -> a -> b) -> b -> Tree a -> b
foldTree f i (Leaf x) = f i x
foldTree f i (Branch left right) = foldTree f (foldTree f i left) right
-- A typical binary tree, holding data and left/right branches
data BinTree a = BinEmpty
| BinBranch a (BinTree a) (BinTree a)
deriving (Show)
binTree1 = BinBranch 2
(BinBranch 1 BinEmpty BinEmpty)
(BinBranch 3 BinEmpty BinEmpty)
binTree2 = BinBranch 10
(BinBranch 5
(BinBranch 4 BinEmpty BinEmpty)
(BinBranch 6 BinEmpty BinEmpty))
(BinBranch 15
(BinBranch 14 BinEmpty BinEmpty)
BinEmpty)
-- Map a function to every element of a binary tree
mapBinTree :: (a -> b) -> BinTree a -> BinTree b
mapBinTree f BinEmpty = BinEmpty
mapBinTree f (BinBranch x left right) = BinBranch (f x) (mapBinTree f left) (mapBinTree f right)
instance Functor BinTree where
fmap = mapBinTree
-- Test if every element in a binary tree matches the given predicate
everyB :: (a -> Bool) -> BinTree a -> Bool
everyB f BinEmpty = True
everyB f (BinBranch elem left right) = (f elem) && everyB f left && everyB f right
-- Test if a binary tree is a binary search tree, that is, the data in the left
-- branch is less than the data in the current node, and the data in the right
-- branch is greater.
isBST :: Ord a => BinTree a -> Bool
isBST BinEmpty = True
isBST (BinBranch elem left right) = leftLess && rightGreater && isBST left && isBST right
where leftLess = case left of
(BinBranch leftElem _ _) -> elem > leftElem
BinEmpty -> True
rightGreater = case right of
(BinBranch rightElem _ _) -> elem < rightElem
BinEmpty -> True
-- Ugly, contrived datatype for learning to write generalized map/fold
data Weird a b = First a
| Second b
| Third [(a,b)]
| Fourth (Weird a b)
deriving (Show)
weirdthing = Fourth $ Third [('a', 5), ('b', 7)]
mapWeird1 :: (a -> c) -> (b -> d) -> Weird a b -> Weird c d
mapWeird1 f g weird = case weird of
First item -> First (f item)
Second item -> Second (g item)
Third list -> Third (mapfgList list)
Fourth weird -> Fourth (mapWeird1 f g weird)
where mapfgList [] = []
mapfgList ((itemA, itemB):etc) = (f itemA, g itemB) : mapfgList etc
mapWeird2 :: (a -> c) -> (b -> d) -> Weird a b -> Weird c d
mapWeird2 f g weird = case weird of
First item -> First (f item)
Second item -> Second (g item)
Third list -> Third (zip (map f listA) (map g listB))
where listA = fst (unzip list); listB = snd (unzip list)
Fourth weird -> Fourth (mapWeird2 f g weird)
mapWeird3 :: (a -> c) -> (b -> d) -> Weird a b -> Weird c d
mapWeird3 f g weird = case weird of
First item -> First (f item)
Second item -> Second (g item)
Third ((itemA, itemB):etc) -> Third ((f itemA, g itemB) : ((\(Third list) -> list) (mapWeird3 f g (Third etc))))
Third [] -> Third []
Fourth weird -> Fourth (mapWeird3 f g weird)
-- instance Functor Weird where
-- fmap = mapWeird1
-- foldWeird :: (a -> c -> c) -> (b -> c -> c) -> ((a, b) -> c) -> (c -> c) -> Weird a b -> c
-- foldWeird f g h i accum (Fourth item) = i (foldWeird f g h i accum item)
-- foldWeird f g h i accum (Third items) = foldl h accum items
-- foldWeird f g h i accum weird = case weird of
-- First item -> f item accum
-- Second item -> g item accum
| strburst/haskell-stuff | wikibooks/otherdatastructs.hs | mit | 3,902 | 0 | 16 | 1,106 | 1,371 | 696 | 675 | 66 | 5 |
module System.AtomicWrite.Writer.StringSpec (spec) where
import Test.Hspec (it, describe, shouldBe, Spec)
import System.AtomicWrite.Writer.String (atomicWriteFile)
import System.IO.Temp (withSystemTempDirectory)
import System.FilePath (joinPath)
import System.PosixCompat.Files
(setFileMode, setFileCreationMask, getFileStatus, fileMode)
{-# ANN module "HLint: ignore Reduce duplication" #-}
spec :: Spec
spec = describe "atomicWriteFile" $ do
it "writes contents to a file" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let path = joinPath [ tmpDir, "writeTest.tmp" ]
atomicWriteFile path "just testing"
contents <- readFile path
contents `shouldBe` "just testing"
it "preserves the permissions of original file, regardless of umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let filePath = joinPath [tmpDir, "testFile"]
writeFile filePath "initial contents"
setFileMode filePath 0o100644
newStat <- getFileStatus filePath
fileMode newStat `shouldBe` 0o100644
-- New files are created with 100600 perms.
_ <- setFileCreationMask 0o100066
-- Create a new file once different mask is set and make sure that mask
-- is applied.
writeFile (joinPath [tmpDir, "sanityCheck"]) "with sanity check mask"
sanityCheckStat <- getFileStatus $ joinPath [tmpDir, "sanityCheck"]
fileMode sanityCheckStat `shouldBe` 0o100600
-- Since we move, this makes the new file assume the filemask of 0600
atomicWriteFile filePath "new contents"
resultStat <- getFileStatus filePath
-- reset mask to not break subsequent specs
_ <- setFileCreationMask 0o100022
-- Fails when using atomic mv command unless apply perms on initial file
fileMode resultStat `shouldBe` 0o100644
it "creates a new file with permissions based on active umask" $
withSystemTempDirectory "atomicFileTest" $ \tmpDir -> do
let
filePath = joinPath [tmpDir, "testFile"]
sampleFilePath = joinPath [tmpDir, "sampleFile"]
-- Set somewhat distinctive defaults for test
_ <- setFileCreationMask 0o100171
-- We don't know what the default file permissions are, so create a
-- file to sample them.
writeFile sampleFilePath "I'm being written to sample permissions"
newStat <- getFileStatus sampleFilePath
fileMode newStat `shouldBe` 0o100606
atomicWriteFile filePath "new contents"
resultStat <- getFileStatus filePath
-- reset mask to not break subsequent specs
_ <- setFileCreationMask 0o100022
-- The default tempfile permissions are 0600, so this fails unless we
-- make sure that the default umask is relied on for creation of the
-- tempfile.
fileMode resultStat `shouldBe` 0o100606
| bitemyapp/atomic-write | spec/System/AtomicWrite/Writer/StringSpec.hs | mit | 2,955 | 0 | 16 | 738 | 497 | 252 | 245 | 44 | 1 |
{-# LANGUAGE JavaScriptFFI #-}
-- | A wrapper over the Electron crash reporter API, as documented
-- <https://electron.atom.io/docs/api/crash-reporter here>.
--
-- After 'start' is called a crash reporter process will be spawned such that
-- if a crash occurs, a POST request with mimetype @multipart/form-data@ will
-- be sent to the @submitURL@. It has the following fields:
--
-- * @ver@: a string representing the version of Electron running
-- * @platform@: a string representing the current platform, e.g.: @"win32"@.
-- * @process_type@: either @"main"@ or @"renderer"@.
-- * @guid@: a string that is a globally unique identifier for this system.
-- * @_version@: the version specified in @package.json@ as a string.
-- * @_productName@: the product name specified in the 'CrashReporterOptions'.
-- * @prod@: the name of the underlying product (should be @"Electron"@).
-- * @_companyName@: the company name specified in the 'CrashReporterOptions'.
-- * @upload_file_minidump@: a Windows minidump file.
--
-- If the @_extraData@ field of the 'CrashReporterOptions' object given to
-- the 'start' function had any fields, each key-value pair @key: value@
-- will be added to the POST request as a field named @key@ with contents
-- equal to the serialization of @value@.
module GHCJS.Electron.CrashReporter where
import Data.Text (Text)
import qualified Data.Text as Text
import GHCJS.Electron.Types
import JavaScript.Object
-- | FIXME: doc
newtype CrashReporter
= MkCrashReporter JSVal
-- | FIXME: doc
data CrashReport
= MkCrashReport
{ _date :: Text
, _ID :: Int
}
-- | FIXME: doc
data CrashReporterOptions
= MkCrashReporterOptions
{ _companyName :: Maybe Text
-- ^ The company name to provide with the crash report.
, _submitURL :: Text
-- ^ Crash reports will be sent as POST requests to this URL.
, _productName :: Maybe Text
-- ^ The product name. If not provided, defaults to @app.getName()@.
, _uploadToServer :: Bool
-- ^ Whether crash reports should be sent to the server.
, _ignoreSystemHandler :: Bool
-- ^ Should the system crash handler be ignored?
, _extraData :: Object
-- ^ An arbitrary JSON object to send with the crash report.
}
-- | FIXME: doc
start :: CrashReporterOptions -> IO CrashReporter
start = undefined -- FIXME: implement
-- | Get the canonical 'CrashReporter' object, i.e.: the value of
-- @require('electron').crashReporter@.
foreign import javascript safe
"$r = require('electron').crashReporter;"
getCrashReporter :: IO CrashReporter
-- ^ A crash reporter object.
-- | Start the crash reporter with the given options.
foreign import javascript safe
"$1.start($2);"
unsafeStart :: CrashReporter
-- ^ The crash reporter object to use.
-> JSVal
-- ^ An options object.
-> IO ()
-- | Returns the latest crash report. If no crash reports have been sent or the
-- crash reporter has not been started, @null@ is returned instead.
foreign import javascript safe
"$r = $1.getLastCrashReport();"
unsafeGetLastCrashReport :: CrashReporter
-- ^ The crash reporter object to use.
-> IO JSVal -- Maybe CrashReport
-- ^ The last crash report if there is one.
-- | Returns a list of all uploaded crash reports to date.
foreign import javascript safe
"$r = $1.getUploadedReports();"
unsafeGetUploadedReports :: CrashReporter
-- ^ The crash reporter object to use.
-> IO (Array CrashReport)
-- ^ All uploaded crash reports.
-- not implemented: crashReporter.getUploadToServer
-- reason: Linux/Mac OS only and unnecessary
-- not implemented: crashReporter.setUploadToServer
-- reason: Linux/Mac OS only and unnecessary
-- not implemented: crashReporter.setExtraParameter
-- reason: Mac OS only and probably unnecessary
| taktoa/ghcjs-electron | src/GHCJS/Electron/CrashReporter.hs | mit | 4,151 | 10 | 10 | 1,067 | 282 | 183 | 99 | 34 | 1 |
{-# LANGUAGE LambdaCase #-}
module Main where
import Language.Haskell.Exts
import System.Environment
import System.Exit
runParser :: FilePath -> IO Module
runParser f =
parseFile f >>= \case
ParseOk a -> return a
ParseFailed loc err -> do
putStrLn ("Parsing failure: " ++ prettyPrint loc)
putStrLn err
exitFailure
main :: IO ()
main = do
[f] <- getArgs
_ <- runParser f
putStrLn "Parsed OK"
| jozefg/hi | src/Main.hs | mit | 427 | 0 | 14 | 99 | 139 | 67 | 72 | 18 | 2 |
head' :: [a] -> a
head' xs = case xs of [] -> error "No head for empty lists!"
(x:_) -> x
describeList :: [a] -> String
describeList ls = "The list is " ++ case ls of [] -> "empty."
[x] -> "a singleton list."
xs -> "a longer list."
describeList2 :: [a] -> String
describeList2 ls = "The list is " ++ what ls
where what [] = "empty."
what [x] = "a singleton list."
what xs = "a longer list."
| v0lkan/learning-haskell | case.hs | mit | 537 | 0 | 9 | 229 | 158 | 82 | 76 | 12 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TupleSections, LambdaCase, RecordWildCards,
OverloadedLists, OverloadedStrings,
FlexibleContexts, ScopedTypeVariables,
MultiParamTypeClasses,
DeriveGeneric
#-}
module HsToCoq.ConvertHaskell.Declarations.TyCl (
convertModuleTyClDecls,
-- * Convert single declarations
ConvertedDeclaration(..), convDeclName,
convertTyClDecl,
-- * Mutually-recursive declaration groups
DeclarationGroup(..), singletonDeclarationGroup,
-- * Converting 'DeclarationGroup's
convertDeclarationGroup, groupTyClDecls,
-- * Argument specifiers
generateArgumentSpecifiers, generateGroupArgumentSpecifiers,
-- * Record accessors
generateRecordAccessors, generateGroupRecordAccessors
) where
import Control.Lens
import HsToCoq.Util.Generics
import Data.Semigroup (Semigroup(..))
import Data.Bifunctor
import Data.Foldable
import Data.Traversable
import HsToCoq.Util.Traversable
import Data.Maybe
import Data.List.NonEmpty (NonEmpty(..), nonEmpty)
import HsToCoq.Util.List
import Control.Arrow ((&&&))
import Control.Monad
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import GHC hiding (Name, HsString)
import HsToCoq.Coq.Gallina as Coq
import HsToCoq.Coq.Gallina.Util as Coq
import HsToCoq.Coq.FreeVars
import HsToCoq.Coq.Pretty
import HsToCoq.Coq.Subst
import HsToCoq.Util.FVs
#if __GLASGOW_HASKELL__ >= 806
import HsToCoq.Util.GHC.HsTypes (noExtCon)
#endif
import Data.Generics hiding (Generic, Fixity(..))
import HsToCoq.ConvertHaskell.Parameters.Edits
import HsToCoq.ConvertHaskell.TypeInfo
import HsToCoq.ConvertHaskell.Monad
import HsToCoq.ConvertHaskell.Variables
import HsToCoq.ConvertHaskell.Axiomatize
import HsToCoq.ConvertHaskell.Declarations.TypeSynonym
import HsToCoq.ConvertHaskell.Declarations.DataType
import HsToCoq.ConvertHaskell.Declarations.Class
import HsToCoq.ConvertHaskell.Declarations.Notations (buildInfixNotations)
--------------------------------------------------------------------------------
data ConvertedDeclaration = ConvData Bool IndBody
| ConvSyn SynBody
| ConvClass ClassBody
| ConvAxiom (Qualid,Term)
| ConvFailure Qualid Sentence
deriving (Eq, Ord, Show, Read)
instance HasBV Qualid ConvertedDeclaration where
bvOf (ConvData _ ind) = bvOf ind
bvOf (ConvSyn syn) = bvOf syn
bvOf (ConvClass cls) = bvOf cls
bvOf (ConvAxiom axm) = bvOf $ uncurry typedAxiom axm
bvOf (ConvFailure _ sen) = bvOf sen
convDeclName :: ConvertedDeclaration -> Qualid
convDeclName (ConvData _ (IndBody tyName _ _ _)) = tyName
convDeclName (ConvSyn (SynBody synName _ _ _)) = synName
convDeclName (ConvClass (ClassBody (ClassDefinition clsName _ _ _) _)) = clsName
convDeclName (ConvAxiom (axName, _)) = axName
convDeclName (ConvFailure n _) = n
failTyClDecl :: ConversionMonad r m => Qualid -> GhcException -> m (Maybe ConvertedDeclaration)
failTyClDecl name e = pure $ Just $
ConvFailure name $ translationFailedComment (qualidBase name) e
convertTyClDecl :: ConversionMonad r m => TyClDecl GhcRn -> m (Maybe ConvertedDeclaration)
convertTyClDecl decl = do
coqName <- var TypeNS . unLoc $ tyClDeclLName decl
withCurrentDefinition coqName $ handleIfPermissive (failTyClDecl coqName) $
view (edits.skippedClasses.contains coqName) >>= \case
True | isClassDecl decl -> pure Nothing
| otherwise -> convUnsupported "skipping non-type classes with `skip class`"
False ->
definitionTask coqName >>= \case
SkipIt
| isClassDecl decl -> convUnsupported "skipping type class declarations (without `skip class')"
| otherwise -> pure Nothing
RedefineIt redef ->
Just <$> case (decl, redef) of
(_, CoqAxiomDef axm) ->
pure $ ConvAxiom axm
(SynDecl{}, CoqDefinitionDef def) ->
pure . ConvSyn $ case def of
DefinitionDef _ name args oty body _ -> SynBody name args oty body
LetDef name args oty body -> SynBody name args oty body
(DataDecl{}, CoqInductiveDef ind) ->
case ind of
Inductive (body :| []) [] -> pure $ ConvData False body
CoInductive (body :| []) [] -> pure $ ConvData True body
Inductive (_ :| _:_) _ -> editFailure $ "cannot redefine data type to mutually-recursive types"
Inductive _ (_:_) -> editFailure $ "cannot redefine data type to include notations"
CoInductive _ _ -> editFailure $ "cannot redefine data type to be coinductive"
(FamDecl{}, _) ->
editFailure "cannot redefine type/data families"
(ClassDecl{}, _) ->
editFailure "cannot redefine type class declarations"
_ ->
let from = case decl of
FamDecl{} -> "a type/data family"
SynDecl{} -> "a type synonym"
DataDecl{} -> "a data type"
ClassDecl{} -> "a type class"
#if __GLASGOW_HASKELL__ >= 806
XTyClDecl v -> noExtCon v
#endif
to = case redef of
CoqDefinitionDef _ -> "a Definition"
CoqFixpointDef _ -> "a Fixpoint"
CoqInductiveDef _ -> "an Inductive"
CoqInstanceDef _ -> "an Instance"
CoqAxiomDef _ -> "an Axiom"
CoqAssertionDef apf -> anAssertionVariety apf
in editFailure $ "cannot redefine " ++ from ++ " to be " ++ to
AxiomatizeIt SpecificAxiomatize ->
let (what, whats) = case decl of
FamDecl{} -> ("type/data family", "type/data families")
SynDecl{} -> ("type synonym", "type synonyms")
DataDecl{} -> ("data type", "data types")
ClassDecl{} -> ("type class", "type classes")
#if __GLASGOW_HASKELL__ >= 806
XTyClDecl v -> noExtCon v
#endif
in convUnsupportedIn ("axiomatizing " ++ whats ++ " (without `redefine Axiom')") what (showP coqName)
TranslateIt ->
translateIt coqName
AxiomatizeIt GeneralAxiomatize ->
-- If we're axiomatizing the MODULE, then we still want to translate
-- type-level definitions.
translateIt coqName
where
translateIt :: LocalConvMonad r m => Qualid -> m (Maybe ConvertedDeclaration)
translateIt coqName =
let isCoind = view (edits.coinductiveTypes.contains coqName)
in Just <$> case decl of
FamDecl{} -> convUnsupported "type/data families"
SynDecl{..} -> ConvSyn <$> convertSynDecl tcdLName (hsq_explicit tcdTyVars) tcdRhs
DataDecl{..} -> ConvData <$> isCoind <*> convertDataDecl tcdLName (hsq_explicit tcdTyVars) tcdDataDefn
ClassDecl{..} -> ConvClass <$> convertClassDecl tcdCtxt tcdLName (hsq_explicit tcdTyVars) tcdFDs tcdSigs tcdMeths tcdATs tcdATDefs
#if __GLASGOW_HASKELL__ >= 806
XTyClDecl v -> noExtCon v
#endif
--------------------------------------------------------------------------------
data DeclarationGroup = DeclarationGroup { dgInductives :: [IndBody]
, dgCoInductives :: [IndBody]
, dgSynonyms :: [SynBody]
, dgClasses :: [ClassBody]
, dgAxioms :: [(Qualid,Term)]
, dgFailures :: [Sentence] }
deriving (Eq, Ord, Show, Read, Generic)
instance Semigroup DeclarationGroup where (<>) = (%<>)
instance Monoid DeclarationGroup where mempty = gmempty
singletonDeclarationGroup :: ConvertedDeclaration -> DeclarationGroup
singletonDeclarationGroup (ConvData False ind) = DeclarationGroup [ind] [] [] [] [] []
singletonDeclarationGroup (ConvData True coi) = DeclarationGroup [] [coi] [] [] [] []
singletonDeclarationGroup (ConvSyn syn) = DeclarationGroup [] [] [syn] [] [] []
singletonDeclarationGroup (ConvClass cls) = DeclarationGroup [] [] [] [cls] [] []
singletonDeclarationGroup (ConvAxiom axm) = DeclarationGroup [] [] [] [] [axm] []
singletonDeclarationGroup (ConvFailure _ sen) = DeclarationGroup [] [] [] [] [] [sen]
--------------------------------------------------------------------------------
convertDeclarationGroup :: ConversionMonad r m => DeclarationGroup -> m [Sentence]
convertDeclarationGroup DeclarationGroup{..} =
(dgFailures ++) <$> case (nonEmpty dgInductives, nonEmpty dgCoInductives, nonEmpty dgSynonyms, nonEmpty dgClasses, nonEmpty dgAxioms) of
(Nothing, Nothing, Nothing, Nothing, Just [axm]) ->
pure [uncurry typedAxiom axm]
(Just inds, Nothing, Nothing, Nothing, Nothing) ->
pure [InductiveSentence $ Inductive inds []]
(Nothing, Just coinds, Nothing, Nothing, Nothing) ->
pure [InductiveSentence $ CoInductive coinds []]
(Nothing, Nothing, Just (SynBody name args oty def :| []), Nothing, Nothing) ->
let sigs = M.empty -- TODO: fixity information
in pure $ [DefinitionSentence $ DefinitionDef Global name args oty def NotExistingClass]
++ (NotationSentence <$> buildInfixNotations sigs name)
{- (Just inds, Nothing, Just syns, Nothing, Nothing) ->
pure $ foldMap recSynType syns
++ [InductiveSentence $ Inductive inds (orderRecSynDefs $ recSynDefs inds syns)] -}
(Just inds, Nothing, Just syns, Nothing, Nothing) ->
let synDefs = recSynDefs inds syns
synDefs' = expandAllDefs synDefs
in pure $ [InductiveSentence $ Inductive (subst synDefs' inds) []]
++ (orderRecSynDefs $ synDefs)
(Nothing, Nothing, Nothing, Just (classDef :| []), Nothing) ->
classSentences classDef
(Nothing, Nothing, Nothing, Nothing, Nothing) ->
pure []
(_, _, _, _, _) ->
let indName (IndBody name _ _ _) = name
clsName (ClassBody (ClassDefinition name _ _ _) _) = name
axmName (name, _) = name
explain :: String -> String -> (a -> Qualid) -> [a] -> Maybe (String, String)
explain _what _whats _name [] = Nothing
explain what _whats name [x] = Just (what, showP $ name x)
explain _what whats name xs = Just (whats, explainStrItems (showP . name) "" "," "and" "" "" xs)
in convUnsupportedIns "too much mutual recursion" $
catMaybes [ explain "inductive type" "inductive types" indName dgInductives
, explain "coinductive type" "coinductive types" indName dgCoInductives
, explain "type synonym" "type synonyms" synName dgSynonyms
, explain "type class" "type classes" clsName dgClasses
, explain "type axiom" "type axioms" axmName dgAxioms ]
where
synName :: SynBody -> Qualid
synName (SynBody name _ _ _) = name
expandAllDefs :: M.Map Qualid Term -> M.Map Qualid Term
expandAllDefs map =
let map' = M.map (subst map) map
in if map == map' then map' else expandAllDefs map'
indParams (IndBody _ params _ _) = S.fromList $ foldMap (toListOf binderIdents) params
-- FIXME use real substitution
avoidParams params = until (`S.notMember` params) (qualidExtendBase "_")
recSynMapping :: S.Set Qualid -> SynBody -> (Qualid, Term)
recSynMapping params (SynBody name args oty def) =
let mkFun = maybe id Fun . nonEmpty
withType = maybe id (flip HasType)
in (name, everywhere (mkT $ avoidParams params) .
mkFun args $ withType oty def)
recSynDefs :: NonEmpty IndBody -> NonEmpty SynBody -> M.Map Qualid Term
recSynDefs inds = M.fromList . toList . fmap (recSynMapping $ foldMap indParams inds)
orderRecSynDefs synDefs =
[ DefinitionSentence $ DefinitionDef Global syn [] Nothing (synDefs M.! syn) NotExistingClass
| syn <- foldMap toList $ topoSortEnvironment synDefs ]
{-
synName = qualidExtendBase "__raw"
recSynType :: SynBody -> [Sentence] -- Otherwise GHC infers a type containing @~@.
recSynType (SynBody name _ _ _) =
[ InductiveSentence $ Inductive [IndBody (synName name) [] (Sort Type) []] []
, NotationSentence $ ReservedNotationIdent (qualidBase name) ]
indParams (IndBody _ params _ _) = S.fromList $ foldMap (toListOf binderIdents) params
recSynMapping params (SynBody name args oty def) =
let mkFun = maybe id Fun . nonEmpty
withType = maybe id (flip HasType)
in (name, App "GHC.Base.Synonym"
$ fmap PosArg [ Qualid (synName name)
, everywhere (mkT $ avoidParams params) .
mkFun args $ withType oty def ])
recSynDefs inds = M.fromList . toList . fmap (recSynMapping $ foldMap indParams inds)
orderRecSynDefs synDefs =
[ NotationIdentBinding (qualidBase syn) $ synDefs M.! syn
| syn <- foldMap toList $ topoSortEnvironment synDefs ] -}
--------------------------------------------------------------------------------
-- We expect to be in the presence of
--
-- @
-- Set Implicit Arguments.
-- Unset Strict Implicit.
-- Unset Printing Implicit Defensive.
-- @
--
-- which creates implicit arguments correctly for most constructors. The
-- exception are constructors which don't mention some parameters in their
-- arguments; any missing parameters are not made implicit. Thus, for those
-- cases, we add the argument specifiers manually.
-- TODO: May be buggy with mixed parameters/indices (which can only arise via
-- edits).
-- TODO: GADTs.
-- TODO: Keep the argument specifiers with the data types.
generateArgumentSpecifiers :: ConversionMonad r m => IndBody -> m [Arguments]
generateArgumentSpecifiers (IndBody _ params _resTy cons)
| null params = pure []
| otherwise = catMaybes <$> traverse setImplicits cons
where
setImplicits (con,binders,tm) = lookupConstructorFields con >>= \case
-- Ignore cons we do not know anythings about
-- (e.g. because they are skipped or redefined)
Nothing -> pure Nothing
Just fields -> do
let bindersInTm = concatMap collectBinders tm
let fieldCount = case fields of NonRecordFields count -> count
_ -> 0
pure . Just . Arguments Nothing con
$ replicate paramCount (underscoreArg ArgMaximal)
++ map (underscoreArg . binderArgumentSpecifiers) binders
++ map (underscoreArg . binderArgumentSpecifiers) bindersInTm
++ replicate fieldCount (underscoreArg ArgExplicit)
paramCount = length params
underscoreArg eim = ArgumentSpec eim UnderscoreName Nothing
binderArgumentSpecifiers binder = case binderExplicitness binder of
Explicit -> ArgExplicit
Implicit -> ArgMaximal
generateGroupArgumentSpecifiers :: ConversionMonad r m => DeclarationGroup -> m [Sentence]
generateGroupArgumentSpecifiers = fmap (fmap ArgumentsSentence)
. foldTraverse generateArgumentSpecifiers
. (\x -> dgInductives x ++ dgCoInductives x)
--------------------------------------------------------------------------------
generateDefaultInstance :: ConversionMonad r m => IndBody -> m [Sentence]
generateDefaultInstance (IndBody tyName _ _ cons)
| Just (con, bndrs, _) <- find suitableCon cons
-- Instance Default_TupleSort : GHC.Err.Default TupleSort :=
-- GHC.Err.Build_Default _ BoxedTuple.
= view (edits.skipped.contains inst_name) >>= \case
True -> pure []
False -> pure $ pure $ InstanceSentence $
InstanceTerm inst_name []
(App1 "GHC.Err.Default" (Qualid tyName))
(App2 "GHC.Err.Build_Default" Underscore (foldl (\acc _ -> (App1 acc "GHC.Err.default")) (Qualid con) bndrs))
Nothing
where
inst_name = qualidMapBase ("Default__" <>) tyName
suitableCon (_, _bndrs, Just ty) = ty == Qualid tyName
suitableCon _ = False
generateDefaultInstance _ = pure []
generateGroupDefaultInstances :: ConversionMonad r m => DeclarationGroup -> m [Sentence]
generateGroupDefaultInstances = foldTraverse generateDefaultInstance . dgInductives
generateRecordAccessors :: ConversionMonad r m => IndBody -> m [Definition]
generateRecordAccessors (IndBody tyName params resTy cons) = do
let conNames = view _1 <$> cons
allFields <- catMaybes <$> mapM lookupConstructorFields conNames
let recordFields = concat [ fields | RecordFields fields <- allFields ]
let nubedFields = S.toAscList $ S.fromList recordFields
filteredFields <- filterM (\field -> not <$> view (edits.skipped.contains field)) nubedFields
for filteredFields $ \(field :: Qualid) -> withCurrentDefinition field $ do
equations <- for conNames $ \con -> do
(args, hasField) <- lookupConstructorFields con >>= \case
Just (NonRecordFields count) ->
pure (replicate count UnderscorePat, False)
Just (RecordFields conFields0) ->
pure $ go conFields0 where
go [] = ([], False)
go (conField : conFields)
| field == conField = (Coq.VarPat (qualidBase field) : map (const UnderscorePat) conFields, True)
| otherwise = first (UnderscorePat :) $ go conFields
Nothing -> throwProgramError $ "internal error: unknown constructor `"
<> show con <> "' for type `"
<> show tyName <> "'"
pure . Equation [MultPattern [ArgsPat con args]] $
if hasField
then Qualid field
else App1 "GHC.Err.error"
(HsString $ "Partial record selector: field `"
<> qualidBase field <> "' has no match in constructor `"
<> qualidBase con <> "' of type `"
<> qualidBase tyName <> "'")
arg <- genqid "arg"
let indices (Forall bs t) = toList bs ++ indices t
indices (Arrow t1 t2) = mkBinders Coq.Explicit [UnderscoreName] t1 : indices t2
indices _ = []
deunderscore UnderscoreName = Ident <$> genqid "ty"
deunderscore name = pure name
typeArgs <- for (params ++ indices resTy) $ \case
ExplicitBinder name -> ExplicitBinder <$> deunderscore name
ImplicitBinders names -> ImplicitBinders <$> traverse deunderscore names
Typed gen ex names kind -> (Typed gen ex ?? kind) <$> traverse deunderscore names
binder -> pure binder
let implicitArgs = toImplicitBinder <$> typeArgs
argBinder = mkBinders Coq.Explicit
[Ident arg] (appList (Qualid tyName) $ binderArgs typeArgs)
pure . (\ m -> DefinitionDef Global field (implicitArgs ++ [argBinder]) Nothing m NotExistingClass) $
(Coq.Match [MatchItem (Qualid arg) Nothing Nothing] Nothing equations)
generateGroupRecordAccessors :: ConversionMonad r m => DeclarationGroup -> m [Sentence]
generateGroupRecordAccessors = fmap (fmap DefinitionSentence)
. foldTraverse generateRecordAccessors
. dgInductives
--------------------------------------------------------------------------------
indNames :: IndBody -> [Qualid]
indNames (IndBody tyName _params _resTy cons) = tyName : map (\(name, _, _) -> name) cons
generateGroupDataInfixNotations :: ConversionMonad r m => DeclarationGroup -> m [Sentence]
generateGroupDataInfixNotations =
let sigs = M.empty -- TODO: fixity information
in pure
. fmap NotationSentence
. concatMap (buildInfixNotations sigs)
. concatMap indNames
. (\x -> dgInductives x ++ dgCoInductives x)
--------------------------------------------------------------------------------
groupTyClDecls :: ConversionMonad r m
=> [TyClDecl GhcRn] -> m [DeclarationGroup]
groupTyClDecls decls = do
bodies <- traverse convertTyClDecl decls <&>
M.fromList . map (convDeclName &&& id) . catMaybes
-- We need to do this here, becaues topoSortEnvironment expects
-- a pure function as the first argument
bodies_fvars <- for bodies $ \decl -> do
let vars = getFreeVars' decl
-- This is very crude; querying all free variables as if
-- they are constructor names:
-- ctypes <- setMapMaybeM lookupConstructorType vars
-- With interface loading, this is too crude.
return $ vars -- <> ctypes
pure $ map (foldMap $ singletonDeclarationGroup . (bodies M.!))
$ topoSortEnvironmentWith id bodies_fvars
convertModuleTyClDecls :: ConversionMonad r m
=> [TyClDecl GhcRn] -> m [Sentence]
convertModuleTyClDecls = fork [ foldTraverse convertDeclarationGroup
, foldTraverse generateGroupArgumentSpecifiers
, foldTraverse generateGroupDefaultInstances
, foldTraverse generateGroupRecordAccessors
, foldTraverse generateGroupDataInfixNotations
]
<=< groupTyClDecls
where fork fns x = mconcat <$> sequence (map ($x) fns)
| antalsz/hs-to-coq | src/lib/HsToCoq/ConvertHaskell/Declarations/TyCl.hs | mit | 22,825 | 7 | 29 | 7,034 | 5,137 | 2,637 | 2,500 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Notification(ws, notify) where
import Network.Wai
import Network.Wai.Handler.WebSockets
import Network.HTTP.Types
import Network.WebSockets
import Database.Persist hiding (get)
import Data.Aeson
import Data.Text hiding (filter)
import Data.Either
import Data.Maybe
import Data.Typeable
import Data.List as L
import qualified Data.Map.Strict as MS
import Control.Monad
import Control.Exception
import Control.Concurrent.STM
import Data.Text.Encoding
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vault.Lazy as V
import Ledger
import Model.Account as M
import Model.Notification
import DB.Schema as S
import Http
ws ledger uriParams req respond = do
when (not authorized) $ throw NotAutorized
websocketsOr defaultConnectionOptions (wsApp ledger accountName) backupApp req respond
where
accountName = decodeUtf8 $ fromJust $ lookup "id" uriParams
authorized =
case join $ V.lookup (keyAuth ledger) (vault req) of
Nothing -> False
Just auth ->
case lookup "id" uriParams of
Just "*" -> Ledger.isAdmin auth
Just name -> decodeUtf8 name == Ledger.user auth
backupApp :: Application
backupApp _ respond =
respond $ responseLBS status400 [] "not a websocket request"
wsApp :: Ledger -> Text -> ServerApp
wsApp ledger accountName pending_conn = do
conn <- acceptRequest pending_conn
-- we are connected
chan <- atomically $ do
chan <- newTChan
m <- readTVar $ listeners ledger
let conns' = case MS.lookup accountName m of
Nothing -> [(chan, conn)]
Just conns -> (chan, conn) : conns
writeTVar (listeners ledger) $ MS.insert accountName conns' m
return chan
putStrLn $ ">>> websocket connection for account: " ++ unpack accountName
catch (wsWorker ledger conn chan) $ caught chan
where
caught chan (e :: ConnectionException) = do
-- we have disconnected
putStrLn $ ">>> websocket disconnected for account: " ++ unpack accountName
atomically $ do
m <- readTVar $ listeners ledger
let conns' = case MS.lookup accountName m of
Nothing -> [] -- this should really never happen!
Just conns -> filter (\(c,_) -> c /= chan) conns
writeTVar (listeners ledger) $ MS.insert accountName conns' m
wsWorker ledger conn chan = do
msg <- atomically $ readTChan chan
sendTextData conn msg
wsWorker ledger conn chan
notify ledger message accountName = do
m <- atomically $ readTVar (listeners ledger)
let chans = L.map fst $ fromMaybe [] (MS.lookup accountName m)
let chansStar = L.map fst $ fromMaybe [] (MS.lookup "*" m)
mapM (\chan -> atomically $ writeTChan chan message) $ L.nub (chans ++ chansStar)
| gip/cinq-cloches-ledger | src/Notification.hs | mit | 2,951 | 0 | 21 | 670 | 873 | 444 | 429 | 73 | 3 |
module Main where
import Criterion.Main
import HERMIT.Optimization.StreamFusion.List
main = defaultMain [ bgroup "test" [ bench "100" $ whnf test 100
, bench "1000" $ whnf test 1000
-- , bench "10000" $ whnf test 10000
] ]
test n = foldl (+) 0 . filter odd . concatMap (\(m,n) -> [m..n]) . zip [1..n] $ [n..2*n]
| ku-fpg/hermit-streamfusion | Main.hs | mit | 419 | 0 | 11 | 163 | 136 | 73 | 63 | 6 | 1 |
-- This prime number generator uses a lazy wheel sieve as described in:
--
-- Colun Runciman, "Lazy wheel sieves and sprials of primes",
-- <http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.55.7096>
module PrimeGen (primes) where
data Wheel = Wheel Int [Int]
primes :: [Int]
primes = sieve wheels primes primeSquares
wheels :: [Wheel]
wheels = Wheel 1 [1] : zipWith nextSize wheels primes
sieve :: [Wheel] -> [Int] -> [Int] -> [Int]
sieve (Wheel s ns : ws) ps qs =
[ n' | o <- s : [2 * s, 3 * s .. (head ps - 1) * s]
, n <- ns
, n' <- [ n + o ]
, s <= 2 || noFactorIn ps qs n' ]
++ sieve ws (tail ps) (tail qs)
sieve _ _ _ = error "sieve: will never reach this"
nextSize :: Wheel -> Int -> Wheel
nextSize (Wheel s ns) p = Wheel (s * p) [ n' | o <- [0, s .. (p - 1) * s]
, n <- ns
, n' <- [n + o]
, n' `mod` p > 0 ]
noFactorIn :: [Int] -> [Int] -> Int -> Bool
noFactorIn (p:ps) (q:qs) x = q > x || x `mod` p > 0 && noFactorIn ps qs x
noFactorIn _ _ _ = error "noFactorIn: will never reach this"
primeSquares :: [Int]
primeSquares = map (\x -> x * x) primes
| cpettitt/euler | PrimeGen.hs | mit | 1,259 | 0 | 14 | 430 | 492 | 265 | 227 | 24 | 1 |
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module Server.Model.Histogram
( createHistogram
, doesHistogramExist
, listHistogramWords
, listUrls
) where
import Data.Text (Text, pack)
import Hasql.Postgres (Postgres)
import Histogram (Histogram)
import Network.URI (URI)
import qualified Data.Map as Map
import qualified Hasql as Db
createHistogram :: URI -> Histogram -> Db.Tx Postgres s ()
createHistogram url histogram =
mapM_ create $ Map.toList histogram
where
create (word, freq) = createWordSql (urlText url) word freq
doesHistogramExist :: URI -> Db.Tx Postgres s Bool
doesHistogramExist url = do
exists <- testForUrlSql $ urlText url
case exists of
Just _ -> return True
_ -> return False
listHistogramWords :: URI -> Db.Tx Postgres s [(Text, Int)]
listHistogramWords = listWordsSql . urlText
listUrls :: Db.Tx Postgres s [(Text, Text, Int)]
listUrls = listUrlsSql
-- queries
createWordSql :: Text -> Text -> Int -> Db.Tx Postgres s ()
createWordSql url word frequency =
Db.unitEx [Db.stmt|
INSERT INTO histogram (url, word, frequency)
VALUES ($url, $word, $frequency)
|]
listUrlsSql :: Db.Tx Postgres s [(Text, Text, Int)]
listUrlsSql =
Db.listEx [Db.stmt|
WITH distinct_urls AS (
SELECT DISTINCT url
FROM histogram
)
SELECT url, result.word, result.frequency
FROM distinct_urls dist, LATERAL (
SELECT word, frequency
FROM histogram
WHERE url = dist.url
ORDER BY frequency DESC
LIMIT 1
) result
ORDER BY url
|]
listWordsSql :: Text -> Db.Tx Postgres s [(Text, Int)]
listWordsSql url =
Db.listEx [Db.stmt|
SELECT word, frequency
FROM histogram
WHERE url = $url
ORDER BY frequency DESC
LIMIT 10
|]
testForUrlSql :: Text -> Db.Tx Postgres s (Maybe (Text, Int))
testForUrlSql url =
Db.maybeEx [Db.stmt|
SELECT word, frequency
FROM histogram
WHERE url = $url
LIMIT 1
|]
-- helpers
urlText :: URI -> Text
urlText = pack . show
| Jonplussed/url-text-histogram | src/Server/Model/Histogram.hs | mit | 2,068 | 0 | 10 | 511 | 495 | 274 | 221 | 40 | 2 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, TypeOperators #-}
-- Copyright (c) 2008 Jean-Philippe Bernardy
-- | Various high-level functions to further classify.
module Yi.Misc
where
{- Standard Library Module Imports -}
import Data.List
( isPrefixOf
, (\\)
, filter
)
import System.FriendlyPath
( expandTilda
, isAbsolute'
)
import System.FilePath
( takeDirectory
, (</>)
, addTrailingPathSeparator
, hasTrailingPathSeparator
, takeFileName
)
import System.Directory
( doesDirectoryExist
, getDirectoryContents
, getCurrentDirectory
, canonicalizePath
)
import Control.Monad.Trans (MonadIO (..))
{- External Library Module Imports -}
{- Local (yi) module imports -}
import Prelude ()
import Yi.Core
import Yi.MiniBuffer
( withMinibuffer
, simpleComplete
, withMinibufferGen
)
-- | Given a possible starting path (which if not given defaults to
-- the current directory) and a fragment of a path we find all
-- files within the given (or current) directory which can complete
-- the given path fragment.
-- We return a pair of both directory plus the filenames on their own
-- that is without their directories. The reason for this is that if
-- we return all of the filenames then we get a 'hint' which is way too
-- long to be particularly useful.
getAppropriateFiles :: Maybe String -> String -> YiM (String, [ String ])
getAppropriateFiles start s = do
curDir <- case start of
Nothing -> do bufferPath <- withBuffer $ gets file
liftIO $ getFolder bufferPath
(Just path) -> return path
let sDir = if hasTrailingPathSeparator s then s else takeDirectory s
searchDir = if null sDir then curDir
else if isAbsolute' sDir then sDir
else curDir </> sDir
searchDir' <- liftIO $ expandTilda searchDir
let fixTrailingPathSeparator f = do
isDir <- doesDirectoryExist (searchDir' </> f)
return $ if isDir then addTrailingPathSeparator f else f
files <- liftIO $ getDirectoryContents searchDir'
-- Remove the two standard current-dir and parent-dir as we do not
-- need to complete or hint about these as they are known by users.
let files' = files \\ [ ".", ".." ]
fs <- liftIO $ mapM fixTrailingPathSeparator files'
let matching = filter (isPrefixOf $ takeFileName s) fs
return (sDir, matching)
-- | Given a path, trim the file name bit if it exists. If no path
-- given, return current directory.
getFolder :: Maybe String -> IO String
getFolder Nothing = getCurrentDirectory
getFolder (Just path) = do
isDir <- doesDirectoryExist path
let dir = if isDir then path else takeDirectory path
if null dir then getCurrentDirectory else return dir
-- | Given a possible path and a prefix, return matching file names.
matchingFileNames :: Maybe String -> String -> YiM [String]
matchingFileNames start s = do
(sDir, files) <- getAppropriateFiles start s
return $ fmap (sDir </>) files
adjBlock :: Int -> BufferM ()
adjBlock x = withSyntaxB' (\m s -> modeAdjustBlock m s x)
-- | A simple wrapper to adjust the current indentation using
-- the mode specific indentation function but according to the
-- given indent behaviour.
adjIndent :: IndentBehaviour -> BufferM ()
adjIndent ib = withSyntaxB' (\m s -> modeIndent m s ib)
-- | Generic emacs style prompt file action. Takes a @prompt and a continuation @act
-- and prompts the user with file hints
promptFile :: String -> (String -> YiM ()) -> YiM ()
promptFile prompt act = do maybePath <- withBuffer $ gets file
startPath <- addTrailingPathSeparator <$> (liftIO $ canonicalizePath =<< getFolder maybePath)
-- TODO: Just call withMinibuffer
withMinibufferGen startPath (findFileHint startPath) prompt (simpleComplete $ matchingFileNames (Just startPath)) act
-- | For use as the hint when opening a file using the minibuffer.
-- We essentially return all the files in the given directory which
-- have the given prefix.
findFileHint :: String -> String -> YiM [String]
findFileHint startPath s = snd <$> getAppropriateFiles (Just startPath) s
| codemac/yi-editor | src/Yi/Misc.hs | gpl-2.0 | 4,241 | 0 | 15 | 963 | 837 | 440 | 397 | 66 | 6 |
-- |
-- Module: Utils.RootFinding
-- Copyright: (c) Andreas Bock
-- License: BSD-3
-- Maintainer: Andreas Bock <bock@andreasbock.dk>
-- Stability: experimental
-- Portability: portable
--
module Utils.RootFinding where
-- Bisection method implementation finding the root of f(x) in the interval [a,b]
-- tolerance function 'f' a b y
bisection :: Double -> (Double -> Double) -> Double -> Double -> Double
bisection eps f a b =
let
m = (a + b)/2
fa = f a
fm = f m
in
if (b-a < eps || fm == 0)
then m
else if (signum fa /= signum fm)
then bisection eps f a m
else bisection eps f m b
| andreasbock/hql | src/Utils/RootFinding.hs | gpl-2.0 | 683 | 0 | 12 | 212 | 162 | 89 | 73 | 12 | 3 |
module Main where
import Lib
main :: IO ()
main = do
putStrLn "Enter an integer: "
input1 <- readLn :: IO Int
putStrLn "Enter another integer: "
input2 <- readLn :: IO Int
putStrLn (show (addTwo input1 input2))
addTwo :: Int -> Int -> Int
addTwo x y = x + y
| shimanekb/Learn_Haskell | pp2/ppTwoOne/app/Main.hs | gpl-2.0 | 301 | 0 | 11 | 96 | 107 | 52 | 55 | 11 | 1 |
{-
teafree, a Haskell utility for tea addicts
Copyright (C) 2013 Fabien Dubosson <fabien.dubosson@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE OverloadedStrings #-}
module Teafree.Command.Info
( info
) where
import Prelude as P
import Teafree.Core.Monad
import Teafree.Interaction.PPrint
import Teafree.Interaction.Notify as N
import Teafree.Interaction.Choice
import qualified Teafree.Entity.Tea as Tea
import Data.Text as T
default (T.Text)
{- Information about a tea -}
info :: Teafree ()
info = do
choice <- chooseTea `catchAny` sendTeafreeError
send . def title (T.pack . show . ppName False $ choice)
. def body (T.pack . show . ppDetails False $ choice)
. def icon (Tea.icon choice)
. def duration 0
. def urgency "normal"
$ notification
| StreakyCobra/teafree | src/Teafree/Command/Info.hs | gpl-3.0 | 1,463 | 0 | 18 | 333 | 198 | 112 | 86 | 20 | 1 |
-- -*-haskell-*-
-- Vision (for the Voice): an XMMS2 client.
--
-- Author: Oleg Belozeorov
-- Created: 20 Jun. 2010
--
-- Copyright (C) 2010, 2011 Oleg Belozeorov
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
module Playlist
( initPlaylist
, showPlaylist
) where
import Graphics.UI.Gtk
import UI
import Builder
import Environment
import XMMS
import Playback
import Medialib
import Playlist.Model
import Playlist.Index
import Playlist.Format
import Playlist.View
import Playlist.Config
import Playlist.Search
import Playlist.Update
import Playlist.DnD
import Playlist.UI
initPlaylist =
initFormat
showPlaylist =
withXMMS $ withMedialib $ withPlayback $ withBuilder $ do
addFromFile $ gladeFilePath "playlist"
withUI "Vision Playlist" $ withModel $ withFormat $ withIndex $
withView $ withPlaylistConfig $ do
initUpdate
setupSearch
setupDnD
setupUI
widgetShowAll window
| upwawet/vision | src/Playlist.hs | gpl-3.0 | 1,413 | 0 | 14 | 282 | 183 | 107 | 76 | 31 | 1 |
import Graphics.Rendering.Chart.Easy
import Graphics.Rendering.Chart.Backend.Cairo
signal :: [Double] -> [(Double,Double)]
signal xs = [ (x,(sin (x*3.14159/45) + 1) / 2 * (sin (x*3.14159/5))) | x <- xs ]
main = toFile def "example1_big.png" $ do
layout_title .= "Amplitude Modulation"
setColors [opaque blue, opaque red]
plot (line "am" [signal [0,(0.5)..400]])
plot (points "am points" (signal [0,7..400]))
| dkensinger/haskell | chart_ex1.hs | gpl-3.0 | 438 | 0 | 14 | 84 | 206 | 110 | 96 | 9 | 1 |
module Graphics.UI.Bottle.Animation.Id
( AnimId
, joinId, subId, augmentId
, mappingFromPrefixMap
) where
import Control.Lens.Operators
import qualified Data.ByteString as SBS
import qualified Data.ByteString.Char8 as SBS8
import Data.List.Lens (prefixed)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
type AnimId = [SBS.ByteString]
augmentId :: Show a => AnimId -> a -> AnimId
augmentId animId = joinId animId . (:[]) . SBS8.pack . show
joinId :: AnimId -> AnimId -> AnimId
joinId = (++)
subId :: AnimId -> AnimId -> Maybe AnimId
subId folder path = path ^? prefixed folder
mappingFromPrefixMap :: Map AnimId AnimId -> AnimId -> AnimId
mappingFromPrefixMap m animId =
do
(animIdPrefixCandidate, newAnimId) <- Map.lookupLE animId m
suffix <- animId ^? prefixed animIdPrefixCandidate
newAnimId <> suffix & Just
& fromMaybe animId
| da-x/lamdu | bottlelib/Graphics/UI/Bottle/Animation/Id.hs | gpl-3.0 | 998 | 0 | 10 | 230 | 295 | 167 | 128 | 26 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- |
-- This module facilitates turning parsed surface expressions into the data
-- needed by the compiler to generate code. These tasks include:
-- [x] Generating unique identifiers for every distinct variable
-- [x] Converting surface-level definitions into TopLevel
-- [x] Converting surface-level signatures into TopLevel
-- [x] Ensuring all symbols are either bound or top level
-- [x] Forming a default type environment for type checking
module Lib.Preprocessor where
import Lib.Compiler
import Lib.Syntax
import Lib.Syntax.Annotated
import Lib.Syntax.Core
import Lib.Syntax.Surface
import Lib.Syntax.Symbol
import Lib.Syntax.TopLevel
import Lib.Types.Class
import Lib.Types.Frame
import Lib.Types.Kind (Kind (..))
import Lib.Types.Scheme
import Lib.Types.Type
import Lib.Types.TypeCheck
import Lib.Errors
import Control.Comonad.Cofree
import Control.Monad (forM, mapM, mapM_, when)
import Control.Monad.Free
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import Data.Monoid ((<>))
type SymbolMap = Map Symbol Symbol
data PState = PState
{ uniqueInt :: Int -- ^ For generating unique IDs
, toplevel :: TopLevel
}
pState :: PState
pState = PState 0 mempty
-- | A monad for transforming parsed surface syntax into what the compiler needs
newtype Preprocess a = Preprocess {
runPreprocess
:: StateT PState (ReaderT SymbolMap Compiler) a
} deriving ( Functor
, Applicative
, Monad
, MonadState PState
, MonadReader (Map Symbol Symbol)
, MonadError PsiloError
)
preprocess
:: Preprocess a
-> Compiler TopLevel
preprocess (Preprocess p) = do
(PState _ toplevel ) <- runReaderT (execStateT p pState ) M.empty
return toplevel
-- | Generate unique symbols
gensym :: Preprocess String
gensym = do
n <- gets uniqueInt
modify $ \s -> s { uniqueInt = n + 1 }
return $ "_" ++ (show n)
genint :: Preprocess Int
genint = do
n <- gets uniqueInt
modify $ \s -> s { uniqueInt = n + 1 }
return n
readBoundVars :: Preprocess SymbolMap
readBoundVars = ask
-- | Perform a preprocessing computation with a temporarily extended bound
-- variable map
withBoundVars
:: SymbolMap
-> Preprocess a
-> Preprocess a
withBoundVars bvs m = local (M.union bvs) m
-- | Give each symbol in a 'SurfaceExpr' a globally unique identifier
uniqueIds
:: SurfaceExpr ()
-> Preprocess (SurfaceExpr ())
uniqueIds (Free (IdS s)) = do
boundVars <- readBoundVars
case M.lookup s boundVars of
Nothing -> return $ aId s
Just s' -> return $ aId s'
-- | add new scope of bound variables to the environment
uniqueIds (Free (FunS args body sigs)) = do
boundVars <- readBoundVars
uniqueVars <- forM args $ \arg -> do
suffix <- gensym
return $ arg ++ suffix
let bvs = M.fromList $ zip args uniqueVars
body' <- withBoundVars bvs $ uniqueIds body
sigs' <- withBoundVars bvs $ forM sigs $ \sig -> do
sig' <- sequence $ fmap uniqueIds sig
return sig'
return $ aFun uniqueVars body' sigs'
uniqueIds (Free (AppS op erands)) = do
op' <- uniqueIds op
erands' <- forM erands uniqueIds
return $ aApp op' erands'
uniqueIds (Free (IfS c t e)) = do
c' <- uniqueIds c
t' <- uniqueIds t
e' <- uniqueIds e
return $ aIf c' t' e'
uniqueIds (Free (DefS sym val)) = do
val' <- uniqueIds val
return $ aDef sym val'
uniqueIds (Free (SigS sym pt)) = do
boundVars <- readBoundVars
let sym' = maybe sym id $ M.lookup sym boundVars
return $ aSig sym' pt
uniqueIds whatever = return whatever
addDefinition :: Symbol -> AnnotatedExpr (Maybe Type) -> Preprocess ()
addDefinition sym dfn = do
tl <- gets toplevel
let tl' = tl {
definitions = M.insert sym dfn (definitions tl)
}
modify $ \st -> st { toplevel = tl' }
lookupDefinition :: Symbol -> Preprocess (Maybe (AnnotatedExpr (Maybe Type)))
lookupDefinition sym = do
tl <- gets toplevel
return $ M.lookup sym (definitions tl)
addSignature :: Symbol -> Sigma -> Preprocess ()
addSignature sym sig = do
tl <- gets toplevel
let sig' = normalize $ quantify sig
let tl' = tl {
signatures = M.insert sym sig' (signatures tl)
}
modify $ \st -> st { toplevel = tl' }
lookupSignature :: Symbol -> Preprocess (Maybe Sigma)
lookupSignature sym = do
tl <- gets toplevel
return $ M.lookup sym (signatures tl)
addTypedef :: Symbol -> ([TyVar], Sigma, Bool) -> Preprocess ()
addTypedef sym td = do
tl <- gets toplevel
let tl' = tl {
typedefs = M.insert sym td (typedefs tl)
}
modify $ \st -> st { toplevel = tl' }
lookupTypedef :: Symbol -> Preprocess (Maybe ([TyVar], Sigma, Bool))
lookupTypedef sym = do
tl <- gets toplevel
return $ M.lookup sym (typedefs tl)
addClassdef :: Symbol -> [Type] -> [Pred] -> Preprocess ()
addClassdef sym vars preds = do
tl <- gets toplevel
let tl' = tl {
classes = (classes tl) <:> (addClass sym vars preds)
}
modify $ \st -> st { toplevel = tl' }
addMethod
:: Symbol
-> (Set (AnnotatedExpr (Maybe Type)))
-> Preprocess ()
addMethod sym mthds = do
tl <- gets toplevel
let ms = case M.lookup sym (methods tl) of
Nothing -> M.insert sym mthds (methods tl)
Just mthds' ->
M.insert sym (mthds `S.union` mthds') (methods tl)
let tl' = tl {
methods = ms
}
modify $ \st -> st { toplevel = tl' }
makeDefinition :: SurfaceExpr () -> Preprocess (AnnotatedExpr (Maybe Type))
makeDefinition (Free (DefS sym val)) = do
uval <- uniqueIds val >>= surfaceToCore
case compile (annotated uval) of
Left _ -> throwError $ PreprocessError $ "Error desugaring " ++ sym
Right ann -> return $ fmap (const Nothing) ann
makeDefinition _ = throwError $ PreprocessError "Not a valid definition."
-- | Transforms a 'SurfaceExpr' into a 'TopLevel' expression
-- In general, a surface level syntax object might generate multiple types of
-- "top level" declarations; eg, a class definition generates signatures and
-- potential method implementations as well as the actual class info.
surfaceToTopLevel
:: SurfaceExpr ()
-> Preprocess ()
surfaceToTopLevel d@(Free (DefS sym val)) = do
dfn <- makeDefinition d
addDefinition sym dfn
surfaceToTopLevel (Free (SigS sym sig)) = addSignature sym sig
-- Generates signatures for constructor and destructor, as well as a proper
-- typedef object.
surfaceToTopLevel (Free (TypeDefS name vars body isAlias)) = do
let body' = quantify body
addTypedef name (vars, body', isAlias)
when (not isAlias) $ do
let ret_type = TList $ (TSym (TyLit name Star)) : (fmap TVar vars)
let ctor = TForall vars $ TList $
tyFun : (body : [ret_type])
let dtor' = TForall vars $ TList $
tyFun : (ret_type : [body])
let mDtor = runSolve (skolemize dtor') initTypeCheckState
dtor <- case mDtor of
Left err -> throwError err
Right (sk_vars, dtor) -> return $ TForall sk_vars dtor
addSignature name ctor
addSignature ('~':name) dtor
surfaceToTopLevel (Free (ClassDefS name vars preds mthods)) = do
addClassdef name vars preds
forM_ mthods $ \(Free (SigS sym scheme), mDfn) -> do
let sig = quantify $ qualify [TPred name vars] scheme
addSignature sym sig
case mDfn of
Nothing -> addMethod sym S.empty
Just d@(Free (DefS sym val)) -> do
dfn' <- makeDefinition d
let s = S.fromList [dfn']
addMethod sym s
surfaceToTopLevel (Free (ClassInstS name vars preds mthods)) = do
forM_ mthods $ \d@(Free (DefS sym val)) -> do
dfn <- makeDefinition d
mSig <- lookupSignature sym
case mSig of
Nothing -> throwError $ PreprocessError "Not a class method."
Just sig -> addMethod sym $ S.fromList [dfn]
surfaceToTopLevel _ = throwError $
PreprocessError $
"Expression is not a top level expression"
-- | Called by 'surfaceToTopLevel' on a subset of 'SurfaceExpr's
surfaceToCore
:: SurfaceExpr ()
-> Preprocess (CoreExpr ())
surfaceToCore (Free (IntS n)) = return $ cInt n
surfaceToCore (Free (FloatS n)) = return $ cFloat n
surfaceToCore (Free (BoolS b)) = return $ cBool b
surfaceToCore (Free (IdS s)) = return $ cId s
surfaceToCore (Free (AppS op erands)) = do
op' <- surfaceToCore op
erands' <- mapM surfaceToCore erands
return $ cApp op' erands'
surfaceToCore (Free (FunS a b _)) = do
b' <- surfaceToCore b
return $ cFun a b'
surfaceToCore (Free (IfS c t e)) = do
c' <- surfaceToCore c
t' <- surfaceToCore t
e' <- surfaceToCore e
return $ cIf c' t' e'
surfaceToCore s = throwError $ PreprocessError $
"Expression " ++ show s ++ " cannot be converted into a core expression."
-- | Ensures that all symbols are either bound or global
-- TODO why am I not using a Set here?
boundVarCheck :: TopLevel -> Preprocess ()
boundVarCheck (TopLevel defns sigs tds cls mthds) =
withBoundVars bvs $ mapM_ go $ M.toList defns
where
dup x = (x, x)
syms = defn_syms ++
(fmap dup ty_defn_syms) ++
(fmap dup (fmap ("~"++) ty_defn_syms))
defn_syms = fmap dup $ M.keys defns
ty_defn_syms = M.keys tds
builtins = fmap dup $ S.toList builtin_syms
bvs = M.fromList $ builtins ++ syms
go :: (Symbol, AnnotatedExpr (Maybe Type)) -> Preprocess ()
go (s, core) = check core
check :: AnnotatedExpr (Maybe Type) -> Preprocess ()
check (ty :< (FunC args body)) = do
let argSyms = fmap (\x -> (x, x)) args
b' <- withBoundVars (M.fromList argSyms) $ check body
return ()
check (_ :< (IdC s)) = do
boundVars <- readBoundVars
when (M.notMember s boundVars) $ do
case M.lookup s sigs of
Nothing -> throwError $ UnboundVariable s
Just _ -> return ()
return ()
check (_ :< (AppC op erands)) = do
op' <- check op
erands' <- mapM check erands
return ()
check (_ :< (IfC c t e)) = do
c' <- check c
t' <- check t
e' <- check e
return ()
check whatever = return ()
| gatlin/psilo | src/Lib/Preprocessor.hs | gpl-3.0 | 11,128 | 0 | 20 | 3,325 | 3,565 | 1,756 | 1,809 | 258 | 6 |
{-# LANGUAGE BangPatterns #-}
--- |
--- | This module contains stuff to handle and convert Filelists alias TreeNodes
--- |
--- Copyright : (c) Florian Richter 2011
--- License : GPL
---
module Filelist where
import System.Directory
import System.Posix.Directory
import System.FilePath
import System.Posix.Files
import System.IO
import System.IO.Unsafe
import Data.Maybe
import Control.Monad
import Control.DeepSeq
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as C
import qualified Data.ByteString.Char8 as SC
import qualified Codec.Compression.BZip as BZip
import qualified Data.Text as T
import Blaze.ByteString.Builder
import Blaze.ByteString.Builder.Char.Utf8
import Data.Monoid (mappend, mconcat)
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Lazy.Encoding (encodeUtf8)
import Data.Text.Encoding.Error (lenientDecode)
import Text.XML.Expat.SAX
import Data.List
import Data.Maybe
import FilelistTypes
import TTH
import Config
import FilelistXml
-- | memory efficient getDirectoryContents
fsDirList :: FilePath -> IO [FilePath]
fsDirList dir = do
ds <- openDirStream dir
fsList ds
where
fsList ds = do
path <- readDirStream ds
if null path
then do
rest <- fsList ds
return (path:rest)
else do
closeDirStream ds
return []
-- | create TreeNode tree from filesystem directory
getFileList :: AppState -> FilePath -> IO TreeNode
getFileList appState dir = do
names <- unsafeInterleaveIO $ getUsefulContents dir
let paths = map (dir </>) names
nodes <- (forM paths $ \path -> do
isDirectory <- doesDirectoryExist path
if isDirectory
then maybeCatch (getFileList appState path)
else maybeCatch (getFile appState path)
)
return (DirNode (T.pack $ last (splitDirectories dir)) (T.pack dir) (catMaybes nodes))
where
maybeCatch :: IO a -> IO (Maybe a)
maybeCatch func = catch (Just `liftM` func) (\e-> return Nothing)
-- | create TreeNode object for file in filesystem (hash is retreived from cache if available)
getFile :: AppState -> FilePath -> IO TreeNode
getFile appState path = do
fileStatus <- unsafeInterleaveIO $ getFileStatus path
let !size = fromIntegral $ fileSize fileStatus
let !modTime = modificationTime fileStatus
hash <- unsafeInterleaveIO $ getCachedHash appState (T.pack path) modTime
let !node = FileNode (T.pack $ takeFileName path) (T.pack path) size modTime hash
return (node `deepseq` node)
-- | get useful contents of a directory (not . or ..)
getUsefulContents :: String -> IO [String]
getUsefulContents path = do
names <- fsDirList path
return (map (T.unpack . (decodeUtf8With lenientDecode) . SC.pack) $ filter (`notElem` [".", ".."]) names)
-- | accumlulate filesizes of all files in tree
treeSize :: TreeNode -> Integer
treeSize (DirNode _ _ children) = sum $ map treeSize children
treeSize (FileNode _ _ size _ _) = size
-- | get return first value in list, which is not Nothing
firstNotNothing :: [Maybe a] -> Maybe a
firstNotNothing [] = Nothing
firstNotNothing ((Just x):xs) = Just x
firstNotNothing (Nothing:xs) = firstNotNothing xs
-- | search FileNode in TreeNode by path
searchFile :: T.Text -> TreeNode -> Maybe TreeNode
searchFile path tree =
case searchNode path tree of
Just (file@(FileNode _ _ _ _ _)) -> Just file
_ -> Nothing
-- | search Node in TreeNode by path
searchNode :: T.Text -> TreeNode -> Maybe TreeNode
searchNode path file@(FileNode name _ _ _ _)
| path == name = Just file
| otherwise = Nothing
searchNode path dir@(DirNode name _ children)
| path == name = Just dir
| (firstPath path) == name = firstNotNothing $ map (searchNode (restPath path)) children
| otherwise = Nothing
where
firstPath path = T.takeWhile (/='/') path
restPath str = T.tail $ T.dropWhile (/='/') path
-- | search Node in TreeNode by path
searchNodeL :: T.Text -> [TreeNode] -> Maybe TreeNode
searchNodeL path nodes | T.length path <= 1 = Just $ DirNode T.empty T.empty nodes
| otherwise = firstNotNothing $ map (searchNode $ T.tail path) nodes
where
restPath str = T.tail $ T.dropWhile (/='/') path
-- | search hash in TreeNode
searchHash :: T.Text -> TreeNode -> Maybe TreeNode
searchHash hash file@(FileNode _ _ _ _ (Just fhash))
| fhash == hash = Just file
| otherwise = Nothing
searchHash hash (FileNode _ _ _ _ _) = Nothing
searchHash hash (DirNode _ _ children) = firstNotNothing $ map (searchHash hash) children
-- | convert TreeNode to xml
treeNodeToXml :: TreeNode -> Builder
treeNodeToXml node = (fromString "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n") `mappend`
(fromString "<FileListing Version=\"1\" Generator=\"hdc V:0.1\">") `mappend`
(toXml node) `mappend` (fromString "</FileListing>")
where
toXml (DirNode name _ children) = mconcat
[fromString "<Directory Name=\"", xmlQuote name, fromString "\">",
mconcat $ map toXml children, fromString "</Directory>" ]
toXml (FileNode name _ size _ (Just hash)) = mconcat [fromString "<File Name=\"", xmlQuote name,
fromString "\" Size=\"", fromString $ show size,
fromString "\" TTH=\"", fromText hash, fromString "\"/>" ]
toXml (FileNode name _ size _ _) = mconcat [fromString "<File Name=\"", xmlQuote name,
fromString "\" Size=\"", fromString $ show size, fromString "\"/>" ]
xmlQuote :: T.Text -> Builder
xmlQuote = fromText
. T.replace (T.singleton '"') (T.pack """)
. T.replace (T.singleton '&') (T.pack "&")
. T.replace (T.singleton '<') (T.pack "<")
. T.replace (T.singleton '>') (T.pack ">")
-- | convert TreeNode to compressed xml
treeNodeToXmlBz :: TreeNode -> L.ByteString
treeNodeToXmlBz node = (BZip.compress . toLazyByteString . treeNodeToXml) node
-- | convert compressed xml to TreeNode object (this is, what you normally need)
xmlBzToTreeNode :: L.ByteString -> [TreeNode]
xmlBzToTreeNode xmlbz = (xmlToTreeNode . BZip.decompress) xmlbz
-- | helper function, to extract attribute value from attributelist
getAttr :: [(T.Text, T.Text)] -> String -> T.Text
getAttr attrs name = fromJust $ lookup (T.pack name) attrs
-- | helper function to add a node to a directory
addToDir :: TreeNode -> TreeNode -> TreeNode
addToDir !node !(DirNode name path children) = DirNode name path (node:children)
-- | parse tagsoup tag on TreeNode stack
processXmlTag :: [TreeNode] -> SAXEvent T.Text T.Text -> [TreeNode]
processXmlTag stack (XMLDeclaration _ _ _) = stack
processXmlTag stack (StartElement tag attrs)
| tag == (T.pack "FileListing") = [DirNode (T.pack "base") T.empty []]
| tag == (T.pack "Directory") = (DirNode (getAttr attrs "Name") T.empty []) : stack
| tag == (T.pack "File") = let file = FileNode (getAttr attrs "Name") T.empty
(read $ T.unpack $ getAttr attrs "Size") 0
(Just $ getAttr attrs "TTH")
in file `deepseq` (addToDir file (head stack)) : (tail stack)
| otherwise = error ("unknown tag: " ++ (show tag))
processXmlTag stack@(x:y:rest) (EndElement tag)
| tag == (T.pack "Directory") = (addToDir x y) : rest
processXmlTag stack (EndElement tag)
| tag == (T.pack "File") = stack
| tag == (T.pack "FileListing") = stack
| otherwise = error ("unknown close tag: " ++ (show tag))
processXmlTag stack (CharacterData _) = stack
processXmlTag stack (StartCData) = stack
processXmlTag stack (EndCData) = stack
processXmlTag stack (ProcessingInstruction _ _) = stack
processXmlTag stack (Comment _) = stack
processXmlTag stack (FailDocument msg) = error ("parsing error: " ++ (show msg))
-- | convert xml to TreeNode object
--xmlToTreeNode :: L.ByteString -> TreeNode
--xmlToTreeNode xml = head $ foldl' processXmlTag [] (parseHere xml)
-- where
-- strictProcessXmlTag s e = let newstack = processXmlTag s e in newstack `deepseq` newstack
xmlToTreeNode :: L.ByteString -> [TreeNode]
xmlToTreeNode xml = parseXml xml
parseHere xml = (parse defaultParseOptions xml)
-- | get name of TreeNode object (directory name or filename)
nodeToName :: TreeNode -> T.Text
nodeToName (DirNode name _ _) = name
nodeToName (FileNode name _ _ _ _) = name
-- vim: sw=4 expandtab
| f1ori/hadcc | Filelist.hs | gpl-3.0 | 9,388 | 6 | 16 | 2,728 | 2,629 | 1,344 | 1,285 | 152 | 3 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE QuasiQuotes #-}
import Control.Monad (unless)
import Data.String.Interpolate (i)
import Data.Tuple.Operator (pattern (:-))
import Development.Shake (Action, liftIO, shakeArgs, shakeOptions)
import Development.Shake.Classes (Binary, Hashable, NFData)
import Development.Shake.Simple (need, simpleRule, want)
import GHC.Generics (Generic)
import System.Directory (getCurrentDirectory)
import System.Environment (lookupEnv)
import System.IO (hPutStrLn, stderr)
import Stack.Offline (Snapshot)
import Docker (DockerImage(..), DockerRun(..), dockerBuild, dockerRunUser)
data Arch = X86_64
deriving (Binary, Eq, Generic, Hashable, NFData)
instance Show Arch where
show X86_64 = "x86_64"
data Os = Linux
deriving (Binary, Eq, Generic, Hashable, NFData)
instance Show Os where
show Linux = "linux"
-- | Full cycle test configuration
data Conf = Conf{source :: (Arch, Os), snapshot :: Snapshot}
deriving (Binary, Eq, Generic, Show, Hashable, NFData)
-- | TODO(cblp, 2016-06-02) remove or replace with final target(s)
newtype FullCycle = FullCycle Conf
deriving (Binary, Eq, Generic, Hashable, NFData, Show)
newtype StackOfflinePack = StackOfflinePack Conf
deriving (Binary, Eq, Generic, Hashable, NFData, Show)
data Tool = Tool Arch Os
deriving (Binary, Eq, Generic, Hashable, NFData, Show)
main :: IO ()
main = do
runFullTest <- (Just "FULL" ==) <$> lookupEnv "STACK_OFFLINE_TEST"
unless runFullTest $
hPutStrLn stderr "Full cycle tests are skipped"
shakeArgs shakeOptions $ do
want [ FullCycle Conf{source, snapshot}
| runFullTest
, source <- [(X86_64, Linux)]
, snapshot <- [ "lts-2.0" -- ghc-7.8.4
, "lts-3.0" -- ghc-7.10.2
, "lts-5.0" -- ghc-7.10.3
]
]
simpleRule $ \(FullCycle conf) ->
need [StackOfflinePack conf]
simpleRule $ -- DockerImage
liftIO . dockerBuild
simpleRule $ \(StackOfflinePack conf) ->
buildPack conf
simpleRule -- Tool
buildTool
dockerImageName :: String -> (Arch, Os) -> String
dockerImageName prefix (arch, os) = [i|stack-offline.#{prefix}.#{arch}-#{os}|]
buildTool :: Tool -> Action ()
buildTool (Tool arch os) = do
let sourceDockerfile = [i|docker/source.#{arch}-#{os}|]
sourceImage = dockerImageName "source" (arch, os)
need [DockerImage sourceImage sourceDockerfile]
cwd <- liftIO getCurrentDirectory
liftIO $ dockerRunUser (mkDROptions sourceImage cwd) [i|
cwd=`pwd`
mkdir -p tmp/bin
set -x
stack --install-ghc --local-bin-path="tmp/bin" --stack-root="$cwd/tmp/stack" install
|]
buildPack :: Conf -> Action ()
buildPack Conf{source = source@(sourceArch, sourceOs), snapshot} = do
let sourceDockerfile = [i|docker/source.#{sourceArch}-#{sourceOs}|]
sourceImage = dockerImageName "source" source
need [DockerImage sourceImage sourceDockerfile]
need [Tool sourceArch sourceOs]
cwd <- liftIO getCurrentDirectory
let packFile = [i|tmp/stack-offline-pack_#{snapshot}_#{sourceArch}-#{sourceOs}.tgz|]
liftIO $ dockerRunUser (mkDROptions sourceImage cwd) [i|
set -x
rm -f "#{packFile}"
tmp/bin/stack-offline --resolver="#{snapshot}" --tgz="#{packFile}"
test -f "#{packFile}"
|]
mkDROptions :: String -> FilePath -> DockerRun
mkDROptions image cwd = DockerRun
{ dr_image = image
, dr_volumes = [cwd :- "/opt/stack-offline"]
, dr_workdir = "/opt/stack-offline"
}
| cblp/stack-offline | test/Spec.hs | gpl-3.0 | 3,837 | 0 | 15 | 926 | 955 | 527 | 428 | 76 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionAutoscalers.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an autoscaler in the specified project using the data included
-- in the request. This method supports PATCH semantics and uses the JSON
-- merge patch format and processing rules.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionAutoscalers.patch@.
module Network.Google.Resource.Compute.RegionAutoscalers.Patch
(
-- * REST Resource
RegionAutoscalersPatchResource
-- * Creating a Request
, regionAutoscalersPatch
, RegionAutoscalersPatch
-- * Request Lenses
, rapRequestId
, rapProject
, rapPayload
, rapAutoscaler
, rapRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionAutoscalers.patch@ method which the
-- 'RegionAutoscalersPatch' request conforms to.
type RegionAutoscalersPatchResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"autoscalers" :>
QueryParam "requestId" Text :>
QueryParam "autoscaler" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Autoscaler :> Patch '[JSON] Operation
-- | Updates an autoscaler in the specified project using the data included
-- in the request. This method supports PATCH semantics and uses the JSON
-- merge patch format and processing rules.
--
-- /See:/ 'regionAutoscalersPatch' smart constructor.
data RegionAutoscalersPatch =
RegionAutoscalersPatch'
{ _rapRequestId :: !(Maybe Text)
, _rapProject :: !Text
, _rapPayload :: !Autoscaler
, _rapAutoscaler :: !(Maybe Text)
, _rapRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionAutoscalersPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rapRequestId'
--
-- * 'rapProject'
--
-- * 'rapPayload'
--
-- * 'rapAutoscaler'
--
-- * 'rapRegion'
regionAutoscalersPatch
:: Text -- ^ 'rapProject'
-> Autoscaler -- ^ 'rapPayload'
-> Text -- ^ 'rapRegion'
-> RegionAutoscalersPatch
regionAutoscalersPatch pRapProject_ pRapPayload_ pRapRegion_ =
RegionAutoscalersPatch'
{ _rapRequestId = Nothing
, _rapProject = pRapProject_
, _rapPayload = pRapPayload_
, _rapAutoscaler = Nothing
, _rapRegion = pRapRegion_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
rapRequestId :: Lens' RegionAutoscalersPatch (Maybe Text)
rapRequestId
= lens _rapRequestId (\ s a -> s{_rapRequestId = a})
-- | Project ID for this request.
rapProject :: Lens' RegionAutoscalersPatch Text
rapProject
= lens _rapProject (\ s a -> s{_rapProject = a})
-- | Multipart request metadata.
rapPayload :: Lens' RegionAutoscalersPatch Autoscaler
rapPayload
= lens _rapPayload (\ s a -> s{_rapPayload = a})
-- | Name of the autoscaler to patch.
rapAutoscaler :: Lens' RegionAutoscalersPatch (Maybe Text)
rapAutoscaler
= lens _rapAutoscaler
(\ s a -> s{_rapAutoscaler = a})
-- | Name of the region scoping this request.
rapRegion :: Lens' RegionAutoscalersPatch Text
rapRegion
= lens _rapRegion (\ s a -> s{_rapRegion = a})
instance GoogleRequest RegionAutoscalersPatch where
type Rs RegionAutoscalersPatch = Operation
type Scopes RegionAutoscalersPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient RegionAutoscalersPatch'{..}
= go _rapProject _rapRegion _rapRequestId
_rapAutoscaler
(Just AltJSON)
_rapPayload
computeService
where go
= buildClient
(Proxy :: Proxy RegionAutoscalersPatchResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/RegionAutoscalers/Patch.hs | mpl-2.0 | 5,406 | 0 | 18 | 1,218 | 641 | 382 | 259 | 97 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.SSLCerts.CreateEphemeral
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Generates a short-lived X509 certificate containing the provided public
-- key and signed by a private key specific to the target instance. Users
-- may use the certificate to authenticate as themselves when connecting to
-- the database.
--
-- /See:/ <https://developers.google.com/cloud-sql/ Cloud SQL Admin API Reference> for @sql.sslCerts.createEphemeral@.
module Network.Google.Resource.SQL.SSLCerts.CreateEphemeral
(
-- * REST Resource
SSLCertsCreateEphemeralResource
-- * Creating a Request
, sslCertsCreateEphemeral
, SSLCertsCreateEphemeral
-- * Request Lenses
, scceXgafv
, scceUploadProtocol
, scceProject
, scceAccessToken
, scceUploadType
, sccePayload
, scceCallback
, scceInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.sslCerts.createEphemeral@ method which the
-- 'SSLCertsCreateEphemeral' request conforms to.
type SSLCertsCreateEphemeralResource =
"v1" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
"createEphemeral" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SSLCertsCreateEphemeralRequest :>
Post '[JSON] SSLCert
-- | Generates a short-lived X509 certificate containing the provided public
-- key and signed by a private key specific to the target instance. Users
-- may use the certificate to authenticate as themselves when connecting to
-- the database.
--
-- /See:/ 'sslCertsCreateEphemeral' smart constructor.
data SSLCertsCreateEphemeral =
SSLCertsCreateEphemeral'
{ _scceXgafv :: !(Maybe Xgafv)
, _scceUploadProtocol :: !(Maybe Text)
, _scceProject :: !Text
, _scceAccessToken :: !(Maybe Text)
, _scceUploadType :: !(Maybe Text)
, _sccePayload :: !SSLCertsCreateEphemeralRequest
, _scceCallback :: !(Maybe Text)
, _scceInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SSLCertsCreateEphemeral' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scceXgafv'
--
-- * 'scceUploadProtocol'
--
-- * 'scceProject'
--
-- * 'scceAccessToken'
--
-- * 'scceUploadType'
--
-- * 'sccePayload'
--
-- * 'scceCallback'
--
-- * 'scceInstance'
sslCertsCreateEphemeral
:: Text -- ^ 'scceProject'
-> SSLCertsCreateEphemeralRequest -- ^ 'sccePayload'
-> Text -- ^ 'scceInstance'
-> SSLCertsCreateEphemeral
sslCertsCreateEphemeral pScceProject_ pSccePayload_ pScceInstance_ =
SSLCertsCreateEphemeral'
{ _scceXgafv = Nothing
, _scceUploadProtocol = Nothing
, _scceProject = pScceProject_
, _scceAccessToken = Nothing
, _scceUploadType = Nothing
, _sccePayload = pSccePayload_
, _scceCallback = Nothing
, _scceInstance = pScceInstance_
}
-- | V1 error format.
scceXgafv :: Lens' SSLCertsCreateEphemeral (Maybe Xgafv)
scceXgafv
= lens _scceXgafv (\ s a -> s{_scceXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
scceUploadProtocol :: Lens' SSLCertsCreateEphemeral (Maybe Text)
scceUploadProtocol
= lens _scceUploadProtocol
(\ s a -> s{_scceUploadProtocol = a})
-- | Project ID of the Cloud SQL project.
scceProject :: Lens' SSLCertsCreateEphemeral Text
scceProject
= lens _scceProject (\ s a -> s{_scceProject = a})
-- | OAuth access token.
scceAccessToken :: Lens' SSLCertsCreateEphemeral (Maybe Text)
scceAccessToken
= lens _scceAccessToken
(\ s a -> s{_scceAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
scceUploadType :: Lens' SSLCertsCreateEphemeral (Maybe Text)
scceUploadType
= lens _scceUploadType
(\ s a -> s{_scceUploadType = a})
-- | Multipart request metadata.
sccePayload :: Lens' SSLCertsCreateEphemeral SSLCertsCreateEphemeralRequest
sccePayload
= lens _sccePayload (\ s a -> s{_sccePayload = a})
-- | JSONP
scceCallback :: Lens' SSLCertsCreateEphemeral (Maybe Text)
scceCallback
= lens _scceCallback (\ s a -> s{_scceCallback = a})
-- | Cloud SQL instance ID. This does not include the project ID.
scceInstance :: Lens' SSLCertsCreateEphemeral Text
scceInstance
= lens _scceInstance (\ s a -> s{_scceInstance = a})
instance GoogleRequest SSLCertsCreateEphemeral where
type Rs SSLCertsCreateEphemeral = SSLCert
type Scopes SSLCertsCreateEphemeral =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient SSLCertsCreateEphemeral'{..}
= go _scceProject _scceInstance _scceXgafv
_scceUploadProtocol
_scceAccessToken
_scceUploadType
_scceCallback
(Just AltJSON)
_sccePayload
sQLAdminService
where go
= buildClient
(Proxy :: Proxy SSLCertsCreateEphemeralResource)
mempty
| brendanhay/gogol | gogol-sqladmin/gen/Network/Google/Resource/SQL/SSLCerts/CreateEphemeral.hs | mpl-2.0 | 6,182 | 0 | 20 | 1,438 | 870 | 508 | 362 | 129 | 1 |
-- |
-- Module: SBP2JSON
-- Copyright: Copyright (C) 2015 Swift Navigation, Inc.
-- License: LGPL-3
-- Maintainer: Mark Fine <dev@swiftnav.com>
-- Stability: experimental
-- Portability: portable
--
-- SBP to JSON tool - reads SBP binary from stdin and sends SBP JSON
-- to stdout.
import BasicPrelude
import Control.Monad.Trans.Resource
import Data.Aeson
import Data.Aeson.Encode
import Data.ByteString.Builder
import qualified Data.ByteString.Lazy as BL
import Data.Conduit
import Data.Conduit.Binary
import qualified Data.Conduit.List as CL
import Data.Conduit.Serialization.Binary
import SwiftNav.SBP
import System.IO
-- | Encode a SBPMsg to a line of JSON.
encodeLine :: SBPMsg -> ByteString
encodeLine v = BL.toStrict $ toLazyByteString $ encodeToBuilder (toJSON v) <> "\n"
main :: IO ()
main =
runResourceT $
sourceHandle stdin =$=
conduitDecode =$=
CL.map encodeLine $$
sinkHandle stdout
| swift-nav/libsbp | haskell/main/SBP2JSON.hs | lgpl-3.0 | 1,042 | 0 | 9 | 267 | 167 | 101 | 66 | 21 | 1 |
import Data.List
import Control.Monad.Memo
data Moves = D | R
space' 0 = [[]]
space' x = [q:qs | qs <- space' (x-1), q <- ['D', 'R']]
space n = permutations $ (take n $ cycle "R") ++ (take n $ cycle "D")
latticePaths n = length $ nub $ space n
paths 0 0 = 0
paths 0 _ = 1
paths _ 0 = 1
paths r d = (paths (r - 1) d) + (paths r (d - 1))
--pathsm :: (Num n, Ord n, MonadMemo (n, n) n m) => n -> n -> m n
pathsm 0 0 = return 0
pathsm 0 _ = return 1
pathsm _ 0 = return 1
pathsm r d = do
r1 <- for2 memo pathsm (r - 1) d
d1 <- for2 memo pathsm r (d -1)
return (r1 + d1)
evalPathsm :: (Num n, Ord n) => n -> n -> n
evalPathsm r d = startEvalMemo $ pathsm r d
runPathsm n = evalPathsm n n
--main = putStrLn $ show $ paths
| LambdaMx/haskelldojo | session_7/LatticePaths.hs | unlicense | 730 | 0 | 10 | 192 | 387 | 195 | 192 | 21 | 1 |
module Hecate.Backend.SQLite.AppContext
( AppContext (..)
) where
import qualified Database.SQLite.Simple as SQLite
import Hecate.Data (Config)
-- | 'AppContext' represents the shared environment for computations which occur
-- within our application. Values of this type are created by 'createContext'.
data AppContext = AppContext
{ appContextConfig :: Config
, appContextConnection :: SQLite.Connection
}
| henrytill/hecate | src/Hecate/Backend/SQLite/AppContext.hs | apache-2.0 | 448 | 0 | 9 | 91 | 61 | 41 | 20 | 7 | 0 |
-- layout can be stopped by parse error
test = let x = 1; y = x in y | metaborg/jsglr | org.spoofax.jsglr/tests-offside/terms/doaitse/layout1.hs | apache-2.0 | 68 | 0 | 8 | 18 | 24 | 13 | 11 | 1 | 1 |
-- Thinking Functionall with Haskell
-- Chapter 4 Homework C
-- find common elements for two decreasing list
disjoint :: (Ord a) => [a] -> [a] -> Bool
disjoint [] ys = False
disjoint xs [] = False
disjoint (x:xs) (y:ys) = if x == y then True
else if x > y then disjoint xs (y:ys)
else disjoint (x:xs) ys
| Oscarzhao/haskell | functional_program_design/ch04/disjoint.hs | apache-2.0 | 355 | 0 | 9 | 112 | 128 | 70 | 58 | 6 | 3 |
module GridPolytopes.A338323Spec (main, spec) where
import Test.Hspec
import GridPolytopes.A338323 (a338323)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A338323" $
it "correctly computes the first 5 elements" $
take 5 (map a338323 [0..]) `shouldBe` expectedValue where
expectedValue = [0, 0, 14, 138, 640]
| peterokagey/haskellOEIS | test/GridPolytopes/A338323Spec.hs | apache-2.0 | 338 | 0 | 10 | 63 | 115 | 65 | 50 | 10 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
import qualified Blaze.ByteString.Builder as Blaze
import qualified Blaze.ByteString.Builder.Char.Utf8 as Blaze
import Codec.Archive.Tar as Tar
import Codec.Archive.Tar.Entry as Tar
import Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent.MVar
import Control.Monad
import Control.Monad.Trans
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Conduit
import Data.Configurator as C
import Data.Configurator.Types
import Data.List
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Data.Time
import Database.Persist
import Database.Persist.Sqlite
import qualified Database.Persist.Store
import qualified Filesystem as F
import qualified Filesystem.Path.CurrentOS as FP
import Network.HTTP.Conduit
import Shelly
import System.Environment
import System.IO
import System.IO.Unsafe
import System.Locale
import Text.Shakespeare.Text
import Yesod.Default.Config
import Model
import Settings
default (Integer, LT.Text)
archiveName :: FP.FilePath -> String -> String -> FP.FilePath
archiveName appDir name ver =
appDir </> "package" </> [st|#{name}-#{ver}|] <.> "tar.gz"
indexFile :: FP.FilePath -> FP.FilePath
indexFile appDir =
appDir </> "00-index.tar.gz"
main :: IO ()
main = do
args <- getArgs
case args of
[conf] -> shelly . main' =<< C.load [Required conf]
_ -> putStrLn "Usage: hackage-mirror <conf-file>"
main' :: Config -> Sh ()
main' conf = do
home <- get_env_text "HOME"
let appDir = home </> ".hackage"
mkdir_p appDir
initDB
repo <- liftIO $ require conf "repo"
updateMeta $ repo ++ "/log"
newpacks <- whatsnew
echo [lt|download #{show (length newpacks)} new packages|]
mng <- liftIO $ newManager def
forM_ (zip [1..] newpacks) $ \(ix, Entity key Package {..}) -> do
let url = [st|#{repo}/#{packageName}/#{packageVersion}/#{packageName}-#{packageVersion}.tar.gz|]
savedir = appDir </> "package"
filename = savedir </> FP.fromText [st|#{packageName}-#{packageVersion}.tar.gz|]
mkdir_p savedir
echo [lt|[#{show ix}/#{show $ length newpacks}] downloading #{url}...|]
download mng (T.unpack url) filename
runDB $ update key [ PackageDownloaded =. True ]
`catchany_sh` (\e -> echo_err $ LT.pack $ show e)
liftIO $ closeManager mng
makeZZIndex appDir
makeLog appDir
makeZZIndex :: FP.FilePath -> Sh ()
makeZZIndex appDir = do
echo "building 00-index.tar.gz..."
pkgs <- runDB $ selectList [] [ Asc PackageName ]
entries <- forM pkgs $ \(Entity _ Package{..}) -> do
let arcname =
archiveName appDir (T.unpack packageName) (T.unpack packageVersion)
cabalname =
T.unpack [st|/#{packageName}.cabal|]
Right tarpath =
toTarPath False $ T.unpack [st|#{packageName}/#{packageVersion}/#{cabalname}|]
withFileSh arcname ReadMode $ \h -> do
bs <- liftIO $ L.hGetContents h
let loop e = case e of
Tar.Done -> do
echo_err [lt|#{toTextIgnore arcname}: cabal file not found|]
return Nothing
Tar.Fail err -> do
echo_err [lt|#{show err}|]
return Nothing
Tar.Next (entry @ Entry { entryContent = NormalFile con _ }) _
| cabalname `isSuffixOf` entryPath entry -> do
let ccon = L.copy con
L.length ccon `seq` (return $ Just $ fileEntry tarpath ccon)
Next _ next ->
loop next
loop $ Tar.read (GZip.decompress bs)
`catchany_sh` (\e -> inspect e >> return Nothing)
let tarball = GZip.compress $ Tar.write [ e | Just e <- entries ]
tmpPath = indexFile appDir <> "-part"
withFileSh tmpPath WriteMode $ \h -> liftIO $ L.hPut h tarball
mv tmpPath (indexFile appDir)
makeLog :: FP.FilePath -> Sh ()
makeLog appDir = do
echo "building log..."
pkgs <- runDB $ selectList [] [ Asc PackageDate ]
withFileSh (appDir </> "log") WriteMode$ \h -> liftIO $
L.hPut h $ Blaze.toLazyByteString $ mconcat $ map (Blaze.fromText . printPkg) pkgs
where
printPkg (Entity _ Package{..}) =
let tm = T.pack $ formatTime defaultTimeLocale "%c" packageDate
in T.unwords [tm, packageUploader, packageName, packageVersion] <> "\n"
mvPool = unsafePerformIO $ newEmptyMVar
{-# NOINLINE mvPool #-}
initDB :: Sh ()
initDB = liftIO $ do
-- initialize DB connection
dbconf <- withYamlEnvironment "config/sqlite.yml" Development
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
Database.Persist.Store.runPool dbconf (runMigration migrateAll) p
putMVar mvPool p
runDB :: MonadIO m => SqlPersist IO a -> m a
runDB sql =
liftIO $ withMVar mvPool $ runSqlPool sql
updateMeta :: String -> Sh ()
updateMeta url = do
echo [lt|downloading #{url}...|]
logs <- L.unpack <$> simpleHttp url
echo "updating database ..."
forM_ (reverse $ lines logs) $ \line -> do
let (version : name : uploader : (unwords . reverse -> tm)) = reverse $ words line
case parseTime defaultTimeLocale "%c" tm of
Just t -> do
runDB $ void $ Database.Persist.Sqlite.insert $ Package (T.pack name) (T.pack version) (T.pack uploader) t False
`catchany_sh` (const $ return ())
Nothing ->
echo_err $ LT.pack $ "cannot parse: " ++ line
return ()
`catchany_sh` (\e -> echo_err $ LT.pack $ show e)
whatsnew :: Sh [Entity Package]
whatsnew =
runDB $ selectList [PackageDownloaded ==. False] []
download :: Manager -> String -> FP.FilePath -> Sh ()
download mng url pa = do
liftIO $ runResourceT $ do
req <- parseUrl url
Response {..} <- httpLbs req mng
liftIO $ F.withFile pa WriteMode $ \h ->
L.hPut h responseBody
withFileSh :: FP.FilePath -> IOMode -> (Handle -> Sh a) -> Sh a
withFileSh pa mode m = do
h <- liftIO $ F.openFile pa mode
m h `finally_sh` liftIO (hClose h)
| tanakh/hackage-mirror | crawler.hs | bsd-2-clause | 6,619 | 1 | 30 | 1,767 | 1,995 | 1,024 | 971 | 158 | 4 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Arity and eta expansion
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -Wno-incomplete-record-updates #-}
-- | Arity and eta expansion
module CoreArity (
manifestArity, joinRhsArity, exprArity, typeArity,
exprEtaExpandArity, findRhsArity, etaExpand,
etaExpandToJoinPoint, etaExpandToJoinPointRule,
exprBotStrictness_maybe
) where
#include "HsVersions.h"
import GhcPrelude
import CoreSyn
import CoreFVs
import CoreUtils
import CoreSubst
import Demand
import Var
import VarEnv
import Id
import Type
import TyCon ( initRecTc, checkRecTc )
import Predicate ( isDictTy )
import Coercion
import BasicTypes
import Unique
import DynFlags ( DynFlags, GeneralFlag(..), gopt )
import Outputable
import FastString
import Util ( debugIsOn )
{-
************************************************************************
* *
manifestArity and exprArity
* *
************************************************************************
exprArity is a cheap-and-cheerful version of exprEtaExpandArity.
It tells how many things the expression can be applied to before doing
any work. It doesn't look inside cases, lets, etc. The idea is that
exprEtaExpandArity will do the hard work, leaving something that's easy
for exprArity to grapple with. In particular, Simplify uses exprArity to
compute the ArityInfo for the Id.
Originally I thought that it was enough just to look for top-level lambdas, but
it isn't. I've seen this
foo = PrelBase.timesInt
We want foo to get arity 2 even though the eta-expander will leave it
unchanged, in the expectation that it'll be inlined. But occasionally it
isn't, because foo is blacklisted (used in a rule).
Similarly, see the ok_note check in exprEtaExpandArity. So
f = __inline_me (\x -> e)
won't be eta-expanded.
And in any case it seems more robust to have exprArity be a bit more intelligent.
But note that (\x y z -> f x y z)
should have arity 3, regardless of f's arity.
-}
manifestArity :: CoreExpr -> Arity
-- ^ manifestArity sees how many leading value lambdas there are,
-- after looking through casts
manifestArity (Lam v e) | isId v = 1 + manifestArity e
| otherwise = manifestArity e
manifestArity (Tick t e) | not (tickishIsCode t) = manifestArity e
manifestArity (Cast e _) = manifestArity e
manifestArity _ = 0
joinRhsArity :: CoreExpr -> JoinArity
-- Join points are supposed to have manifestly-visible
-- lambdas at the top: no ticks, no casts, nothing
-- Moreover, type lambdas count in JoinArity
joinRhsArity (Lam _ e) = 1 + joinRhsArity e
joinRhsArity _ = 0
---------------
exprArity :: CoreExpr -> Arity
-- ^ An approximate, fast, version of 'exprEtaExpandArity'
exprArity e = go e
where
go (Var v) = idArity v
go (Lam x e) | isId x = go e + 1
| otherwise = go e
go (Tick t e) | not (tickishIsCode t) = go e
go (Cast e co) = trim_arity (go e) (coercionRKind co)
-- Note [exprArity invariant]
go (App e (Type _)) = go e
go (App f a) | exprIsTrivial a = (go f - 1) `max` 0
-- See Note [exprArity for applications]
-- NB: coercions count as a value argument
go _ = 0
trim_arity :: Arity -> Type -> Arity
trim_arity arity ty = arity `min` length (typeArity ty)
---------------
typeArity :: Type -> [OneShotInfo]
-- How many value arrows are visible in the type?
-- We look through foralls, and newtypes
-- See Note [exprArity invariant]
typeArity ty
= go initRecTc ty
where
go rec_nts ty
| Just (_, ty') <- splitForAllTy_maybe ty
= go rec_nts ty'
| Just (arg,res) <- splitFunTy_maybe ty
= typeOneShot arg : go rec_nts res
| Just (tc,tys) <- splitTyConApp_maybe ty
, Just (ty', _) <- instNewTyCon_maybe tc tys
, Just rec_nts' <- checkRecTc rec_nts tc -- See Note [Expanding newtypes]
-- in TyCon
-- , not (isClassTyCon tc) -- Do not eta-expand through newtype classes
-- -- See Note [Newtype classes and eta expansion]
-- (no longer required)
= go rec_nts' ty'
-- Important to look through non-recursive newtypes, so that, eg
-- (f x) where f has arity 2, f :: Int -> IO ()
-- Here we want to get arity 1 for the result!
--
-- AND through a layer of recursive newtypes
-- e.g. newtype Stream m a b = Stream (m (Either b (a, Stream m a b)))
| otherwise
= []
---------------
exprBotStrictness_maybe :: CoreExpr -> Maybe (Arity, StrictSig)
-- A cheap and cheerful function that identifies bottoming functions
-- and gives them a suitable strictness signatures. It's used during
-- float-out
exprBotStrictness_maybe e
= case getBotArity (arityType env e) of
Nothing -> Nothing
Just ar -> Just (ar, sig ar)
where
env = AE { ae_ped_bot = True, ae_cheap_fn = \ _ _ -> False }
sig ar = mkClosedStrictSig (replicate ar topDmd) botRes
{-
Note [exprArity invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprArity has the following invariants:
(1) If typeArity (exprType e) = n,
then manifestArity (etaExpand e n) = n
That is, etaExpand can always expand as much as typeArity says
So the case analysis in etaExpand and in typeArity must match
(2) exprArity e <= typeArity (exprType e)
(3) Hence if (exprArity e) = n, then manifestArity (etaExpand e n) = n
That is, if exprArity says "the arity is n" then etaExpand really
can get "n" manifest lambdas to the top.
Why is this important? Because
- In GHC.Iface.Tidy we use exprArity to fix the *final arity* of
each top-level Id, and in
- In CorePrep we use etaExpand on each rhs, so that the visible lambdas
actually match that arity, which in turn means
that the StgRhs has the right number of lambdas
An alternative would be to do the eta-expansion in GHC.Iface.Tidy, at least
for top-level bindings, in which case we would not need the trim_arity
in exprArity. That is a less local change, so I'm going to leave it for today!
Note [Newtype classes and eta expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: this nasty special case is no longer required, because
for newtype classes we don't use the class-op rule mechanism
at all. See Note [Single-method classes] in TcInstDcls. SLPJ May 2013
-------- Old out of date comments, just for interest -----------
We have to be careful when eta-expanding through newtypes. In general
it's a good idea, but annoyingly it interacts badly with the class-op
rule mechanism. Consider
class C a where { op :: a -> a }
instance C b => C [b] where
op x = ...
These translate to
co :: forall a. (a->a) ~ C a
$copList :: C b -> [b] -> [b]
$copList d x = ...
$dfList :: C b -> C [b]
{-# DFunUnfolding = [$copList] #-}
$dfList d = $copList d |> co@[b]
Now suppose we have:
dCInt :: C Int
blah :: [Int] -> [Int]
blah = op ($dfList dCInt)
Now we want the built-in op/$dfList rule will fire to give
blah = $copList dCInt
But with eta-expansion 'blah' might (and in #3772, which is
slightly more complicated, does) turn into
blah = op (\eta. ($dfList dCInt |> sym co) eta)
and now it is *much* harder for the op/$dfList rule to fire, because
exprIsConApp_maybe won't hold of the argument to op. I considered
trying to *make* it hold, but it's tricky and I gave up.
The test simplCore/should_compile/T3722 is an excellent example.
-------- End of old out of date comments, just for interest -----------
Note [exprArity for applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we come to an application we check that the arg is trivial.
eg f (fac x) does not have arity 2,
even if f has arity 3!
* We require that is trivial rather merely cheap. Suppose f has arity 2.
Then f (Just y)
has arity 0, because if we gave it arity 1 and then inlined f we'd get
let v = Just y in \w. <f-body>
which has arity 0. And we try to maintain the invariant that we don't
have arity decreases.
* The `max 0` is important! (\x y -> f x) has arity 2, even if f is
unknown, hence arity 0
************************************************************************
* *
Computing the "arity" of an expression
* *
************************************************************************
Note [Definition of arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The "arity" of an expression 'e' is n if
applying 'e' to *fewer* than n *value* arguments
converges rapidly
Or, to put it another way
there is no work lost in duplicating the partial
application (e x1 .. x(n-1))
In the divergent case, no work is lost by duplicating because if the thing
is evaluated once, that's the end of the program.
Or, to put it another way, in any context C
C[ (\x1 .. xn. e x1 .. xn) ]
is as efficient as
C[ e ]
It's all a bit more subtle than it looks:
Note [One-shot lambdas]
~~~~~~~~~~~~~~~~~~~~~~~
Consider one-shot lambdas
let x = expensive in \y z -> E
We want this to have arity 1 if the \y-abstraction is a 1-shot lambda.
Note [Dealing with bottom]
~~~~~~~~~~~~~~~~~~~~~~~~~~
A Big Deal with computing arities is expressions like
f = \x -> case x of
True -> \s -> e1
False -> \s -> e2
This happens all the time when f :: Bool -> IO ()
In this case we do eta-expand, in order to get that \s to the
top, and give f arity 2.
This isn't really right in the presence of seq. Consider
(f bot) `seq` 1
This should diverge! But if we eta-expand, it won't. We ignore this
"problem" (unless -fpedantic-bottoms is on), because being scrupulous
would lose an important transformation for many programs. (See
#5587 for an example.)
Consider also
f = \x -> error "foo"
Here, arity 1 is fine. But if it is
f = \x -> case x of
True -> error "foo"
False -> \y -> x+y
then we want to get arity 2. Technically, this isn't quite right, because
(f True) `seq` 1
should diverge, but it'll converge if we eta-expand f. Nevertheless, we
do so; it improves some programs significantly, and increasing convergence
isn't a bad thing. Hence the ABot/ATop in ArityType.
So these two transformations aren't always the Right Thing, and we
have several tickets reporting unexpected behaviour resulting from
this transformation. So we try to limit it as much as possible:
(1) Do NOT move a lambda outside a known-bottom case expression
case undefined of { (a,b) -> \y -> e }
This showed up in #5557
(2) Do NOT move a lambda outside a case if all the branches of
the case are known to return bottom.
case x of { (a,b) -> \y -> error "urk" }
This case is less important, but the idea is that if the fn is
going to diverge eventually anyway then getting the best arity
isn't an issue, so we might as well play safe
(3) Do NOT move a lambda outside a case unless
(a) The scrutinee is ok-for-speculation, or
(b) more liberally: the scrutinee is cheap (e.g. a variable), and
-fpedantic-bottoms is not enforced (see #2915 for an example)
Of course both (1) and (2) are readily defeated by disguising the bottoms.
4. Note [Newtype arity]
~~~~~~~~~~~~~~~~~~~~~~~~
Non-recursive newtypes are transparent, and should not get in the way.
We do (currently) eta-expand recursive newtypes too. So if we have, say
newtype T = MkT ([T] -> Int)
Suppose we have
e = coerce T f
where f has arity 1. Then: etaExpandArity e = 1;
that is, etaExpandArity looks through the coerce.
When we eta-expand e to arity 1: eta_expand 1 e T
we want to get: coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
HOWEVER, note that if you use coerce bogusly you can ge
coerce Int negate
And since negate has arity 2, you might try to eta expand. But you can't
decompose Int to a function type. Hence the final case in eta_expand.
Note [The state-transformer hack]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
f = e
where e has arity n. Then, if we know from the context that f has
a usage type like
t1 -> ... -> tn -1-> t(n+1) -1-> ... -1-> tm -> ...
then we can expand the arity to m. This usage type says that
any application (x e1 .. en) will be applied to uniquely to (m-n) more args
Consider f = \x. let y = <expensive>
in case x of
True -> foo
False -> \(s:RealWorld) -> e
where foo has arity 1. Then we want the state hack to
apply to foo too, so we can eta expand the case.
Then we expect that if f is applied to one arg, it'll be applied to two
(that's the hack -- we don't really know, and sometimes it's false)
See also Id.isOneShotBndr.
Note [State hack and bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's a terrible idea to use the state hack on a bottoming function.
Here's what happens (#2861):
f :: String -> IO T
f = \p. error "..."
Eta-expand, using the state hack:
f = \p. (\s. ((error "...") |> g1) s) |> g2
g1 :: IO T ~ (S -> (S,T))
g2 :: (S -> (S,T)) ~ IO T
Extrude the g2
f' = \p. \s. ((error "...") |> g1) s
f = f' |> (String -> g2)
Discard args for bottomming function
f' = \p. \s. ((error "...") |> g1 |> g3
g3 :: (S -> (S,T)) ~ (S,T)
Extrude g1.g3
f'' = \p. \s. (error "...")
f' = f'' |> (String -> S -> g1.g3)
And now we can repeat the whole loop. Aargh! The bug is in applying the
state hack to a function which then swallows the argument.
This arose in another guise in #3959. Here we had
catch# (throw exn >> return ())
Note that (throw :: forall a e. Exn e => e -> a) is called with [a = IO ()].
After inlining (>>) we get
catch# (\_. throw {IO ()} exn)
We must *not* eta-expand to
catch# (\_ _. throw {...} exn)
because 'catch#' expects to get a (# _,_ #) after applying its argument to
a State#, not another function!
In short, we use the state hack to allow us to push let inside a lambda,
but not to introduce a new lambda.
Note [ArityType]
~~~~~~~~~~~~~~~~
ArityType is the result of a compositional analysis on expressions,
from which we can decide the real arity of the expression (extracted
with function exprEtaExpandArity).
Here is what the fields mean. If an arbitrary expression 'f' has
ArityType 'at', then
* If at = ABot n, then (f x1..xn) definitely diverges. Partial
applications to fewer than n args may *or may not* diverge.
We allow ourselves to eta-expand bottoming functions, even
if doing so may lose some `seq` sharing,
let x = <expensive> in \y. error (g x y)
==> \y. let x = <expensive> in error (g x y)
* If at = ATop as, and n=length as,
then expanding 'f' to (\x1..xn. f x1 .. xn) loses no sharing,
assuming the calls of f respect the one-shot-ness of
its definition.
NB 'f' is an arbitrary expression, eg (f = g e1 e2). This 'f'
can have ArityType as ATop, with length as > 0, only if e1 e2 are
themselves.
* In both cases, f, (f x1), ... (f x1 ... f(n-1)) are definitely
really functions, or bottom, but *not* casts from a data type, in
at least one case branch. (If it's a function in one case branch but
an unsafe cast from a data type in another, the program is bogus.)
So eta expansion is dynamically ok; see Note [State hack and
bottoming functions], the part about catch#
Example:
f = \x\y. let v = <expensive> in
\s(one-shot) \t(one-shot). blah
'f' has ArityType [ManyShot,ManyShot,OneShot,OneShot]
The one-shot-ness means we can, in effect, push that
'let' inside the \st.
Suppose f = \xy. x+y
Then f :: AT [False,False] ATop
f v :: AT [False] ATop
f <expensive> :: AT [] ATop
-------------------- Main arity code ----------------------------
-}
-- See Note [ArityType]
data ArityType = ATop [OneShotInfo] | ABot Arity
-- There is always an explicit lambda
-- to justify the [OneShot], or the Arity
instance Outputable ArityType where
ppr (ATop os) = text "ATop" <> parens (ppr (length os))
ppr (ABot n) = text "ABot" <> parens (ppr n)
vanillaArityType :: ArityType
vanillaArityType = ATop [] -- Totally uninformative
-- ^ The Arity returned is the number of value args the
-- expression can be applied to without doing much work
exprEtaExpandArity :: DynFlags -> CoreExpr -> Arity
-- exprEtaExpandArity is used when eta expanding
-- e ==> \xy -> e x y
exprEtaExpandArity dflags e
= case (arityType env e) of
ATop oss -> length oss
ABot n -> n
where
env = AE { ae_cheap_fn = mk_cheap_fn dflags isCheapApp
, ae_ped_bot = gopt Opt_PedanticBottoms dflags }
getBotArity :: ArityType -> Maybe Arity
-- Arity of a divergent function
getBotArity (ABot n) = Just n
getBotArity _ = Nothing
mk_cheap_fn :: DynFlags -> CheapAppFun -> CheapFun
mk_cheap_fn dflags cheap_app
| not (gopt Opt_DictsCheap dflags)
= \e _ -> exprIsCheapX cheap_app e
| otherwise
= \e mb_ty -> exprIsCheapX cheap_app e
|| case mb_ty of
Nothing -> False
Just ty -> isDictTy ty
----------------------
findRhsArity :: DynFlags -> Id -> CoreExpr -> Arity -> (Arity, Bool)
-- This implements the fixpoint loop for arity analysis
-- See Note [Arity analysis]
-- If findRhsArity e = (n, is_bot) then
-- (a) any application of e to <n arguments will not do much work,
-- so it is safe to expand e ==> (\x1..xn. e x1 .. xn)
-- (b) if is_bot=True, then e applied to n args is guaranteed bottom
findRhsArity dflags bndr rhs old_arity
= go (get_arity init_cheap_app)
-- We always call exprEtaExpandArity once, but usually
-- that produces a result equal to old_arity, and then
-- we stop right away (since arities should not decrease)
-- Result: the common case is that there is just one iteration
where
is_lam = has_lam rhs
has_lam (Tick _ e) = has_lam e
has_lam (Lam b e) = isId b || has_lam e
has_lam _ = False
init_cheap_app :: CheapAppFun
init_cheap_app fn n_val_args
| fn == bndr = True -- On the first pass, this binder gets infinite arity
| otherwise = isCheapApp fn n_val_args
go :: (Arity, Bool) -> (Arity, Bool)
go cur_info@(cur_arity, _)
| cur_arity <= old_arity = cur_info
| new_arity == cur_arity = cur_info
| otherwise = ASSERT( new_arity < cur_arity )
#if defined(DEBUG)
pprTrace "Exciting arity"
(vcat [ ppr bndr <+> ppr cur_arity <+> ppr new_arity
, ppr rhs])
#endif
go new_info
where
new_info@(new_arity, _) = get_arity cheap_app
cheap_app :: CheapAppFun
cheap_app fn n_val_args
| fn == bndr = n_val_args < cur_arity
| otherwise = isCheapApp fn n_val_args
get_arity :: CheapAppFun -> (Arity, Bool)
get_arity cheap_app
= case (arityType env rhs) of
ABot n -> (n, True)
ATop (os:oss) | isOneShotInfo os || is_lam
-> (1 + length oss, False) -- Don't expand PAPs/thunks
ATop _ -> (0, False) -- Note [Eta expanding thunks]
where
env = AE { ae_cheap_fn = mk_cheap_fn dflags cheap_app
, ae_ped_bot = gopt Opt_PedanticBottoms dflags }
{-
Note [Arity analysis]
~~~~~~~~~~~~~~~~~~~~~
The motivating example for arity analysis is this:
f = \x. let g = f (x+1)
in \y. ...g...
What arity does f have? Really it should have arity 2, but a naive
look at the RHS won't see that. You need a fixpoint analysis which
says it has arity "infinity" the first time round.
This example happens a lot; it first showed up in Andy Gill's thesis,
fifteen years ago! It also shows up in the code for 'rnf' on lists
in #4138.
The analysis is easy to achieve because exprEtaExpandArity takes an
argument
type CheapFun = CoreExpr -> Maybe Type -> Bool
used to decide if an expression is cheap enough to push inside a
lambda. And exprIsCheapX in turn takes an argument
type CheapAppFun = Id -> Int -> Bool
which tells when an application is cheap. This makes it easy to
write the analysis loop.
The analysis is cheap-and-cheerful because it doesn't deal with
mutual recursion. But the self-recursive case is the important one.
Note [Eta expanding through dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the experimental -fdicts-cheap flag is on, we eta-expand through
dictionary bindings. This improves arities. Thereby, it also
means that full laziness is less prone to floating out the
application of a function to its dictionary arguments, which
can thereby lose opportunities for fusion. Example:
foo :: Ord a => a -> ...
foo = /\a \(d:Ord a). let d' = ...d... in \(x:a). ....
-- So foo has arity 1
f = \x. foo dInt $ bar x
The (foo DInt) is floated out, and makes ineffective a RULE
foo (bar x) = ...
One could go further and make exprIsCheap reply True to any
dictionary-typed expression, but that's more work.
Note [Eta expanding thunks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't eta-expand
* Trivial RHSs x = y
* PAPs x = map g
* Thunks f = case y of p -> \x -> blah
When we see
f = case y of p -> \x -> blah
should we eta-expand it? Well, if 'x' is a one-shot state token
then 'yes' because 'f' will only be applied once. But otherwise
we (conservatively) say no. My main reason is to avoid expanding
PAPSs
f = g d ==> f = \x. g d x
because that might in turn make g inline (if it has an inline pragma),
which we might not want. After all, INLINE pragmas say "inline only
when saturated" so we don't want to be too gung-ho about saturating!
-}
arityLam :: Id -> ArityType -> ArityType
arityLam id (ATop as) = ATop (idStateHackOneShotInfo id : as)
arityLam _ (ABot n) = ABot (n+1)
floatIn :: Bool -> ArityType -> ArityType
-- We have something like (let x = E in b),
-- where b has the given arity type.
floatIn _ (ABot n) = ABot n
floatIn True (ATop as) = ATop as
floatIn False (ATop as) = ATop (takeWhile isOneShotInfo as)
-- If E is not cheap, keep arity only for one-shots
arityApp :: ArityType -> Bool -> ArityType
-- Processing (fun arg) where at is the ArityType of fun,
-- Knock off an argument and behave like 'let'
arityApp (ABot 0) _ = ABot 0
arityApp (ABot n) _ = ABot (n-1)
arityApp (ATop []) _ = ATop []
arityApp (ATop (_:as)) cheap = floatIn cheap (ATop as)
andArityType :: ArityType -> ArityType -> ArityType -- Used for branches of a 'case'
andArityType (ABot n1) (ABot n2) = ABot (n1 `max` n2) -- Note [ABot branches: use max]
andArityType (ATop as) (ABot _) = ATop as
andArityType (ABot _) (ATop bs) = ATop bs
andArityType (ATop as) (ATop bs) = ATop (as `combine` bs)
where -- See Note [Combining case branches]
combine (a:as) (b:bs) = (a `bestOneShot` b) : combine as bs
combine [] bs = takeWhile isOneShotInfo bs
combine as [] = takeWhile isOneShotInfo as
{- Note [ABot branches: use max]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider case x of
True -> \x. error "urk"
False -> \xy. error "urk2"
Remember: ABot n means "if you apply to n args, it'll definitely diverge".
So we need (ABot 2) for the whole thing, the /max/ of the ABot arities.
Note [Combining case branches]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
go = \x. let z = go e0
go2 = \x. case x of
True -> z
False -> \s(one-shot). e1
in go2 x
We *really* want to eta-expand go and go2.
When combining the branches of the case we have
ATop [] `andAT` ATop [OneShotLam]
and we want to get ATop [OneShotLam]. But if the inner
lambda wasn't one-shot we don't want to do this.
(We need a proper arity analysis to justify that.)
So we combine the best of the two branches, on the (slightly dodgy)
basis that if we know one branch is one-shot, then they all must be.
Note [Arity trimming]
~~~~~~~~~~~~~~~~~~~~~
Consider ((\x y. blah) |> co), where co :: (Int->Int->Int) ~ (Int -> F a) , and
F is some type family.
Because of Note [exprArity invariant], item (2), we must return with arity at
most 1, because typeArity (Int -> F a) = 1. So we have to trim the result of
calling arityType on (\x y. blah). Failing to do so, and hence breaking the
exprArity invariant, led to #5441.
How to trim? For ATop, it's easy. But we must take great care with ABot.
Suppose the expression was (\x y. error "urk"), we'll get (ABot 2). We
absolutely must not trim that to (ABot 1), because that claims that
((\x y. error "urk") |> co) diverges when given one argument, which it
absolutely does not. And Bad Things happen if we think something returns bottom
when it doesn't (#16066).
So, do not reduce the 'n' in (ABot n); rather, switch (conservatively) to ATop.
Historical note: long ago, we unconditionally switched to ATop when we
encountered a cast, but that is far too conservative: see #5475
-}
---------------------------
type CheapFun = CoreExpr -> Maybe Type -> Bool
-- How to decide if an expression is cheap
-- If the Maybe is Just, the type is the type
-- of the expression; Nothing means "don't know"
data ArityEnv
= AE { ae_cheap_fn :: CheapFun
, ae_ped_bot :: Bool -- True <=> be pedantic about bottoms
}
arityType :: ArityEnv -> CoreExpr -> ArityType
arityType env (Cast e co)
= case arityType env e of
ATop os -> ATop (take co_arity os)
-- See Note [Arity trimming]
ABot n | co_arity < n -> ATop (replicate co_arity noOneShotInfo)
| otherwise -> ABot n
where
co_arity = length (typeArity (coercionRKind co))
-- See Note [exprArity invariant] (2); must be true of
-- arityType too, since that is how we compute the arity
-- of variables, and they in turn affect result of exprArity
-- #5441 is a nice demo
-- However, do make sure that ATop -> ATop and ABot -> ABot!
-- Casts don't affect that part. Getting this wrong provoked #5475
arityType _ (Var v)
| strict_sig <- idStrictness v
, not $ isTopSig strict_sig
, (ds, res) <- splitStrictSig strict_sig
, let arity = length ds
= if isBotRes res then ABot arity
else ATop (take arity one_shots)
| otherwise
= ATop (take (idArity v) one_shots)
where
one_shots :: [OneShotInfo] -- One-shot-ness derived from the type
one_shots = typeArity (idType v)
-- Lambdas; increase arity
arityType env (Lam x e)
| isId x = arityLam x (arityType env e)
| otherwise = arityType env e
-- Applications; decrease arity, except for types
arityType env (App fun (Type _))
= arityType env fun
arityType env (App fun arg )
= arityApp (arityType env fun) (ae_cheap_fn env arg Nothing)
-- Case/Let; keep arity if either the expression is cheap
-- or it's a 1-shot lambda
-- The former is not really right for Haskell
-- f x = case x of { (a,b) -> \y. e }
-- ===>
-- f x y = case x of { (a,b) -> e }
-- The difference is observable using 'seq'
--
arityType env (Case scrut _ _ alts)
| exprIsBottom scrut || null alts
= ABot 0 -- Do not eta expand
-- See Note [Dealing with bottom (1)]
| otherwise
= case alts_type of
ABot n | n>0 -> ATop [] -- Don't eta expand
| otherwise -> ABot 0 -- if RHS is bottomming
-- See Note [Dealing with bottom (2)]
ATop as | not (ae_ped_bot env) -- See Note [Dealing with bottom (3)]
, ae_cheap_fn env scrut Nothing -> ATop as
| exprOkForSpeculation scrut -> ATop as
| otherwise -> ATop (takeWhile isOneShotInfo as)
where
alts_type = foldr1 andArityType [arityType env rhs | (_,_,rhs) <- alts]
arityType env (Let b e)
= floatIn (cheap_bind b) (arityType env e)
where
cheap_bind (NonRec b e) = is_cheap (b,e)
cheap_bind (Rec prs) = all is_cheap prs
is_cheap (b,e) = ae_cheap_fn env e (Just (idType b))
arityType env (Tick t e)
| not (tickishIsCode t) = arityType env e
arityType _ _ = vanillaArityType
{-
%************************************************************************
%* *
The main eta-expander
%* *
%************************************************************************
We go for:
f = \x1..xn -> N ==> f = \x1..xn y1..ym -> N y1..ym
(n >= 0)
where (in both cases)
* The xi can include type variables
* The yi are all value variables
* N is a NORMAL FORM (i.e. no redexes anywhere)
wanting a suitable number of extra args.
The biggest reason for doing this is for cases like
f = \x -> case x of
True -> \y -> e1
False -> \y -> e2
Here we want to get the lambdas together. A good example is the nofib
program fibheaps, which gets 25% more allocation if you don't do this
eta-expansion.
We may have to sandwich some coerces between the lambdas
to make the types work. exprEtaExpandArity looks through coerces
when computing arity; and etaExpand adds the coerces as necessary when
actually computing the expansion.
Note [No crap in eta-expanded code]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The eta expander is careful not to introduce "crap". In particular,
given a CoreExpr satisfying the 'CpeRhs' invariant (in CorePrep), it
returns a CoreExpr satisfying the same invariant. See Note [Eta
expansion and the CorePrep invariants] in CorePrep.
This means the eta-expander has to do a bit of on-the-fly
simplification but it's not too hard. The alternative, of relying on
a subsequent clean-up phase of the Simplifier to de-crapify the result,
means you can't really use it in CorePrep, which is painful.
Note [Eta expansion for join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The no-crap rule is very tiresome to guarantee when
we have join points. Consider eta-expanding
let j :: Int -> Int -> Bool
j x = e
in b
The simple way is
\(y::Int). (let j x = e in b) y
The no-crap way is
\(y::Int). let j' :: Int -> Bool
j' x = e y
in b[j'/j] y
where I have written to stress that j's type has
changed. Note that (of course!) we have to push the application
inside the RHS of the join as well as into the body. AND if j
has an unfolding we have to push it into there too. AND j might
be recursive...
So for now I'm abandoning the no-crap rule in this case. I think
that for the use in CorePrep it really doesn't matter; and if
it does, then CoreToStg.myCollectArgs will fall over.
(Moreover, I think that casts can make the no-crap rule fail too.)
Note [Eta expansion and SCCs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that SCCs are not treated specially by etaExpand. If we have
etaExpand 2 (\x -> scc "foo" e)
= (\xy -> (scc "foo" e) y)
So the costs of evaluating 'e' (not 'e y') are attributed to "foo"
Note [Eta expansion and source notes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CorePrep puts floatable ticks outside of value applications, but not
type applications. As a result we might be trying to eta-expand an
expression like
(src<...> v) @a
which we want to lead to code like
\x -> src<...> v @a x
This means that we need to look through type applications and be ready
to re-add floats on the top.
-}
-- | @etaExpand n e@ returns an expression with
-- the same meaning as @e@, but with arity @n@.
--
-- Given:
--
-- > e' = etaExpand n e
--
-- We should have that:
--
-- > ty = exprType e = exprType e'
etaExpand :: Arity -- ^ Result should have this number of value args
-> CoreExpr -- ^ Expression to expand
-> CoreExpr
-- etaExpand arity e = res
-- Then 'res' has at least 'arity' lambdas at the top
--
-- etaExpand deals with for-alls. For example:
-- etaExpand 1 E
-- where E :: forall a. a -> a
-- would return
-- (/\b. \y::a -> E b y)
--
-- It deals with coerces too, though they are now rare
-- so perhaps the extra code isn't worth it
etaExpand n orig_expr
= go n orig_expr
where
-- Strip off existing lambdas and casts before handing off to mkEtaWW
-- Note [Eta expansion and SCCs]
go 0 expr = expr
go n (Lam v body) | isTyVar v = Lam v (go n body)
| otherwise = Lam v (go (n-1) body)
go n (Cast expr co) = Cast (go n expr) co
go n expr
= -- pprTrace "ee" (vcat [ppr orig_expr, ppr expr, ppr etas]) $
retick $ etaInfoAbs etas (etaInfoApp subst' sexpr etas)
where
in_scope = mkInScopeSet (exprFreeVars expr)
(in_scope', etas) = mkEtaWW n (ppr orig_expr) in_scope (exprType expr)
subst' = mkEmptySubst in_scope'
-- Find ticks behind type apps.
-- See Note [Eta expansion and source notes]
(expr', args) = collectArgs expr
(ticks, expr'') = stripTicksTop tickishFloatable expr'
sexpr = foldl' App expr'' args
retick expr = foldr mkTick expr ticks
-- Abstraction Application
--------------
data EtaInfo = EtaVar Var -- /\a. [] [] a
-- \x. [] [] x
| EtaCo Coercion -- [] |> sym co [] |> co
instance Outputable EtaInfo where
ppr (EtaVar v) = text "EtaVar" <+> ppr v
ppr (EtaCo co) = text "EtaCo" <+> ppr co
pushCoercion :: Coercion -> [EtaInfo] -> [EtaInfo]
pushCoercion co1 (EtaCo co2 : eis)
| isReflCo co = eis
| otherwise = EtaCo co : eis
where
co = co1 `mkTransCo` co2
pushCoercion co eis = EtaCo co : eis
--------------
etaInfoAbs :: [EtaInfo] -> CoreExpr -> CoreExpr
etaInfoAbs [] expr = expr
etaInfoAbs (EtaVar v : eis) expr = Lam v (etaInfoAbs eis expr)
etaInfoAbs (EtaCo co : eis) expr = Cast (etaInfoAbs eis expr) (mkSymCo co)
--------------
etaInfoApp :: Subst -> CoreExpr -> [EtaInfo] -> CoreExpr
-- (etaInfoApp s e eis) returns something equivalent to
-- ((substExpr s e) `appliedto` eis)
etaInfoApp subst (Lam v1 e) (EtaVar v2 : eis)
= etaInfoApp (CoreSubst.extendSubstWithVar subst v1 v2) e eis
etaInfoApp subst (Cast e co1) eis
= etaInfoApp subst e (pushCoercion co' eis)
where
co' = CoreSubst.substCo subst co1
etaInfoApp subst (Case e b ty alts) eis
= Case (subst_expr subst e) b1 ty' alts'
where
(subst1, b1) = substBndr subst b
alts' = map subst_alt alts
ty' = etaInfoAppTy (CoreSubst.substTy subst ty) eis
subst_alt (con, bs, rhs) = (con, bs', etaInfoApp subst2 rhs eis)
where
(subst2,bs') = substBndrs subst1 bs
etaInfoApp subst (Let b e) eis
| not (isJoinBind b)
-- See Note [Eta expansion for join points]
= Let b' (etaInfoApp subst' e eis)
where
(subst', b') = substBindSC subst b
etaInfoApp subst (Tick t e) eis
= Tick (substTickish subst t) (etaInfoApp subst e eis)
etaInfoApp subst expr _
| (Var fun, _) <- collectArgs expr
, Var fun' <- lookupIdSubst (text "etaInfoApp" <+> ppr fun) subst fun
, isJoinId fun'
= subst_expr subst expr
etaInfoApp subst e eis
= go (subst_expr subst e) eis
where
go e [] = e
go e (EtaVar v : eis) = go (App e (varToCoreExpr v)) eis
go e (EtaCo co : eis) = go (Cast e co) eis
--------------
etaInfoAppTy :: Type -> [EtaInfo] -> Type
-- If e :: ty
-- then etaInfoApp e eis :: etaInfoApp ty eis
etaInfoAppTy ty [] = ty
etaInfoAppTy ty (EtaVar v : eis) = etaInfoAppTy (applyTypeToArg ty (varToCoreExpr v)) eis
etaInfoAppTy _ (EtaCo co : eis) = etaInfoAppTy (coercionRKind co) eis
--------------
-- | @mkEtaWW n _ fvs ty@ will compute the 'EtaInfo' necessary for eta-expanding
-- an expression @e :: ty@ to take @n@ value arguments, where @fvs@ are the
-- free variables of @e@.
--
-- Note that this function is entirely unconcerned about cost centres and other
-- semantically-irrelevant source annotations, so call sites must take care to
-- preserve that info. See Note [Eta expansion and SCCs].
mkEtaWW
:: Arity
-- ^ How many value arguments to eta-expand
-> SDoc
-- ^ The pretty-printed original expression, for warnings.
-> InScopeSet
-- ^ A super-set of the free vars of the expression to eta-expand.
-> Type
-> (InScopeSet, [EtaInfo])
-- ^ The variables in 'EtaInfo' are fresh wrt. to the incoming 'InScopeSet'.
-- The outgoing 'InScopeSet' extends the incoming 'InScopeSet' with the
-- fresh variables in 'EtaInfo'.
mkEtaWW orig_n ppr_orig_expr in_scope orig_ty
= go orig_n empty_subst orig_ty []
where
empty_subst = mkEmptyTCvSubst in_scope
go :: Arity -- Number of value args to expand to
-> TCvSubst -> Type -- We are really looking at subst(ty)
-> [EtaInfo] -- Accumulating parameter
-> (InScopeSet, [EtaInfo])
go n subst ty eis -- See Note [exprArity invariant]
----------- Done! No more expansion needed
| n == 0
= (getTCvInScope subst, reverse eis)
----------- Forall types (forall a. ty)
| Just (tcv,ty') <- splitForAllTy_maybe ty
, let (subst', tcv') = Type.substVarBndr subst tcv
= let ((n_subst, n_tcv), n_n)
-- We want to have at least 'n' lambdas at the top.
-- If tcv is a tyvar, it corresponds to one Lambda (/\).
-- And we won't reduce n.
-- If tcv is a covar, we could eta-expand the expr with one
-- lambda \co:ty. e co. In this case we generate a new variable
-- of the coercion type, update the scope, and reduce n by 1.
| isTyVar tcv = ((subst', tcv'), n)
| otherwise = (freshEtaId n subst' (varType tcv'), n-1)
-- Avoid free vars of the original expression
in go n_n n_subst ty' (EtaVar n_tcv : eis)
----------- Function types (t1 -> t2)
| Just (arg_ty, res_ty) <- splitFunTy_maybe ty
, not (isTypeLevPoly arg_ty)
-- See Note [Levity polymorphism invariants] in CoreSyn
-- See also test case typecheck/should_run/EtaExpandLevPoly
, let (subst', eta_id') = freshEtaId n subst arg_ty
-- Avoid free vars of the original expression
= go (n-1) subst' res_ty (EtaVar eta_id' : eis)
----------- Newtypes
-- Given this:
-- newtype T = MkT ([T] -> Int)
-- Consider eta-expanding this
-- eta_expand 1 e T
-- We want to get
-- coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
| Just (co, ty') <- topNormaliseNewType_maybe ty
, let co' = Coercion.substCo subst co
-- Remember to apply the substitution to co (#16979)
-- (or we could have applied to ty, but then
-- we'd have had to zap it for the recursive call)
= go n subst ty' (pushCoercion co' eis)
| otherwise -- We have an expression of arity > 0,
-- but its type isn't a function, or a binder
-- is levity-polymorphic
= WARN( True, (ppr orig_n <+> ppr orig_ty) $$ ppr_orig_expr )
(getTCvInScope subst, reverse eis)
-- This *can* legitimately happen:
-- e.g. coerce Int (\x. x) Essentially the programmer is
-- playing fast and loose with types (Happy does this a lot).
-- So we simply decline to eta-expand. Otherwise we'd end up
-- with an explicit lambda having a non-function type
--------------
-- Don't use short-cutting substitution - we may be changing the types of join
-- points, so applying the in-scope set is necessary
-- TODO Check if we actually *are* changing any join points' types
subst_expr :: Subst -> CoreExpr -> CoreExpr
subst_expr = substExpr (text "CoreArity:substExpr")
--------------
-- | Split an expression into the given number of binders and a body,
-- eta-expanding if necessary. Counts value *and* type binders.
etaExpandToJoinPoint :: JoinArity -> CoreExpr -> ([CoreBndr], CoreExpr)
etaExpandToJoinPoint join_arity expr
= go join_arity [] expr
where
go 0 rev_bs e = (reverse rev_bs, e)
go n rev_bs (Lam b e) = go (n-1) (b : rev_bs) e
go n rev_bs e = case etaBodyForJoinPoint n e of
(bs, e') -> (reverse rev_bs ++ bs, e')
etaExpandToJoinPointRule :: JoinArity -> CoreRule -> CoreRule
etaExpandToJoinPointRule _ rule@(BuiltinRule {})
= WARN(True, (sep [text "Can't eta-expand built-in rule:", ppr rule]))
-- How did a local binding get a built-in rule anyway? Probably a plugin.
rule
etaExpandToJoinPointRule join_arity rule@(Rule { ru_bndrs = bndrs, ru_rhs = rhs
, ru_args = args })
| need_args == 0
= rule
| need_args < 0
= pprPanic "etaExpandToJoinPointRule" (ppr join_arity $$ ppr rule)
| otherwise
= rule { ru_bndrs = bndrs ++ new_bndrs, ru_args = args ++ new_args
, ru_rhs = new_rhs }
where
need_args = join_arity - length args
(new_bndrs, new_rhs) = etaBodyForJoinPoint need_args rhs
new_args = varsToCoreExprs new_bndrs
-- Adds as many binders as asked for; assumes expr is not a lambda
etaBodyForJoinPoint :: Int -> CoreExpr -> ([CoreBndr], CoreExpr)
etaBodyForJoinPoint need_args body
= go need_args (exprType body) (init_subst body) [] body
where
go 0 _ _ rev_bs e
= (reverse rev_bs, e)
go n ty subst rev_bs e
| Just (tv, res_ty) <- splitForAllTy_maybe ty
, let (subst', tv') = Type.substVarBndr subst tv
= go (n-1) res_ty subst' (tv' : rev_bs) (e `App` varToCoreExpr tv')
| Just (arg_ty, res_ty) <- splitFunTy_maybe ty
, let (subst', b) = freshEtaId n subst arg_ty
= go (n-1) res_ty subst' (b : rev_bs) (e `App` Var b)
| otherwise
= pprPanic "etaBodyForJoinPoint" $ int need_args $$
ppr body $$ ppr (exprType body)
init_subst e = mkEmptyTCvSubst (mkInScopeSet (exprFreeVars e))
--------------
freshEtaId :: Int -> TCvSubst -> Type -> (TCvSubst, Id)
-- Make a fresh Id, with specified type (after applying substitution)
-- It should be "fresh" in the sense that it's not in the in-scope set
-- of the TvSubstEnv; and it should itself then be added to the in-scope
-- set of the TvSubstEnv
--
-- The Int is just a reasonable starting point for generating a unique;
-- it does not necessarily have to be unique itself.
freshEtaId n subst ty
= (subst', eta_id')
where
ty' = Type.substTyUnchecked subst ty
eta_id' = uniqAway (getTCvInScope subst) $
mkSysLocalOrCoVar (fsLit "eta") (mkBuiltinUnique n) ty'
-- "OrCoVar" since this can be used to eta-expand
-- coercion abstractions
subst' = extendTCvInScope subst eta_id'
| sdiehl/ghc | compiler/coreSyn/CoreArity.hs | bsd-3-clause | 44,032 | 0 | 16 | 11,940 | 5,627 | 2,911 | 2,716 | 340 | 7 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Autonix.Src (Src, mkSrc, url, sha256, name) where
import Control.Lens
import Data.Aeson
import Data.Aeson.Types
import Data.Text (Text)
import GHC.Generics
data Src =
Src { _url :: FilePath
, _sha256 :: Text
, _name :: Text
}
deriving Generic
makeLenses ''Src
srcOptions :: Options
srcOptions = defaultOptions { fieldLabelModifier = tail }
instance FromJSON Src where
parseJSON = genericParseJSON srcOptions
instance ToJSON Src where
toJSON = genericToJSON srcOptions
mkSrc :: FilePath -> Text -> Text -> Src
mkSrc = Src
| ttuegel/autonix-deps | src/Autonix/Src.hs | bsd-3-clause | 625 | 0 | 8 | 120 | 166 | 97 | 69 | 22 | 1 |
import DSL.Structured.Api
argument1, argument2, argument3 :: Argument
argument1 = argument ["kill", "intent"] [] "murder"
argument2 = argument ["witness"] ["unreliable"] "intent"
argument3 = argument ["witness2"] ["unreliable2"] "-intent"
threads1 :: Threads
threads1 = threads [argument1, argument2, argument3]
importance :: Importance
importance arg | arg == argument1 = 0.8
importance arg | arg == argument2 = 0.3
importance arg | arg == argument3 = 0.8
importance _ = error "no importance assigned"
assumptions :: [Proposition]
assumptions = map proposition ["kill", "witness", "witness2", "unreliable2"]
audience :: Audience
audience = (assumptions, importance)
semantics :: Semantics
semantics (_, "intent") = BeyondReasonableDoubt
semantics _ = Scintilla
example :: Model
example = Strct (threads1, audience, semantics)
| shingoOKAWA/hsarg-haskell | src/DSL/Structured/Test.hs | bsd-3-clause | 903 | 0 | 8 | 184 | 259 | 143 | 116 | 21 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.SGIX.IglooInterface
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/SGIX/igloo_interface.txt SGIX_igloo_interface> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.SGIX.IglooInterface (
-- * Functions
glIglooInterfaceSGIX
) where
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/SGIX/IglooInterface.hs | bsd-3-clause | 668 | 0 | 4 | 78 | 37 | 31 | 6 | 3 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module EFA2.Signal.TH where
import Control.Monad (liftM)
import Language.Haskell.TH
type Val = Double
class Sample cont dim where
fromSample :: dim cont -> cont Val
toSample :: cont Val -> dim cont
printQ :: Ppr a => Q a -> IO ()
printQ expr = runQ expr >>= putStrLn.pprint
samples :: [String]
samples = ["PSample", "NSample"]
sampleNames :: [Name]
sampleNames = map mkName samples
showN, eqN, numN, contN, valN :: Name
showN = mkName "Show"
eqN = mkName "Eq"
numN = mkName "Num"
contN = mkName "cont"
valN = mkName "Val"
standaloneDerive :: Name -> Q [Dec]
standaloneDerive s = return (concatMap f [showN, eqN])
where f x = [InstanceD [ClassP x [AppT (VarT contN) (ConT valN)]] (AppT (ConT x) (AppT (ConT s) (VarT contN))) []]
mkDeriving :: [Name] -> Q [Dec]
mkDeriving names = liftM concat $ mapM standaloneDerive names
sampleNewtype :: String -> Q [Dec]
sampleNewtype str = do
let ty = mkName str
getter = mkName ("un" ++ str)
var <- newName "cont"
return $ [NewtypeD [] ty [PlainTV var] (RecC ty [(getter, NotStrict, AppT (VarT var) (ConT valN))]) []]
mkNewtypes :: [String] -> Q [Dec]
mkNewtypes strs = liftM concat $ mapM sampleNewtype strs
sampleInstance :: Name -> Q [Dec]
sampleInstance iname = do
TyConI (NewtypeD _ name _tyvars (RecC constr _) _) <- reify iname
ClassI (ClassD _ cname _ctyvars _ cdecs) _ <- reify (mkName "Sample")
var <- newName "x"
let [SigD fname _, SigD tname _] = cdecs
header = AppT (AppT (ConT cname) (VarT (mkName "cont"))) (ConT name)
vare = VarE var
varp = VarP var
fromf = FunD fname [Clause [ConP constr [varp]] (NormalB vare) []]
tof = FunD tname [Clause [varp] (NormalB (AppE (ConE constr) vare)) []]
return $ [InstanceD [] header [fromf, tof]]
mkInstances :: [Name] -> Q [Dec]
mkInstances names = liftM concat $ mapM sampleInstance names
| energyflowanalysis/efa-2.1 | attic/src/EFA2/Signal/TH.hs | bsd-3-clause | 1,940 | 0 | 18 | 403 | 865 | 441 | 424 | 48 | 1 |
{-# LANGUAGE BangPatterns, TupleSections, Rank2Types #-}
module Evaluator.Deeds where
import StaticFlags
import Utilities
import Data.Ord (comparing)
-- | Number of unclaimed deeds. Invariant: always greater than or equal to 0
type Unclaimed = Int
-- | A deed supply shared amongst all expressions
type Deeds = Int
-- NB: it is OK if the number of deeds to claim is negative -- that just causes some deeds to be released
claimDeeds :: Deeds -> Int -> Maybe Deeds
claimDeeds deeds want = guard (not dEEDS || deeds >= want) >> return (deeds - want)
-- | Splits up a number evenly across several partitions in proportions to weights given to those partitions.
--
-- > sum (apportion n weights) == n
--
-- Annoyingly, it is important that this works properly if n is negative as well -- these can occur
-- when we have turned off deed checking. I don't care about handling negative weights.
apportion :: Deeds -> [Deeds] -> [Deeds]
apportion _ [] = error "apportion: empty list"
apportion orig_n weighting
| orig_n < 0 = map negate $ apportion (negate orig_n) weighting
| otherwise = result
where
fracs :: [Rational]
fracs = assertRender (text "apportion: must have at least one non-zero weight") (denominator /= 0) $
map (\numerator -> fromIntegral numerator / denominator) weighting
where denominator = fromIntegral (sum weighting)
-- Here is the idea:
-- 1) Do one pass through the list of fractians
-- 2) Start by allocating the floor of the number of "n" that we should allocate to this weight of the fraction
-- 3) Accumulate the fractional pieces and the indexes that generated them
-- 4) Use circular programming to feed the list of fractional pieces that we actually allowed the allocation
-- of back in to the one pass we are doing over the list
((_, remaining, final_deserving), result) = mapAccumL go (0 :: Int, orig_n, []) fracs
go (i, n, deserving) frac = ((i + 1, n - whole, (i, remainder) : deserving),
whole + if i `elem` final_deserving_allowed then 1 else 0)
where (whole, remainder) = properFraction (frac * fromIntegral orig_n)
-- We should prefer to allocate pieces to those bits of the fraction where the error (i.e. the fractional part) is greatest.
-- We cannot allocate more of these "fixup" pieces than we had "n" left at the end of the first pass.
final_deserving_allowed = map fst (take remaining (sortBy (comparing (Down . snd)) final_deserving))
noChange, noGain :: Deeds -> Deeds -> Bool
noChange = (==)
noGain = (>=)
| batterseapower/chsc | Evaluator/Deeds.hs | bsd-3-clause | 2,601 | 0 | 15 | 577 | 470 | 265 | 205 | 26 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeFamilies #-}
-- | 'Patch'es on 'Map' that consist only of insertions (including overwrites)
-- and deletions
module Reflex.Patch.Map where
import Reflex.Patch.Class
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Semigroup
-- | A set of changes to a 'Map'. Any element may be inserted/updated or
-- deleted. Insertions are represented as values wrapped in 'Just', while
-- deletions are represented as 'Nothing's
newtype PatchMap k v = PatchMap { unPatchMap :: Map k (Maybe v) }
deriving (Show, Read, Eq, Ord)
-- | Apply the insertions or deletions to a given 'Map'.
instance Ord k => Patch (PatchMap k v) where
type PatchTarget (PatchMap k v) = Map k v
{-# INLINABLE apply #-}
apply (PatchMap p) old = Just $! insertions `Map.union` (old `Map.difference` deletions) --TODO: return Nothing sometimes --Note: the strict application here is critical to ensuring that incremental merges don't hold onto all their prerequisite events forever; can we make this more robust?
where insertions = Map.mapMaybeWithKey (const id) p
deletions = Map.mapMaybeWithKey (const nothingToJust) p
nothingToJust = \case
Nothing -> Just ()
Just _ -> Nothing
-- | @a <> b@ will apply the changes of @b@ and then apply the changes of @a@.
-- If the same key is modified by both patches, the one on the left will take
-- precedence.
instance Ord k => Semigroup (PatchMap k v) where
PatchMap a <> PatchMap b = PatchMap $ a `mappend` b --TODO: Add a semigroup instance for Map
-- PatchMap is idempotent, so stimes n is id for every n
stimes = stimesIdempotentMonoid
-- | The empty 'PatchMap' contains no insertions or deletions
instance Ord k => Monoid (PatchMap k v) where
mempty = PatchMap mempty
mappend = (<>)
-- | 'fmap'ping a 'PatchMap' will alter all of the values it will insert.
-- Deletions are unaffected.
instance Functor (PatchMap k) where
fmap f = PatchMap . fmap (fmap f) . unPatchMap
-- | Returns all the new elements that will be added to the 'Map'
patchMapNewElements :: PatchMap k v -> [v]
patchMapNewElements (PatchMap p) = catMaybes $ Map.elems p
-- | Returns all the new elements that will be added to the 'Map'
patchMapNewElementsMap :: PatchMap k v -> Map k v
patchMapNewElementsMap (PatchMap p) = Map.mapMaybe id p
| Saulzar/reflex | src/Reflex/Patch/Map.hs | bsd-3-clause | 2,374 | 0 | 12 | 466 | 467 | 254 | 213 | 30 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-|
Module : Numeric.AERN.RmToRn.Plot.Simple
Description : simple utilities for plotting functions
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Simple utilities for plotting functions.
-}
module Numeric.AERN.RmToRn.Plot.Simple
(
plotFns,
simpleFnMetaData
)
where
import Numeric.AERN.RmToRn.Plot.FnView.FnData
import Numeric.AERN.RmToRn.Plot.FnView.New
import Numeric.AERN.RmToRn.Plot.Params
import Numeric.AERN.RmToRn.Plot.CairoDrawable
import Numeric.AERN.RmToRn
import qualified Numeric.AERN.RealArithmetic.RefinementOrderRounding as ArithInOut
--import Numeric.AERN.RealArithmetic.RefinementOrderRounding.Operators
import qualified Numeric.AERN.RefinementOrder as RefOrd
import Numeric.AERN.RealArithmetic.ExactOps
import qualified Graphics.UI.Gtk as Gtk
import Control.Concurrent.STM
{-|
To make the following plotting code work, the file FnView.glade
has to be available in the current directory. The master copy of this file
is in the root folder of the aern-realfn-plot-gtk package.
-}
plotFns ::
(fnInfo ~ (String, FnPlotStyle, Bool),
CairoDrawableFn f,
ArithInOut.RoundedReal (Domain f),
RefOrd.IntervalLike (Domain f),
CanEvaluate f,
RefOrd.PartialComparison (Domain f),
Show f, Show (Domain f), Show (Var f))
=>
[(String, [(fnInfo, f)])] ->
IO ()
plotFns fnGroups =
do
-- enable multithreaded GUI:
_ <- Gtk.unsafeInitGUIForThreadedRTS
fnDataTV <- atomically $ newTVar $ FnData $ addPlotVar fns
fnMetaTV <- atomically $ newTVar $ fnmeta
_ <- new sampleFn effDrawFn effCF effEval (fnDataTV, fnMetaTV) Nothing
-- Concurrent.forkIO $ signalFn fnMetaTV
Gtk.mainGUI
where
((sampleFn :_) :_) = fns
sampleCF = getSampleDomValue sampleFn
effDrawFn = cairoDrawFnDefaultEffort sampleFn
effEval = evaluationDefaultEffort sampleFn
effCF = ArithInOut.roundedRealDefaultEffort sampleCF
--effCF = (100, (100,())) -- MPFR
fnmeta =
simpleFnMetaData sampleFn rect Nothing 100 (show var) groupInfo
where
rect = Rectangle valHI valLO domL domR
(domL, domR) = RefOrd.getEndpointsOut dom
_varDoms@((var, dom) : _) = getVarDoms sampleFn
groupInfo = zip groupNames fnInfos
(valLO, valHI) = RefOrd.getEndpointsOut rangeUnion
rangeUnion = foldl1 (RefOrd.</\>) ranges
ranges = concat $ map (map getRange) fns
getRange fn = evalAtPointOut (getDomainBox fn) fn
(fnInfos, fns) = unzip $ map unzip groupFns
(groupNames, groupFns) = unzip fnGroups
addPlotVar ::
(HasDomainBox f)
=>
[[f]] -> [[(GraphOrParamPlotFn f, Var f)]]
addPlotVar fns =
map (map addV) fns
where
addV fn = (GraphPlotFn [fn], plotVar)
where
(plotVar : _) = vars
vars = map fst $ getVarDoms fn
simpleFnMetaData ::
(fnInfo ~ (String, FnPlotStyle, Bool),
HasZero (Domain t), HasOne (Domain t), HasDomainBox t)
=>
t
-> Rectangle (Domain f) -- ^ initial crop
-> Maybe ColourRGBA -- ^ background colour
-> Int -- ^ number of samples to take of each function per viewable region
-> String -- ^ the name of the variable that ranges over the plotted function domain
-> [(String, [fnInfo])] -- ^ information on plotted function groups (names, plot colours, whether shown initially)
-> FnMetaData f
simpleFnMetaData sampleFn rect bgrColour samplesPerUnit domVarName (groups :: [(String, [fnInfo])]) =
(defaultFnMetaData sampleFn)
{
dataFnGroupNames = map getGroupName groups,
dataFnNames = mapGroupsFns getFnName,
dataFnStyles = mapGroupsFns getFnStyle,
dataDefaultActiveFns = mapGroupsFns getFnEnabled,
dataDomName = domVarName,
dataDomL = domL,
dataDomR = domR,
dataValLO = valLO,
dataValHI = valHI,
dataDefaultEvalPoint = domR,
dataDefaultCanvasParams =
(defaultCanvasParams sampleDom)
{
cnvprmCoordSystem = CoordSystemLinear rect,
cnvprmBackgroundColour = bgrColour,
cnvprmSamplesPerUnit = samplesPerUnit
}
}
where
(Rectangle valHI valLO domL domR) = rect
sampleDom = getSampleDomValue sampleFn
getGroupName (name, _) = name
getGroupContent (_, content) = content
mapGroupsFns :: (fnInfo -> t) -> [[t]]
mapGroupsFns f = map (map f . getGroupContent) groups
getFnName (name, _, _) = name
getFnStyle (_, style, _) = style
getFnEnabled (_, _, enabled) = enabled
| michalkonecny/aern | aern-realfn-plot-gtk/src/Numeric/AERN/RmToRn/Plot/Simple.hs | bsd-3-clause | 4,970 | 0 | 14 | 1,327 | 1,112 | 623 | 489 | 95 | 1 |
module Language.Interpreter.Operators where
import Control.Monad.Except ( throwError )
import Data.Fixed ( mod' )
import Language.Ast ( Value(Number, Symbol) )
import Language.Interpreter.Types ( InterpreterProcess )
import Language.Interpreter.Values ( getNumberValue
, getValueType
)
binaryOp :: String -> Value -> Value -> InterpreterProcess Value
binaryOp op (Number n1) (Number n2) = case op of
"^" -> return $ Number (n1 ** n2)
"*" -> return $ Number (n1 * n2)
"/" -> safeDiv n1 n2
"+" -> return $ Number (n1 + n2)
"-" -> return $ Number (n1 - n2)
"%" -> safeMod n1 n2
"<" -> return $ Number (if n1 < n2 then 1 else 0)
">" -> return $ Number (if n1 > n2 then 1 else 0)
"<=" -> return $ Number (if n1 <= n2 then 1 else 0)
">=" -> return $ Number (if n1 >= n2 then 1 else 0)
"==" -> return $ Number (if n1 == n2 then 1 else 0)
"!=" -> return $ Number (if n1 /= n2 then 1 else 0)
"&&" -> return $ Number (if n1 /= 0 && n2 /= 0 then 1 else 0)
"||" -> return $ Number (if n1 /= 0 || n2 /= 0 then 1 else 0)
_ -> throwError $ "Unknown operator: " ++ op
binaryOp op (Symbol s1) (Symbol s2) = case op of
"==" -> return $ Number (if s1 == s2 then 1 else 0)
_ -> throwError $ "Cannot " ++ op ++ " on two symbols"
binaryOp op v1 v2 =
throwError
$ "Cannot "
++ op
++ " on a "
++ getValueType v1
++ " and a "
++ getValueType v2
unaryOp :: String -> Value -> InterpreterProcess Value
unaryOp op v = do
n <- getNumberValue v
case op of
"-" -> return $ Number (-n)
"!" -> return $ Number $ if n == 0 then 1 else 0
_ -> throwError $ "Unknown operator: " ++ op
safeDiv :: Float -> Float -> InterpreterProcess Value
safeDiv n1 n2 = if n2 == 0
then throwError "Cannot divide by zero"
else return $ Number (n1 / n2)
safeMod :: Float -> Float -> InterpreterProcess Value
safeMod n1 n2 = if n2 == 0
then throwError "Cannot modulo by zero"
else return $ Number (mod' n1 n2)
| rumblesan/improviz | src/Language/Interpreter/Operators.hs | bsd-3-clause | 2,165 | 0 | 14 | 694 | 801 | 418 | 383 | 50 | 25 |
-- Copyright : Isaac Jones 2003-2004
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
-- | ComponentLocalBuildInfo for Cabal >= 1.21
module CabalVersions.Cabal21 (
ComponentLocalBuildInfo
, PackageIdentifier(..)
, PackageName(..)
, componentPackageDeps
, componentLibraries
) where
import Distribution.Package (InstalledPackageId)
import Data.Version (Version)
data LibraryName = LibraryName String
deriving (Read, Show)
newtype PackageName = PackageName { unPackageName :: String }
deriving (Read, Show)
data PackageIdentifier
= PackageIdentifier {
pkgName :: PackageName,
pkgVersion :: Version
}
deriving (Read, Show)
type PackageId = PackageIdentifier
data ComponentLocalBuildInfo
= LibComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)],
componentLibraries :: [LibraryName]
}
| ExeComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)]
}
| TestComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)]
}
| BenchComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)]
}
deriving (Read, Show)
| imeckler/mote | CabalVersions/Cabal21.hs | bsd-3-clause | 2,636 | 0 | 10 | 467 | 240 | 151 | 89 | 29 | 0 |
module Test.Sloth.Color
(
Color(..), showColor
) where
data Color = Red
| Green
| Magenta
| Blue
instance Show Color where
show Red = "31"
show Green = "32"
show Magenta = "35"
show Blue = "34"
-- | Color the result of a show function
showColor :: Color -> ShowS -> ShowS
showColor color s =
showString ("\027[" ++ show color ++ "m") . s . showString "\027[0m"
| plancalculus/sloth | Test/Sloth/Color.hs | bsd-3-clause | 425 | 0 | 11 | 133 | 127 | 69 | 58 | 15 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- | More IO functions. The functions include ones for reading files with specific encodings,
-- strictly reading files, and writing files with encodings. There are also some simple
-- temporary file functions, more advanced alternatives can be found in
-- the <http://hackage.haskell.org/package/exceptions exceptions> package.
module System.IO.Extra(
module System.IO,
captureOutput,
withBuffering,
-- * Read encoding
readFileEncoding, readFileUTF8, readFileBinary,
-- * Strict reading
readFile', readFileEncoding', readFileUTF8', readFileBinary',
-- * Write with encoding
writeFileEncoding, writeFileUTF8, writeFileBinary,
-- * Temporary files
withTempFile, withTempDir, newTempFile, newTempDir,
) where
import System.IO
import Control.Concurrent.Extra
import Control.Exception.Extra as E
import GHC.IO.Handle(hDuplicate,hDuplicateTo)
import System.Directory.Extra
import System.IO.Error
import System.IO.Unsafe
import System.FilePath
import Data.Char
import Data.Time.Clock
import Data.Tuple.Extra
import Data.IORef
-- File reading
-- | Like 'readFile', but setting an encoding.
readFileEncoding :: TextEncoding -> FilePath -> IO String
readFileEncoding enc file = do
h <- openFile file ReadMode
hSetEncoding h enc
hGetContents h
-- | Like 'readFile', but with the encoding 'utf8'.
readFileUTF8 :: FilePath -> IO String
readFileUTF8 = readFileEncoding utf8
-- | Like 'readFile', but for binary files.
readFileBinary :: FilePath -> IO String
readFileBinary file = do
h <- openBinaryFile file ReadMode
hGetContents h
-- Strict file reading
-- | A strict version of 'readFile'. When the string is produced, the entire
-- file will have been read into memory and the file handle will have been closed.
-- Closing the file handle does not rely on the garbage collector.
--
-- > \(filter isHexDigit -> s) -> fmap (== s) $ withTempFile $ \file -> do writeFile file s; readFile' file
readFile' :: FilePath -> IO String
readFile' file = withFile file ReadMode $ \h -> do
s <- hGetContents h
evaluate $ length s
return s
-- | A strict version of 'readFileEncoding', see 'readFile'' for details.
readFileEncoding' :: TextEncoding -> FilePath -> IO String
readFileEncoding' e file = withFile file ReadMode $ \h -> do
hSetEncoding h e
s <- hGetContents h
evaluate $ length s
return s
-- | A strict version of 'readFileUTF8', see 'readFile'' for details.
readFileUTF8' :: FilePath -> IO String
readFileUTF8' = readFileEncoding' utf8
-- | A strict version of 'readFileBinary', see 'readFile'' for details.
readFileBinary' :: FilePath -> IO String
readFileBinary' file = withBinaryFile file ReadMode $ \h -> do
s <- hGetContents h
evaluate $ length s
return s
-- File writing
-- | Write a file with a particular encoding.
writeFileEncoding :: TextEncoding -> FilePath -> String -> IO ()
writeFileEncoding enc file x = withFile file WriteMode $ \h -> do
hSetEncoding h enc
hPutStr h x
-- | Write a file with the 'utf8' encoding.
--
-- > \s -> withTempFile $ \file -> do writeFileUTF8 file s; fmap (== s) $ readFileUTF8' file
writeFileUTF8 :: FilePath -> String -> IO ()
writeFileUTF8 = writeFileEncoding utf8
-- | Write a binary file.
--
-- > \s -> withTempFile $ \file -> do writeFileBinary file s; fmap (== s) $ readFileBinary' file
writeFileBinary :: FilePath -> String -> IO ()
writeFileBinary file x = withBinaryFile file WriteMode $ \h -> hPutStr h x
-- Console
-- | Capture the 'stdout' and 'stderr' of a computation.
--
-- > captureOutput (print 1) == return ("1\n",())
captureOutput :: IO a -> IO (String, a)
captureOutput act = withTempFile $ \file -> do
withFile file ReadWriteMode $ \h -> do
res <- clone stdout h $ clone stderr h $ do
hClose h
act
out <- readFile' file
return (out, res)
where
clone out h act = do
buf <- hGetBuffering out
out2 <- hDuplicate out
hDuplicateTo h out
act `finally` do
hDuplicateTo out2 out
hClose out2
hSetBuffering out buf
-- | Execute an action with a custom 'BufferMode', a wrapper around
-- 'hSetBuffering'.
withBuffering :: Handle -> BufferMode -> IO a -> IO a
withBuffering h m act = bracket (hGetBuffering h) (hSetBuffering h) $ const $ do
hSetBuffering h m
act
---------------------------------------------------------------------
-- TEMPORARY FILE
{-# NOINLINE tempRef #-}
tempRef :: IORef Int
tempRef = unsafePerformIO $ do
rand :: Integer <- fmap (read . reverse . filter isDigit . show . utctDayTime) getCurrentTime
newIORef $ fromIntegral rand
tempUnique :: IO Int
tempUnique = atomicModifyIORef tempRef $ succ &&& succ
-- | Provide a function to create a temporary file, and a way to delete a
-- temporary file. Most users should use 'withTempFile' which
-- combines these operations.
newTempFile :: IO (FilePath, IO ())
newTempFile = do
file <- create
del <- once $ ignore $ removeFile file
return (file, del)
where
create = do
tmpdir <- getTemporaryDirectory
val <- tempUnique
(file, h) <- retryBool (\(_ :: IOError) -> True) 5 $ openTempFile tmpdir $ "extra-file-" ++ show val ++ "-"
hClose h
return file
-- | Create a temporary file in the temporary directory. The file will be deleted
-- after the action completes (provided the file is not still open).
-- The 'FilePath' will not have any file extension, will exist, and will be zero bytes long.
-- If you require a file with a specific name, use 'withTempDir'.
--
-- > withTempFile doesFileExist == return True
-- > (doesFileExist =<< withTempFile return) == return False
-- > withTempFile readFile' == return ""
withTempFile :: (FilePath -> IO a) -> IO a
withTempFile act = do
(file, del) <- newTempFile
act file `finally` del
-- | Provide a function to create a temporary directory, and a way to delete a
-- temporary directory. Most users should use 'withTempDir' which
-- combines these operations.
newTempDir :: IO (FilePath, IO ())
newTempDir = do
tmpdir <- getTemporaryDirectory
dir <- retryBool (\(_ :: IOError) -> True) 5 $ create tmpdir
del <- once $ ignore $ removeDirectoryRecursive dir
return (dir, del)
where
create tmpdir = do
v <- tempUnique
let dir = tmpdir </> "extra-dir-" ++ show v
catchBool isAlreadyExistsError
(createDirectoryPrivate dir >> return dir) $
\e -> create tmpdir
-- | Create a temporary directory inside the system temporary directory.
-- The directory will be deleted after the action completes.
--
-- > withTempDir doesDirectoryExist == return True
-- > (doesDirectoryExist =<< withTempDir return) == return False
-- > withTempDir listFiles == return []
withTempDir :: (FilePath -> IO a) -> IO a
withTempDir act = do
(dir,del) <- newTempDir
act dir `finally` del
| mgmeier/extra | src/System/IO/Extra.hs | bsd-3-clause | 7,115 | 0 | 18 | 1,620 | 1,430 | 732 | 698 | 116 | 1 |
module Data.Carbonara.Time where
import Data.Char (isDigit)
import Data.Time.Calendar (Day(toModifiedJulianDay), addDays, fromGregorian, toGregorian) --time
import Data.Time.Clock (getCurrentTime, UTCTime(utctDay))
import Data.Time.Calendar.Easter (gregorianEaster)
addTradingDays :: Integral i => i -> Day -> Day
addTradingDays n day
| n == 0 = day
| n > 0 = nextTradingDay $ addTradingDays (n - 1) day
| otherwise = previousTradingDay $ addTradingDays (n + 1) day -- n < 0
-- | > dayToString (fromGregorian 2016 3 14) --> "20160314"
dayToStr :: Day -> String
dayToStr = filter (isDigit) . show
-- | diffDays (fromGregorian 1970 1 1) (fromGregorian 1858 11 17) is 40587; 1 day has 86400 seconds
-- compare: unixtimeToDay
dayToUnixtime :: Day -> Integer
dayToUnixtime day = (toModifiedJulianDay day - 40587) * 86400
fg :: Integer -> Int -> Int -> Day
fg = fromGregorian
getDay :: Day -> Int
getDay day = d where (_,_,d) = toGregorian day
getMonth :: Day -> Int
getMonth day = m where (_,m,_) = toGregorian day
getYear :: Day -> Integer
getYear day = y where (y,_,_) = toGregorian day
-- | This is the current GMT day
getToday :: IO Day
getToday = utctDay <$> getCurrentTime
getTradingDay :: IO Day
getTradingDay = previousTradingDay <$> getToday
getTradingDayUTC :: IO (Day, UTCTime)
getTradingDayUTC = do
utc <- getCurrentTime
let trading_day = previousTradingDay . utctDay $ utc
return (trading_day, utc)
-- | this is actually CME Exchange Holiday, which includes Good Friday, while Federal holiday not
exchangeHolidays :: Integer -> [Day]
exchangeHolidays year = federalHolidays year ++ [holidayGoodFriday year]
-- | Federal holidays do not include Easter
-- New Years Day might be Saturady and falls into 31 Dec, see year 2010
-- https://www.opm.gov/policy-data-oversight/snow-dismissal-procedures/federal-holidays/
federalHolidays :: Integer -> [Day]
federalHolidays year
| nextYearJan1isSat = thisYearFederalHolidays ++ [fg year 12 31]
| otherwise = thisYearFederalHolidays
where nextYearJan1isSat = isSaturday $ fg (year + 1) 1 1
thisYearFederalHolidays = [holidayNewYears year, holidayMartinLuther year, holidayWashington year
, holidayMemorial year, holidayIndependence year, holidayLabor year
, holidayColumbus year, holidayVeterans year, holidayThanksgiving year, holidayChristmas year]
-- | New Year's Day is fixed at January 1st, falls to Dec 31 if Saturday
holidayNewYears :: Integer -> Day
holidayNewYears year
| isSaturday jan1 = pred $ jan1
| isSunday jan1 = fromGregorian year 1 2
| otherwise = jan1
where jan1 = fromGregorian year 1 1
-- | Martin Luther Day is the third Monday in January
holidayMartinLuther :: Integer -> Day
holidayMartinLuther year = nextMonday (fromGregorian year 1 14)
-- | Presidents' Day is the third Monday in February
holidayWashington :: Integer -> Day
holidayWashington year = nextMonday (fromGregorian year 2 14)
-- | Good Friday is observed by CME, though it is not a US Federal Holiday
holidayGoodFriday :: Integer -> Day
holidayGoodFriday year = lastFriday $ gregorianEaster year
-- | Memorial Day is the last Monday in May
holidayMemorial :: Integer -> Day
holidayMemorial year = lastMonday (fromGregorian year 6 1)
-- | Independence Day is fixed at July 4th
holidayIndependence :: Integer -> Day
holidayIndependence year
| isSaturday july4 = fromGregorian year 7 3
| isSunday july4 = fromGregorian year 7 5
| otherwise = july4
where july4 = fromGregorian year 7 4
-- | Labor Day is the first Monday in September
holidayLabor :: Integer -> Day
holidayLabor year = nextMonday (fromGregorian year 8 31)
-- | Columbus Day is the second Monday in October
holidayColumbus :: Integer -> Day
holidayColumbus year = nextMonday (fromGregorian year 10 7)
-- | Veterans Day is fixed at November 11th
holidayVeterans :: Integer -> Day
holidayVeterans year
| isSaturday nov11 = fromGregorian year 11 10
| isSunday nov11 = fromGregorian year 11 12
| otherwise = nov11
where nov11 = fromGregorian year 11 11
-- | Thanksgiving Day is the fourth Thursday in November
holidayThanksgiving :: Integer -> Day
holidayThanksgiving year = nextThursday (fromGregorian year 11 21)
-- | Christmas Day is fixed at December 25th
holidayChristmas :: Integer -> Day
holidayChristmas year
| isSaturday dec25 = fromGregorian year 12 24
| isSunday dec25 = fromGregorian year 12 26
| otherwise = dec25
where dec25 = fromGregorian year 12 25
isWednesday,isThursday,isFriday,isSaturday,isSunday,isMonday,isTuesday :: Day -> Bool
[isWednesday,isThursday,isFriday,isSaturday,isSunday,isMonday,isTuesday] = [isDay i | i <- [0 .. 6]]
where isDay :: Integer -> Day -> Bool
isDay i day = toModifiedJulianDay day `mod` 7 == i
isExchangeHoliday :: Day -> Bool
isExchangeHoliday day = day `elem` (exchangeHolidays $ getYear day)
-- | New Years Day might be Saturady and falls into 31 Dec, see year 2010
isFederalHoliday :: Day -> Bool
isFederalHoliday day = day `elem` (federalHolidays $ getYear day)
isTradingDay :: Day -> Bool
isTradingDay day = not (isSunday day || isSaturday day || isExchangeHoliday day)
isWeekday :: Day -> Bool
isWeekday day = not (isSaturday day || isSunday day)
lastTuesday,lastMonday,lastSunday,lastSaturday,lastFriday,lastThursday,lastWednesday :: Day -> Day
[lastTuesday,lastMonday,lastSunday,lastSaturday,lastFriday,lastThursday,lastWednesday] = [lastDay i | i <- [0 .. 6]]
where lastDay :: Integer -> Day -> Day
lastDay i day = addDays ((negate $ (toModifiedJulianDay day + i) `mod` 7) - 1) day
nextWednesday,nextTuesday,nextMonday,nextSunday,nextSaturday,nextFriday,nextThursday :: Day -> Day
[nextWednesday,nextTuesday,nextMonday,nextSunday,nextSaturday,nextFriday,nextThursday] = [nextDay i | i <- [0 .. 6]]
where nextDay :: Integer -> Day -> Day
nextDay i day = addDays (7 - (toModifiedJulianDay day + i) `mod` 7) day
nextTradingDay :: Day -> Day
nextTradingDay day
| isTradingDay tomorrow = tomorrow
| otherwise = nextTradingDay tomorrow
where tomorrow = succ day
previousTradingDay :: Day -> Day
previousTradingDay day
| isTradingDay yesterday = yesterday
| otherwise = previousTradingDay yesterday
where yesterday = pred day
-- | compare showGregorian
-- > dayToString (fromGregorian 2016 3 14) --> "20160314"
showGreg :: Day -> String
showGreg = filter (isDigit) . show
-- 1 day has 86400 seconds
-- see also dayToUnixtime function
unixtimeToDay :: Integer -> Day
unixtimeToDay i = addDays (i `div` 86400) (fromGregorian 1970 1 1)
--dayToUnixtime2 :: Day -> Integer
--dayToUnixtime2 day = (diffDays day (fromGregorian 1970 1 1)) * 86400
| szehk/Haskell-Carbonara-Library | src/Data/Carbonara/Time.hs | bsd-3-clause | 6,965 | 0 | 15 | 1,454 | 1,798 | 956 | 842 | 113 | 1 |
module EFA2.Solver.Horn where
import qualified Data.Traversable as Trav
import qualified Data.Foldable as Fold
import qualified Data.List.Key as Key
import qualified Data.List as L
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.NonEmpty.Mixed as NonEmptyM
import qualified Data.NonEmpty as NonEmpty
import Data.NonEmpty ((!:))
import Control.Monad (liftM2)
import Control.Functor.HT (void)
import Data.Maybe (mapMaybe, catMaybes)
import Data.Ord.HT (comparing)
import Data.Eq.HT (equating)
import Data.Bool.HT (if')
import Data.Graph.Inductive (Node, Gr, labNodes, delEdge, edges)
import EFA2.Utils.Graph (foldGraphNodes, mapGraphNodes)
import EFA2.Solver.DependencyGraph (dpgDiffByAtMostOne, dpgHasSameVariable)
import EFA2.Solver.Equation (EqTerm, mkVarSet)
import qualified Test.QuickCheck as QC
import Debug.Trace (trace)
data Formula = Zero
| One
| Atom Int
| And Formula Formula
| Formula :-> Formula deriving (Ord, Eq)
infix 8 :->
instance Show Formula where
show Zero = "F"
show One = "T"
show (Atom x) = show x
show (And f g) = "(" ++ show f ++ " ∧ " ++ show g ++ ")"
show (f :-> g) = show f ++ " → " ++ show g
instance QC.Arbitrary Formula where
arbitrary =
QC.oneof $
return Zero :
return One :
fmap Atom QC.arbitrary :
QC.sized (\n -> let arb = QC.resize (div n 2) QC.arbitrary in liftM2 And arb arb) :
QC.sized (\n -> let arb = QC.resize (div n 2) QC.arbitrary in liftM2 (:->) arb arb) :
[]
shrink x =
case x of
Zero -> []
One -> []
Atom n -> map Atom $ QC.shrink n
And f g -> f : g : (map (uncurry And) $ QC.shrink (f,g))
f :-> g -> f : g : (map (uncurry (:->)) $ QC.shrink (f,g))
type Step = Int
hornsToStr :: [Formula] -> String
hornsToStr fs = L.intercalate " ∧ " $ map (("(" ++) . (++ ")") . show) fs
isAtom :: Formula -> Bool
isAtom (Atom _) = True
isAtom _ = False
fromAtom :: Formula -> Int
fromAtom (Atom x) = x
fromAtom t = error ("Wrong term " ++ show t ++ " supplied to fromAtom.")
getAtoms :: Formula -> S.Set Formula
getAtoms v@(Atom _) = S.singleton v
getAtoms (And f g) = S.union (getAtoms f) (getAtoms g)
getAtoms (f :-> g) = S.union (getAtoms f) (getAtoms g)
getAtoms _ = S.empty
leftMarked :: S.Set Formula -> Formula -> Bool
leftMarked _ (One :-> _) = True
leftMarked vs (lhs :-> _) = S.null $ S.difference (getAtoms lhs) vs
leftMarked _ _ = False
rightMarked :: S.Set Formula -> Formula -> Bool
rightMarked vs (_ :-> v) = S.member v vs
rightMarked _ _ = False
makeAnd :: NonEmpty.T [] Formula -> Formula
makeAnd = NonEmpty.foldl1 And
step :: Step -> S.Set (Step, Formula) -> [Formula] -> (S.Set (Step, Formula), [Formula])
step i vs fs = (unionVs, filter (not . rightMarked onlyVars') bs)
where (as, bs) = L.partition (leftMarked onlyVars) fs
vs' = S.fromList $ zip (repeat i) (map (\(_ :-> v) -> v) as)
unionVs = S.union vs' vs
onlyVars = S.map snd vs
onlyVars' = S.map snd unionVs
horn' :: Step -> S.Set (Step, Formula) -> [Formula] -> Maybe (S.Set (Step, Formula))
horn' i vs fs =
if' (Fold.any ((Zero ==) . snd) vs) Nothing $
if' (vs == vs') (Just vs) $
horn' (i+1) vs' fs'
where (vs', fs') = step i vs fs
-- | Returns a set of 'Atom's that are have to be marked True in order to fulfill the 'Formula'e.
-- To each 'Atom' is associated the 'Step' in which it was marked.
horn :: [Formula] -> Maybe (S.Set (Step, Formula))
horn fs = fmap atomsOnly res
where atomsOnly = S.filter (isAtom . snd)
res = horn' 0 S.empty fs
-- | Takes a dependency graph and returns Horn clauses from it, that is, every directed edge
-- is taken for an implication.
graphToHorn :: Gr a () -> [Formula]
graphToHorn g = foldGraphNodes f [] g
where f acc ([], _, []) = acc
f acc (ins, x, _) = (map (:-> Atom x) (map Atom ins)) ++ acc
{-
-- | Takes a dependency graph and returns Horn clauses from it. /Given/ 'Formula'e will
-- produce additional clauses of the form One :-> Atom x.
-- These are the starting clauses for the Horn marking algorithm.
makeHornFormulae :: (a -> Bool) -> Gr a () -> [Formula]
makeHornFormulae isVar g = given ++ graphToHorn g
where given = L.foldl' f [] (labNodes g)
f acc (n, t) | isGiven t = (One :-> Atom n):acc
f acc _ = acc
-}
-- | Takes a dependency graph and a list of 'Formula'e. With help of the horn marking algorithm
-- it produces a list of 'EqTerm' equations that is ordered such, that it can be computed
-- one by one.
makeHornOrder :: M.Map Node a -> [Formula] -> [a]
makeHornOrder m formulae = map ((m M.!) . fromAtom) fs'
where Just fs = horn formulae
fs' = map snd (S.toAscList fs)
-- | Filter equations which contain the same variables.
-- Given terms are also filtered, as they contain no variables.
filterUnneeded ::
(Ord a) =>
(EqTerm -> Maybe a) -> [EqTerm] -> [EqTerm]
filterUnneeded isVar =
map (fst . NonEmpty.head) .
NonEmptyM.groupBy (equating snd) .
map (\t -> (t, mkVarSet isVar t))
makeHornClauses ::
(Ord a, Show a) =>
(EqTerm -> Maybe a) -> [EqTerm] -> [EqTerm] ->
(M.Map Node EqTerm, [Formula])
makeHornClauses isVar givenExt rest = (m, startfs ++ fsdpg ++ fsdpg2)
where m = M.fromList (labNodes dpg)
ts = givenExt ++ rest
dpg = dpgDiffByAtMostOne isVar ts
fsdpg = graphToHorn dpg
ext = filter (flip elem givenExt . snd) (labNodes dpg)
startfs = map (h . fst) ext
h x = One :-> Atom x
dpg2 = dpgHasSameVariable isVar ts
dpg3 = L.foldl' (flip delEdge) dpg2 (edges dpg)
fsdpg2 = concat $ mapGraphNodes g dpg3
mset = M.map (mkVarSet isVar) m
g ([], _, _) = []
g (ins, n, _) =
map (\xs -> makeAnd (fmap Atom xs) :-> Atom n) $
mapMaybe NonEmpty.fetch sc
where _sc = greedyCover mset n ins
sc = setCoverBruteForce mset n ins
hornOrder ::
(Ord a, Show a) =>
(EqTerm -> Maybe a) -> [EqTerm] -> [EqTerm] -> [EqTerm]
hornOrder isVar givenExt ts =
uncurry makeHornOrder $ makeHornClauses isVar givenExt ts
-- using a NonEmptyList, the 'tail' could be total
allNotEmptyCombinations :: (Ord a) => [a] -> [[a]]
allNotEmptyCombinations =
NonEmpty.tail . fmap catMaybes . Trav.mapM (\x -> Nothing !: Just x : [])
setCoverBruteForce ::
Ord a => M.Map Node (S.Set a) -> Node -> [Node] -> [[Node]]
setCoverBruteForce m n ns =
let minL = 16
l = length ns
in if l > minL
then
trace
("Instance size " ++ show l ++
"; setCoverBruteForce doesn't like instances > " ++ show minL) []
else
let p t = sizeLessThanTwo ((m M.! n) S.\\ t)
in map fst $ filter (p . S.unions . snd) $
map unzip $
allNotEmptyCombinations $
map (\k -> (k, m M.! k)) ns
greedyCover ::
Ord a => M.Map Node (S.Set a) -> Node -> [Node] -> [[Node]]
greedyCover m n ns0 = [go (m M.! n) ns0]
where go s _ | sizeLessThanTwo s = []
go _ [] = error "no set cover"
go s ns = x : go (s S.\\ s') (L.delete x ns)
where (x, s') =
Key.minimum (lazySize . (s S.\\) . snd) $
map (\a -> (a, m M.! a)) ns
lazySize :: S.Set a -> [()]
lazySize = void . S.toList
sizeLessThanTwo :: S.Set a -> Bool
sizeLessThanTwo = null . drop 1 . S.toList
setCoverBruteForceOld ::
Ord a => M.Map Node (S.Set a) -> Node -> [Node] -> [[Node]]
setCoverBruteForceOld _ _ ns | l > n = trace msg []
where n = 16
l = length ns
msg = "Instance size " ++ show l ++ "; setCoverBruteForceOld doesn't like instances > " ++ show n
setCoverBruteForceOld m n ns = map fst $ filter p xs
where s = m M.! n
combs = allNotEmptyCombinations ns
xs = zip combs (map f combs)
f ys = S.unions $ map (m M.!) ys
p (_c, t) = S.size (s S.\\ t) < 2
greedyCoverOld ::
Ord a => M.Map Node (S.Set a) -> Node -> [Node] -> [[Node]]
greedyCoverOld m n ns0 = [go s0 ns0]
where s0 = m M.! n
go s _ | S.size s < 2 = []
go _ [] = error "no set cover"
go s ns = x:(go (s S.\\ s') ns')
where sets = map (\a -> (a, m M.! a)) ns
(x, s') = head $ L.sortBy (comparing (S.size . (s S.\\) . snd)) sets
ns' = L.delete x ns
setCoverBruteForceProp :: [(Node, [Ordering])] -> Node -> [Node] -> Bool
setCoverBruteForceProp forms n ns0 =
let m = fmap S.fromList $ M.insert n [] $ M.fromList forms
ns = S.toList $ S.intersection (M.keysSet m) $ S.fromList ns0
in setCoverBruteForce m n ns
==
setCoverBruteForceOld m n ns
greedyCoverProp :: [(Node, [Ordering])] -> Node -> [Node] -> Bool
greedyCoverProp forms n ns0 =
let m = fmap S.fromList $ M.insert n [] $ M.fromList forms
ns = S.toList $ S.intersection (M.keysSet m) $ S.fromList ns0
in greedyCover m n ns
==
greedyCoverOld m n ns
| energyflowanalysis/efa-2.1 | attic/src/EFA2/Solver/Horn.hs | bsd-3-clause | 9,092 | 0 | 18 | 2,470 | 3,573 | 1,874 | 1,699 | 198 | 3 |
module Main where
import Control.Prototype
main = flip runProt initProtEnv $ do
( dog, setName, sit ) <- package "dog" $ do
honyuurui <- clone object
dog <- clone honyuurui
sit <- makeMember "sit"
setName <- makeMember "setName"
name <- makeMember "name"
setMethod dog sit $ \obj _ -> do
n <- member obj name
liftProt $ putStrLn $ fromPrimStr n ++ " sitting."
return [ ]
setMethod dog setName $ \obj [ n ] -> do
setMember obj name n
return [ ]
return ( dog, setName, sit )
myDog <- clone dog
method myDog setName [ primStr "John" ]
method myDog sit [ ]
| YoshikuniJujo/prototype | examples/test.hs | bsd-3-clause | 590 | 0 | 18 | 143 | 246 | 112 | 134 | 20 | 1 |
{-# language FlexibleContexts #-}
{-# language ScopedTypeVariables #-}
{-# language TypeFamilies #-}
module VCopy where
import Feldspar
import Feldspar.Array.Vector
import Feldspar.Array.Buffered
import Feldspar.Software
import Feldspar.Software.Verify
import Feldspar.Software as Soft (icompile)
--import Feldspar.Hardware
--import Feldspar.Hardware as Hard (icompile)
import Prelude hiding (take, drop, reverse, length, map, zip, zipWith, sum, div)
--------------------------------------------------------------------------------
--
--------------------------------------------------------------------------------
reverse' :: SType a => Manifest Software (SExp a) -> SPush (SExp a)
reverse' (M iarr) = Push len $ \write -> do
arr <- unsafeThawArr iarr
for 0 1 (len `div` 2) $ \ix -> do
a <- getArr arr (len-ix-1)
b <- getArr arr (ix)
write (ix) a
write (len-ix-1) b
where
len = length iarr
prog :: Software ()
prog = do
buf :: Store Software (SExp Word32) <- newStore 20
vec1 <- store buf $ (1...20)
vec2 <- store buf $ reverse' vec1
printf "%d" $ sum $ map (*2) vec2
prog' :: Software ()
prog' = do
buf :: Store Software (SExp Word32) <- newInPlaceStore 20
vec1 <- store buf $ (1...20)
vec2 <- store buf $ reverse' vec1
printf "%d" $ sum $ map (*2) vec2
--------------------------------------------------------------------------------
test :: Software ()
test = do
buf :: Store Software (SExp Word32) <- newInPlaceStore 10
arr <- store buf (1...10)
brr <- store buf (rev arr)
return ()
where
rev :: Manifest Software (SExp Word32) -> Push Software (SExp Word32)
rev = pairwise (\ix -> (ix, 10-ix-1))
--------------------------------------------------------------------------------
| markus-git/co-feldspar | examples/VCopy.hs | bsd-3-clause | 1,774 | 0 | 17 | 324 | 613 | 312 | 301 | 40 | 1 |
module Resolve.DNS.Transport.Helper.LiveTCP where
import Resolve.DNS.Transport.Types
import qualified Resolve.DNS.Transport.LiveTCP as TCP
import Network.Socket
data Config = Config { host :: HostName
, port :: ServiceName
, passive :: Bool
}
deriving (Show)
lname = "Resolve.DNS.Helper.LiveTCP"
new :: Config -> IO Transport
new c = do
let hints = defaultHints { addrSocketType = Stream}
addr:_ <- getAddrInfo (Just hints) (Just $ host c) (Just $ port c)
TCP.new $ TCP.Config { TCP.family = addrFamily addr
, TCP.protocol = addrProtocol addr
, TCP.server = addrAddress addr
, TCP.passive = passive c
}
| riaqn/resolve | src/Resolve/DNS/Transport/Helper/LiveTCP.hs | bsd-3-clause | 797 | 0 | 11 | 283 | 208 | 117 | 91 | 17 | 1 |
-- for TokParsing, MonadicParsing
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
-- {-# LANGUAGE GADTs #-}
-- {-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DataKinds #-}
-- {-# LANGUAGE KindSignatures #-}
{-# LANGUAGE CPP #-}
-- {-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- {-# OPTIONS_GHC -fno-warn-orphans #-}
-- {-# OPTIONS_GHC -fno-warn-missing-signatures #-}
-- {-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-}
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- This module is part of Pire's parser.
module Pire.Parser.Token
(
module Pire.Parser.Token
, module Text.Trifecta
)
where
import Pire.Syntax.Token
import Pire.Syntax.Ws
import Pire.Parser.Parser
import Pire.Parser.Basic
import Text.Parser.Token
import qualified Data.Text as T
import Control.Applicative
-- want :@ from ReSyntax
-- do I ever need :@ from Trifecta ?
import Text.Trifecta hiding ((:@))
#ifdef DocTest
-- for the doctests - can be simplified though: need no state here
-- import Control.Monad.State
import Control.Monad.State.Strict
#endif
-- maybe move to separate Pire.Parser.Ws
ws_ :: (TokParsing m
, DeltaParsing m
)
=> m (Ws T.Text)
ws_ = do
ws <- sliced whiteSpace
return $ Ws $ txt ws
-- some helpers
-- I guess can be simplified, use reserved_ throughout ie.
-- be careful to use reserved_ only when ws is required after the symbol
-- eg. not for colon, as "x: A" is allowed, but not required, ie. "x:A" is fine as well
-- ie. can use reserved_ for if, then, else, in etc.
-- not for (, ), [, ], :, etc
-- |
--
-- >>> fromSuccess $ parseString (runInnerParser $ evalStateT (whiteSpace *> bracketOpen_) piPrelude) beginning " [{-foo-}y] x "
-- BracketOpen "[" (Ws "{-foo-}")
bracketOpen_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'BracketOpenTy T.Text)
bracketOpen_ = do
bo <- runUnspaced $ textSymbol "["
ws <- sliced whiteSpace
return $ BracketOpen bo $ Ws $ txt ws
bracketClose_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'BracketCloseTy T.Text)
bracketClose_ = do
bc <- runUnspaced $ textSymbol "]"
ws <- sliced whiteSpace
return $ BracketClose bc $ Ws $ txt ws
parenOpen_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ParenOpenTy T.Text)
parenOpen_ = do
po <- runUnspaced $ textSymbol "("
ws <- sliced whiteSpace
return $ ParenOpen po $ Ws $ txt ws
parenClose_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ParenCloseTy T.Text)
parenClose_ = do
pc <- runUnspaced $ textSymbol ")"
ws <- sliced whiteSpace
return $ ParenClose pc $ Ws $ txt ws
braceOpen_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'BraceOpenTy T.Text)
braceOpen_ = do
bo <- runUnspaced $ textSymbol "{"
ws <- sliced whiteSpace
return $ BraceOpen bo $ Ws $ txt ws
braceClose_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'BraceCloseTy T.Text)
braceClose_ = do
bc <- runUnspaced $ textSymbol "}"
ws <- sliced whiteSpace
return $ BraceClose bc $ Ws $ txt ws
vbar_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'VBarTy T.Text)
vbar_ = do
vb <- runUnspaced $ textSymbol "|"
ws <- sliced whiteSpace
return $ VBar vb $ Ws $ txt ws
lamTok_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'LamTokTy T.Text)
lamTok_ = do
lam' <- runUnspaced $ textSymbol "\\"
ws <- sliced whiteSpace
return $ LamTok lam' $ Ws $ txt ws
-- ! can probably use reserve_ as well
dot_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'DotTy T.Text)
dot_ = do
dot' <- runUnspaced $ textSymbol "."
ws <- sliced whiteSpace
return $ Dot dot' $ Ws $ txt ws
letTok_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'LetTokTy T.Text)
letTok_ = do
let let' = "let"
ws <- reserved_ let'
return $ LetTok (T.pack let') ws
eq_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'EqualTy T.Text)
eq_ = do
e <- runUnspaced $ textSymbol "="
ws <- sliced whiteSpace
return $ Equal e $ Ws $ txt ws
if_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'IfTokTy T.Text)
if_ = do
let if' = "if"
ws <- reserved_ if'
return $ IfTok (T.pack if') $ ws
then_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ThenTokTy T.Text)
then_ = do
let t = "then"
ws <- reserved_ t
return $ ThenTok (T.pack t) $ ws
else_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ElseTokTy T.Text)
else_ = do
let e = "else"
ws <- reserved_ e
return $ ElseTok (T.pack e) $ ws
in_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'InTy T.Text)
in_ = do
let i = "in"
ws <- reserved_ i
return $ In (T.pack i) $ ws
colon_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ColonTy T.Text)
colon_ = do
col <- runUnspaced $ textSymbol ":"
ws <- sliced whiteSpace
return $ Colon col $ Ws $ txt ws
semiColon_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'SemiColonTy T.Text)
semiColon_ = do
sem <- runUnspaced $ textSymbol ";"
ws <- sliced whiteSpace
return $ SemiColon sem $ Ws $ txt ws
comma_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'CommaTy T.Text)
comma_ = do
cmm <- runUnspaced $ textSymbol ","
ws <- sliced whiteSpace
return $ Comma cmm $ Ws $ txt ws
arrow_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ArrowTy T.Text)
arrow_ = do
arr <- runUnspaced $ textSymbol "->"
ws <- sliced whiteSpace
return $ Arrow arr $ Ws $ txt ws
case_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'CaseTokTy T.Text)
case_ = do
let c = "case"
ws <- reserved_ c
return $ CaseTok (T.pack c) $ ws
pcase_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'PcaseTokTy T.Text)
pcase_ = do
let p = "pcase"
ws <- reserved_ p
return $ PcaseTok (T.pack p) $ ws
subst_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'SubstTokTy T.Text)
subst_ = do
let s = "subst"
ws <- reserved_ s
return $ SubstTok (T.pack s) $ ws
by_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ByTy T.Text)
by_ = do
let b = "by"
ws <- reserved_ b
return $ By (T.pack b) $ ws
contraTok_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ContraTokTy T.Text)
contraTok_ = do
let c = "contra"
ws <- reserved_ c
return $ ContraTok (T.pack c) $ ws
of_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'OfTy T.Text)
of_ = do
let o = "of"
ws <- reserved_ o
return $ Of (T.pack o) $ ws
where_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'WhereTy T.Text)
where_ = do
let w = "where"
ws <- reserved_ w
return $ Where (T.pack w) $ ws
mod_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'ModuleTokTy T.Text)
mod_ = do
let m = "module"
ws <- reserved_ m
return $ ModuleTok (T.pack m) $ ws
data_ :: (TokParsing m
, DeltaParsing m
)
=> m (Token 'DataTokTy T.Text)
data_ = do
let w = "data"
ws <- reserved_ w
return $ DataTok (T.pack w) $ ws
| reuleaux/pire | src/Pire/Parser/Token.hs | bsd-3-clause | 7,967 | 0 | 12 | 2,524 | 2,508 | 1,233 | 1,275 | 218 | 1 |
-- | An abstract pointer to a compiled PCRE Regex structure
-- The structure allocated by the PCRE library will be deallocated
-- automatically by the Haskell storage manager.
--
{-- snippet data --}
data Regex = Regex !(ForeignPtr PCRE)
!ByteString
deriving (Eq, Ord, Show)
{-- /snippet data --}
{-- snippet unit --}
type PCRE = ()
{-- /snippet unit --}
{-- snippet pcre_compile --}
foreign import ccall unsafe "pcre.h pcre_compile"
c_pcre_compile :: CString
-> PCREOption
-> Ptr CString
-> Ptr CInt
-> Ptr Word8
-> IO (Ptr PCRE)
{-- /snippet pcre_compile --}
{-- snippet compiletype --}
compile :: ByteString -> [PCREOption] -> Either String Regex
{-- /snippet compiletype --}
{-- snippet compileReal --}
compile :: ByteString -> [PCREOption] -> Either String Regex
compile str flags = unsafePerformIO $
useAsCString str $ \pattern -> do
alloca $ \errptr -> do
alloca $ \erroffset -> do
pcre_ptr <- c_pcre_compile pattern (combineOptions flags) errptr erroffset nullPtr
if pcre_ptr == nullPtr
then do
err <- peekCString =<< peek errptr
return (Left err)
else do
reg <- newForeignPtr finalizerFree pcre_ptr -- release with free()
return (Right (Regex reg str))
{-- /snippet compileReal --}
| binesiyu/ifl | examples/ch17/PCRE-compile.hs | mit | 1,453 | 0 | 24 | 451 | 299 | 153 | 146 | -1 | -1 |
-- | Parsing of constants in GIR files.
module Data.GI.GIR.Constant
( Constant(..)
, parseConstant
) where
import Data.Text (Text)
import Data.GI.GIR.BasicTypes (Type)
import Data.GI.GIR.Type (parseType, parseCType)
import Data.GI.GIR.Parser
-- | Info about a constant.
data Constant = Constant {
constantType :: Type,
constantValue :: Text,
constantCType :: Text,
constantDocumentation :: Documentation,
constantDeprecated :: Maybe DeprecationInfo
} deriving (Show)
-- | Parse a "constant" element from the GIR file.
parseConstant :: Parser (Name, Constant)
parseConstant = do
name <- parseName
deprecated <- parseDeprecation
value <- getAttr "value"
t <- parseType
ctype <- parseCType
doc <- parseDocumentation
return (name, Constant { constantType = t
, constantValue = value
, constantCType = ctype
, constantDocumentation = doc
, constantDeprecated = deprecated
})
| ford-prefect/haskell-gi | lib/Data/GI/GIR/Constant.hs | lgpl-2.1 | 1,079 | 0 | 10 | 328 | 224 | 133 | 91 | 27 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-------------------------------------------------------------------------------------
------------ Code to convert Core to Administrative Normal Form ---------------------
-------------------------------------------------------------------------------------
module Language.Haskell.Liquid.ANFTransform (anormalize) where
import CoreSyn
import CoreUtils (exprType)
import qualified DsMonad
import DsMonad (initDs)
import GHC hiding (exprType)
import HscTypes
import Id (mkSysLocalM)
import Literal
import MkCore (mkCoreLets)
import Outputable (trace)
import Var (varType, setVarType)
import TypeRep
import Type (mkForAllTys, substTy, mkForAllTys, mkTopTvSubst, isTyVar)
import TyCon (tyConDataCons_maybe)
import DataCon (dataConInstArgTys)
import FamInstEnv (emptyFamInstEnv)
import VarEnv (VarEnv, emptyVarEnv, extendVarEnv, lookupWithDefaultVarEnv)
import Control.Monad.State.Lazy
import UniqSupply (MonadUnique)
import Language.Fixpoint.Types (anfPrefix)
import Language.Haskell.Liquid.GhcMisc (MGIModGuts(..), showPpr, symbolFastString)
import Language.Haskell.Liquid.TransformRec
import Language.Fixpoint.Misc (fst3, errorstar)
import Data.Maybe (fromMaybe)
import Data.List (sortBy, (\\))
import Control.Applicative
anormalize :: Bool -> HscEnv -> MGIModGuts -> IO [CoreBind]
anormalize expandFlag hscEnv modGuts
= do -- putStrLn "***************************** GHC CoreBinds ***************************"
-- putStrLn $ showPpr orig_cbs
liftM (fromMaybe err . snd) $ initDs hscEnv m grEnv tEnv emptyFamInstEnv act
where m = mgi_module modGuts
grEnv = mgi_rdr_env modGuts
tEnv = modGutsTypeEnv modGuts
act = liftM concat $ mapM (normalizeTopBind expandFlag emptyVarEnv) orig_cbs
orig_cbs = transformRecExpr $ mgi_binds modGuts
err = errorstar "anormalize fails!"
modGutsTypeEnv mg = typeEnvFromEntities ids tcs fis
where ids = bindersOfBinds (mgi_binds mg)
tcs = mgi_tcs mg
fis = mgi_fam_insts mg
------------------------------------------------------------------
----------------- Actual Normalizing Functions -------------------
------------------------------------------------------------------
-- Can't make the below default for normalizeBind as it
-- fails tests/pos/lets.hs due to GHCs odd let-bindings
normalizeTopBind :: Bool -> VarEnv Id -> Bind CoreBndr -> DsMonad.DsM [CoreBind]
normalizeTopBind expandFlag γ (NonRec x e)
= do e' <- runDsM $ evalStateT (stitch γ e) (DsST expandFlag [])
return [normalizeTyVars $ NonRec x e']
normalizeTopBind expandFlag γ (Rec xes)
= do xes' <- runDsM $ execStateT (normalizeBind γ (Rec xes)) (DsST expandFlag [])
return $ map normalizeTyVars (st_binds xes')
normalizeTyVars (NonRec x e) = NonRec (setVarType x t') $ normalizeForAllTys e
where t' = subst msg as as' bt
msg = "WARNING unable to renameVars on " ++ showPpr x
as' = fst $ splitForAllTys $ exprType e
(as, bt) = splitForAllTys (varType x)
normalizeTyVars (Rec xes) = Rec xes'
where nrec = normalizeTyVars <$> ((\(x, e) -> NonRec x e) <$> xes)
xes' = (\(NonRec x e) -> (x, e)) <$> nrec
subst msg as as' bt
| length as == length as'
= mkForAllTys as' $ substTy su bt
| otherwise
= trace msg $ mkForAllTys as bt
where su = mkTopTvSubst $ zip as (mkTyVarTys as')
-- | eta-expand CoreBinds with quantified types
normalizeForAllTys :: CoreExpr -> CoreExpr
normalizeForAllTys e = case e of
Lam b _ | isTyVar b
-> e
_ -> mkLams tvs (mkTyApps e (map mkTyVarTy tvs))
where
(tvs, _) = splitForAllTys (exprType e)
newtype DsM a = DsM {runDsM :: DsMonad.DsM a}
deriving (Functor, Monad, MonadUnique, Applicative)
data DsST = DsST { st_expandflag :: Bool
, st_binds :: [CoreBind]
}
type DsMW = StateT DsST DsM
------------------------------------------------------------------
normalizeBind :: VarEnv Id -> CoreBind -> DsMW ()
------------------------------------------------------------------
normalizeBind γ (NonRec x e)
= do e' <- normalize γ e
add [NonRec x e']
normalizeBind γ (Rec xes)
= do es' <- mapM (stitch γ) es
add [Rec (zip xs es')]
where (xs, es) = unzip xes
--------------------------------------------------------------------
normalizeName :: VarEnv Id -> CoreExpr -> DsMW CoreExpr
--------------------------------------------------------------------
-- normalizeNameDebug γ e
-- = liftM (tracePpr ("normalizeName" ++ showPpr e)) $ normalizeName γ e
normalizeName _ e@(Lit (LitInteger _ _))
= normalizeLiteral e
normalizeName _ e@(Tick _ (Lit (LitInteger _ _)))
= normalizeLiteral e
normalizeName γ (Var x)
= return $ Var (lookupWithDefaultVarEnv γ x x)
normalizeName _ e@(Type _)
= return e
normalizeName _ e@(Lit _)
= return e
normalizeName _ e@(Coercion _)
= do x <- lift $ freshNormalVar $ exprType e
add [NonRec x e]
return $ Var x
normalizeName γ (Tick n e)
= do e' <- normalizeName γ e
return $ Tick n e'
normalizeName γ e
= do e' <- normalize γ e
x <- lift $ freshNormalVar $ exprType e
add [NonRec x e']
return $ Var x
add :: [CoreBind] -> DsMW ()
add w = modify $ \s -> s{st_binds = st_binds s++w}
---------------------------------------------------------------------
normalizeLiteral :: CoreExpr -> DsMW CoreExpr
---------------------------------------------------------------------
normalizeLiteral e =
do x <- lift $ freshNormalVar (exprType e)
add [NonRec x e]
return $ Var x
freshNormalVar :: Type -> DsM Id
freshNormalVar = mkSysLocalM (symbolFastString anfPrefix)
---------------------------------------------------------------------
normalize :: VarEnv Id -> CoreExpr -> DsMW CoreExpr
---------------------------------------------------------------------
normalize γ (Lam x e)
= do e' <- stitch γ e
return $ Lam x e'
normalize γ (Let b e)
= do normalizeBind γ b
normalize γ e
-- Need to float bindings all the way up to the top
-- Due to GHCs odd let-bindings (see tests/pos/lets.hs)
normalize γ (Case e x t as)
= do n <- normalizeName γ e
x' <- lift $ freshNormalVar τx -- rename "wild" to avoid shadowing
let γ' = extendVarEnv γ x x'
as' <- forM as $ \(c, xs, e') -> liftM (c, xs,) (stitch γ' e')
flag <- st_expandflag <$> get
as'' <- lift $ expandDefaultCase flag τx as'
return $ Case n x' t as''
where τx = varType x
normalize γ (Var x)
= return $ Var (lookupWithDefaultVarEnv γ x x)
normalize _ e@(Lit _)
= return e
normalize _ e@(Type _)
= return e
normalize γ (Cast e τ)
= do e' <- normalizeName γ e
return $ Cast e' τ
normalize γ (App e1 e2)
= do e1' <- normalize γ e1
n2 <- normalizeName γ e2
return $ App e1' n2
normalize γ (Tick n e)
= do e' <- normalize γ e
return $ Tick n e'
normalize _ (Coercion c)
= return $ Coercion c
stitch :: VarEnv Id -> CoreExpr -> DsMW CoreExpr
stitch γ e
= do bs' <- get
modify $ \s -> s {st_binds = []}
e' <- normalize γ e
bs <- st_binds <$> get
put bs'
return $ mkCoreLets bs e'
----------------------------------------------------------------------------------
expandDefaultCase :: Bool -> Type -> [(AltCon, [Id], CoreExpr)] -> DsM [(AltCon, [Id], CoreExpr)]
----------------------------------------------------------------------------------
expandDefaultCase flag tyapp zs@((DEFAULT, _ ,_) : _) | flag
= expandDefaultCase' tyapp zs
expandDefaultCase _ tyapp@(TyConApp tc _) z@((DEFAULT, _ ,_):dcs)
= case tyConDataCons_maybe tc of
Just ds -> do let ds' = ds \\ [ d | (DataAlt d, _ , _) <- dcs]
if (length ds') == 1
then expandDefaultCase' tyapp z
else return z
Nothing -> return z --
expandDefaultCase _ _ z
= return z
expandDefaultCase' (TyConApp tc argτs) z@((DEFAULT, _ ,e) : dcs)
= case tyConDataCons_maybe tc of
Just ds -> do let ds' = ds \\ [ d | (DataAlt d, _ , _) <- dcs]
dcs' <- forM ds' $ cloneCase argτs e
return $ sortCases $ dcs' ++ dcs
Nothing -> return z --
expandDefaultCase' _ z
= return z
cloneCase argτs e d
= do xs <- mapM freshNormalVar $ dataConInstArgTys d argτs
return (DataAlt d, xs, e)
sortCases = sortBy (\x y -> cmpAltCon (fst3 x) (fst3 y))
| Kyly/liquidhaskell | src/Language/Haskell/Liquid/ANFTransform.hs | bsd-3-clause | 9,457 | 0 | 18 | 2,617 | 2,762 | 1,405 | 1,357 | 185 | 3 |
module Language.Java.Paragon.TypeCheck.TcTypeDecl where
import Language.Java.Paragon.Syntax
import Language.Java.Paragon.TypeCheck.Monad
import Language.Java.Paragon.TypeCheck.Types
import Language.Java.Paragon.TypeCheck.Locks
import Language.Java.Paragon.TypeCheck.TcDecl
import Language.Java.Paragon.TypeCheck.TcExp
-----------------------------------------
-- TODO: The module structure needs refactoring
typeCheckTd :: TypeDecl -> TcBase ()
typeCheckTd (ClassTypeDecl cd) = typeCheckCd cd
typeCheckCd :: ClassDecl -> TcBase ()
typeCheckCd (ClassDecl ms i tps _super _impls (ClassBody decls)) = do
mapM_ typeCheckDecl
skolemTypeDecl :: TypeDecl -> TcType
skolemTypeDecl = undefined
------------------------------------------
skolemType :: Ident -> [TypeParam] -> (TcType, [(TypeParam,TcTypeArg)])
skolemType i tps =
let args = map skolemParam tps
in (clsTypeWArg [(i, args)], zip tps args)
skolemParam :: TypeParam -> TcTypeArg
skolemParam tp = case tp of
TypeParam i _ -> TcActualType (TcClsRefT (TcClassT [(i,[])]))
ActorParam i -> TcActualType (TcClsRefT (TcClassT [(i,[])]))
PolicyParam i -> TcActualType (TcClsRefT (TcClassT [(i,[])]))
LockStateParam i -> TcActualLockState [TcLockVar i]
| bvdelft/parac2 | src/Language/Java/Paragon/TypeCheck/TcTypeDecl.hs | bsd-3-clause | 1,339 | 0 | 15 | 270 | 380 | 210 | 170 | 24 | 4 |
main = putStrLn "Hello world!" | dumptyd/hello-world | hello-world.hs | mit | 30 | 0 | 5 | 4 | 9 | 4 | 5 | 1 | 1 |
module Dotnet.System.MarshalByRefObject where
import Dotnet
import qualified Dotnet.System.Object
data MarshalByRefObject_ a
type MarshalByRefObject a = Dotnet.System.Object.Object (MarshalByRefObject_ a)
| FranklinChen/Hugs | dotnet/lib/Dotnet/System/MarshalByRefObject.hs | bsd-3-clause | 207 | 0 | 7 | 20 | 41 | 27 | 14 | -1 | -1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Web.Template.Context.Tests
( tests
) where
--------------------------------------------------------------------------------
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (Assertion, (@=?))
--------------------------------------------------------------------------------
import Hakyll.Core.Compiler
import Hakyll.Core.Identifier
import Hakyll.Core.Provider
import Hakyll.Core.Store (Store)
import Hakyll.Web.Template.Context
import TestSuite.Util
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "Hakyll.Core.Template.Context.Tests"
[ testCase "testDateField" testDateField
]
--------------------------------------------------------------------------------
testDateField :: Assertion
testDateField = do
store <- newTestStore
provider <- newTestProvider store
date1 <- testContextDone store provider "example.md" "date" $
dateField "date" "%B %e, %Y"
date1 @=? "October 22, 2012"
date2 <- testContextDone store provider
"posts/2010-08-26-birthday.md" "date" $
dateField "date" "%B %e, %Y"
date2 @=? "August 26, 2010"
cleanTestEnv
--------------------------------------------------------------------------------
testContextDone :: Store -> Provider -> Identifier -> String
-> Context String -> IO String
testContextDone store provider identifier key context =
testCompilerDone store provider identifier $ do
item <- getResourceBody
cf <- unContext context key [] item
case cf of
StringField str -> return str
ListField _ _ -> error $
"Hakyll.Web.Template.Context.Tests.testContextDone: " ++
"Didn't expect ListField"
| Minoru/hakyll | tests/Hakyll/Web/Template/Context/Tests.hs | bsd-3-clause | 2,100 | 0 | 12 | 491 | 340 | 180 | 160 | 38 | 2 |
{-# LANGUAGE TypeFamilies, QuasiQuotes, TemplateHaskell, MultiParamTypeClasses, OverloadedStrings #-}
module YesodCoreTest.Ssl ( sslOnlySpec, unsecSpec, sameSiteSpec ) where
import qualified YesodCoreTest.StubSslOnly as Ssl
import qualified YesodCoreTest.StubLaxSameSite as LaxSameSite
import qualified YesodCoreTest.StubStrictSameSite as StrictSameSite
import qualified YesodCoreTest.StubUnsecured as Unsecured
import Yesod.Core
import Test.Hspec
import Network.Wai
import Network.Wai.Test
import qualified Data.ByteString.Char8 as C8
import qualified Web.Cookie as Cookie
import qualified Data.List as DL
type CookieSpec = Cookie.SetCookie -> Bool
type ResponseExpectation = SResponse -> Session ()
homeFixtureFor :: YesodDispatch a => a -> ResponseExpectation -> IO ()
homeFixtureFor app assertion = do
wa <- toWaiApp app
runSession (getHome >>= assertion) wa
where
getHome = request defaultRequest
cookieShouldSatisfy :: String -> CookieSpec -> ResponseExpectation
cookieShouldSatisfy name spec response =
liftIO $
case DL.filter matchesName $ cookiesIn response of
[] -> expectationFailure $ DL.concat
[ "Expected a cookie named "
, name
, " but none is set"
]
[c] -> c `shouldSatisfy` spec
_ -> expectationFailure $ DL.concat
[ "Expected one cookie named "
, name
, " but found more than one"
]
where
matchesName c = (Cookie.setCookieName c) == C8.pack name
cookiesIn r =
DL.map
(Cookie.parseSetCookie . snd)
(DL.filter (("Set-Cookie"==) . fst) $ simpleHeaders r)
sslOnlySpec :: Spec
sslOnlySpec = describe "A Yesod application with sslOnly on" $ do
it "serves a Strict-Transport-Security header in all responses" $
atHome $ assertHeader "Strict-Transport-Security"
"max-age=7200; includeSubDomains"
it "sets the Secure flag on its session cookie" $
atHome $ "_SESSION" `cookieShouldSatisfy` Cookie.setCookieSecure
where
atHome = homeFixtureFor Ssl.App
unsecSpec :: Spec
unsecSpec = describe "A Yesod application with sslOnly off" $ do
it "never serves a Strict-Transport-Security header" $ do
atHome $ assertNoHeader "Strict-Transport-Security"
it "does not set the Secure flag on its session cookie" $ do
atHome $ "_SESSION" `cookieShouldSatisfy` isNotSecure
where
atHome = homeFixtureFor Unsecured.App
isNotSecure c = not $ Cookie.setCookieSecure c
sameSiteSpec :: Spec
sameSiteSpec = describe "A Yesod application" $ do
it "can set a Lax SameSite option" $
laxHome $ "_SESSION" `cookieShouldSatisfy` isLax
it "can set a Strict SameSite option" $
strictHome $ "_SESSION" `cookieShouldSatisfy` isStrict
where
laxHome = homeFixtureFor LaxSameSite.App
strictHome = homeFixtureFor StrictSameSite.App
isLax = (== Just Cookie.sameSiteLax) . Cookie.setCookieSameSite
isStrict = (== Just Cookie.sameSiteStrict) . Cookie.setCookieSameSite
| s9gf4ult/yesod | yesod-core/test/YesodCoreTest/Ssl.hs | mit | 3,064 | 0 | 13 | 682 | 658 | 351 | 307 | 64 | 3 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Stg to C-- code generation: bindings
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmBind (
cgTopRhsClosure,
cgBind,
emitBlackHoleCode,
pushUpdateFrame, emitUpdateFrame
) where
#include "HsVersions.h"
import StgCmmExpr
import StgCmmMonad
import StgCmmEnv
import StgCmmCon
import StgCmmHeap
import StgCmmProf (curCCS, ldvEnterClosure, enterCostCentreFun, enterCostCentreThunk,
initUpdFrameProf)
import StgCmmTicky
import StgCmmLayout
import StgCmmUtils
import StgCmmClosure
import StgCmmForeign (emitPrimCall)
import MkGraph
import CoreSyn ( AltCon(..), tickishIsCode )
import SMRep
import Cmm
import CmmInfo
import CmmUtils
import CLabel
import StgSyn
import CostCentre
import Id
import IdInfo
import Name
import Module
import ListSetOps
import Util
import BasicTypes
import Outputable
import FastString
import DynFlags
import Control.Monad
#if __GLASGOW_HASKELL__ >= 709
import Prelude hiding ((<*>))
#endif
------------------------------------------------------------------------
-- Top-level bindings
------------------------------------------------------------------------
-- For closures bound at top level, allocate in static space.
-- They should have no free variables.
cgTopRhsClosure :: DynFlags
-> RecFlag -- member of a recursive group?
-> Id
-> CostCentreStack -- Optional cost centre annotation
-> StgBinderInfo
-> UpdateFlag
-> [Id] -- Args
-> StgExpr
-> (CgIdInfo, FCode ())
cgTopRhsClosure dflags rec id ccs _ upd_flag args body =
let closure_label = mkLocalClosureLabel (idName id) (idCafInfo id)
cg_id_info = litIdInfo dflags id lf_info (CmmLabel closure_label)
lf_info = mkClosureLFInfo dflags id TopLevel [] upd_flag args
in (cg_id_info, gen_code dflags lf_info closure_label)
where
-- special case for a indirection (f = g). We create an IND_STATIC
-- closure pointing directly to the indirectee. This is exactly
-- what the CAF will eventually evaluate to anyway, we're just
-- shortcutting the whole process, and generating a lot less code
-- (#7308)
--
-- Note: we omit the optimisation when this binding is part of a
-- recursive group, because the optimisation would inhibit the black
-- hole detection from working in that case. Test
-- concurrent/should_run/4030 fails, for instance.
--
gen_code dflags _ closure_label
| StgApp f [] <- body, null args, isNonRec rec
= do
cg_info <- getCgIdInfo f
let closure_rep = mkStaticClosureFields dflags
indStaticInfoTable ccs MayHaveCafRefs
[unLit (idInfoToAmode cg_info)]
emitDataLits closure_label closure_rep
return ()
gen_code dflags lf_info closure_label
= do { -- LAY OUT THE OBJECT
let name = idName id
; mod_name <- getModuleName
; let descr = closureDescription dflags mod_name name
closure_info = mkClosureInfo dflags True id lf_info 0 0 descr
caffy = idCafInfo id
info_tbl = mkCmmInfo closure_info -- XXX short-cut
closure_rep = mkStaticClosureFields dflags info_tbl ccs caffy []
-- BUILD THE OBJECT, AND GENERATE INFO TABLE (IF NECESSARY)
; emitDataLits closure_label closure_rep
; let fv_details :: [(NonVoid Id, VirtualHpOffset)]
(_, _, fv_details) = mkVirtHeapOffsets dflags (isLFThunk lf_info)
(addIdReps [])
-- Don't drop the non-void args until the closure info has been made
; forkClosureBody (closureCodeBody True id closure_info ccs
(nonVoidIds args) (length args) body fv_details)
; return () }
unLit (CmmLit l) = l
unLit _ = panic "unLit"
------------------------------------------------------------------------
-- Non-top-level bindings
------------------------------------------------------------------------
cgBind :: StgBinding -> FCode ()
cgBind (StgNonRec name rhs)
= do { (info, fcode) <- cgRhs name rhs
; addBindC info
; init <- fcode
; emit init }
-- init cannot be used in body, so slightly better to sink it eagerly
cgBind (StgRec pairs)
= do { r <- sequence $ unzipWith cgRhs pairs
; let (id_infos, fcodes) = unzip r
; addBindsC id_infos
; (inits, body) <- getCodeR $ sequence fcodes
; emit (catAGraphs inits <*> body) }
{- Note [cgBind rec]
Recursive let-bindings are tricky.
Consider the following pseudocode:
let x = \_ -> ... y ...
y = \_ -> ... z ...
z = \_ -> ... x ...
in ...
For each binding, we need to allocate a closure, and each closure must
capture the address of the other closures.
We want to generate the following C-- code:
// Initialization Code
x = hp - 24; // heap address of x's closure
y = hp - 40; // heap address of x's closure
z = hp - 64; // heap address of x's closure
// allocate and initialize x
m[hp-8] = ...
m[hp-16] = y // the closure for x captures y
m[hp-24] = x_info;
// allocate and initialize y
m[hp-32] = z; // the closure for y captures z
m[hp-40] = y_info;
// allocate and initialize z
...
For each closure, we must generate not only the code to allocate and
initialize the closure itself, but also some initialization Code that
sets a variable holding the closure pointer.
We could generate a pair of the (init code, body code), but since
the bindings are recursive we also have to initialise the
environment with the CgIdInfo for all the bindings before compiling
anything. So we do this in 3 stages:
1. collect all the CgIdInfos and initialise the environment
2. compile each binding into (init, body) code
3. emit all the inits, and then all the bodies
We'd rather not have separate functions to do steps 1 and 2 for
each binding, since in pratice they share a lot of code. So we
have just one function, cgRhs, that returns a pair of the CgIdInfo
for step 1, and a monadic computation to generate the code in step
2.
The alternative to separating things in this way is to use a
fixpoint. That's what we used to do, but it introduces a
maintenance nightmare because there is a subtle dependency on not
being too strict everywhere. Doing things this way means that the
FCode monad can be strict, for example.
-}
cgRhs :: Id
-> StgRhs
-> FCode (
CgIdInfo -- The info for this binding
, FCode CmmAGraph -- A computation which will generate the
-- code for the binding, and return an
-- assignent of the form "x = Hp - n"
-- (see above)
)
cgRhs id (StgRhsCon cc con args)
= withNewTickyCounterThunk False (idName id) $ -- False for "not static"
buildDynCon id True cc con args
{- See Note [GC recovery] in compiler/codeGen/StgCmmClosure.hs -}
cgRhs name (StgRhsClosure cc bi fvs upd_flag _srt args body)
= do dflags <- getDynFlags
mkRhsClosure dflags name cc bi (nonVoidIds fvs) upd_flag args body
------------------------------------------------------------------------
-- Non-constructor right hand sides
------------------------------------------------------------------------
mkRhsClosure :: DynFlags -> Id -> CostCentreStack -> StgBinderInfo
-> [NonVoid Id] -- Free vars
-> UpdateFlag
-> [Id] -- Args
-> StgExpr
-> FCode (CgIdInfo, FCode CmmAGraph)
{- mkRhsClosure looks for two special forms of the right-hand side:
a) selector thunks
b) AP thunks
If neither happens, it just calls mkClosureLFInfo. You might think
that mkClosureLFInfo should do all this, but it seems wrong for the
latter to look at the structure of an expression
Note [Selectors]
~~~~~~~~~~~~~~~~
We look at the body of the closure to see if it's a selector---turgid,
but nothing deep. We are looking for a closure of {\em exactly} the
form:
... = [the_fv] \ u [] ->
case the_fv of
con a_1 ... a_n -> a_i
Note [Ap thunks]
~~~~~~~~~~~~~~~~
A more generic AP thunk of the form
x = [ x_1...x_n ] \.. [] -> x_1 ... x_n
A set of these is compiled statically into the RTS, so we just use
those. We could extend the idea to thunks where some of the x_i are
global ids (and hence not free variables), but this would entail
generating a larger thunk. It might be an option for non-optimising
compilation, though.
We only generate an Ap thunk if all the free variables are pointers,
for semi-obvious reasons.
-}
---------- Note [Selectors] ------------------
mkRhsClosure dflags bndr _cc _bi
[NonVoid the_fv] -- Just one free var
upd_flag -- Updatable thunk
[] -- A thunk
expr
| let strip = snd . stripStgTicksTop (not . tickishIsCode)
, StgCase (StgApp scrutinee [{-no args-}])
_ _ _ _ -- ignore uniq, etc.
(AlgAlt _)
[(DataAlt _, params, _use_mask, sel_expr)] <- strip expr
, StgApp selectee [{-no args-}] <- strip sel_expr
, the_fv == scrutinee -- Scrutinee is the only free variable
, let (_, _, params_w_offsets) = mkVirtConstrOffsets dflags (addIdReps params)
-- Just want the layout
, Just the_offset <- assocMaybe params_w_offsets (NonVoid selectee)
, let offset_into_int = bytesToWordsRoundUp dflags the_offset
- fixedHdrSizeW dflags
, offset_into_int <= mAX_SPEC_SELECTEE_SIZE dflags -- Offset is small enough
= -- NOT TRUE: ASSERT(is_single_constructor)
-- The simplifier may have statically determined that the single alternative
-- is the only possible case and eliminated the others, even if there are
-- other constructors in the datatype. It's still ok to make a selector
-- thunk in this case, because we *know* which constructor the scrutinee
-- will evaluate to.
--
-- srt is discarded; it must be empty
let lf_info = mkSelectorLFInfo bndr offset_into_int (isUpdatable upd_flag)
in cgRhsStdThunk bndr lf_info [StgVarArg the_fv]
---------- Note [Ap thunks] ------------------
mkRhsClosure dflags bndr _cc _bi
fvs
upd_flag
[] -- No args; a thunk
(StgApp fun_id args)
| args `lengthIs` (arity-1)
&& all (isGcPtrRep . idPrimRep . unsafe_stripNV) fvs
&& isUpdatable upd_flag
&& arity <= mAX_SPEC_AP_SIZE dflags
&& not (gopt Opt_SccProfilingOn dflags)
-- not when profiling: we don't want to
-- lose information about this particular
-- thunk (e.g. its type) (#949)
-- Ha! an Ap thunk
= cgRhsStdThunk bndr lf_info payload
where
lf_info = mkApLFInfo bndr upd_flag arity
-- the payload has to be in the correct order, hence we can't
-- just use the fvs.
payload = StgVarArg fun_id : args
arity = length fvs
---------- Default case ------------------
mkRhsClosure dflags bndr cc _ fvs upd_flag args body
= do { let lf_info = mkClosureLFInfo dflags bndr NotTopLevel fvs upd_flag args
; (id_info, reg) <- rhsIdInfo bndr lf_info
; return (id_info, gen_code lf_info reg) }
where
gen_code lf_info reg
= do { -- LAY OUT THE OBJECT
-- If the binder is itself a free variable, then don't store
-- it in the closure. Instead, just bind it to Node on entry.
-- NB we can be sure that Node will point to it, because we
-- haven't told mkClosureLFInfo about this; so if the binder
-- _was_ a free var of its RHS, mkClosureLFInfo thinks it *is*
-- stored in the closure itself, so it will make sure that
-- Node points to it...
; let
is_elem = isIn "cgRhsClosure"
bndr_is_a_fv = (NonVoid bndr) `is_elem` fvs
reduced_fvs | bndr_is_a_fv = fvs `minusList` [NonVoid bndr]
| otherwise = fvs
-- MAKE CLOSURE INFO FOR THIS CLOSURE
; mod_name <- getModuleName
; dflags <- getDynFlags
; let name = idName bndr
descr = closureDescription dflags mod_name name
fv_details :: [(NonVoid Id, ByteOff)]
(tot_wds, ptr_wds, fv_details)
= mkVirtHeapOffsets dflags (isLFThunk lf_info)
(addIdReps (map unsafe_stripNV reduced_fvs))
closure_info = mkClosureInfo dflags False -- Not static
bndr lf_info tot_wds ptr_wds
descr
-- BUILD ITS INFO TABLE AND CODE
; forkClosureBody $
-- forkClosureBody: (a) ensure that bindings in here are not seen elsewhere
-- (b) ignore Sequel from context; use empty Sequel
-- And compile the body
closureCodeBody False bndr closure_info cc (nonVoidIds args)
(length args) body fv_details
-- BUILD THE OBJECT
-- ; (use_cc, blame_cc) <- chooseDynCostCentres cc args body
; let use_cc = curCCS; blame_cc = curCCS
; emit (mkComment $ mkFastString "calling allocDynClosure")
; let toVarArg (NonVoid a, off) = (NonVoid (StgVarArg a), off)
; let info_tbl = mkCmmInfo closure_info
; hp_plus_n <- allocDynClosure (Just bndr) info_tbl lf_info use_cc blame_cc
(map toVarArg fv_details)
-- RETURN
; return (mkRhsInit dflags reg lf_info hp_plus_n) }
-------------------------
cgRhsStdThunk
:: Id
-> LambdaFormInfo
-> [StgArg] -- payload
-> FCode (CgIdInfo, FCode CmmAGraph)
cgRhsStdThunk bndr lf_info payload
= do { (id_info, reg) <- rhsIdInfo bndr lf_info
; return (id_info, gen_code reg)
}
where
gen_code reg -- AHA! A STANDARD-FORM THUNK
= withNewTickyCounterStdThunk False (idName bndr) $ -- False for "not static"
do
{ -- LAY OUT THE OBJECT
mod_name <- getModuleName
; dflags <- getDynFlags
; let (tot_wds, ptr_wds, payload_w_offsets)
= mkVirtHeapOffsets dflags (isLFThunk lf_info) (addArgReps payload)
descr = closureDescription dflags mod_name (idName bndr)
closure_info = mkClosureInfo dflags False -- Not static
bndr lf_info tot_wds ptr_wds
descr
-- ; (use_cc, blame_cc) <- chooseDynCostCentres cc [{- no args-}] body
; let use_cc = curCCS; blame_cc = curCCS
; tickyEnterStdThunk closure_info
-- BUILD THE OBJECT
; let info_tbl = mkCmmInfo closure_info
; hp_plus_n <- allocDynClosure (Just bndr) info_tbl lf_info
use_cc blame_cc payload_w_offsets
-- RETURN
; return (mkRhsInit dflags reg lf_info hp_plus_n) }
mkClosureLFInfo :: DynFlags
-> Id -- The binder
-> TopLevelFlag -- True of top level
-> [NonVoid Id] -- Free vars
-> UpdateFlag -- Update flag
-> [Id] -- Args
-> LambdaFormInfo
mkClosureLFInfo dflags bndr top fvs upd_flag args
| null args =
mkLFThunk (idType bndr) top (map unsafe_stripNV fvs) upd_flag
| otherwise =
mkLFReEntrant top (map unsafe_stripNV fvs) args (mkArgDescr dflags args)
------------------------------------------------------------------------
-- The code for closures
------------------------------------------------------------------------
closureCodeBody :: Bool -- whether this is a top-level binding
-> Id -- the closure's name
-> ClosureInfo -- Lots of information about this closure
-> CostCentreStack -- Optional cost centre attached to closure
-> [NonVoid Id] -- incoming args to the closure
-> Int -- arity, including void args
-> StgExpr
-> [(NonVoid Id, ByteOff)] -- the closure's free vars
-> FCode ()
{- There are two main cases for the code for closures.
* If there are *no arguments*, then the closure is a thunk, and not in
normal form. So it should set up an update frame (if it is
shared). NB: Thunks cannot have a primitive type!
* If there is *at least one* argument, then this closure is in
normal form, so there is no need to set up an update frame.
-}
closureCodeBody top_lvl bndr cl_info cc _args arity body fv_details
| arity == 0 -- No args i.e. thunk
= withNewTickyCounterThunk (isStaticClosure cl_info) (closureName cl_info) $
emitClosureProcAndInfoTable top_lvl bndr lf_info info_tbl [] $
\(_, node, _) -> thunkCode cl_info fv_details cc node arity body
where
lf_info = closureLFInfo cl_info
info_tbl = mkCmmInfo cl_info
closureCodeBody top_lvl bndr cl_info cc args arity body fv_details
= -- Note: args may be [], if all args are Void
withNewTickyCounterFun (closureName cl_info) args $ do {
; let
lf_info = closureLFInfo cl_info
info_tbl = mkCmmInfo cl_info
-- Emit the main entry code
; emitClosureProcAndInfoTable top_lvl bndr lf_info info_tbl args $
\(_offset, node, arg_regs) -> do
-- Emit slow-entry code (for entering a closure through a PAP)
{ mkSlowEntryCode bndr cl_info arg_regs
; dflags <- getDynFlags
; let node_points = nodeMustPointToIt dflags lf_info
node' = if node_points then Just node else Nothing
; loop_header_id <- newLabelC
-- Extend reader monad with information that
-- self-recursive tail calls can be optimized into local
-- jumps. See Note [Self-recursive tail calls] in StgCmmExpr.
; withSelfLoop (bndr, loop_header_id, arg_regs) $ do
{
-- Main payload
; entryHeapCheck cl_info node' arity arg_regs $ do
{ -- emit LDV code when profiling
when node_points (ldvEnterClosure cl_info (CmmLocal node))
-- ticky after heap check to avoid double counting
; tickyEnterFun cl_info
; enterCostCentreFun cc
(CmmMachOp (mo_wordSub dflags)
[ CmmReg (CmmLocal node) -- See [NodeReg clobbered with loopification]
, mkIntExpr dflags (funTag dflags cl_info) ])
; fv_bindings <- mapM bind_fv fv_details
-- Load free vars out of closure *after*
-- heap check, to reduce live vars over check
; when node_points $ load_fvs node lf_info fv_bindings
; void $ cgExpr body
}}}
}
-- Note [NodeReg clobbered with loopification]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Previously we used to pass nodeReg (aka R1) here. With profiling, upon
-- entering a closure, enterFunCCS was called with R1 passed to it. But since R1
-- may get clobbered inside the body of a closure, and since a self-recursive
-- tail call does not restore R1, a subsequent call to enterFunCCS received a
-- possibly bogus value from R1. The solution is to not pass nodeReg (aka R1) to
-- enterFunCCS. Instead, we pass node, the callee-saved temporary that stores
-- the original value of R1. This way R1 may get modified but loopification will
-- not care.
-- A function closure pointer may be tagged, so we
-- must take it into account when accessing the free variables.
bind_fv :: (NonVoid Id, ByteOff) -> FCode (LocalReg, ByteOff)
bind_fv (id, off) = do { reg <- rebindToReg id; return (reg, off) }
load_fvs :: LocalReg -> LambdaFormInfo -> [(LocalReg, ByteOff)] -> FCode ()
load_fvs node lf_info = mapM_ (\ (reg, off) ->
do dflags <- getDynFlags
let tag = lfDynTag dflags lf_info
emit $ mkTaggedObjectLoad dflags reg node off tag)
-----------------------------------------
-- The "slow entry" code for a function. This entry point takes its
-- arguments on the stack. It loads the arguments into registers
-- according to the calling convention, and jumps to the function's
-- normal entry point. The function's closure is assumed to be in
-- R1/node.
--
-- The slow entry point is used for unknown calls: eg. stg_PAP_entry
mkSlowEntryCode :: Id -> ClosureInfo -> [LocalReg] -> FCode ()
-- If this function doesn't have a specialised ArgDescr, we need
-- to generate the function's arg bitmap and slow-entry code.
-- Here, we emit the slow-entry code.
mkSlowEntryCode bndr cl_info arg_regs -- function closure is already in `Node'
| Just (_, ArgGen _) <- closureFunInfo cl_info
= do dflags <- getDynFlags
let node = idToReg dflags (NonVoid bndr)
slow_lbl = closureSlowEntryLabel cl_info
fast_lbl = closureLocalEntryLabel dflags cl_info
-- mkDirectJump does not clobber `Node' containing function closure
jump = mkJump dflags NativeNodeCall
(mkLblExpr fast_lbl)
(map (CmmReg . CmmLocal) (node : arg_regs))
(initUpdFrameOff dflags)
tscope <- getTickScope
emitProcWithConvention Slow Nothing slow_lbl
(node : arg_regs) (jump, tscope)
| otherwise = return ()
-----------------------------------------
thunkCode :: ClosureInfo -> [(NonVoid Id, ByteOff)] -> CostCentreStack
-> LocalReg -> Int -> StgExpr -> FCode ()
thunkCode cl_info fv_details _cc node arity body
= do { dflags <- getDynFlags
; let node_points = nodeMustPointToIt dflags (closureLFInfo cl_info)
node' = if node_points then Just node else Nothing
; ldvEnterClosure cl_info (CmmLocal node) -- NB: Node always points when profiling
-- Heap overflow check
; entryHeapCheck cl_info node' arity [] $ do
{ -- Overwrite with black hole if necessary
-- but *after* the heap-overflow check
; tickyEnterThunk cl_info
; when (blackHoleOnEntry cl_info && node_points)
(blackHoleIt node)
-- Push update frame
; setupUpdate cl_info node $
-- We only enter cc after setting up update so
-- that cc of enclosing scope will be recorded
-- in update frame CAF/DICT functions will be
-- subsumed by this enclosing cc
do { tickyEnterThunk cl_info
; enterCostCentreThunk (CmmReg nodeReg)
; let lf_info = closureLFInfo cl_info
; fv_bindings <- mapM bind_fv fv_details
; load_fvs node lf_info fv_bindings
; void $ cgExpr body }}}
------------------------------------------------------------------------
-- Update and black-hole wrappers
------------------------------------------------------------------------
blackHoleIt :: LocalReg -> FCode ()
-- Only called for closures with no args
-- Node points to the closure
blackHoleIt node_reg
= emitBlackHoleCode (CmmReg (CmmLocal node_reg))
emitBlackHoleCode :: CmmExpr -> FCode ()
emitBlackHoleCode node = do
dflags <- getDynFlags
-- Eager blackholing is normally disabled, but can be turned on with
-- -feager-blackholing. When it is on, we replace the info pointer
-- of the thunk with stg_EAGER_BLACKHOLE_info on entry.
-- If we wanted to do eager blackholing with slop filling, we'd need
-- to do it at the *end* of a basic block, otherwise we overwrite
-- the free variables in the thunk that we still need. We have a
-- patch for this from Andy Cheadle, but not incorporated yet. --SDM
-- [6/2004]
--
-- Previously, eager blackholing was enabled when ticky-ticky was
-- on. But it didn't work, and it wasn't strictly necessary to bring
-- back minimal ticky-ticky, so now EAGER_BLACKHOLING is
-- unconditionally disabled. -- krc 1/2007
-- Note the eager-blackholing check is here rather than in blackHoleOnEntry,
-- because emitBlackHoleCode is called from CmmParse.
let eager_blackholing = not (gopt Opt_SccProfilingOn dflags)
&& gopt Opt_EagerBlackHoling dflags
-- Profiling needs slop filling (to support LDV
-- profiling), so currently eager blackholing doesn't
-- work with profiling.
when eager_blackholing $ do
emitStore (cmmOffsetW dflags node (fixedHdrSizeW dflags))
(CmmReg (CmmGlobal CurrentTSO))
emitPrimCall [] MO_WriteBarrier []
emitStore node (CmmReg (CmmGlobal EagerBlackholeInfo))
setupUpdate :: ClosureInfo -> LocalReg -> FCode () -> FCode ()
-- Nota Bene: this function does not change Node (even if it's a CAF),
-- so that the cost centre in the original closure can still be
-- extracted by a subsequent enterCostCentre
setupUpdate closure_info node body
| not (lfUpdatable (closureLFInfo closure_info))
= body
| not (isStaticClosure closure_info)
= if not (closureUpdReqd closure_info)
then do tickyUpdateFrameOmitted; body
else do
tickyPushUpdateFrame
dflags <- getDynFlags
let
bh = blackHoleOnEntry closure_info &&
not (gopt Opt_SccProfilingOn dflags) &&
gopt Opt_EagerBlackHoling dflags
lbl | bh = mkBHUpdInfoLabel
| otherwise = mkUpdInfoLabel
pushUpdateFrame lbl (CmmReg (CmmLocal node)) body
| otherwise -- A static closure
= do { tickyUpdateBhCaf closure_info
; if closureUpdReqd closure_info
then do -- Blackhole the (updatable) CAF:
{ upd_closure <- link_caf node True
; pushUpdateFrame mkBHUpdInfoLabel upd_closure body }
else do {tickyUpdateFrameOmitted; body}
}
-----------------------------------------------------------------------------
-- Setting up update frames
-- Push the update frame on the stack in the Entry area,
-- leaving room for the return address that is already
-- at the old end of the area.
--
pushUpdateFrame :: CLabel -> CmmExpr -> FCode () -> FCode ()
pushUpdateFrame lbl updatee body
= do
updfr <- getUpdFrameOff
dflags <- getDynFlags
let
hdr = fixedHdrSize dflags
frame = updfr + hdr + sIZEOF_StgUpdateFrame_NoHdr dflags
--
emitUpdateFrame dflags (CmmStackSlot Old frame) lbl updatee
withUpdFrameOff frame body
emitUpdateFrame :: DynFlags -> CmmExpr -> CLabel -> CmmExpr -> FCode ()
emitUpdateFrame dflags frame lbl updatee = do
let
hdr = fixedHdrSize dflags
off_updatee = hdr + oFFSET_StgUpdateFrame_updatee dflags
--
emitStore frame (mkLblExpr lbl)
emitStore (cmmOffset dflags frame off_updatee) updatee
initUpdFrameProf frame
-----------------------------------------------------------------------------
-- Entering a CAF
--
-- See Note [CAF management] in rts/sm/Storage.c
link_caf :: LocalReg -- pointer to the closure
-> Bool -- True <=> updatable, False <=> single-entry
-> FCode CmmExpr -- Returns amode for closure to be updated
-- This function returns the address of the black hole, so it can be
-- updated with the new value when available.
link_caf node _is_upd = do
{ dflags <- getDynFlags
-- Call the RTS function newCAF, returning the newly-allocated
-- blackhole indirection closure
; let newCAF_lbl = mkForeignLabel (fsLit "newCAF") Nothing
ForeignLabelInExternalPackage IsFunction
; bh <- newTemp (bWord dflags)
; emitRtsCallGen [(bh,AddrHint)] newCAF_lbl
[ (CmmReg (CmmGlobal BaseReg), AddrHint),
(CmmReg (CmmLocal node), AddrHint) ]
False
-- see Note [atomic CAF entry] in rts/sm/Storage.c
; updfr <- getUpdFrameOff
; let target = entryCode dflags (closureInfoPtr dflags (CmmReg (CmmLocal node)))
; emit =<< mkCmmIfThen
(cmmEqWord dflags (CmmReg (CmmLocal bh)) (zeroExpr dflags))
-- re-enter the CAF
(mkJump dflags NativeNodeCall target [] updfr)
; return (CmmReg (CmmLocal bh)) }
------------------------------------------------------------------------
-- Profiling
------------------------------------------------------------------------
-- For "global" data constructors the description is simply occurrence
-- name of the data constructor itself. Otherwise it is determined by
-- @closureDescription@ from the let binding information.
closureDescription :: DynFlags
-> Module -- Module
-> Name -- Id of closure binding
-> String
-- Not called for StgRhsCon which have global info tables built in
-- CgConTbls.hs with a description generated from the data constructor
closureDescription dflags mod_name name
= showSDocDump dflags (char '<' <>
(if isExternalName name
then ppr name -- ppr will include the module name prefix
else pprModule mod_name <> char '.' <> ppr name) <>
char '>')
-- showSDocDump, because we want to see the unique on the Name.
| christiaanb/ghc | compiler/codeGen/StgCmmBind.hs | bsd-3-clause | 30,426 | 4 | 24 | 8,989 | 4,622 | 2,417 | 2,205 | 373 | 3 |
-- Test the flage `force-no-intermediates` (issue #4114)
module Main (main) where
import T4114dSub
keep, nokeep :: [FilePath]
keep = ["T4114dSub.myhi", "T4114d.myhi"]
nokeep = ["T4114dSub.myo", "T4114d.myo"]
main :: IO ()
main = do
mapM_ assertNoKeep nokeep
mapM_ assertKeep keep
| ezyang/ghc | testsuite/tests/driver/T4114d.hs | bsd-3-clause | 290 | 0 | 7 | 48 | 76 | 44 | 32 | 9 | 1 |
{-# LANGUAGE KindSignatures, PolyKinds, DataKinds, RankNTypes #-}
module T6081 where
data KProxy (a :: *) = KP
class KindClass (kp :: KProxy k)
instance KindClass (KP :: KProxy [k])
| urbanslug/ghc | testsuite/tests/polykinds/T6081.hs | bsd-3-clause | 187 | 1 | 8 | 34 | 55 | 31 | 24 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
module ShouldCompile where
f :: forall a. a -> forall b. b -> Int
f = error "urk"
-- Both these should be ok, but an early GHC 6.6 failed
g1 = [ (+) :: Int -> Int -> Int, f ]
g2 = [ f, (+) :: Int -> Int -> Int ]
| siddhanathan/ghc | testsuite/tests/typecheck/should_compile/tc210.hs | bsd-3-clause | 245 | 0 | 8 | 63 | 85 | 52 | 33 | 6 | 1 |
{-# LANGUAGE MagicHash #-}
-- tests that expFloat# works (had linking problems on Windows)
import GHC.Exts
main = do
print (F# (expFloat# 3.45#))
| wxwxwwxxx/ghc | testsuite/tests/numeric/should_run/expfloat.hs | bsd-3-clause | 150 | 0 | 11 | 27 | 31 | 16 | 15 | 4 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
module Types
( Tag(..)
, Key(..)
, ViewCreated(..)
, ViewDestroyed(..)
, OutputCreated(..)
, OutputDestroyed(..)
, OutputResolution(..)
, WindowManager
, Actions
, Dir(..)
, Action(..)
) where
import Control.Monad.Fix
import Data.Dependent.Map hiding (Key,split)
import Data.GADT.Compare.TH
import Data.Set hiding (split)
import Reflex
import Text.XkbCommon
import WLC
data Tag a where
TKey :: Tag Key
TViewCreated :: Tag ViewCreated
TViewDestroyed :: Tag ViewDestroyed
TOutputCreated :: Tag OutputCreated
TOutputDestroyed :: Tag OutputDestroyed
TOutputResolution :: Tag OutputResolution
data Key =
Key WLCKeyState
Keysym
(Set WLCModifier)
deriving (Show,Eq)
data ViewCreated =
ViewCreated WLCViewPtr WLCOutputPtr
deriving (Show,Eq,Ord)
data ViewDestroyed = ViewDestroyed WLCViewPtr deriving (Show,Eq,Ord)
data OutputCreated = OutputCreated WLCOutputPtr WLCSize deriving (Show,Eq,Ord)
data OutputDestroyed = OutputDestroyed WLCOutputPtr deriving (Show,Eq,Ord)
data OutputResolution = OutputResolution WLCOutputPtr WLCSize WLCSize deriving (Show,Eq,Ord)
type WindowManager t m = (Reflex t,MonadHold t m,MonadFix m) => Event t (DSum Tag) -> m (Event t (IO ()))
deriveGEq ''Tag
deriveGCompare ''Tag
data Action
= InsertView WLCViewPtr
WLCOutputPtr
| FocusView WLCViewPtr
| DestroyView WLCViewPtr
| CreateOutput WLCOutputPtr WLCSize
| DestroyOutput WLCOutputPtr
| SpawnCommand String
| Focus Dir
| Swap Dir
| Output Dir
| Move Dir
| Split
| ViewWorkspace String
| ChangeResolution WLCOutputPtr WLCSize
| Cycle
| MoveViewUp
| Close
type Actions = [Action]
data Dir = Up | Down
| cocreature/reactand | src/Types.hs | isc | 1,856 | 0 | 12 | 360 | 523 | 305 | 218 | 66 | 0 |
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xy) = (quicksort less) ++ (x : equal) ++ (quicksort more)
where
less = filter (<x) xy
equal = filter (==x) xy
more = filter (>x) xy
| maggy96/haskell | smaller snippets/qs.hs | mit | 212 | 0 | 8 | 54 | 123 | 67 | 56 | 6 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TypeFamilies #-}
module Observable.Metropolis where
import Control.Comonad
import Control.Comonad.Cofree
import Control.Monad (forever)
import Control.Monad.Primitive (PrimMonad, PrimState)
import Control.Monad.Trans.State.Strict
import Data.Dynamic
import Data.Maybe (fromJust)
import GHC.Prim (RealWorld)
import Observable.Core hiding (Parameter)
import Observable.Distribution
import System.IO.Unsafe (unsafePerformIO)
import qualified System.Random.MWC.Probability as P
import qualified System.Random.MWC as MWC
import Pipes (Producer, Consumer, (>->), runEffect, yield, lift)
import qualified Pipes.Prelude as Pipes
import Prelude hiding (Foldable)
import Data.Functor.Foldable
import Control.Monad.Free
-- | An execution of a program.
type Execution a = Cofree ModelF (Node a, Dynamic, Double)
-- | A transition operator between executions.
type Transition m a = StateT (Chain a) m [Parameter]
-- | State of a Markov chain over executions.
data Chain a = Chain {
chainScore :: Double
, chainExecution :: Execution a
}
-- | Initialize a Markov chain over executions.
initializeChain
:: Typeable a
=> Conditioned a
-> Chain a
initializeChain prog = Chain score initd where
initd = execute prog
score = scoreExecution initd
-- | Run the Metropolis algorithm and print positions to stdout.
metropolis
:: Typeable a
=> Int
-> Double
-> P.Gen RealWorld
-> Conditioned a
-> IO ()
metropolis n step gen model = runEffect $
generate step gen model
>-> Pipes.take n
>-> display
display :: Show a => Consumer a IO r
display = Pipes.mapM_ (putStrLn . init . drop 1 . show)
-- | Perform a Metropolis transition and yield its result downstream.
generate
:: (PrimMonad m, Typeable a)
=> Double
-> P.Gen (PrimState m)
-> Conditioned a
-> Producer [Parameter] m ()
generate step gen model = flip evalStateT initd . forever $ do
proposal <- transition step gen
lift (yield proposal)
where
initd = initializeChain model
-- | A Metropolis transition.
transition
:: (PrimMonad m, Typeable a)
=> Double
-> P.Gen (PrimState m)
-> Transition (Producer [Parameter] m) a
transition step gen = do
Chain currentScore current <- get
let proposal = perturbExecution step current
proposalScore = scoreExecution proposal
currentToProposal = transitionProbability step current proposal
proposalToCurrent = transitionProbability step proposal current
ratio = exp . min 0 $
proposalScore + proposalToCurrent - currentScore - currentToProposal
acceptanceProbability
| isNaN ratio = 0
| otherwise = ratio
zc <- lift . lift $ MWC.uniform gen
if zc < acceptanceProbability
then do
put (Chain proposalScore proposal)
return $! collectPositions proposal
else return $! collectPositions current
-- | Execute a program.
execute
:: Typeable a
=> Conditioned a
-> Cofree ModelF (Node a, Dynamic, Double)
execute = extend initialize
-- | Return execution information from the root node of a conditioned AST.
initialize
:: Typeable a
=> Conditioned a
-> (Node a, Dynamic, Double)
initialize w = (ann, z1, p1) where
(ann, etc) = (extract w, unwrap w)
(z1, p1) = case ann of
Unconditioned -> case etc of
BetaF a b _ ->
let z = toDyn (unsafeGen $ P.sample (P.beta a b))
in (z, scoreNode z etc)
BinomialF n p _ ->
let z = toDyn (unsafeGen $ P.sample (P.binomial n p))
in (z, scoreNode z etc)
StandardF _ ->
let z = toDyn (unsafeGen $ P.sample P.standard)
in (z, scoreNode z etc)
NormalF m s _ ->
let z = toDyn (unsafeGen $ P.sample (P.normal m s))
in (z, scoreNode z etc)
StudentF a b _ ->
let z = toDyn (unsafeGen $ P.sample (P.t a 1 b))
in (z, scoreNode z etc)
GammaF a b _ ->
let z = toDyn (unsafeGen $ P.sample (P.gamma a b))
in (z, scoreNode z etc)
InvGammaF a b _ ->
let z = toDyn (unsafeGen $ P.sample (P.inverseGamma a b))
in (z, scoreNode z etc)
UniformF a b _ ->
let z = toDyn (unsafeGen $ P.sample (P.uniformR (a, b)))
in (z, scoreNode z etc)
DirichletF vs _ ->
let z = toDyn (unsafeGen $ P.sample (P.dirichlet vs))
in (z, scoreNode z etc)
SymmetricDirichletF n a _ ->
let z = toDyn (unsafeGen $ P.sample (P.symmetricDirichlet n a))
in (z, scoreNode z etc)
CategoricalF vs _ ->
let z = toDyn (unsafeGen $ P.sample (P.categorical vs))
in (z, scoreNode z etc)
DiscreteUniformF n _ ->
let z = toDyn (unsafeGen $ P.sample (P.discreteUniform [1..n]))
in (z, scoreNode z etc)
IsoGaussF ms s _ ->
let z = toDyn (unsafeGen $ P.sample (P.isoGauss ms s))
in (z, scoreNode z etc)
PoissonF l _ ->
let z = toDyn (unsafeGen $ P.sample (P.poisson l))
in (z, scoreNode z etc)
ExponentialF l _ ->
let z = toDyn (unsafeGen $ P.sample (P.exponential l))
in (z, scoreNode z etc)
ConditionF -> error "impossible"
Conditioned cs ->
(toDyn cs, sum $ map (\z -> scoreNode (toDyn z) etc) cs)
Closed -> (toDyn (), 0)
-- | Perturb the execution of a program's root node and record the perturbed
-- execution information.
perturb
:: Typeable a
=> Double
-> Execution a
-> (Node a, Dynamic, Double)
perturb step w = (ann, z1, p1) where
((ann, z0, _), etc) = (extract w, unwrap w)
u = unsafeGen (P.sample (P.normal 0 step))
d = unsafeGen (P.sample (P.uniformR (-1, 1 :: Int)))
(z1, p1) = case ann of
Unconditioned -> case etc of
BetaF {} -> let z = toDyn (unsafeFromDyn z0 + u) in (z, scoreNode z etc)
NormalF {} -> let z = toDyn (unsafeFromDyn z0 + u) in (z, scoreNode z etc)
UniformF {} -> let z = toDyn (unsafeFromDyn z0 + u) in (z, scoreNode z etc)
GammaF {} -> let z = toDyn (unsafeFromDyn z0 + u) in (z, scoreNode z etc)
BinomialF {} -> let z = toDyn (unsafeFromDyn z0 + d) in (z, scoreNode z etc)
Conditioned cs -> (toDyn cs, sum $ map (\z -> scoreNode (toDyn z) etc) cs)
Closed -> (toDyn (), 0)
-- | Perturb a program's execution and return the perturbed execution.
perturbExecution
:: Typeable a
=> Double
-> Execution a
-> Execution a
perturbExecution step = extend (perturb step)
-- | Calculate a log probability mass/density for a given distribution and
-- observation.
scoreNode :: Dynamic -> ModelF t -> Double
scoreNode z term = fromJust $ case term of
BetaF a b _ -> fmap (log . densityBeta a b) (fromDynamic z)
BinomialF n p _ -> fmap (log . densityBinomial n p) (fromDynamic z)
NormalF m s _ -> fmap (log . densityNormal m s) (fromDynamic z)
GammaF a b _ -> fmap (log . densityGamma a b) (fromDynamic z)
UniformF a b _ -> fmap (log . densityUniform a b) (fromDynamic z)
-- FIXME remainder (tedious)
ConditionF -> Just 0
-- | Score the execution of a program.
scoreExecution :: Cofree ModelF (Node a, Dynamic, Double) -> Double
scoreExecution = go where
go ((_, a, s) :< f) = case f of
BetaF _ _ k -> s + go (k (unsafeFromDyn a))
BinomialF _ _ k -> s + go (k (unsafeFromDyn a))
NormalF _ _ k -> s + go (k (unsafeFromDyn a))
GammaF _ _ k -> s + go (k (unsafeFromDyn a))
UniformF _ _ k -> s + go (k (unsafeFromDyn a))
-- FIXME remainder (tedious)
ConditionF -> s
-- | Calculate a probability of transitioning between two executions.
transitionProbability
:: Double -- ^ Step size
-> Execution a -- ^ Execution at current execution
-> Execution a -- ^ Execution at proposed execution
-> Double -- ^ Transition probability
transitionProbability s = go where
go :: Execution a -> Execution a -> Double
go ((_, z0, _) :< f) ((_, z1, _) :< _) = case f of
BetaF _ _ k ->
let (u0, u1) = (unsafeFromDyn z0 :: Double, unsafeFromDyn z1 :: Double)
in log (densityNormal u0 u1 s) + go (k u0) (k u1)
NormalF _ _ k ->
let (u0, u1) = (unsafeFromDyn z0 :: Double, unsafeFromDyn z1 :: Double)
in log (densityNormal u0 u1 s) + go (k u0) (k u1)
GammaF _ _ k ->
let (u0, u1) = (unsafeFromDyn z0 :: Double, unsafeFromDyn z1 :: Double)
in log (densityNormal u0 u1 s) + go (k u0) (k u1)
UniformF _ _ k ->
let (u0, u1) = (unsafeFromDyn z0 :: Double, unsafeFromDyn z1 :: Double)
in log (densityNormal u0 u1 s) + go (k u0) (k u1)
BinomialF _ _ k ->
let (u0, u1) = (unsafeFromDyn z0 :: Int, unsafeFromDyn z1 :: Int)
in log (1 / 3) + go (k u0) (k u1)
ConditionF -> 0
-- FIXME remainder (tedious)
_ -> 0
-- | Collect the execution trace of a program as a list.
collectPositions :: Execution a -> [Parameter]
collectPositions = go where
go ((Unconditioned, a, _) :< f) = case f of
BetaF _ _ k -> toParameter a : go (k (unsafeFromDyn a))
BinomialF _ _ k -> toParameter a : go (k (unsafeFromDyn a))
GammaF _ _ k -> toParameter a : go (k (unsafeFromDyn a))
UniformF _ _ k -> toParameter a : go (k (unsafeFromDyn a))
NormalF _ _ k -> toParameter a : go (k (unsafeFromDyn a))
ConditionF -> []
-- FIXME remainder (tedious)
go _ = []
-- | Safely coerce a dynamic value to a showable parameter.
toParameter :: Dynamic -> Parameter
toParameter z
| dynTypeRep z == typeOf (0 :: Int) = Parameter (unsafeFromDyn z :: Int)
| dynTypeRep z == typeOf (0 :: Double) = Parameter (unsafeFromDyn z :: Double)
| otherwise = error "toParameter: unsupported type"
-- | A showable parameter.
data Parameter = forall a. Show a => Parameter a
instance Show Parameter where
show (Parameter s) = show s
-- | Unsafely coerce a dynamic value to some type.
unsafeFromDyn :: Typeable a => Dynamic -> a
unsafeFromDyn = fromJust . fromDynamic
-- | Unsafely use the system's PRNG for randomness.
unsafeGen :: (P.Gen RealWorld -> IO a) -> a
unsafeGen = unsafePerformIO . P.withSystemRandom . P.asGenIO
| jtobin/observable | Observable/Metropolis.hs | mit | 10,242 | 0 | 23 | 2,672 | 3,845 | 1,944 | 1,901 | 230 | 18 |
{-# LANGUAGE ScopedTypeVariables #-}
module Random
( random
) where
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.QuickCheck (testProperty, Arbitrary, (==>))
import System.Random (Random, randoms, randomRs, mkStdGen)
random :: forall a. (Arbitrary a, Show a, Ord a, Fractional a, Random a) => a -> TestTree
random _ = testGroup "Test Random instance" ts
where ts = [ testProperty "randomR range"
(\s l u -> let rs = take 100 (randomRs (l :: a, u) (mkStdGen s))
in l <= u ==> (all (>= l) rs && all (<= u) rs))
, testProperty "randomR zero bounds"
(\s l -> let rs = take 100 (randomRs (l :: a, l) (mkStdGen s))
in all (== l) rs)
, testProperty "random range"
(\s -> let rs = take 100 (randoms (mkStdGen s)) :: [a]
in all (>= 0) rs && all (< 1) rs)
]
| expipiplus1/exact-real | test/Random.hs | mit | 937 | 0 | 19 | 317 | 358 | 197 | 161 | 17 | 1 |
main = do
print $ sum $ filter (\n -> f n == n) [2 .. 6*9^5] where
f 0 = 0
f n = d^5 + f n' where (n', d) = divMod n 10
| dpieroux/euler | 0/0030.hs | mit | 140 | 0 | 12 | 56 | 99 | 50 | 49 | 4 | 2 |
module Constructions
where
import Shape
import Centres
--------------------------------------------------------------------------------
linesIntersect :: Line -> Line -> Point
linesIntersect (Line (Point x1 y1) (Point x2 y2)) (Line (Point x3 y3) (Point x4 y4)) =
Point x y
where a = y1-y2
b = x2-x1
c = y3-y4
d = x4-x3
det = a*d - b*c
v1 = y1*x2-x1*y2
v2 = y3*x4-x3*y4
x = (d*v1-b*v2)/det
y = (a*v2-c*v1)/det
ceviansThrough :: Triangle -> Point -> [Line]
ceviansThrough (Triangle pa pb pc) x = [Line pa ia, Line pb ib, Line pc ic]
where ia = linesIntersect (Line pa x) (Line pb pc)
ib = linesIntersect (Line pb x) (Line pc pa)
ic = linesIntersect (Line pc x) (Line pa pb)
cevianIntersects :: Triangle -> Point -> [Point]
cevianIntersects t x = map e $ ceviansThrough t x
where e (Line a b) = b
rays :: Point -> [Point] -> [Line]
rays x ps = map (\p -> Line x p) ps
midpoint :: Line -> Point
midpoint (Line (Point a b) (Point c d)) = Point ((a+c)/2) ((b+d)/2)
raysMidpoints :: Point -> [Point] -> [Point]
raysMidpoints c ps = map midpoint $ rays c ps
--------------------------------------------------------------------------------
-- Given two points a and b, return c such that (a,b,c) are 3 corners of an equilateral triangle going counterclockwise.
completeEquilateralTriangle :: Point -> Point -> Point
completeEquilateralTriangle (Point a b) (Point c d) = Point e f
where x = (c-a)/2 - (d-b)*(sqrt 3)/2
y = (c-a)*(sqrt 3)/2 + (d-b)/2
e = a + x
f = b + y
-- Bool argument is true for outer, false for inner.
sideEquilateralTriangleCorners :: Bool -> Triangle -> [Point]
sideEquilateralTriangleCorners isouter (Triangle a b c)
| isCounterClockwise a b c == isouter = [completeEquilateralTriangle c b, completeEquilateralTriangle a c, completeEquilateralTriangle b a]
| otherwise = [completeEquilateralTriangle b c, completeEquilateralTriangle c a, completeEquilateralTriangle a b]
sideEquilateralTriangles :: Bool -> Triangle -> [Triangle]
sideEquilateralTriangles isouter t@(Triangle a b c) = [Triangle b c x, Triangle c a y, Triangle a b z]
where [x,y,z] = sideEquilateralTriangleCorners isouter t
napoleonTriangle :: Bool -> Triangle -> Triangle
napoleonTriangle isouter t = pointsToTriangle $ map centroid $ sideEquilateralTriangles isouter t
outerEquilateralTriangleCorners :: Triangle -> [Point]
outerEquilateralTriangleCorners (Triangle a b c)
| isCounterClockwise a b c = [completeEquilateralTriangle c b, completeEquilateralTriangle a c, completeEquilateralTriangle b a]
| otherwise = [completeEquilateralTriangle b c, completeEquilateralTriangle c a, completeEquilateralTriangle a b]
outerEquilateralTriangles :: Triangle -> [Triangle]
outerEquilateralTriangles t@(Triangle a b c) = [Triangle b c x, Triangle c a y, Triangle a b z]
where [x,y,z] = outerEquilateralTriangleCorners t
outerNapoleonTriangle :: Triangle -> Triangle
outerNapoleonTriangle t = pointsToTriangle $ map centroid $ outerEquilateralTriangles t
--------------------------------------------------------------------------------
| clauderichard/Euclaude | Constructions.hs | mit | 3,316 | 0 | 12 | 744 | 1,186 | 614 | 572 | 53 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module BT.ZMQ where
import qualified System.ZMQ3 as ZMQ
import qualified Data.ByteString as B
import Control.Monad.IO.Class (liftIO)
import Data.Pool (withResource, Pool)
import BT.Types
import Control.Monad (when)
import Control.Exception (throw)
sendraw :: Data.Pool.Pool (ZMQ.Socket ZMQ.Req) -> B.ByteString -> IO B.ByteString
sendraw conn msg = liftIO $ withResource conn (\s -> do
liftIO $ ZMQ.send s [] msg
recv <- liftIO $ ZMQ.receive s
when (recv == "error") $ do throw $ BackendException "zmq error"
return recv
)
send :: PersistentConns -> B.ByteString -> IO B.ByteString
send conn = sendraw (pool conn)
sendmine :: PersistentConns -> B.ByteString -> IO B.ByteString
sendmine conn = sendraw (minePool conn)
| c00w/BitToll | haskell/BT/ZMQ.hs | mit | 798 | 0 | 15 | 153 | 277 | 145 | 132 | 19 | 1 |
module Main where
import Language.Haskell.Exts
import Language.Haskell.Exts.GenericPretty.Instances
import Shared
import System.Environment
import System.IO
import Text.PrettyPrint.GenericPretty
prettyPrintHs :: String -> IO ()
prettyPrintHs path = do
result <- parseFileWithMode myParseMode path
case result of
ParseOk ast -> putStrLn $ pretty ast
ParseFailed srcLoc err -> putStrLn $ show err
main :: IO ()
main = do
argv <- getArgs
case argv of
[path] -> prettyPrintHs path
_ -> putStrLn "usage: pretty-ast <file.hs>"
| adarqui/haskell-src-exts-genericpretty | examples/pretty-ast.hs | mit | 624 | 0 | 11 | 174 | 162 | 83 | 79 | 19 | 2 |
module HaskChat where
import Network.Socket
import System.IO
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Monad
import Control.Monad.Fix (fix)
import Control.Exception
type Msg = (Int,String)
main = do
chan <- newChan
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrInet 4242 iNADDR_ANY)
-- que numbers
listen sock 5
forkIO $ fix $ \loop -> do
(_, msg) <- readChan chan
loop
mainLoop sock chan 0
mainLoop sock chan nr = do
conn <- accept sock
forkIO (runConn conn chan nr)
mainLoop sock chan $! nr+1
runConn (sock, _) chan nr = do
let broadcast msg = writeChan chan (nr,msg)
hdl <- socketToHandle sock ReadWriteMode
hSetBuffering hdl NoBuffering
hPutStrLn hdl "Hi How Are YOU ?"
name <- liftM init (hGetLine hdl)
broadcast ("--> " ++ name ++ " entered,")
chan' <- dupChan chan
reader <- forkIO $ fix $ \loop -> do
(nr', line) <- readChan chan'
when (nr /= nr') $ hPutStrLn hdl line
loop
handle (\(SomeException _ ) -> return ()) $ fix $ \loop -> do
line <- liftM init (hGetLine hdl)
case line of
"quit" -> hPutStrLn hdl "Bye!"
_ -> do
broadcast (name ++ ": " ++ line)
loop
killThread reader
broadcast ("<-- " ++ name ++ " left.")
hClose hdl
| HaskellForCats/HaskellForCats | 30MinHaskell/_Progs/HaskChat01.hs | mit | 1,626 | 0 | 19 | 622 | 522 | 249 | 273 | 45 | 2 |
-- Haskell Revision
-- note
-- whereのありがたみをちゃんと理解できていない
-- Pattern matchの基礎であるMaybe型をしっかりやり直す
-- folrの考え方をやり直す
-- 型シノニム
-- uncurry忘れかけてない?
-- 次回復習はここからrepeat replicate cycle
-- new workはパーサ: 計算機
-- https://skami.iocikun.jp/computer/haskell/web_lecture/for_programmer/parser_calculator.html
import Prelude hiding((*>))
import Data.Maybe
import Data.Char
-- ----------------------------------------
-- Parser : Number parser
-- ----------------------------------------
number :: Parse Integer
number = list1 (check isDigit) `build` read
parse :: Parse a -> String -> Maybe a
-- parse p = listToMaybe . map fst . (p >* eof)
-- parse p = ((listToMaybe . map fst) . ) $ p >* eof
-- parse p = ((listToMaybe . map fst) . ) $ (>* eof) p
parse = ((listToMaybe . map fst) . ) .( >* eof)
-- Space区切りの文字列リスト
spaces1 :: Parse ()
-- spaces1 " 123 456 789"
-- [((),"123 456 789")]
spaces1 = list1 (check isSpace) `build` const ()
numbers :: Parse [Integer]
numbers = (number >*> list (spaces1 *> number)) `build` uncurry (:)
-- コンマ区切りの数値リスト
spaces :: Parse()
spaces = list (check isSpace) `build` const ()
comma :: Parse ()
comma = (spaces >*> char ',' >*> spaces) `build` const ()
cnumbers :: Parse [Integer]
cnumbers = (number >*> list (comma *> number)) `build` uncurry (:)
-- ----------------------------------------
-- Parser : Parse list
-- ----------------------------------------
-- uncurry (:) (1,[2])
-- > [1,2]
-- (check isDigit >*> succeed "a") "1223"
-- > [(('1',"a"),"223")]
-- (check isDigit >*> succeed "a") `build` uncurry (:) $ "1223"
-- > [("1a","223")]
list, list1 :: Parse a -> Parse [a]
list p = succeed [] `alt` list1 p
list1 p = (p >*> list p) `build` uncurry (:)
-- list1 p = (p >*> (succeed [] `alt` list1 p)) `build` uncurry (:)
-- ----------------------------------------
-- Parser : basic function
-- ----------------------------------------
type Parse a = String -> [(a, String)]
-- succeed 123 "hello"
succeed :: a -> Parse a
succeed v i = [(v, i)]
-- read 1 char
-- check isDigit "123"
check :: (Char -> Bool) -> Parse Char
check p (c : cs) | p c = [(c, cs)]
check _ _ = []
-- read target char
-- char 'a' "abc"
char :: Char -> Parse Char
char = check . (==)
-- return two parse result
-- (char 'a' `alt` check isDigit) "123"
-- alt (char 'a') (check isDigit) "123"
-- alt :: (String -> [(a, String)]) -> (String -> [(a, String)]) -> String -> [(a, String)]
alt :: Parse a -> Parse a -> Parse a
(p1 `alt` p2) i = p1 i ++ p2 i
-- Modify parser return
-- build :: Parse a -> (a -> b) -> String -> [(b, String)]
build :: Parse a -> (a -> b) -> Parse b
build p f i = [(f x, r) | (x, r) <- p i]
-- Parser chain
-- (>*>) :: (String -> [(a, String)]) -> (String -> [(b, String)]) -> String -> [((a,b), String)]
-- (char 'a' >*> check isDigit) "a123"
(>*>) :: Parse a -> Parse b -> Parse (a,b )
(p1 >*> p2) i = [((x,y), r') | (x, r) <- p1 i, (y, r') <- p2 r]
(>*) :: Parse a -> Parse b -> Parse a
-- (p1 >* p2) i = ((p1 >*> p2) `build` fst) i
p1 >* p2 = (p1 >*> p2) `build` fst
(*>) :: Parse a -> Parse b -> Parse b
p1 *> p2 = (p1 >*> p2) `build` snd
-- check the string EOF
-- eof "" -> 終端
-- eof "a" -> 終端でない
eof :: Parse ()
-- eof :: String -> [((), String)]
eof "" = [((), "")]
eof _ = []
-- ----------------------------------------
-- concatMap
-- ----------------------------------------
concatMap' :: (a -> [b]) -> [a] -> [b]
-- concatMap' f = concat . map f
-- concatMap' f = (concat .) $ map f
concatMap' = (concat .) . map
concatMapF, concatMapRaw :: (a -> [b]) -> [a] -> [b]
-- concatMapF f = foldr (\x xs -> f x ++ xs) []
-- concatMapF f = foldr (\x xs -> (++) (f x) xs) []
-- concatMapF f = foldr (\x -> (++) (f x)) []
-- concatMapF f = foldr (\x -> (++) $ f x) []
concatMapF f = foldr ((++) . f) []
concatMapRaw f (x : xs) = f x ++ concatMapRaw f xs
concatMapRaw _ _ = []
| threetreeslight/learning-haskell | practice/old/20151018.hs | mit | 4,032 | 2 | 10 | 754 | 1,052 | 596 | 456 | 47 | 1 |
module Fortran where
import Prelude ()
import Common hiding (Integer, Complex)
import qualified C
data LetterCase = LowerCase
| UpperCase
deriving (Eq, Read, Show)
toCase LowerCase = toLower
toCase UpperCase = toUpper
data NameSuffix = NameSuffix String -- ^ fixed suffix
| Underscore' -- ^ double underscore if name already
-- contains underscore; otherwise single
-- underscore
deriving (Eq, Read, Show)
addSuffix (NameSuffix s) name = name <> s
addSuffix Underscore' name = name <> if '_' `elem` name then "__" else "_"
-- ^ Performs name-mangling on a Fortran name.
mangle :: Config -> String -> String
mangle Config { nameCase = nameCase, nameSuffix = nameSuffix } name =
addSuffix nameSuffix (toCase nameCase <$> name)
data Type = Character
| Integer
| Real
| DoublePrecision
| Complex
| DoubleComplex
deriving (Eq, Read, Show)
defaultTypeMap t = C.Type $ case t of
Character -> "char"
Integer -> "int"
Real -> "float"
DoublePrecision -> "double"
Complex -> "complex_float"
DoubleComplex -> "complex_double"
data Intent = In
| Out
| InOut
deriving (Eq, Read, Show)
data DeclType = DeclType
{ dType :: Type
, dArray :: Bool
, dIntent :: Intent
} deriving (Eq, Read, Show)
declType t = DeclType
{ dType = t
, dArray = False
, dIntent = InOut
}
data FunctionDecl = FunctionDecl String [(DeclType, String)] (Maybe Type)
deriving (Eq, Read, Show)
data ParamConvention = ByValue
| ByPointer
deriving (Eq, Read, Show)
applyParamConvention ByValue t = t
applyParamConvention ByPointer t = C.Pointer t
defaultParamConventionMap _ = ByPointer
data ReturnConvention = ReturnValue
| FirstParamByPointer
deriving (Eq, Read, Show)
applyReturnConvention c name params ret = case c of
ReturnValue -> C.FunctionDecl name params ret
FirstParamByPointer -> C.FunctionDecl name
([(C.Pointer $ ret, "ret")] <> params)
C.Void
defaultReturnConventionMap Complex = FirstParamByPointer
defaultReturnConventionMap DoubleComplex = FirstParamByPointer
defaultReturnConventionMap _ = ReturnValue
data Config = Config
{ nameCase :: LetterCase
, nameSuffix :: NameSuffix
, typeMap :: Type -> C.Type
, paramConventionMap :: Type -> ParamConvention
, returnConventionMap :: Type -> ReturnConvention
}
defaultConfig = Config
{ nameCase = LowerCase
, nameSuffix = NameSuffix "_"
, typeMap = defaultTypeMap
, paramConventionMap = defaultParamConventionMap
, returnConventionMap = defaultReturnConventionMap
}
-- default configurations for specific compilers (and platforms)
gfortranConfig = defaultConfig
ifortConfig = defaultConfig
ifortWindowsConfig = defaultConfig
{ nameCase = UpperCase
, nameSuffix = NameSuffix ""
}
| Rufflewind/blas-shim | Fortran.hs | mit | 3,330 | 0 | 14 | 1,117 | 737 | 415 | 322 | 80 | 6 |
{-# LANGUAGE DeriveGeneric, TemplateHaskell #-}
import Game.Cosanostra.Glue.Instances.JSON ()
import Game.Cosanostra.Glue.Lenses
import Game.Cosanostra.Game
import Game.Cosanostra.Plan
import Control.Lens
import Control.Monad.State
import qualified Data.ByteString.Char8 as BS
import Data.Maybe
import qualified Data.Map.Strict as M
import Data.Yaml
import System.Environment
import System.IO
main :: IO ()
main = do
[gamePath, actionGroupsPath, minutesPath] <- getArgs
game <- either error id <$> decodeEither <$> BS.readFile gamePath
groups <- either error id <$> decodeEither <$> BS.readFile actionGroupsPath
minutes <- either error id <$> decodeEither <$> BS.readFile minutesPath
args <- either error id <$> decodeEither <$> BS.hGetContents stdin
let m = plan groups
(args ^. planArgsActionGroup)
(args ^. planArgsAction)
(args ^. planArgsSource)
(fromJust $ args ^. planArgsTargets)
case evalState (runPlanner m minutes) game of
Left e -> error (show e)
Right (_, minutes') -> do
let (_, p) = either (error . show) id
(evalState (runPlanner m M.empty) game)
let (_, game') = runState (runPlan p) game
game' `seq` BS.writeFile gamePath $ encode game'
minutes' `seq` BS.writeFile minutesPath $ encode minutes'
| rfw/cosanostra | glue/impulse.hs | mit | 1,415 | 0 | 20 | 363 | 434 | 225 | 209 | 33 | 2 |
module Lwch003 where
-- fun with GHC
-- : retab
-- : set +s
-- set expandtab ts=4 ruler number spell linebreak
------------------
-- Lists --
------------------
-- no "any" type lists
-- all lists must be SAME type
-- recursion is how we loop through lists
--------Lists--------------------
{-
---------------------------------
-- lists are one of the primary data structures
-- but list elements most be of the same type.
-- hetrogenious elements go into tuples.
ghci> [] == []
True
ghci> [] == [[]]
False
ghci> [3,4,2] == [3,4,2]
True
ghci> head [5,4,3,2,1]
5
ghci> tail [5,4,3,2,1]
[4,3,2,1]
ghci> last [5,4,3,2,1]
1
ghci> init [5,4,3,2,1]
[5,4,3,2]
ghci> head []
*** Exception: Prelude.head: empty list
ghci> length [5,4,3,2,1]
5
ghci> null [1,2,3]
False
ghci> null []
True
ghci> reverse [5,4,3,2,1]
[1,2,3,4,5]
ghci> take 3 [5,4,3,2,1]
[5,4,3]
ghci> take 5 [1,2]
[1,2]
ghci> take 0 [6,6,6]
[]
ghci> drop 3 [8,4,2,1,5,6]
[1,5,6]
ghci> minimum [8,4,2,1,5,6]
1
ghci> maximum [1,9,2,3,4]
9
ghci> sum [5,2,1,6,3,2,5,7]
31
ghci> product [6,2,1,2]
24
ghci> product [1,2,5,6,7,9,2,0]
0
ghci> 10 `elem` [3,4,5,6]
False
ghci> [1..20]
[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]
ghci> ['a'..'z']
"abcdefghijklmnopqrstuvwxyz"
ghci> ['K'..'Z']
"KLMNOPQRSTUVWXYZ"
ghci> [2,4..20]
[2,4,6,8,10,12,14,16,18,20]
-- Floating Point weirdness
-- !!!!!!!!!!!!!!!!!!!!!!!!
ghci> [0.1, 0.3 .. 1]
[0.1,0.3,0.5,0.7,0.8999999999999999,1.0999999999999999]
ghci> take 10 (cycle [1,2,3])
[1,2,3,1,2,3,1,2,3,1]
ghci> take 10 (repeat 5)
[5,5,5,5,5,5,5,5,5,5]
ghci> zip [1..] ["apple", "orange", "cherry", "mango"]
[(1,"apple"),(2,"orange"),(3,"cherry"),(4,"mango")]
---------------------------
-}
--------------------------
empty = []
x = [1,2,3]
y = 0 : x
z = empty:empty:empty
xa= 1:(2:(3:(4:[])))
xb= 1:2:3:4:[]
xc :: [Int]
xc = 1:2:3:4:[]
-- :t 0:1:2:3:4:empt
str = "I am a list of characters"
str' = 'I':' ':'a':'m':' ':'a':' ':'l':'i':'s':'t':' ':'t':'o':'o':'!':[]
-- length ['a'..'z']
-- zip [1..12] "Call me Ishmael"
count x xs = length [x'|x' <- xs, x == x']
-- count 'a' "Call me Ishmael"
----------------
-- recursion --
----------------
-- a function calling itself.
doubleList [] = []
doubleList x = (2 * (head x)) : (doubleList (tail x))
dubNum x =
if null x
then []
else (2 * (head x)) : (dubNum (tail x))
oddGone x =
if null x
then []
else
if (mod(head x) 2) == 0
then (head x) : (oddGone (tail x))
else oddGone (tail x)
| HaskellForCats/HaskellForCats | ladiesWhoCodeHaskell/ladiesWhoCodeHaskell003.hs | mit | 2,594 | 8 | 20 | 532 | 432 | 242 | 190 | 24 | 3 |
-- Type annotation (optional)
factorial :: Integer -> Integer
-- Using fold (implements product)
factorial n = foldl1 (*) [1..n]
| butchhoward/xhaskell | factorial_fold.hs | mit | 132 | 0 | 6 | 22 | 32 | 18 | 14 | 2 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
module Component.Name (Name (..)) where
import Core.ECS (Component, stock)
import Data.Typeable
import Data.Text (Text)
import qualified Data.Aeson as J
newtype Name = Name Text deriving (Typeable, Monoid, Eq)
instance J.ToJSON Name where
toJSON (Name n) = J.toJSON n
instance Component Name where
stock = Name "Unnamed"
| jameshsmith/HRL | Server/Component/Name.hs | mit | 394 | 0 | 8 | 61 | 120 | 69 | 51 | 11 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.BeforeUnloadEvent
(js_setReturnValue, setReturnValue, js_getReturnValue,
getReturnValue, BeforeUnloadEvent, castToBeforeUnloadEvent,
gTypeBeforeUnloadEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"returnValue\"] = $2;"
js_setReturnValue :: BeforeUnloadEvent -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BeforeUnloadEvent.returnValue Mozilla BeforeUnloadEvent.returnValue documentation>
setReturnValue ::
(MonadIO m, ToJSString val) => BeforeUnloadEvent -> val -> m ()
setReturnValue self val
= liftIO (js_setReturnValue (self) (toJSString val))
foreign import javascript unsafe "$1[\"returnValue\"]"
js_getReturnValue :: BeforeUnloadEvent -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BeforeUnloadEvent.returnValue Mozilla BeforeUnloadEvent.returnValue documentation>
getReturnValue ::
(MonadIO m, FromJSString result) => BeforeUnloadEvent -> m result
getReturnValue self
= liftIO (fromJSString <$> (js_getReturnValue (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/BeforeUnloadEvent.hs | mit | 1,929 | 14 | 10 | 250 | 463 | 284 | 179 | 31 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Data.Track.Commitment where
import Control.DeepSeq
import Data.Aeson
import Data.Data
import Data.Text
import Data.Time
import Data.Track.Rules
import GHC.Generics
data Commitment
= Commitment
{ commIssueTime :: UTCTime
, commEstimatedResolveTime :: UTCTime
, commResolveTime :: Maybe UTCTime
, commCreditor :: Text
, commDebtor :: Text
, commDescription :: Text
}
deriving (Eq, Show, Read, Data, Typeable, Generic)
instance NFData Commitment
instance ToJSON Commitment where toJSON = trackToJSON
instance FromJSON Commitment where parseJSON = trackParseJSON
| AndrewRademacher/track | src/Data/Track/Commitment.hs | mit | 831 | 0 | 9 | 277 | 153 | 90 | 63 | 22 | 0 |
type Subject = String
type Verb = String
type Object = String
data Sentence =
Sentence Subject Verb Object
deriving (Eq, Show)
-- this won't work because Object argument is not applied
s1 = Sentence "dogs" "drool"
-- this will work because all 3 arguments were applied
s2 = Sentence "Julie" "loves" "dogs"
| Numberartificial/workflow | haskell-first-principles/haskell-from-first-principles-master/06/06.14.04-does-it-typecheck4.hs | mit | 314 | 0 | 6 | 61 | 67 | 38 | 29 | 8 | 1 |
-- Das Module \hsSource{Circuit.Splice} bietet nach außen hin nur eine
-- Funktion an, nämlich \hsSource{splice}. Diese Funktion führt zwei
-- Schaltungen zu einer neuen zusammen. Dabei ist noch nicht
-- festgelegt, wie dieses Zusammenführen tatsächlich aussieht.
module System.ArrowVHDL.Circuit.Splice
where
-- Verwendet werden die Standard-Definitionen, sowie eine Sensor und
-- einer Worker Funktion.
import Data.List (nub)
import System.ArrowVHDL.Circuit.Graphs
import System.ArrowVHDL.Circuit.Descriptor
import System.ArrowVHDL.Circuit.Workers (alterCompIDs)
import System.ArrowVHDL.Circuit.Sensors (maxCompID)
-- Auch wenn hier tatsächlich zwei Funktionen stehen wird
-- \hsSource{splice} doch als eine Einheit
-- angesehen. \hsSource{splice'} enthält die Funktionalität,
-- \hsSource{splice} ist der öffentliche Bezeichner, der obendrein
-- noch eine grundlegende Fehlerprüfung macht.
-- \hsSource{splice} wird eine \hsSource{rewire} Funktion
-- übergeben. Diese Funktion enthält die Logik, nach der die
-- ``Verdrahtung'' der beiden Schaltkreise erfolgen wird. Hier ist es
-- dann möglich beispielsweise sequentiell oder parallel zu
-- verdrahten. Außerdem erwartet \hsSource{splice} noch zwei
-- Schaltungen, die zusammengeführt werden sollen. Diese beiden werden
-- dann auf die gewählte Art miteinander verbunden. Die übrigen
-- ``Drähte'' werden nach außen geführt, ein neuer Name wird erzeugt
-- und dieser neue Schaltkreis wird dann zurückgegeben.
splice :: ((CircuitDescriptor -> CircuitDescriptor -> ([Edge], (Pins, Pins))), String)
-> CircuitDescriptor -> CircuitDescriptor -> CircuitDescriptor
splice _ sg NoDescriptor = sg
splice _ NoDescriptor sg = sg
splice (rewire, s) cd_f cd_g = splice' (rewire, s) cd_f cd_g
splice' :: ((CircuitDescriptor -> CircuitDescriptor -> ([Edge], (Pins, Pins))), String)
-> CircuitDescriptor -> CircuitDescriptor -> CircuitDescriptor
splice' (rewire, s) cd_f cd_g
= MkCombinatorial
{ nodeDesc = MkNode
{ label = (label.nodeDesc $ cd_f') ++ s ++ (label.nodeDesc $ cd_g')
, nodeId = 0
, sinks = srcs
, sources = snks
}
, nodes = cd_f': cd_g' : []
, edges = es
, cycles = (cycles cd_f) + (cycles cd_g)
, space = (space cd_f) + (space cd_g)
}
where cd_f' = alterCompIDs 1 cd_f
cd_g' = alterCompIDs (maxCompID cd_f' +1) cd_g
(es, (srcs, snks)) = rewire cd_f' cd_g'
-- \subsection{Verdrahtungsvarianten} Die Funktion \hsSource{splice}
-- aus dem Modul \ref{mod:Circuit.Splice} verwendet für das
-- ``verdrahten'' eine der folgenden
-- \hsSource{rewire}-Funktionen. Daneben wird noch eine Zeichenkette
-- zugeordnet, um später debug-Ausgaben erzeugen zu können.
-- Die \hsSource{connect} Funktion verbindet zwei Schaltkreise zu
-- einem neuen. Hierbei wird sequentiell verbunden, als Zeichenkette
-- wird der selbe Operator angegeben, wie er auch aus der
-- \hsSource{Arrow}-Schreibweise schon bekannt ist.
connect :: CircuitDescriptor -> CircuitDescriptor -> CircuitDescriptor
connect = splice (seqRewire, ">>>")
-- Neben dem sequentiellen verbinden lassen sich Schaltkreise auch
-- parallel verbinden. Dies ist mit der Funktion \hsSource{combine}
-- möglich. Als Zeichenkette wird auch hier das aus der
-- \hsSource{Arrow}-Schreibweise bekannte Operator-Symbol verwendet.
-- %%% TODO : combine = frame???
combine :: CircuitDescriptor -> CircuitDescriptor -> CircuitDescriptor
combine = splice (parRewire, "&&&")
-- Eine Variante der \hsSource{combine} Funktion ist die Funktion
-- \hsSource{dupCombine}. Hier werden die Eingänge zunächst dupliziert
-- und dann parallel weiter verbunden.
dupCombine :: CircuitDescriptor -> CircuitDescriptor -> CircuitDescriptor
dupCombine = splice (dupParRewire, ">2>")
-- Um eine Verzögerung in Hardware zu realisieren ist es notwendig die
-- Daten zu speichern. Dies wird über ein Register erreicht. Nach der
-- gewünschten Anzahl von Zyklen, kann dann das Datum aus dem Register
-- wieder ausgelesen werden, und in der Schaltung weiter verwendet
-- werden. Die Funktion %%%%%%%%%%%%%%%%%%% %%% TODO : Schaltwerke
delayByRegister :: CircuitDescriptor -> CircuitDescriptor
delayByRegister cd@(MkCombinatorial nd _ _ _ _)
= MkComposite (cd : reg : [])
where reg = mkRegister nd
-- Möchte man einen \begriff{Loop} erstellen, so wird dieser durch ein
-- Register geführt, dass eine Verzögerung um einen Takt
-- ermöglicht. Die Funktion, die ein Bauteil um eine Schleife mit
-- Register erweitert, nennt sich \hsSource{registerloopRewire}. Diese
-- Funktion lässt sich mittels \hsSource{splice} zu der nach Außen
-- verwendeten \hsSource{loopWithRegister} Funktion umbauen.
loopWithRegister :: CircuitDescriptor -> CircuitDescriptor
loopWithRegister cd
= MkLoop
{ nodeDesc = MkNode
{ label = "loop(" ++ (label.nodeDesc $ cd) ++ ")"
, nodeId = 0
, sinks = srcs
, sources = snks
}
, nodes = [alterCompIDs 1 cd]
, edges = es
, space = space cd
}
where (es, (srcs, snks)) = registerLoopRewire cd
-- Unter den \hsSource{rewire}-Funktionen sind Funktionen zu
-- verstehen, die eine Vorstufe für die eigentliche Verbindung (das
-- \begriff{splicen}) darstellen. Zwei Schaltkreise werden jeweils in
-- eine Zwischendarstellung überführt. Die Zwischendarstellung besteht
-- aus einer Liste von neuen Kanten (\hsSource{[Edge]}), zusammen mit
-- den überbleibenden Ein- und Ausgangspins.
--
-- Alle \hsSource{rewire}-Funktionen nutzen eine Funktion, nämlich
-- \hsSource{wire}. Das verbinden von Drähten mit Komponenten ist,
-- unabhängig davon ob sequentiell oder parallel verbunden werden
-- soll, immer gleich. Eingehende Parameter zu \hsSource{wire} sind
-- die beiden Komponenten Nummern, sowie die Pin-Listen. Auch diese
-- Funktion erzeugt die schon beschriebene Zwischendarstellung.
wire :: Maybe CompID -> Maybe CompID -> Pins -> Pins -> ([Edge], (Pins, Pins))
wire cid_l cid_r pins_l pins_r
= (edges, (drop cnt pins_l, drop cnt pins_r))
where points_l = map ((,) (cid_l)) pins_l
points_r = map ((,) (cid_r)) pins_r
edges = map (uncurry MkEdge) $ zip points_l points_r
cnt = length edges
-- \hsSource{wire_} ist ein Synonym für \hsSource{fst . wire}.
wire_ :: Maybe CompID -> Maybe CompID -> Pins -> Pins -> [Edge]
wire_ cid_l cid_r pins_l pins_r = fst $ wire cid_l cid_r pins_l pins_r
-- Bei der Funktion \hsSource{seqRewire} werden die Verbindungen
-- sequentiell erstellt; übrige Ein oder Ausgänge werden zu den gesamt
-- Ein und Ausgängen hinzugefügt.
seqRewire :: CircuitDescriptor -> CircuitDescriptor -> ([Edge], (Pins, Pins))
seqRewire sg_l sg_r
= ( fromOuterToL ++ fromOuterToR ++ edgs ++ fromRToOuter ++ fromLToOuter
, (super_srcs, super_snks)
)
where (edgs, (srcs_l', snks_r')) = wire (Just $ nodeId.nodeDesc $ sg_l) (Just $ nodeId.nodeDesc $ sg_r) (sources.nodeDesc $ sg_l) (sinks.nodeDesc $ sg_r)
super_srcs = [0..(length.sinks.nodeDesc $ sg_l) + length snks_r' -1]
super_snks = [0..(length.sources.nodeDesc $ sg_r) + length srcs_l' -1]
( fromOuterToL, (super_srcs', _)) = wire Nothing (Just $ nodeId.nodeDesc $ sg_l) super_srcs (sinks.nodeDesc $ sg_l)
( fromOuterToR, (_ , _)) = wire Nothing (Just $ nodeId.nodeDesc $ sg_r) super_srcs' (drop (length fromOuterToL) $ sinks.nodeDesc $ sg_r)
( fromRToOuter, (_, super_snks')) = wire (Just $ nodeId.nodeDesc $ sg_r) Nothing (sources.nodeDesc $ sg_r) super_snks
( fromLToOuter, (_, _)) = wire (Just $ nodeId.nodeDesc $ sg_l) Nothing (drop (length fromRToOuter) $ sources.nodeDesc $ sg_l) super_snks'
-- Bei der \hsSource{parRewire} Funktion werden beide Bausteine
-- ``übereinander'' angeordnet. Die Eingänge beider Komponenten, sowie
-- deren Ausgänge werden parallel geschaltet.
parRewire :: CircuitDescriptor -> CircuitDescriptor -> ([Edge], (Pins, Pins))
parRewire sg_u sg_d
= ( goingIn_edges ++ goingOut_edges
, (super_srcs, super_snks)
)
where super_srcs = [0..(length $ (sinks.nodeDesc $ sg_u) ++ (sinks.nodeDesc $ sg_d)) -1]
super_snks = [0..(length $ (sources.nodeDesc $ sg_u) ++ (sources.nodeDesc $ sg_d)) -1]
goingIn_edges = (wire_ Nothing (Just $ nodeId.nodeDesc $ sg_u) (super_srcs) (sinks.nodeDesc $ sg_u))
++ (wire_ Nothing (Just $ nodeId.nodeDesc $ sg_d) (drop (length.sinks.nodeDesc $ sg_u) super_srcs) (sinks.nodeDesc $ sg_d))
goingOut_edges = (wire_ (Just $ nodeId.nodeDesc $ sg_u) Nothing (sources.nodeDesc $ sg_u) (super_snks))
++ (wire_ (Just $ nodeId.nodeDesc $ sg_d) Nothing (sources.nodeDesc $ sg_d) (drop (length.sources.nodeDesc $ sg_u) super_snks))
-- Die Funktion \hsSource{dupParRewire} funktioniert dabei analog zur
-- Funktion \hsSource{parRewire}. Lediglich die Eingänge werden
-- zunächst dupliziert und dann auf beide Komponenten geschaltet.
dupParRewire :: CircuitDescriptor -> CircuitDescriptor -> ([Edge], (Pins, Pins))
dupParRewire sg_u sg_d
= ( goingIn_edges ++ goingOut_edges
, (super_srcs, super_snks)
)
where super_srcs = [0..(length.sinks.nodeDesc $ sg_u) -1]
super_snks = [0..(length $ (sources.nodeDesc $ sg_u) ++ (sources.nodeDesc $ sg_d)) -1]
goingIn_edges = (wire_ Nothing (Just $ nodeId.nodeDesc $ sg_u) super_srcs (sinks.nodeDesc $ sg_u))
++ (wire_ Nothing (Just $ nodeId.nodeDesc $ sg_d) super_srcs (sinks.nodeDesc $ sg_d))
goingOut_edges = (wire_ (Just $ nodeId.nodeDesc $ sg_u) Nothing (sources.nodeDesc $ sg_u) (super_snks))
++ (wire_ (Just $ nodeId.nodeDesc $ sg_d) Nothing (sources.nodeDesc $ sg_d) (drop (length.sources.nodeDesc $ sg_u) super_snks))
registerLoopRewire :: CircuitDescriptor -> ([Edge], (Pins, Pins))
registerLoopRewire cd
= (es, (srcs, snks))
where reg = mkRegister $ nodeDesc emptyCircuit
(es1, (srcs1, snks1)) = seqRewire cd reg
(es2, (srcs2, snks2)) = seqRewire reg cd
es = es1 ++ es2
srcs = nub $ filter (flip elem srcs2) srcs1 ++ filter (flip elem srcs1) srcs2
snks = nub $ filter (flip elem snks2) snks1 ++ filter (flip elem snks1) snks2
| frosch03/arrowVHDL | src/System/ArrowVHDL/Circuit/Splice.hs | cc0-1.0 | 10,669 | 0 | 15 | 2,305 | 2,208 | 1,251 | 957 | 98 | 1 |
{-# language TemplateHaskell #-}
{-# language DeriveDataTypeable #-}
{-# language FlexibleInstances #-}
{-# language MultiParamTypeClasses #-}
module CSP.Derive.Quiz where
import CSP.Syntax
import CSP.Step
import CSP.Property
import qualified CSP.Roll
import qualified CSP.Property.Guarded
import qualified Data.Set as S
import qualified Data.Map as M
import Control.Monad ( guard )
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Size
import Data.Typeable
import Data.List ( maximumBy )
import Data.Ord ( comparing )
import Control.Monad ( forM, when )
import System.IO ( hFlush, stdout )
levels :: Ord a => Process a -> [[ Process a ]]
levels p = do
let helper [] = []
helper xs =
let ys = S.toList $ S.fromList
$ map snd $ xs >>= successors
in xs : helper ys
helper [p]
interesting_terms p (minw,maxw) cut = do
( r, dist ) <- M.toList $
M.fromListWith min $ do
( k , rs ) <- zip [ 0 .. cut ]
$ takeWhile ( \ qs -> length qs < maxw )
$ levels p
r <- rs
guard $ size r <= size p
return ( r, ( length rs > minw, k ) )
return ( dist, (p, r ))
target_roller :: ( ToDoc a, Ord a )
=> Config a
-> IO ( (Bool,Int), (Process a, Process a ))
target_roller conf = do
outs <- forM [ 1 .. generator_repeats conf ] $ \ k -> do
p <- CSP.Roll.roll_free
( how_to_iterate conf )
( process_alphabet conf )
( process_size conf )
let ok = CSP.Property.Guarded.ok p
let cut = max_derivation_length conf
minw = min_derivation_tree_width conf
maxw = max_derivation_tree_width conf
return $ if not ok then [] else
interesting_terms p (minw, maxw) cut
case concat outs of
[] -> target_roller conf
co -> return $ maximumBy ( comparing fst ) co
rejects :: Ord a => Int
-> S.Set a
-> Process a -> [[ S.Set a ]]
rejects width sigma p = do
qs <- takeWhile ( \ qs -> length qs < width ) $ levels p
return $ do
q <- qs
guard $ null $ tau q
let accepts = S.fromList $ map fst $ real q
return $ S.difference sigma accepts
interesting_rejects sigma p (width, wadth) cut = do
( r, dist ) <- M.toList $
M.fromListWith min $ do
( k , rs ) <- zip [ 0 .. cut ]
$ rejects wadth sigma p
r <- rs
return ( r, (length rs > width, k ) )
return ( dist, (p, r ))
data Goal_Type = Target | Rejects deriving Typeable
data Config a = Config
{ goal_type :: Goal_Type
, process_alphabet :: [ a ]
, process_size :: Int
, how_to_iterate :: Iteration
, max_derivation_length :: Int
, max_derivation_tree_width :: Int
, min_derivation_tree_width :: Int
, generator_repeats :: Int
} deriving Typeable
$(derives [makeReader, makeToDoc] [''Config, ''Goal_Type])
config = Config
{ goal_type = Rejects
, process_alphabet = "abc"
, process_size = 6
, how_to_iterate = Iteration_Star
, max_derivation_length = 10
, min_derivation_tree_width = 2
, max_derivation_tree_width = 100
, generator_repeats = 100
}
reject_roller :: ( ToDoc a, Ord a )
=> Config a
-> IO ( (Bool, Int), (Process a, S.Set a ))
reject_roller conf = do
outs <- forM [ 1 .. generator_repeats conf ] $ \ k -> do
p <- CSP.Roll.roll_free
( how_to_iterate conf )
( process_alphabet conf )
( process_size conf )
let sigma = CSP.Syntax.alphabet p
let ok = CSP.Property.Guarded.ok p
let cut = max_derivation_length conf
wid = min_derivation_tree_width conf
wad = max_derivation_tree_width conf
return $ if not ok then [] else
interesting_rejects sigma p (wid, wad) cut
return $ maximumBy ( comparing fst ) $ concat outs
t1 = Fix (Ext (Ext (Pre 'a' (Fix Point))
(Par [ 'b' ] (Pre 'c' Point) (Pre 'c' Point)))
(Pre 'b' (Pre 'b' (Seq (Pre 'b' Point) (Pre 'b' Point)))))
| marcellussiegburg/autotool | collection/src/CSP/Derive/Quiz.hs | gpl-2.0 | 4,358 | 0 | 18 | 1,515 | 1,445 | 756 | 689 | 116 | 3 |
module Forging where
import Prelude
import qualified Postructures as PS
import qualified Constants as Const
int2nat :: Integer -> PS.Nat
int2nat 0 = PS.O
int2nat m = PS.S (int2nat (m-1))
data Stream a =
Cons0 a (Stream a)
hd :: (Stream a1) -> a1
hd x =
case x of {
Cons0 a s -> a}
tl :: (Stream a1) -> Stream a1
tl x =
case x of {
Cons0 a s -> s}
str_nth_tl :: Integer -> (Stream a1) -> Stream a1
str_nth_tl 0 s = s
str_nth_tl m s = str_nth_tl (m-1) (tl s)
str_nth :: Integer -> (Stream a1) -> a1
str_nth n s =
hd (str_nth_tl n s)
type BN = Integer
type Digest = PS.HexN
dig2string = id
hashfun = id
modN = mod
multN = (*)
divN = div
doubleN = (*) 2
succN = (+) 1
nat2bn :: PS.Nat -> Integer
nat2bn PS.O = 0
nat2bn (PS.S n') = 1 + (nat2bn n')
bN0 = 0
bN1 = 1
bool2bool :: Prelude.Bool -> PS.Bool
bool2bool True = PS.True
bool2bool False = PS.False
geN :: BN -> BN -> PS.Bool
geN x y = bool2bool (x>=y)
plusN = (+)
rand :: Integer -> Integer -> Integer -> Stream Integer
rand seed n1 n2 = let seed' = modN seed n2 in
Cons0 seed' (rand (seed' * n1) n1 n2)
randStream :: Integer -> Stream Integer
randStream n = rand n Const.iterRand Const.maxRand
type Block = PS.Block0 BN
type Account = PS.Account0 BN
type Timestamp = PS.Timestamp BN
type Transaction = PS.Transaction0 BN
type Currency = PS.Currency BN
type System = PS.System0 BN
validate :: Transaction -> PS.Bool
validate _ = PS.True
initialBaseTarget = PS.initialBaseTarget multN divN doubleN Const.systemBalance Const.goalBlockTime Const.maxRand
maxBaseTarget = PS.maxBaseTarget multN divN doubleN Const.systemBalance Const.goalBlockTime Const.maxRand
calcGenerationSignature = PS.calcGenerationSignature dig2string hashfun
sqr = \n -> n*n
difficulty_fun = \n -> sqr (divN Const.maxRand n)
block_difficulty = \b -> difficulty_fun (PS.baseTarget b)
formBlock_orig :: Block -> Account -> Timestamp -> PS.List Transaction -> PS.HexSPK -> Block
formBlock_orig pb acc ts txs pk =
let pbt = PS.baseTarget pb in
let vtxs = PS.filter validate txs in
let maxTarget = min (2*pbt) maxBaseTarget in
let minTarget = max (div pbt 2) 1 in
let candidate = div (pbt*(ts-(PS.btimestamp pb))) Const.goalBlockTime in
let bt = min (max minTarget candidate) maxTarget in
let gs = calcGenerationSignature pb acc in
PS.Block vtxs PS.O bt (plusN (PS.totalDifficulty pb) (difficulty_fun bt)) acc gs ts
mthcl_gamma = 0.1
mthcl_invbeta = 1.0-mthcl_gamma/2.0
formBlock_mthcl :: Block -> Account -> Timestamp -> PS.List Transaction -> PS.HexSPK -> Block
formBlock_mthcl pb acc ts txs pk =
let pbt = PS.baseTarget pb in
let vtxs = PS.filter validate txs in
let dt = fromInteger (ts-(PS.btimestamp pb)) / (fromInteger Const.goalBlockTime) in
let kbt = if dt >= 2.0 then 2.0 else if dt > 1.0 then dt else if dt>0.5 then (1.0-mthcl_gamma*(1.0-dt)) else mthcl_invbeta in
let bt = max 100 (truncate (kbt*(fromInteger pbt))) in
let gs = calcGenerationSignature pb acc in
PS.Block vtxs PS.O bt (plusN (PS.totalDifficulty pb) (difficulty_fun bt)) acc gs ts
formBlock = formBlock_orig
-- block_difficulty = \ _ -> 1
-- block_difficulty = PS.baseTarget
minRand = div Const.maxRand 3
calculateHit_orig :: Account -> Block -> BN
calculateHit_orig acc pb =
str_nth (PS.btimestamp pb + 29) (randStream (100+nat2bn (PS.hex2nat (PS.publicKey acc))))
calculateHit_mthcl :: Account -> Block -> BN
calculateHit_mthcl acc pb =
let pp = str_nth (PS.btimestamp pb + 1) (randStream (nat2bn (PS.hex2nat (PS.publicKey acc)))) in
let ppd = fromInteger pp in
let lpd = (log ((fromInteger Const.maxRand)/ ppd))*1000000000000000 in
let hitd = min (truncate lpd) (2*Const.maxRand) in
hitd
calculateHit = calculateHit_orig
verifyHit :: BN -> Block -> Timestamp -> Currency -> PS.Bool
verifyHit hit pb ts effpb = let eta = (ts - (PS.btimestamp pb)) in
let effbt = effpb*(PS.baseTarget pb) in
let target = effbt*eta in
bool2bool ((0 < eta) && (hit < target))
type Node = PS.Node0 BN
canforge :: Node -> Timestamp -> Block -> PS.Bool
canforge n ts pb = verifyHit (calculateHit (PS.node_account n) pb) pb ts (PS.effectiveBalance (PS.node_account n))
systemTransform = PS.systemTransform
plusN succN dig2string hashfun formBlock block_difficulty bN0 geN Const.markTimestamp canforge (int2nat Const.lengthConfirmation) --plusN succN dig2string hashfun formBlock block_difficulty bN0 geN markTimestamp canforge lengthConfirmation
genesisBlock = PS.genesisBlock multN divN doubleN bN0 Const.systemBalance Const.goalBlockTime Const.maxRand
genesisState = \nFixAccounts -> \accountParams -> PS.genesisState multN divN doubleN plusN bN0 bN1 nFixAccounts accountParams
Const.systemBalance Const.goalBlockTime Const.maxRand
sys = \nFixAccounts -> \accountParams -> PS.sys
multN divN doubleN plusN succN dig2string hashfun formBlock block_difficulty bN0 bN1 geN Const.markTimestamp canforge
(int2nat Const.lengthConfirmation) nFixAccounts accountParams Const.systemBalance Const.goalBlockTime Const.maxRand
signs = \nFixAccounts -> \accountParams -> PS.signs
multN divN doubleN plusN succN dig2string hashfun formBlock block_difficulty bN0 bN1 geN Const.markTimestamp canforge
(int2nat Const.lengthConfirmation) nFixAccounts accountParams Const.systemBalance Const.goalBlockTime Const.maxRand
rebalance_sys = PS.rebalance_sys plusN block_difficulty bN0 geN
sysigns = PS.sysigns
sysblocks = PS.sysblocks
generators = PS.generators | ConsensusResearch/MultiBranch | forging.hs | gpl-2.0 | 6,003 | 0 | 24 | 1,493 | 2,082 | 1,065 | 1,017 | 119 | 4 |
{-# LANGUAGE ScopedTypeVariables #-}
-- enough rope to evaluate Salsa20 algebraically
module Algebraic where {
import Data.Maybe(fromJust);
import Data.Bits(Bits(..));
data Algebraic a = Atom a | Xor (Algebraic a) (Algebraic a) | Rotate Int (Algebraic a) | Add (Algebraic a) (Algebraic a) deriving (Show,Eq);
instance Num (Algebraic a) where {
(+) x y = Add x y;
(*) = error "Algebraic *";
abs = error "Algebraic abs";
signum = error "Algebraic signum";
negate = error "Algebraic negate";
fromInteger = error "Algebraic fromInteger";
};
instance (Eq a) => Bits (Algebraic a) where {
xor x y = Xor x y;
rotate x k = Rotate k x;
(.&.) = error "Algebraic &";
(.|.) = error "Algebraic |";
complement = error "Algebraic complement";
shift = error "Algebraic shift";
bitSize = fromJust . bitSizeMaybe;
bitSizeMaybe = Just . fromIntegral . size;
isSigned = error "Algebraic isSigned";
testBit = error "Algebraic testBit";
bit = error "Algebraic bit";
popCount = error "Algebraic popCount";
};
size :: Algebraic a -> Integer;
size (Atom _) = 1;
size (Xor x y) = 1 + size x + size y;
size (Add x y) = 1 + size x + size y;
size (Rotate _ x) = 1 + size x;
simplify :: (Eq a) => (a,Algebraic a) -> (Algebraic a) -> (Algebraic a);
simplify v@(n,small) large = if small == large then Atom n
else case large of {
Atom _ -> large;
Xor x y -> Xor (simplify v x) (simplify v y);
Rotate i x -> Rotate i (simplify v x);
Add x y -> Add (simplify v x) (simplify v y);
};
}
| kenta2/yescrypt | Algebraic.hs | gpl-3.0 | 1,457 | 0 | 11 | 275 | 647 | 354 | 293 | 37 | 5 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative
import Data.Aeson
import Data.Attoparsec.Text as AP
import Data.Text as T
import Data.Text.IO as TIO
import System.Environment
import System.Exit
data Nick = Nick
{ nick :: Text
, alias_of :: Text
} deriving Show
instance ToJSON Nick where
toJSON (Nick name alias) = object [ "nick" .= name, "alias_of" .= alias ]
type Nicks = [Nick]
parseLog :: Parser Nick
parseLog = do
_ <- AP.take 20
_ <- AP.string "--"
_ <- AP.take 1
alias <- AP.takeTill (\x -> x == ' ')
_ <- AP.string " is now known as "
nick <- AP.takeTill AP.isEndOfLine
return $ Nick { nick = alias, alias_of = nick }
parseNicks :: Parser Nicks
parseNicks = many $ parseLog <* endOfLine
main :: IO ()
main = do
args <- getArgs
TIO.readFile (args !! 0) >>= print . parseOnly parseNicks
exitWith ExitSuccess
| qpfiffer/normalizer | normalize.hs | gpl-3.0 | 893 | 0 | 11 | 209 | 314 | 165 | 149 | 31 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
-- Copyright (C) 2010-2011 John Millikin <jmillikin@gmail.com>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module Anansi.Types
( Block (..)
, Content (..)
, Position (..)
, ParseError (..)
, Document (..)
, Loom
, LoomM
, LoomOptions (..)
, parseLoomOptions
, weave
) where
import Prelude hiding (FilePath)
import Control.Applicative (Applicative, pure, (<*>))
import Control.Monad (ap, liftM)
import qualified Control.Monad.Reader as Reader
import Control.Monad.Reader (ReaderT, EnvType, runReaderT)
import qualified Control.Monad.Writer as Writer
import Control.Monad.Writer (Writer, WriterType, execWriter)
import Data.ByteString (ByteString)
import qualified Data.Map
import Data.Map (Map)
import qualified Data.Text
import Data.Text (Text)
import Filesystem.Path.CurrentOS (FilePath)
data Block
= BlockText Text
| BlockFile Text [Content]
| BlockDefine Text [Content]
deriving (Eq, Ord, Show)
data Content
= ContentText Position Text
-- | A macro reference within a content block. The first 'Text' is
-- any indentation found before the first @\'|\'@, and the second is
-- the name of the macro.
| ContentMacro Position Text Text
deriving (Eq, Ord, Show)
data Position = Position
{ positionFile :: FilePath
, positionLine :: Integer
}
deriving (Eq, Ord, Show)
data ParseError = ParseError
{ parseErrorPosition :: Position
, parseErrorMessage :: Text
}
deriving (Eq, Show)
data Document = Document
{ documentBlocks :: [Block]
-- | A map of @:option@ commands found in the document. If
-- the same option is specified multiple times, the most recent will
-- be used.
, documentOptions :: Map Text Text
-- | The last @:loom@ command given, if any. A document does not
-- require a loom name if it's just going to be tangled, or will be
-- woven by the user calling 'weave'. Documents woven by
-- 'defaultMain' do require a loom name.
, documentLoomName :: Maybe Text
}
deriving (Eq, Show)
-- | A loom contains all the logic required to convert a 'Document' into
-- markup suitable for processing with an external documentation tool.
--
-- Within a loom, use 'Reader.ask' to retrieve the 'LoomOptions', and
-- 'Writer.tell' to append data to the output.
type Loom = Document -> LoomM ()
newtype LoomM a = LoomM { unLoomM :: ReaderT LoomOptions (Writer ByteString) a }
instance Functor LoomM where
fmap = liftM
instance Applicative LoomM where
pure = return
(<*>) = ap
instance Monad LoomM where
return = LoomM . return
(LoomM m) >>= f = LoomM $ do
x <- m
unLoomM (f x)
instance Reader.MonadReader LoomM where
type EnvType LoomM = LoomOptions
ask = LoomM Reader.ask
local f (LoomM m) = LoomM (Reader.local f m)
instance Writer.MonadWriter LoomM where
type WriterType LoomM = ByteString
tell = LoomM . Writer.tell
listen (LoomM m) = LoomM (Writer.listen m)
pass m = LoomM (Writer.pass (unLoomM m))
-- | Write a document to some sort of document markup. This will typically be
-- rendered into documentation by external tools, such as LaTeX or a web
-- browser.
--
-- This writes a 'ByteString' rather than 'Text' so that looms have full
-- control over character encoding.
weave :: Loom -> Document -> ByteString
weave loom doc = execWriter (runReaderT
(unLoomM (loom doc))
(parseLoomOptions (documentOptions doc)))
-- | A set of processed @:option@ commands related to looms. Looms are always
-- free to check options manually, but this simplifies common cases.
data LoomOptions = LoomOptions
{ loomOptionTabSize :: Integer
}
deriving (Eq, Show)
parseLoomOptions :: Map Text Text -> LoomOptions
parseLoomOptions opts = LoomOptions
{ loomOptionTabSize = case Data.Map.lookup "anansi.tab-size" opts of
Just x -> read (Data.Text.unpack x)
Nothing -> case Data.Map.lookup "tab-size" opts of
Just x -> read (Data.Text.unpack x)
Nothing -> 8
}
| jmillikin/anansi | lib/Anansi/Types.hs | gpl-3.0 | 4,591 | 47 | 16 | 900 | 948 | 547 | 401 | 83 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Schemas.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves all schemas for a customer.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @directory.schemas.list@.
module Network.Google.Resource.Directory.Schemas.List
(
-- * REST Resource
SchemasListResource
-- * Creating a Request
, schemasList
, SchemasList
-- * Request Lenses
, slXgafv
, slUploadProtocol
, slAccessToken
, slUploadType
, slCustomerId
, slCallback
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.schemas.list@ method which the
-- 'SchemasList' request conforms to.
type SchemasListResource =
"admin" :>
"directory" :>
"v1" :>
"customer" :>
Capture "customerId" Text :>
"schemas" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Schemas
-- | Retrieves all schemas for a customer.
--
-- /See:/ 'schemasList' smart constructor.
data SchemasList =
SchemasList'
{ _slXgafv :: !(Maybe Xgafv)
, _slUploadProtocol :: !(Maybe Text)
, _slAccessToken :: !(Maybe Text)
, _slUploadType :: !(Maybe Text)
, _slCustomerId :: !Text
, _slCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SchemasList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'slXgafv'
--
-- * 'slUploadProtocol'
--
-- * 'slAccessToken'
--
-- * 'slUploadType'
--
-- * 'slCustomerId'
--
-- * 'slCallback'
schemasList
:: Text -- ^ 'slCustomerId'
-> SchemasList
schemasList pSlCustomerId_ =
SchemasList'
{ _slXgafv = Nothing
, _slUploadProtocol = Nothing
, _slAccessToken = Nothing
, _slUploadType = Nothing
, _slCustomerId = pSlCustomerId_
, _slCallback = Nothing
}
-- | V1 error format.
slXgafv :: Lens' SchemasList (Maybe Xgafv)
slXgafv = lens _slXgafv (\ s a -> s{_slXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
slUploadProtocol :: Lens' SchemasList (Maybe Text)
slUploadProtocol
= lens _slUploadProtocol
(\ s a -> s{_slUploadProtocol = a})
-- | OAuth access token.
slAccessToken :: Lens' SchemasList (Maybe Text)
slAccessToken
= lens _slAccessToken
(\ s a -> s{_slAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
slUploadType :: Lens' SchemasList (Maybe Text)
slUploadType
= lens _slUploadType (\ s a -> s{_slUploadType = a})
-- | Immutable ID of the Google Workspace account.
slCustomerId :: Lens' SchemasList Text
slCustomerId
= lens _slCustomerId (\ s a -> s{_slCustomerId = a})
-- | JSONP
slCallback :: Lens' SchemasList (Maybe Text)
slCallback
= lens _slCallback (\ s a -> s{_slCallback = a})
instance GoogleRequest SchemasList where
type Rs SchemasList = Schemas
type Scopes SchemasList =
'["https://www.googleapis.com/auth/admin.directory.userschema",
"https://www.googleapis.com/auth/admin.directory.userschema.readonly"]
requestClient SchemasList'{..}
= go _slCustomerId _slXgafv _slUploadProtocol
_slAccessToken
_slUploadType
_slCallback
(Just AltJSON)
directoryService
where go
= buildClient (Proxy :: Proxy SchemasListResource)
mempty
| brendanhay/gogol | gogol-admin-directory/gen/Network/Google/Resource/Directory/Schemas/List.hs | mpl-2.0 | 4,493 | 0 | 19 | 1,119 | 713 | 415 | 298 | 104 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.ContainerBuilder.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.ContainerBuilder.Types.Product where
import Network.Google.ContainerBuilder.Types.Sum
import Network.Google.Prelude
-- | BuildStep describes a step to perform in the build pipeline.
--
-- /See:/ 'buildStep' smart constructor.
data BuildStep = BuildStep'
{ _bsDir :: !(Maybe Text)
, _bsArgs :: !(Maybe [Text])
, _bsEnv :: !(Maybe [Text])
, _bsEntrypoint :: !(Maybe Text)
, _bsWaitFor :: !(Maybe [Text])
, _bsName :: !(Maybe Text)
, _bsId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BuildStep' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bsDir'
--
-- * 'bsArgs'
--
-- * 'bsEnv'
--
-- * 'bsEntrypoint'
--
-- * 'bsWaitFor'
--
-- * 'bsName'
--
-- * 'bsId'
buildStep
:: BuildStep
buildStep =
BuildStep'
{ _bsDir = Nothing
, _bsArgs = Nothing
, _bsEnv = Nothing
, _bsEntrypoint = Nothing
, _bsWaitFor = Nothing
, _bsName = Nothing
, _bsId = Nothing
}
-- | Working directory (relative to project source root) to use when running
-- this operation\'s container.
bsDir :: Lens' BuildStep (Maybe Text)
bsDir = lens _bsDir (\ s a -> s{_bsDir = a})
-- | A list of arguments that will be presented to the step when it is
-- started. If the image used to run the step\'s container has an
-- entrypoint, these args will be used as arguments to that entrypoint. If
-- the image does not define an entrypoint, the first element in args will
-- be used as the entrypoint, and the remainder will be used as arguments.
bsArgs :: Lens' BuildStep [Text]
bsArgs
= lens _bsArgs (\ s a -> s{_bsArgs = a}) . _Default .
_Coerce
-- | A list of environment variable definitions to be used when running a
-- step. The elements are of the form \"KEY=VALUE\" for the environment
-- variable \"KEY\" being given the value \"VALUE\".
bsEnv :: Lens' BuildStep [Text]
bsEnv
= lens _bsEnv (\ s a -> s{_bsEnv = a}) . _Default .
_Coerce
-- | Optional entrypoint to be used instead of the build step image\'s
-- default If unset, the image\'s default will be used.
bsEntrypoint :: Lens' BuildStep (Maybe Text)
bsEntrypoint
= lens _bsEntrypoint (\ s a -> s{_bsEntrypoint = a})
-- | The ID(s) of the step(s) that this build step depends on. This build
-- step will not start until all the build steps in wait_for have completed
-- successfully. If wait_for is empty, this build step will start when all
-- previous build steps in the Build.Steps list have completed
-- successfully.
bsWaitFor :: Lens' BuildStep [Text]
bsWaitFor
= lens _bsWaitFor (\ s a -> s{_bsWaitFor = a}) .
_Default
. _Coerce
-- | The name of the container image that will run this particular build
-- step. If the image is already available in the host\'s Docker daemon\'s
-- cache, it will be run directly. If not, the host will attempt to pull
-- the image first, using the builder service account\'s credentials if
-- necessary. The Docker daemon\'s cache will already have the latest
-- versions of all of the officially supported build steps
-- (https:\/\/github.com\/GoogleCloudPlatform\/cloud-builders). The Docker
-- daemon will also have cached many of the layers for some popular images,
-- like \"ubuntu\", \"debian\", but they will be refreshed at the time you
-- attempt to use them. If you built an image in a previous build step, it
-- will be stored in the host\'s Docker daemon\'s cache and is available to
-- use as the name for a later build step.
bsName :: Lens' BuildStep (Maybe Text)
bsName = lens _bsName (\ s a -> s{_bsName = a})
-- | Optional unique identifier for this build step, used in wait_for to
-- reference this build step as a dependency.
bsId :: Lens' BuildStep (Maybe Text)
bsId = lens _bsId (\ s a -> s{_bsId = a})
instance FromJSON BuildStep where
parseJSON
= withObject "BuildStep"
(\ o ->
BuildStep' <$>
(o .:? "dir") <*> (o .:? "args" .!= mempty) <*>
(o .:? "env" .!= mempty)
<*> (o .:? "entrypoint")
<*> (o .:? "waitFor" .!= mempty)
<*> (o .:? "name")
<*> (o .:? "id"))
instance ToJSON BuildStep where
toJSON BuildStep'{..}
= object
(catMaybes
[("dir" .=) <$> _bsDir, ("args" .=) <$> _bsArgs,
("env" .=) <$> _bsEnv,
("entrypoint" .=) <$> _bsEntrypoint,
("waitFor" .=) <$> _bsWaitFor,
("name" .=) <$> _bsName, ("id" .=) <$> _bsId])
-- | Provenance of the source. Ways to find the original source, or verify
-- that some source was used for this build.
--
-- /See:/ 'sourceProvenance' smart constructor.
data SourceProvenance = SourceProvenance'
{ _spResolvedRepoSource :: !(Maybe RepoSource)
, _spResolvedStorageSource :: !(Maybe StorageSource)
, _spFileHashes :: !(Maybe SourceProvenanceFileHashes)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SourceProvenance' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'spResolvedRepoSource'
--
-- * 'spResolvedStorageSource'
--
-- * 'spFileHashes'
sourceProvenance
:: SourceProvenance
sourceProvenance =
SourceProvenance'
{ _spResolvedRepoSource = Nothing
, _spResolvedStorageSource = Nothing
, _spFileHashes = Nothing
}
-- | A copy of the build\'s source.repo_source, if exists, with any revisions
-- resolved.
spResolvedRepoSource :: Lens' SourceProvenance (Maybe RepoSource)
spResolvedRepoSource
= lens _spResolvedRepoSource
(\ s a -> s{_spResolvedRepoSource = a})
-- | A copy of the build\'s source.storage_source, if exists, with any
-- generations resolved.
spResolvedStorageSource :: Lens' SourceProvenance (Maybe StorageSource)
spResolvedStorageSource
= lens _spResolvedStorageSource
(\ s a -> s{_spResolvedStorageSource = a})
-- | Hash(es) of the build source, which can be used to verify that the
-- original source integrity was maintained in the build. Note that
-- FileHashes will only be populated if BuildOptions has requested a
-- SourceProvenanceHash. The keys to this map are file paths used as build
-- source and the values contain the hash values for those files. If the
-- build source came in a single package such as a gzipped tarfile
-- (.tar.gz), the FileHash will be for the single path to that file.
-- \'OutputOnly
spFileHashes :: Lens' SourceProvenance (Maybe SourceProvenanceFileHashes)
spFileHashes
= lens _spFileHashes (\ s a -> s{_spFileHashes = a})
instance FromJSON SourceProvenance where
parseJSON
= withObject "SourceProvenance"
(\ o ->
SourceProvenance' <$>
(o .:? "resolvedRepoSource") <*>
(o .:? "resolvedStorageSource")
<*> (o .:? "fileHashes"))
instance ToJSON SourceProvenance where
toJSON SourceProvenance'{..}
= object
(catMaybes
[("resolvedRepoSource" .=) <$> _spResolvedRepoSource,
("resolvedStorageSource" .=) <$>
_spResolvedStorageSource,
("fileHashes" .=) <$> _spFileHashes])
-- | Response including listed builds.
--
-- /See:/ 'listBuildsResponse' smart constructor.
data ListBuildsResponse = ListBuildsResponse'
{ _lbrNextPageToken :: !(Maybe Text)
, _lbrBuilds :: !(Maybe [Build])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListBuildsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbrNextPageToken'
--
-- * 'lbrBuilds'
listBuildsResponse
:: ListBuildsResponse
listBuildsResponse =
ListBuildsResponse'
{ _lbrNextPageToken = Nothing
, _lbrBuilds = Nothing
}
-- | Token to receive the next page of results.
lbrNextPageToken :: Lens' ListBuildsResponse (Maybe Text)
lbrNextPageToken
= lens _lbrNextPageToken
(\ s a -> s{_lbrNextPageToken = a})
-- | Builds will be sorted by create_time, descending.
lbrBuilds :: Lens' ListBuildsResponse [Build]
lbrBuilds
= lens _lbrBuilds (\ s a -> s{_lbrBuilds = a}) .
_Default
. _Coerce
instance FromJSON ListBuildsResponse where
parseJSON
= withObject "ListBuildsResponse"
(\ o ->
ListBuildsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "builds" .!= mempty))
instance ToJSON ListBuildsResponse where
toJSON ListBuildsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lbrNextPageToken,
("builds" .=) <$> _lbrBuilds])
-- | The \`Status\` type defines a logical error model that is suitable for
-- different programming environments, including REST APIs and RPC APIs. It
-- is used by [gRPC](https:\/\/github.com\/grpc). The error model is
-- designed to be: - Simple to use and understand for most users - Flexible
-- enough to meet unexpected needs # Overview The \`Status\` message
-- contains three pieces of data: error code, error message, and error
-- details. The error code should be an enum value of google.rpc.Code, but
-- it may accept additional error codes if needed. The error message should
-- be a developer-facing English message that helps developers *understand*
-- and *resolve* the error. If a localized user-facing error message is
-- needed, put the localized message in the error details or localize it in
-- the client. The optional error details may contain arbitrary information
-- about the error. There is a predefined set of error detail types in the
-- package \`google.rpc\` which can be used for common error conditions. #
-- Language mapping The \`Status\` message is the logical representation of
-- the error model, but it is not necessarily the actual wire format. When
-- the \`Status\` message is exposed in different client libraries and
-- different wire protocols, it can be mapped differently. For example, it
-- will likely be mapped to some exceptions in Java, but more likely mapped
-- to some error codes in C. # Other uses The error model and the
-- \`Status\` message can be used in a variety of environments, either with
-- or without APIs, to provide a consistent developer experience across
-- different environments. Example uses of this error model include: -
-- Partial errors. If a service needs to return partial errors to the
-- client, it may embed the \`Status\` in the normal response to indicate
-- the partial errors. - Workflow errors. A typical workflow has multiple
-- steps. Each step may have a \`Status\` message for error reporting
-- purpose. - Batch operations. If a client uses batch request and batch
-- response, the \`Status\` message should be used directly inside batch
-- response, one for each error sub-response. - Asynchronous operations. If
-- an API call embeds asynchronous operation results in its response, the
-- status of those operations should be represented directly using the
-- \`Status\` message. - Logging. If some API errors are stored in logs,
-- the message \`Status\` could be used directly after any stripping needed
-- for security\/privacy reasons.
--
-- /See:/ 'status' smart constructor.
data Status = Status'
{ _sDetails :: !(Maybe [StatusDetailsItem])
, _sCode :: !(Maybe (Textual Int32))
, _sMessage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Status' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sDetails'
--
-- * 'sCode'
--
-- * 'sMessage'
status
:: Status
status =
Status'
{ _sDetails = Nothing
, _sCode = Nothing
, _sMessage = Nothing
}
-- | A list of messages that carry the error details. There will be a common
-- set of message types for APIs to use.
sDetails :: Lens' Status [StatusDetailsItem]
sDetails
= lens _sDetails (\ s a -> s{_sDetails = a}) .
_Default
. _Coerce
-- | The status code, which should be an enum value of google.rpc.Code.
sCode :: Lens' Status (Maybe Int32)
sCode
= lens _sCode (\ s a -> s{_sCode = a}) .
mapping _Coerce
-- | A developer-facing error message, which should be in English. Any
-- user-facing error message should be localized and sent in the
-- google.rpc.Status.details field, or localized by the client.
sMessage :: Lens' Status (Maybe Text)
sMessage = lens _sMessage (\ s a -> s{_sMessage = a})
instance FromJSON Status where
parseJSON
= withObject "Status"
(\ o ->
Status' <$>
(o .:? "details" .!= mempty) <*> (o .:? "code") <*>
(o .:? "message"))
instance ToJSON Status where
toJSON Status'{..}
= object
(catMaybes
[("details" .=) <$> _sDetails,
("code" .=) <$> _sCode,
("message" .=) <$> _sMessage])
-- | The response message for Operations.ListOperations.
--
-- /See:/ 'listOperationsResponse' smart constructor.
data ListOperationsResponse = ListOperationsResponse'
{ _lorNextPageToken :: !(Maybe Text)
, _lorOperations :: !(Maybe [Operation])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListOperationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lorNextPageToken'
--
-- * 'lorOperations'
listOperationsResponse
:: ListOperationsResponse
listOperationsResponse =
ListOperationsResponse'
{ _lorNextPageToken = Nothing
, _lorOperations = Nothing
}
-- | The standard List next-page token.
lorNextPageToken :: Lens' ListOperationsResponse (Maybe Text)
lorNextPageToken
= lens _lorNextPageToken
(\ s a -> s{_lorNextPageToken = a})
-- | A list of operations that matches the specified filter in the request.
lorOperations :: Lens' ListOperationsResponse [Operation]
lorOperations
= lens _lorOperations
(\ s a -> s{_lorOperations = a})
. _Default
. _Coerce
instance FromJSON ListOperationsResponse where
parseJSON
= withObject "ListOperationsResponse"
(\ o ->
ListOperationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "operations" .!= mempty))
instance ToJSON ListOperationsResponse where
toJSON ListOperationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lorNextPageToken,
("operations" .=) <$> _lorOperations])
-- | The request message for Operations.CancelOperation.
--
-- /See:/ 'cancelOperationRequest' smart constructor.
data CancelOperationRequest =
CancelOperationRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CancelOperationRequest' with the minimum fields required to make a request.
--
cancelOperationRequest
:: CancelOperationRequest
cancelOperationRequest = CancelOperationRequest'
instance FromJSON CancelOperationRequest where
parseJSON
= withObject "CancelOperationRequest"
(\ o -> pure CancelOperationRequest')
instance ToJSON CancelOperationRequest where
toJSON = const emptyObject
-- | Container message for hash values.
--
-- /See:/ 'hash' smart constructor.
data Hash = Hash'
{ _hValue :: !(Maybe Bytes)
, _hType :: !(Maybe HashType)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Hash' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hValue'
--
-- * 'hType'
hash
:: Hash
hash =
Hash'
{ _hValue = Nothing
, _hType = Nothing
}
-- | The hash value.
hValue :: Lens' Hash (Maybe ByteString)
hValue
= lens _hValue (\ s a -> s{_hValue = a}) .
mapping _Bytes
-- | The type of hash that was performed.
hType :: Lens' Hash (Maybe HashType)
hType = lens _hType (\ s a -> s{_hType = a})
instance FromJSON Hash where
parseJSON
= withObject "Hash"
(\ o -> Hash' <$> (o .:? "value") <*> (o .:? "type"))
instance ToJSON Hash where
toJSON Hash'{..}
= object
(catMaybes
[("value" .=) <$> _hValue, ("type" .=) <$> _hType])
-- | Results describes the artifacts created by the build pipeline.
--
-- /See:/ 'results' smart constructor.
data Results = Results'
{ _rImages :: !(Maybe [BuiltImage])
, _rBuildStepImages :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Results' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rImages'
--
-- * 'rBuildStepImages'
results
:: Results
results =
Results'
{ _rImages = Nothing
, _rBuildStepImages = Nothing
}
-- | Images that were built as a part of the build.
rImages :: Lens' Results [BuiltImage]
rImages
= lens _rImages (\ s a -> s{_rImages = a}) . _Default
. _Coerce
-- | List of build step digests, in order corresponding to build step
-- indices.
rBuildStepImages :: Lens' Results [Text]
rBuildStepImages
= lens _rBuildStepImages
(\ s a -> s{_rBuildStepImages = a})
. _Default
. _Coerce
instance FromJSON Results where
parseJSON
= withObject "Results"
(\ o ->
Results' <$>
(o .:? "images" .!= mempty) <*>
(o .:? "buildStepImages" .!= mempty))
instance ToJSON Results where
toJSON Results'{..}
= object
(catMaybes
[("images" .=) <$> _rImages,
("buildStepImages" .=) <$> _rBuildStepImages])
-- | RepoSource describes the location of the source in a Google Cloud Source
-- Repository.
--
-- /See:/ 'repoSource' smart constructor.
data RepoSource = RepoSource'
{ _rsRepoName :: !(Maybe Text)
, _rsCommitSha :: !(Maybe Text)
, _rsBranchName :: !(Maybe Text)
, _rsTagName :: !(Maybe Text)
, _rsProjectId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RepoSource' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rsRepoName'
--
-- * 'rsCommitSha'
--
-- * 'rsBranchName'
--
-- * 'rsTagName'
--
-- * 'rsProjectId'
repoSource
:: RepoSource
repoSource =
RepoSource'
{ _rsRepoName = Nothing
, _rsCommitSha = Nothing
, _rsBranchName = Nothing
, _rsTagName = Nothing
, _rsProjectId = Nothing
}
-- | Name of the repo. If omitted, the name \"default\" is assumed.
rsRepoName :: Lens' RepoSource (Maybe Text)
rsRepoName
= lens _rsRepoName (\ s a -> s{_rsRepoName = a})
-- | Explicit commit SHA to build.
rsCommitSha :: Lens' RepoSource (Maybe Text)
rsCommitSha
= lens _rsCommitSha (\ s a -> s{_rsCommitSha = a})
-- | Name of the branch to build.
rsBranchName :: Lens' RepoSource (Maybe Text)
rsBranchName
= lens _rsBranchName (\ s a -> s{_rsBranchName = a})
-- | Name of the tag to build.
rsTagName :: Lens' RepoSource (Maybe Text)
rsTagName
= lens _rsTagName (\ s a -> s{_rsTagName = a})
-- | ID of the project that owns the repo. If omitted, the project ID
-- requesting the build is assumed.
rsProjectId :: Lens' RepoSource (Maybe Text)
rsProjectId
= lens _rsProjectId (\ s a -> s{_rsProjectId = a})
instance FromJSON RepoSource where
parseJSON
= withObject "RepoSource"
(\ o ->
RepoSource' <$>
(o .:? "repoName") <*> (o .:? "commitSha") <*>
(o .:? "branchName")
<*> (o .:? "tagName")
<*> (o .:? "projectId"))
instance ToJSON RepoSource where
toJSON RepoSource'{..}
= object
(catMaybes
[("repoName" .=) <$> _rsRepoName,
("commitSha" .=) <$> _rsCommitSha,
("branchName" .=) <$> _rsBranchName,
("tagName" .=) <$> _rsTagName,
("projectId" .=) <$> _rsProjectId])
-- | This resource represents a long-running operation that is the result of
-- a network API call.
--
-- /See:/ 'operation' smart constructor.
data Operation = Operation'
{ _oDone :: !(Maybe Bool)
, _oError :: !(Maybe Status)
, _oResponse :: !(Maybe OperationResponse)
, _oName :: !(Maybe Text)
, _oMetadata :: !(Maybe OperationMetadata)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Operation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oDone'
--
-- * 'oError'
--
-- * 'oResponse'
--
-- * 'oName'
--
-- * 'oMetadata'
operation
:: Operation
operation =
Operation'
{ _oDone = Nothing
, _oError = Nothing
, _oResponse = Nothing
, _oName = Nothing
, _oMetadata = Nothing
}
-- | If the value is \`false\`, it means the operation is still in progress.
-- If true, the operation is completed, and either \`error\` or
-- \`response\` is available.
oDone :: Lens' Operation (Maybe Bool)
oDone = lens _oDone (\ s a -> s{_oDone = a})
-- | The error result of the operation in case of failure or cancellation.
oError :: Lens' Operation (Maybe Status)
oError = lens _oError (\ s a -> s{_oError = a})
-- | The normal response of the operation in case of success. If the original
-- method returns no data on success, such as \`Delete\`, the response is
-- \`google.protobuf.Empty\`. If the original method is standard
-- \`Get\`\/\`Create\`\/\`Update\`, the response should be the resource.
-- For other methods, the response should have the type \`XxxResponse\`,
-- where \`Xxx\` is the original method name. For example, if the original
-- method name is \`TakeSnapshot()\`, the inferred response type is
-- \`TakeSnapshotResponse\`.
oResponse :: Lens' Operation (Maybe OperationResponse)
oResponse
= lens _oResponse (\ s a -> s{_oResponse = a})
-- | The server-assigned name, which is only unique within the same service
-- that originally returns it. If you use the default HTTP mapping, the
-- \`name\` should have the format of \`operations\/some\/unique\/name\`.
oName :: Lens' Operation (Maybe Text)
oName = lens _oName (\ s a -> s{_oName = a})
-- | Service-specific metadata associated with the operation. It typically
-- contains progress information and common metadata such as create time.
-- Some services might not provide such metadata. Any method that returns a
-- long-running operation should document the metadata type, if any.
oMetadata :: Lens' Operation (Maybe OperationMetadata)
oMetadata
= lens _oMetadata (\ s a -> s{_oMetadata = a})
instance FromJSON Operation where
parseJSON
= withObject "Operation"
(\ o ->
Operation' <$>
(o .:? "done") <*> (o .:? "error") <*>
(o .:? "response")
<*> (o .:? "name")
<*> (o .:? "metadata"))
instance ToJSON Operation where
toJSON Operation'{..}
= object
(catMaybes
[("done" .=) <$> _oDone, ("error" .=) <$> _oError,
("response" .=) <$> _oResponse,
("name" .=) <$> _oName,
("metadata" .=) <$> _oMetadata])
-- | A generic empty message that you can re-use to avoid defining duplicated
-- empty messages in your APIs. A typical example is to use it as the
-- request or the response type of an API method. For instance: service Foo
-- { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
-- JSON representation for \`Empty\` is empty JSON object \`{}\`.
--
-- /See:/ 'empty' smart constructor.
data Empty =
Empty'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Empty' with the minimum fields required to make a request.
--
empty
:: Empty
empty = Empty'
instance FromJSON Empty where
parseJSON = withObject "Empty" (\ o -> pure Empty')
instance ToJSON Empty where
toJSON = const emptyObject
--
-- /See:/ 'statusDetailsItem' smart constructor.
newtype StatusDetailsItem = StatusDetailsItem'
{ _sdiAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StatusDetailsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdiAddtional'
statusDetailsItem
:: HashMap Text JSONValue -- ^ 'sdiAddtional'
-> StatusDetailsItem
statusDetailsItem pSdiAddtional_ =
StatusDetailsItem'
{ _sdiAddtional = _Coerce # pSdiAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
sdiAddtional :: Lens' StatusDetailsItem (HashMap Text JSONValue)
sdiAddtional
= lens _sdiAddtional (\ s a -> s{_sdiAddtional = a})
. _Coerce
instance FromJSON StatusDetailsItem where
parseJSON
= withObject "StatusDetailsItem"
(\ o -> StatusDetailsItem' <$> (parseJSONObject o))
instance ToJSON StatusDetailsItem where
toJSON = toJSON . _sdiAddtional
-- | A build resource in the Container Builder API. At a high level, a Build
-- describes where to find source code, how to build it (for example, the
-- builder image to run on the source), and what tag to apply to the built
-- image when it is pushed to Google Container Registry. Fields can include
-- the following variables which will be expanded when the build is
-- created: - $PROJECT_ID: the project ID of the build. - $BUILD_ID: the
-- autogenerated ID of the build. - $REPO_NAME: the source repository name
-- specified by RepoSource. - $BRANCH_NAME: the branch name specified by
-- RepoSource. - $TAG_NAME: the tag name specified by RepoSource. -
-- $REVISION_ID or $COMMIT_SHA: the commit SHA specified by RepoSource or
-- resolved from the specified branch or tag.
--
-- /See:/ 'build' smart constructor.
data Build = Build'
{ _bImages :: !(Maybe [Text])
, _bStatus :: !(Maybe BuildStatus)
, _bSourceProvenance :: !(Maybe SourceProvenance)
, _bLogURL :: !(Maybe Text)
, _bResults :: !(Maybe Results)
, _bStartTime :: !(Maybe DateTime')
, _bLogsBucket :: !(Maybe Text)
, _bSteps :: !(Maybe [BuildStep])
, _bStatusDetail :: !(Maybe Text)
, _bSource :: !(Maybe Source)
, _bId :: !(Maybe Text)
, _bOptions :: !(Maybe BuildOptions)
, _bProjectId :: !(Maybe Text)
, _bBuildTriggerId :: !(Maybe Text)
, _bTimeout :: !(Maybe Duration)
, _bFinishTime :: !(Maybe DateTime')
, _bCreateTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Build' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bImages'
--
-- * 'bStatus'
--
-- * 'bSourceProvenance'
--
-- * 'bLogURL'
--
-- * 'bResults'
--
-- * 'bStartTime'
--
-- * 'bLogsBucket'
--
-- * 'bSteps'
--
-- * 'bStatusDetail'
--
-- * 'bSource'
--
-- * 'bId'
--
-- * 'bOptions'
--
-- * 'bProjectId'
--
-- * 'bBuildTriggerId'
--
-- * 'bTimeout'
--
-- * 'bFinishTime'
--
-- * 'bCreateTime'
build
:: Build
build =
Build'
{ _bImages = Nothing
, _bStatus = Nothing
, _bSourceProvenance = Nothing
, _bLogURL = Nothing
, _bResults = Nothing
, _bStartTime = Nothing
, _bLogsBucket = Nothing
, _bSteps = Nothing
, _bStatusDetail = Nothing
, _bSource = Nothing
, _bId = Nothing
, _bOptions = Nothing
, _bProjectId = Nothing
, _bBuildTriggerId = Nothing
, _bTimeout = Nothing
, _bFinishTime = Nothing
, _bCreateTime = Nothing
}
-- | A list of images to be pushed upon the successful completion of all
-- build steps. The images will be pushed using the builder service
-- account\'s credentials. The digests of the pushed images will be stored
-- in the Build resource\'s results field. If any of the images fail to be
-- pushed, the build is marked FAILURE.
bImages :: Lens' Build [Text]
bImages
= lens _bImages (\ s a -> s{_bImages = a}) . _Default
. _Coerce
-- | Status of the build. \'OutputOnly
bStatus :: Lens' Build (Maybe BuildStatus)
bStatus = lens _bStatus (\ s a -> s{_bStatus = a})
-- | A permanent fixed identifier for source. \'OutputOnly
bSourceProvenance :: Lens' Build (Maybe SourceProvenance)
bSourceProvenance
= lens _bSourceProvenance
(\ s a -> s{_bSourceProvenance = a})
-- | URL to logs for this build in Google Cloud Logging. \'OutputOnly
bLogURL :: Lens' Build (Maybe Text)
bLogURL = lens _bLogURL (\ s a -> s{_bLogURL = a})
-- | Results of the build. \'OutputOnly
bResults :: Lens' Build (Maybe Results)
bResults = lens _bResults (\ s a -> s{_bResults = a})
-- | Time at which execution of the build was started. \'OutputOnly
bStartTime :: Lens' Build (Maybe UTCTime)
bStartTime
= lens _bStartTime (\ s a -> s{_bStartTime = a}) .
mapping _DateTime
-- | Google Cloud Storage bucket where logs should be written (see [Bucket
-- Name
-- Requirements](https:\/\/cloud.google.com\/storage\/docs\/bucket-naming#requirements)).
-- Logs file names will be of the format
-- \`/l//o//g//s//b//u//c//k//e//t/\//l//o//g/−{build_id}.txt\`.
bLogsBucket :: Lens' Build (Maybe Text)
bLogsBucket
= lens _bLogsBucket (\ s a -> s{_bLogsBucket = a})
-- | Describes the operations to be performed on the workspace.
bSteps :: Lens' Build [BuildStep]
bSteps
= lens _bSteps (\ s a -> s{_bSteps = a}) . _Default .
_Coerce
-- | Customer-readable message about the current status. \'OutputOnly
bStatusDetail :: Lens' Build (Maybe Text)
bStatusDetail
= lens _bStatusDetail
(\ s a -> s{_bStatusDetail = a})
-- | Describes where to find the source files to build.
bSource :: Lens' Build (Maybe Source)
bSource = lens _bSource (\ s a -> s{_bSource = a})
-- | Unique identifier of the build. \'OutputOnly
bId :: Lens' Build (Maybe Text)
bId = lens _bId (\ s a -> s{_bId = a})
-- | Special options for this build.
bOptions :: Lens' Build (Maybe BuildOptions)
bOptions = lens _bOptions (\ s a -> s{_bOptions = a})
-- | ID of the project. \'OutputOnly.
bProjectId :: Lens' Build (Maybe Text)
bProjectId
= lens _bProjectId (\ s a -> s{_bProjectId = a})
-- | The ID of the BuildTrigger that triggered this build, if it was
-- triggered automatically. \'OutputOnly
bBuildTriggerId :: Lens' Build (Maybe Text)
bBuildTriggerId
= lens _bBuildTriggerId
(\ s a -> s{_bBuildTriggerId = a})
-- | Amount of time that this build should be allowed to run, to second
-- granularity. If this amount of time elapses, work on the build will
-- cease and the build status will be TIMEOUT. Default time is ten minutes.
bTimeout :: Lens' Build (Maybe Scientific)
bTimeout
= lens _bTimeout (\ s a -> s{_bTimeout = a}) .
mapping _Duration
-- | Time at which execution of the build was finished. The difference
-- between finish_time and start_time is the duration of the build\'s
-- execution. \'OutputOnly
bFinishTime :: Lens' Build (Maybe UTCTime)
bFinishTime
= lens _bFinishTime (\ s a -> s{_bFinishTime = a}) .
mapping _DateTime
-- | Time at which the request to create the build was received. \'OutputOnly
bCreateTime :: Lens' Build (Maybe UTCTime)
bCreateTime
= lens _bCreateTime (\ s a -> s{_bCreateTime = a}) .
mapping _DateTime
instance FromJSON Build where
parseJSON
= withObject "Build"
(\ o ->
Build' <$>
(o .:? "images" .!= mempty) <*> (o .:? "status") <*>
(o .:? "sourceProvenance")
<*> (o .:? "logUrl")
<*> (o .:? "results")
<*> (o .:? "startTime")
<*> (o .:? "logsBucket")
<*> (o .:? "steps" .!= mempty)
<*> (o .:? "statusDetail")
<*> (o .:? "source")
<*> (o .:? "id")
<*> (o .:? "options")
<*> (o .:? "projectId")
<*> (o .:? "buildTriggerId")
<*> (o .:? "timeout")
<*> (o .:? "finishTime")
<*> (o .:? "createTime"))
instance ToJSON Build where
toJSON Build'{..}
= object
(catMaybes
[("images" .=) <$> _bImages,
("status" .=) <$> _bStatus,
("sourceProvenance" .=) <$> _bSourceProvenance,
("logUrl" .=) <$> _bLogURL,
("results" .=) <$> _bResults,
("startTime" .=) <$> _bStartTime,
("logsBucket" .=) <$> _bLogsBucket,
("steps" .=) <$> _bSteps,
("statusDetail" .=) <$> _bStatusDetail,
("source" .=) <$> _bSource, ("id" .=) <$> _bId,
("options" .=) <$> _bOptions,
("projectId" .=) <$> _bProjectId,
("buildTriggerId" .=) <$> _bBuildTriggerId,
("timeout" .=) <$> _bTimeout,
("finishTime" .=) <$> _bFinishTime,
("createTime" .=) <$> _bCreateTime])
-- | Hash(es) of the build source, which can be used to verify that the
-- original source integrity was maintained in the build. Note that
-- FileHashes will only be populated if BuildOptions has requested a
-- SourceProvenanceHash. The keys to this map are file paths used as build
-- source and the values contain the hash values for those files. If the
-- build source came in a single package such as a gzipped tarfile
-- (.tar.gz), the FileHash will be for the single path to that file.
-- \'OutputOnly
--
-- /See:/ 'sourceProvenanceFileHashes' smart constructor.
newtype SourceProvenanceFileHashes = SourceProvenanceFileHashes'
{ _spfhAddtional :: HashMap Text FileHashes
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SourceProvenanceFileHashes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'spfhAddtional'
sourceProvenanceFileHashes
:: HashMap Text FileHashes -- ^ 'spfhAddtional'
-> SourceProvenanceFileHashes
sourceProvenanceFileHashes pSpfhAddtional_ =
SourceProvenanceFileHashes'
{ _spfhAddtional = _Coerce # pSpfhAddtional_
}
spfhAddtional :: Lens' SourceProvenanceFileHashes (HashMap Text FileHashes)
spfhAddtional
= lens _spfhAddtional
(\ s a -> s{_spfhAddtional = a})
. _Coerce
instance FromJSON SourceProvenanceFileHashes where
parseJSON
= withObject "SourceProvenanceFileHashes"
(\ o ->
SourceProvenanceFileHashes' <$> (parseJSONObject o))
instance ToJSON SourceProvenanceFileHashes where
toJSON = toJSON . _spfhAddtional
-- | Request to cancel an ongoing build.
--
-- /See:/ 'cancelBuildRequest' smart constructor.
data CancelBuildRequest =
CancelBuildRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CancelBuildRequest' with the minimum fields required to make a request.
--
cancelBuildRequest
:: CancelBuildRequest
cancelBuildRequest = CancelBuildRequest'
instance FromJSON CancelBuildRequest where
parseJSON
= withObject "CancelBuildRequest"
(\ o -> pure CancelBuildRequest')
instance ToJSON CancelBuildRequest where
toJSON = const emptyObject
-- | StorageSource describes the location of the source in an archive file in
-- Google Cloud Storage.
--
-- /See:/ 'storageSource' smart constructor.
data StorageSource = StorageSource'
{ _ssBucket :: !(Maybe Text)
, _ssObject :: !(Maybe Text)
, _ssGeneration :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StorageSource' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssBucket'
--
-- * 'ssObject'
--
-- * 'ssGeneration'
storageSource
:: StorageSource
storageSource =
StorageSource'
{ _ssBucket = Nothing
, _ssObject = Nothing
, _ssGeneration = Nothing
}
-- | Google Cloud Storage bucket containing source (see [Bucket Name
-- Requirements](https:\/\/cloud.google.com\/storage\/docs\/bucket-naming#requirements)).
ssBucket :: Lens' StorageSource (Maybe Text)
ssBucket = lens _ssBucket (\ s a -> s{_ssBucket = a})
-- | Google Cloud Storage object containing source. This object must be a
-- gzipped archive file (.tar.gz) containing source to build.
ssObject :: Lens' StorageSource (Maybe Text)
ssObject = lens _ssObject (\ s a -> s{_ssObject = a})
-- | Google Cloud Storage generation for the object. If the generation is
-- omitted, the latest generation will be used.
ssGeneration :: Lens' StorageSource (Maybe Int64)
ssGeneration
= lens _ssGeneration (\ s a -> s{_ssGeneration = a})
. mapping _Coerce
instance FromJSON StorageSource where
parseJSON
= withObject "StorageSource"
(\ o ->
StorageSource' <$>
(o .:? "bucket") <*> (o .:? "object") <*>
(o .:? "generation"))
instance ToJSON StorageSource where
toJSON StorageSource'{..}
= object
(catMaybes
[("bucket" .=) <$> _ssBucket,
("object" .=) <$> _ssObject,
("generation" .=) <$> _ssGeneration])
-- | Response containing existing BuildTriggers.
--
-- /See:/ 'listBuildTriggersResponse' smart constructor.
newtype ListBuildTriggersResponse = ListBuildTriggersResponse'
{ _lbtrTriggers :: Maybe [BuildTrigger]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListBuildTriggersResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbtrTriggers'
listBuildTriggersResponse
:: ListBuildTriggersResponse
listBuildTriggersResponse =
ListBuildTriggersResponse'
{ _lbtrTriggers = Nothing
}
-- | BuildTriggers for the project, sorted by create_time descending.
lbtrTriggers :: Lens' ListBuildTriggersResponse [BuildTrigger]
lbtrTriggers
= lens _lbtrTriggers (\ s a -> s{_lbtrTriggers = a})
. _Default
. _Coerce
instance FromJSON ListBuildTriggersResponse where
parseJSON
= withObject "ListBuildTriggersResponse"
(\ o ->
ListBuildTriggersResponse' <$>
(o .:? "triggers" .!= mempty))
instance ToJSON ListBuildTriggersResponse where
toJSON ListBuildTriggersResponse'{..}
= object
(catMaybes [("triggers" .=) <$> _lbtrTriggers])
-- | Container message for hashes of byte content of files, used in
-- SourceProvenance messages to verify integrity of source input to the
-- build.
--
-- /See:/ 'fileHashes' smart constructor.
newtype FileHashes = FileHashes'
{ _fhFileHash :: Maybe [Hash]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileHashes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fhFileHash'
fileHashes
:: FileHashes
fileHashes =
FileHashes'
{ _fhFileHash = Nothing
}
-- | Collection of file hashes.
fhFileHash :: Lens' FileHashes [Hash]
fhFileHash
= lens _fhFileHash (\ s a -> s{_fhFileHash = a}) .
_Default
. _Coerce
instance FromJSON FileHashes where
parseJSON
= withObject "FileHashes"
(\ o ->
FileHashes' <$> (o .:? "fileHash" .!= mempty))
instance ToJSON FileHashes where
toJSON FileHashes'{..}
= object
(catMaybes [("fileHash" .=) <$> _fhFileHash])
-- | Source describes the location of the source in a supported storage
-- service.
--
-- /See:/ 'source' smart constructor.
data Source = Source'
{ _sRepoSource :: !(Maybe RepoSource)
, _sStorageSource :: !(Maybe StorageSource)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Source' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sRepoSource'
--
-- * 'sStorageSource'
source
:: Source
source =
Source'
{ _sRepoSource = Nothing
, _sStorageSource = Nothing
}
-- | If provided, get source from this location in a Cloud Repo.
sRepoSource :: Lens' Source (Maybe RepoSource)
sRepoSource
= lens _sRepoSource (\ s a -> s{_sRepoSource = a})
-- | If provided, get the source from this location in in Google Cloud
-- Storage.
sStorageSource :: Lens' Source (Maybe StorageSource)
sStorageSource
= lens _sStorageSource
(\ s a -> s{_sStorageSource = a})
instance FromJSON Source where
parseJSON
= withObject "Source"
(\ o ->
Source' <$>
(o .:? "repoSource") <*> (o .:? "storageSource"))
instance ToJSON Source where
toJSON Source'{..}
= object
(catMaybes
[("repoSource" .=) <$> _sRepoSource,
("storageSource" .=) <$> _sStorageSource])
-- | Service-specific metadata associated with the operation. It typically
-- contains progress information and common metadata such as create time.
-- Some services might not provide such metadata. Any method that returns a
-- long-running operation should document the metadata type, if any.
--
-- /See:/ 'operationMetadata' smart constructor.
newtype OperationMetadata = OperationMetadata'
{ _omAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omAddtional'
operationMetadata
:: HashMap Text JSONValue -- ^ 'omAddtional'
-> OperationMetadata
operationMetadata pOmAddtional_ =
OperationMetadata'
{ _omAddtional = _Coerce # pOmAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
omAddtional :: Lens' OperationMetadata (HashMap Text JSONValue)
omAddtional
= lens _omAddtional (\ s a -> s{_omAddtional = a}) .
_Coerce
instance FromJSON OperationMetadata where
parseJSON
= withObject "OperationMetadata"
(\ o -> OperationMetadata' <$> (parseJSONObject o))
instance ToJSON OperationMetadata where
toJSON = toJSON . _omAddtional
-- | Metadata for build operations.
--
-- /See:/ 'buildOperationMetadata' smart constructor.
newtype BuildOperationMetadata = BuildOperationMetadata'
{ _bomBuild :: Maybe Build
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BuildOperationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bomBuild'
buildOperationMetadata
:: BuildOperationMetadata
buildOperationMetadata =
BuildOperationMetadata'
{ _bomBuild = Nothing
}
-- | The build that the operation is tracking.
bomBuild :: Lens' BuildOperationMetadata (Maybe Build)
bomBuild = lens _bomBuild (\ s a -> s{_bomBuild = a})
instance FromJSON BuildOperationMetadata where
parseJSON
= withObject "BuildOperationMetadata"
(\ o -> BuildOperationMetadata' <$> (o .:? "build"))
instance ToJSON BuildOperationMetadata where
toJSON BuildOperationMetadata'{..}
= object (catMaybes [("build" .=) <$> _bomBuild])
-- | Optional arguments to enable specific features of builds.
--
-- /See:/ 'buildOptions' smart constructor.
data BuildOptions = BuildOptions'
{ _boRequestedVerifyOption :: !(Maybe BuildOptionsRequestedVerifyOption)
, _boSourceProvenanceHash :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BuildOptions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'boRequestedVerifyOption'
--
-- * 'boSourceProvenanceHash'
buildOptions
:: BuildOptions
buildOptions =
BuildOptions'
{ _boRequestedVerifyOption = Nothing
, _boSourceProvenanceHash = Nothing
}
-- | Requested verifiability options.
boRequestedVerifyOption :: Lens' BuildOptions (Maybe BuildOptionsRequestedVerifyOption)
boRequestedVerifyOption
= lens _boRequestedVerifyOption
(\ s a -> s{_boRequestedVerifyOption = a})
-- | Requested hash for SourceProvenance.
boSourceProvenanceHash :: Lens' BuildOptions [Text]
boSourceProvenanceHash
= lens _boSourceProvenanceHash
(\ s a -> s{_boSourceProvenanceHash = a})
. _Default
. _Coerce
instance FromJSON BuildOptions where
parseJSON
= withObject "BuildOptions"
(\ o ->
BuildOptions' <$>
(o .:? "requestedVerifyOption") <*>
(o .:? "sourceProvenanceHash" .!= mempty))
instance ToJSON BuildOptions where
toJSON BuildOptions'{..}
= object
(catMaybes
[("requestedVerifyOption" .=) <$>
_boRequestedVerifyOption,
("sourceProvenanceHash" .=) <$>
_boSourceProvenanceHash])
-- | The normal response of the operation in case of success. If the original
-- method returns no data on success, such as \`Delete\`, the response is
-- \`google.protobuf.Empty\`. If the original method is standard
-- \`Get\`\/\`Create\`\/\`Update\`, the response should be the resource.
-- For other methods, the response should have the type \`XxxResponse\`,
-- where \`Xxx\` is the original method name. For example, if the original
-- method name is \`TakeSnapshot()\`, the inferred response type is
-- \`TakeSnapshotResponse\`.
--
-- /See:/ 'operationResponse' smart constructor.
newtype OperationResponse = OperationResponse'
{ _orAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'orAddtional'
operationResponse
:: HashMap Text JSONValue -- ^ 'orAddtional'
-> OperationResponse
operationResponse pOrAddtional_ =
OperationResponse'
{ _orAddtional = _Coerce # pOrAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
orAddtional :: Lens' OperationResponse (HashMap Text JSONValue)
orAddtional
= lens _orAddtional (\ s a -> s{_orAddtional = a}) .
_Coerce
instance FromJSON OperationResponse where
parseJSON
= withObject "OperationResponse"
(\ o -> OperationResponse' <$> (parseJSONObject o))
instance ToJSON OperationResponse where
toJSON = toJSON . _orAddtional
-- | Configuration for an automated build in response to source repository
-- changes.
--
-- /See:/ 'buildTrigger' smart constructor.
data BuildTrigger = BuildTrigger'
{ _btDisabled :: !(Maybe Bool)
, _btTriggerTemplate :: !(Maybe RepoSource)
, _btBuild :: !(Maybe Build)
, _btId :: !(Maybe Text)
, _btDescription :: !(Maybe Text)
, _btFilename :: !(Maybe Text)
, _btCreateTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BuildTrigger' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'btDisabled'
--
-- * 'btTriggerTemplate'
--
-- * 'btBuild'
--
-- * 'btId'
--
-- * 'btDescription'
--
-- * 'btFilename'
--
-- * 'btCreateTime'
buildTrigger
:: BuildTrigger
buildTrigger =
BuildTrigger'
{ _btDisabled = Nothing
, _btTriggerTemplate = Nothing
, _btBuild = Nothing
, _btId = Nothing
, _btDescription = Nothing
, _btFilename = Nothing
, _btCreateTime = Nothing
}
-- | If true, the trigger will never result in a build.
btDisabled :: Lens' BuildTrigger (Maybe Bool)
btDisabled
= lens _btDisabled (\ s a -> s{_btDisabled = a})
-- | Template describing the types of source changes to trigger a build.
-- Branch and tag names in trigger templates are interpreted as regular
-- expressions. Any branch or tag change that matches that regular
-- expression will trigger a build.
btTriggerTemplate :: Lens' BuildTrigger (Maybe RepoSource)
btTriggerTemplate
= lens _btTriggerTemplate
(\ s a -> s{_btTriggerTemplate = a})
-- | Contents of the build template.
btBuild :: Lens' BuildTrigger (Maybe Build)
btBuild = lens _btBuild (\ s a -> s{_btBuild = a})
-- | Unique identifier of the trigger. \'OutputOnly
btId :: Lens' BuildTrigger (Maybe Text)
btId = lens _btId (\ s a -> s{_btId = a})
-- | Human-readable description of this trigger.
btDescription :: Lens' BuildTrigger (Maybe Text)
btDescription
= lens _btDescription
(\ s a -> s{_btDescription = a})
-- | Path, from the source root, to a file whose contents is used for the
-- template.
btFilename :: Lens' BuildTrigger (Maybe Text)
btFilename
= lens _btFilename (\ s a -> s{_btFilename = a})
-- | Time when the trigger was created. \'OutputOnly
btCreateTime :: Lens' BuildTrigger (Maybe UTCTime)
btCreateTime
= lens _btCreateTime (\ s a -> s{_btCreateTime = a})
. mapping _DateTime
instance FromJSON BuildTrigger where
parseJSON
= withObject "BuildTrigger"
(\ o ->
BuildTrigger' <$>
(o .:? "disabled") <*> (o .:? "triggerTemplate") <*>
(o .:? "build")
<*> (o .:? "id")
<*> (o .:? "description")
<*> (o .:? "filename")
<*> (o .:? "createTime"))
instance ToJSON BuildTrigger where
toJSON BuildTrigger'{..}
= object
(catMaybes
[("disabled" .=) <$> _btDisabled,
("triggerTemplate" .=) <$> _btTriggerTemplate,
("build" .=) <$> _btBuild, ("id" .=) <$> _btId,
("description" .=) <$> _btDescription,
("filename" .=) <$> _btFilename,
("createTime" .=) <$> _btCreateTime])
-- | BuiltImage describes an image built by the pipeline.
--
-- /See:/ 'builtImage' smart constructor.
data BuiltImage = BuiltImage'
{ _biName :: !(Maybe Text)
, _biDigest :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BuiltImage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'biName'
--
-- * 'biDigest'
builtImage
:: BuiltImage
builtImage =
BuiltImage'
{ _biName = Nothing
, _biDigest = Nothing
}
-- | Name used to push the container image to Google Container Registry, as
-- presented to \`docker push\`.
biName :: Lens' BuiltImage (Maybe Text)
biName = lens _biName (\ s a -> s{_biName = a})
-- | Docker Registry 2.0 digest.
biDigest :: Lens' BuiltImage (Maybe Text)
biDigest = lens _biDigest (\ s a -> s{_biDigest = a})
instance FromJSON BuiltImage where
parseJSON
= withObject "BuiltImage"
(\ o ->
BuiltImage' <$> (o .:? "name") <*> (o .:? "digest"))
instance ToJSON BuiltImage where
toJSON BuiltImage'{..}
= object
(catMaybes
[("name" .=) <$> _biName,
("digest" .=) <$> _biDigest])
| rueshyna/gogol | gogol-containerbuilder/gen/Network/Google/ContainerBuilder/Types/Product.hs | mpl-2.0 | 52,868 | 0 | 28 | 12,873 | 9,233 | 5,345 | 3,888 | 988 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.FirewallPolicies.AddRule
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Inserts a rule into a firewall policy.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.firewallPolicies.addRule@.
module Network.Google.Resource.Compute.FirewallPolicies.AddRule
(
-- * REST Resource
FirewallPoliciesAddRuleResource
-- * Creating a Request
, firewallPoliciesAddRule
, FirewallPoliciesAddRule
-- * Request Lenses
, fparRequestId
, fparFirewallPolicy
, fparPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.firewallPolicies.addRule@ method which the
-- 'FirewallPoliciesAddRule' request conforms to.
type FirewallPoliciesAddRuleResource =
"compute" :>
"v1" :>
"locations" :>
"global" :>
"firewallPolicies" :>
Capture "firewallPolicy" Text :>
"addRule" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] FirewallPolicyRule :>
Post '[JSON] Operation
-- | Inserts a rule into a firewall policy.
--
-- /See:/ 'firewallPoliciesAddRule' smart constructor.
data FirewallPoliciesAddRule =
FirewallPoliciesAddRule'
{ _fparRequestId :: !(Maybe Text)
, _fparFirewallPolicy :: !Text
, _fparPayload :: !FirewallPolicyRule
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FirewallPoliciesAddRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fparRequestId'
--
-- * 'fparFirewallPolicy'
--
-- * 'fparPayload'
firewallPoliciesAddRule
:: Text -- ^ 'fparFirewallPolicy'
-> FirewallPolicyRule -- ^ 'fparPayload'
-> FirewallPoliciesAddRule
firewallPoliciesAddRule pFparFirewallPolicy_ pFparPayload_ =
FirewallPoliciesAddRule'
{ _fparRequestId = Nothing
, _fparFirewallPolicy = pFparFirewallPolicy_
, _fparPayload = pFparPayload_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
fparRequestId :: Lens' FirewallPoliciesAddRule (Maybe Text)
fparRequestId
= lens _fparRequestId
(\ s a -> s{_fparRequestId = a})
-- | Name of the firewall policy to update.
fparFirewallPolicy :: Lens' FirewallPoliciesAddRule Text
fparFirewallPolicy
= lens _fparFirewallPolicy
(\ s a -> s{_fparFirewallPolicy = a})
-- | Multipart request metadata.
fparPayload :: Lens' FirewallPoliciesAddRule FirewallPolicyRule
fparPayload
= lens _fparPayload (\ s a -> s{_fparPayload = a})
instance GoogleRequest FirewallPoliciesAddRule where
type Rs FirewallPoliciesAddRule = Operation
type Scopes FirewallPoliciesAddRule =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient FirewallPoliciesAddRule'{..}
= go _fparFirewallPolicy _fparRequestId
(Just AltJSON)
_fparPayload
computeService
where go
= buildClient
(Proxy :: Proxy FirewallPoliciesAddRuleResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/FirewallPolicies/AddRule.hs | mpl-2.0 | 4,654 | 0 | 17 | 1,037 | 486 | 292 | 194 | 80 | 1 |
import Test.Hspec
input :: [[Int]]
input = [ [08, 02, 22, 97, 38, 15, 00, 40, 00, 75, 04, 05, 07, 78, 52, 12, 50, 77, 91, 08],
[49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 04, 56, 62, 00],
[81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 03, 49, 13, 36, 65],
[52, 70, 95, 23, 04, 60, 11, 42, 69, 24, 68, 56, 01, 32, 56, 71, 37, 02, 36, 91],
[22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80],
[24, 47, 32, 60, 99, 03, 45, 02, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50],
[32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70],
[67, 26, 20, 68, 02, 62, 12, 20, 95, 63, 94, 39, 63, 08, 40, 91, 66, 49, 94, 21],
[24, 55, 58, 05, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72],
[21, 36, 23, 09, 75, 00, 76, 44, 20, 45, 35, 14, 00, 61, 33, 97, 34, 31, 33, 95],
[78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03, 80, 04, 62, 16, 14, 09, 53, 56, 92],
[16, 39, 05, 42, 96, 35, 31, 47, 55, 58, 88, 24, 00, 17, 54, 24, 36, 29, 85, 57],
[86, 56, 00, 48, 35, 71, 89, 07, 05, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58],
[19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 04, 89, 55, 40],
[04, 52, 08, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66],
[88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69],
[04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 08, 46, 29, 32, 40, 62, 76, 36],
[20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 04, 36, 16],
[20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 05, 54],
[01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 01, 89, 19, 67, 48] ]
-- [[Int]] - input matrix
-- (Int, Int) - start coord
-- Int - chain length
-- (Int, Int) - index limit
-- return max production of neighbours
max_neighbours :: [[Int]] -> (Int, Int) -> Int -> (Int, Int) -> Int
max_neighbours _ _ 0 _ = 1
max_neighbours m (x, y) len (mx, my)
| x < 0 = 0
| y < 0 = 0
| x >= mx = 0
| y >= my = 0
| otherwise = (m !! x !! y) *
maximum (max_neighbours m (x+1, y) (len-1) (mx, my) :
max_neighbours m (x+1, y+1) (len-1) (mx, my) :
max_neighbours m (x+1, y-1) (len-1) (mx, my) :
max_neighbours m (x-1, y) (len-1) (mx, my) :
max_neighbours m (x-1, y+1) (len-1) (mx, my) :
max_neighbours m (x-1, y-1) (len-1) (mx, my) :
max_neighbours m (x, y+1) (len-1) (mx, my) :
max_neighbours m (x, y-1) (len-1) (mx, my) :
[]
)
neighbours_dir_down :: [[Int]] -> (Int, Int) -> Int -> (Int, Int) -> Int
neighbours_dir_down m (x, y) len (mx, my)
| x + len > mx = 0
| otherwise = foldl (*) 1 [(m !! xd !! y) | xd <- [x..(x+len-1)]]
neighbours_dir_right :: [[Int]] -> (Int, Int) -> Int -> (Int, Int) -> Int
neighbours_dir_right m (x, y) len (mx, my)
| y + len > my = 0
| otherwise = foldl (*) 1 [(m !! x !! yd) | yd <- [y..(y+len-1)]]
neighbours_dir_diag_left :: [[Int]] -> (Int, Int) -> Int -> (Int, Int) -> Int
neighbours_dir_diag_left m (x, y) len (mx, my)
| x + len > mx = 0
| y - len + 1 < 0 = 0
| otherwise = foldl (*) 1 [(m !! (x+d) !! (y-d)) | d <- [0..(len-1)]]
neighbours_dir_diag_right :: [[Int]] -> (Int, Int) -> Int -> (Int, Int) -> Int
neighbours_dir_diag_right m (x, y) len (mx, my)
| x + len > mx = 0
| y + len > my = 0
| otherwise = foldl (*) 1 [(m !! (x+d) !! (y+d)) | d <- [0..(len-1)]]
neighbours_dir :: [[Int]] -> (Int, Int) -> Int -> (Int, Int) -> Int
neighbours_dir m xy len max = maximum ( (neighbours_dir_down m xy len max) :
(neighbours_dir_right m xy len max) :
(neighbours_dir_diag_right m xy len max) :
(neighbours_dir_diag_left m xy len max) :
[]
)
res = maximum [ neighbours_dir input (x,y) 4 (20,20) | x <- [0..19], y <- [0..19]]
-- Tests + result print
main = hspec $ do
describe "Dummy" $ do
it "dummy test" $ do
True `shouldBe` True
describe "Euler test" $ do
describe "Euler actual problem" $ do
it "max production of 4 nearby numbers" $ do
putStrLn ("res = " ++ show res)
| orbitgray/ProjectEuler | haskell/011.hs | lgpl-3.0 | 4,631 | 9 | 20 | 1,596 | 2,614 | 1,573 | 1,041 | -1 | -1 |
-- Copyright (c) 2014-2015 Jonathan M. Lange <jml@mumak.net>
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Haverer.PlayerSet (
PlayerSet
, PlayerSetError(..)
, toPlayerSet
, toPlayers
, randomize
, rotate
) where
import BasicPrelude
import Control.Monad.Except
import Control.Monad.Random
import System.Random.Shuffle
data PlayerSetError a = InvalidNumPlayers Int | DuplicatePlayers [a] deriving (Show, Eq)
newtype PlayerSet a = PlayerSet { toPlayers :: [a] } deriving (Show, Eq, Functor, Traversable, Foldable)
toPlayerSet :: Ord a => [a] -> Either (PlayerSetError a) (PlayerSet a)
toPlayerSet playerIds
| numPlayers /= numDistinctPlayers = throwError (DuplicatePlayers playerIds)
| numPlayers < 2 || numPlayers > 4 = throwError (InvalidNumPlayers numPlayers)
| otherwise = (return . PlayerSet) playerIds
where numPlayers = length playerIds
numDistinctPlayers = (length . nub . sort) playerIds
-- | Rotate the order of the PlayerSet
--
-- The player who was first is now last, whoever was second is now third,
-- whoever was third is now second, etc.
--
-- Since 0.3
rotate :: PlayerSet a -> PlayerSet a
rotate (PlayerSet (x:xs)) = PlayerSet (xs ++ [x])
rotate _ = error "Empty PlayerSet is impossible"
-- | Randomize the order of the PlayerSet
--
-- Since 0.3
randomize :: MonadRandom m => PlayerSet a -> m (PlayerSet a)
randomize = map PlayerSet . shuffleM . toPlayers
| jml/haverer | lib/Haverer/PlayerSet.hs | apache-2.0 | 2,006 | 0 | 10 | 351 | 389 | 218 | 171 | 27 | 1 |
{-# LANGUAGE FlexibleContexts, MultiParamTypeClasses, OverloadedStrings #-}
module Database.MongoDB.Query.Typesafe where
import Control.Monad.IO.Class
import Control.Monad.Trans.Control
import Data.Bson
import Data.Word
import qualified Database.MongoDB.Query as DB
import TSQuery.Query
data Op = Eq | Neq | Gt | Lt | In deriving (Show, Eq)
type QueryExp = QueryExpOp Op
data QueryTs a = QueryTs
{ options :: [DB.QueryOption] -- ^ Default = []
, selection :: QueryExp a
, skip :: Word32 -- ^ Number of initial matching documents to skip. Default = 0
, limit :: DB.Limit -- ^ Maximum number of documents to return, 0 = no limit. Default = 0
, sort :: DB.Order -- ^ Sort results by this order, [] = no sort. Default = []
, snapshot :: Bool -- ^ If true assures no duplicates are returned, or objects missed, which were present at both the start and end of the query's execution (even if the object were updated). If an object is new during the query, or deleted during the query, it may or may not be returned, even with snapshot mode. Note that short query responses (less than 1MB) are always effectively snapshotted. Default = False
, batchSize :: DB.BatchSize -- ^ The number of document to return in each batch response from the server. 0 means use Mongo default. Default = 0
, hint :: DB.Order -- ^ Force MongoDB to use this index, [] = no hint. Default = []
}
eq :: (Val b, Eq b) => Entity a b -> b -> QueryExp a
eq = QBin Eq
neq :: (Val b, Eq b) => Entity a b -> b -> QueryExp a
neq = QBin Neq
gt :: (Val b, Ord b) => Entity a b -> b -> QueryExp a
gt = QBin Gt
lt :: (Val b, Ord b) => Entity a b -> b -> QueryExp a
lt = QBin Lt
cnt :: (Val b, Eq b) => Entity a b -> b -> QueryExp a
cnt = QBin In
query :: DB.Selector -> DB.Collection -> DB.Query
query sel col = DB.Query [] (DB.Select sel col) [] 0 0 [] False 0 []
tsQueryToSelector :: QueryExp a -> DB.Selector
tsQueryToSelector QAll = []
tsQueryToSelector (QBin Eq (Entity fld) v) = [ fld =: v ]
tsQueryToSelector (QBin op (Entity fld) v) = [ fld =: [ (opText op) =: v ] ]
where
opText Eq = "$eq"
opText Neq = "$ne"
opText Gt = "$gt"
opText Lt = "$lt"
opText In = "$in"
tsQueryToSelector (QOr lhs rhs) = [ "$or" =: [ tsQueryToSelector lhs, tsQueryToSelector rhs ] ]
tsQueryToSelector (QAnd lhs rhs) = [ "$and" =: [ tsQueryToSelector lhs, tsQueryToSelector rhs ] ]
tsQueryToSelector (QNot q) = [ "$not" =: tsQueryToSelector q ]
--
qtype :: QueryExp a -> a
qtype = undefined
find :: (Nameable a, MonadIO m, MonadBaseControl IO m) => QueryExp a -> DB.Action m DB.Cursor
find tsq = DB.find (query (tsQueryToSelector tsq) (name $ qtype tsq))
| pkamenarsky/typesafe-query-mongodb | src/Database/MongoDB/Query/Typesafe.hs | apache-2.0 | 2,736 | 0 | 10 | 633 | 789 | 423 | 366 | 47 | 5 |
module Graham.A305677 (a305677) where
import Graham.A321482 (a321482)
-- import Data.List (subsequences)
-- import HelperSequences.A007913 (a007913)
-- This counts the number of subsets of [n + 1..a072905 n - 1] whose product has
-- the same squarefree part as n (equivalently a072905 n).
-- 1,2,8,1,64,256,2048,4,1,...
-- bruteForce :: Integer -> Int
-- bruteForce n = length $ hasSameSquarefreePart $ map a007913 allSubsetProducts where
-- hasSameSquarefreePart = filter (== a007913 n)
-- allSubsetProducts = map product $ subsequences [n + 1..a072905 n - 1]
--
-- (!!) This does not work if A305677(n) = 0.
a305677 :: Integer -> Integer
a305677 = (2^) . a321482
| peterokagey/haskellOEIS | src/Graham/A305677.hs | apache-2.0 | 671 | 0 | 6 | 109 | 54 | 37 | 17 | 4 | 1 |
module Main where
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
import TestXml
import TestXmlInternal
import TestTemplateBasic
import TestTemplateRepeat
import TestTemplateHiding
import TestTemplateBlogExample
tests = hUnitTestToTests $ TestList (TestXml.xmlTests ++
TestXmlInternal.xmlTests ++
TestTemplateBasic.templateTests ++
TestTemplateRepeat.templateTests ++
TestTemplateHiding.templateTests ++
TestTemplateBlogExample.templateTests
)
main = defaultMain tests | jasonrbriggs/proton | haskell/testsuite/Tests.hs | apache-2.0 | 727 | 0 | 13 | 270 | 94 | 54 | 40 | 17 | 1 |
import Control.Exception (bracket)
import Control.Monad
import Data.String.Utils
import System.Directory
import System.Environment
import System.FilePath
import System.Find
import System.Process
import System.Exit
import System.IO
import Text.Printf
import Text.Regex.Posix.Wrap
main :: IO ()
main = do
args <- getArgs
when (length args < 3) (usage >> exitFailure)
let ra = reverse . take 3 . reverse $ args
let rx = head ra
let rt = ra !! 1
let path = ra !! 2
------------------------------------------------------------------------
files <- find path doesFileExist
dirs <- find path doesDirectoryExist
mapM_ (\(f,n)->doReplace rx rt n f) (zip files [1..])
mapM_ (\(f,n)->doReplace rx rt n f) (zip dirs [(length files + 1)..])
numReplace :: [String] -> String -> String
numReplace ls str = replaceNumbered ls str 1
where
replaceNumbered [] str' _ = str'
replaceNumbered (x:xs) str' n =
replace ("%" ++ show n) x (replaceNumbered xs str' (n+1))
doReplace regex replacement n fpath = do
let fname = takeFileName fpath
let dname = takeDirectory fpath
let checkName = fname =~ regex
when checkName $
do let found = fname =~ regex :: [[String]]
let rep = numReplace (tail $ head found) $ replace "%n" (show n) replacement
let rnamed = dname </> replace (head $ head found) rep fname
askPermission
(printf "Move '%s' to '%s'?" fpath rnamed)
(do fe <- doesFileExist rnamed
de <- doesDirectoryExist rnamed
isDir <- doesDirectoryExist fpath
if fe || de
then putStrLn " - File exist, skipping."
else do putStrLn $ printf "%s -> %s" fpath rnamed
if isDir
then renameDirectory fpath rnamed
else renameFile fpath rnamed)
(putStrLn " - Skipping")
getChar' :: IO Char
getChar' = bracket
(system "stty raw" >> system "stty -echo")
(\_-> system "stty sane" >> system "stty echo")
(\_-> getChar)
askPermission :: String -> IO () -> IO () -> IO ()
askPermission str ifyes ifno = do
putStrLn (str ++ " (y/n)")
answer <- getChar'
if (answer `notElem` "yn")
then do putStrLn "Answer must be 'y' or 'n'."
askPermission str ifyes ifno
else if (answer == 'y') then ifyes else ifno
usage = do
progname <- getProgName
hPutStrLn stderr
$ printf "Usage: %s <find_regex> <replacement> <filepath>" progname | jamessanders/hsysadmin | src/renamex/renamex.hs | bsd-2-clause | 2,627 | 0 | 18 | 799 | 864 | 426 | 438 | 66 | 3 |
module Problem74 where
import Data.Array
import Data.Char
lim :: Int
lim = 1000000
main :: IO ()
main = print . length . filter (== 60) . map snd . assocs $ chainLengths
chainLengths :: Array Int Int
chainLengths = array (1, lim) [ (i, chainLengths' i) | i <- [1 .. lim] ]
getChainLengths :: Int -> Int
getChainLengths n
| n <= lim = chainLengths ! n
| otherwise = chainLengths' n
chainLengths' :: Int -> Int
-- it turns out that there are only three such loops that exist:
-- 169 -> 363601 -> 1454 -> 169
-- 871 -> 45361 -> 871
-- 872 -> 45362 -> 872
chainLengths' n
| n == sumFactorialDigit n = 1
| n == 169 = 3
| n `elem` [871, 872, 45361, 45362] = 2
| otherwise = 1 + getChainLengths (sumFactorialDigit n)
sumFactorialDigit :: Int -> Int
sumFactorialDigit = sum . map (factorial . digitToInt) . show
factorial :: Int -> Int
factorial 0 = 1
factorial n = n * factorial (n - 1)
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem74.hs | bsd-3-clause | 971 | 0 | 10 | 268 | 331 | 174 | 157 | 24 | 1 |
-- | API entry point
module Web.MangoPay (
-- generic functions
MangoPayT
,runMangoPayT
,runResourceInMp
,MpException
,getAll
-- useful types
,Credentials(..)
,AccessPoint(..)
,AccessToken(..)
,OAuthToken(..)
,Pagination(..)
,PagedList(..)
,MpTime(..)
,MPUsableMonad
,ToHtQuery(..)
,CardExpiration(..)
,readCardExpiration
,writeCardExpiration
,KindOfAuthentication(..)
,SortDirection(..)
,GenericSort(..)
-- access
,createCredentialsSecret
,oauthLogin
,toAccessToken
-- Users
,NaturalUser(..)
,IncomeRange(..)
,incomeBounds
,incomeRange
,NaturalUserId
,LegalUser(..)
,LegalUserType(..)
,LegalUserId
,UserRef(..)
,PersonType(..)
,AnyUserId
,createNaturalUser
,modifyNaturalUser
,fetchNaturalUser
,createLegalUser
,modifyLegalUser
,fetchLegalUser
,getUser
,listUsers
,getExistingUserId
-- Wallets
,Wallet(..)
,Amount(..)
,WalletId
,Currency
,createWallet
,modifyWallet
,fetchWallet
,listWallets
,Transfer(..)
,TransferId
,TransferStatus(..)
,Transaction(..)
,TransactionId
,TransactionType(..)
,TransactionNature(..)
,TransactionFilter(..)
,TransactionSort(..)
,createTransfer
,fetchTransfer
,listTransactions
,listTransactionsForUser
-- Events and Hooks
,Event(..)
,EventType(..)
,EventSearchParams(..)
,searchEvents
,searchAllEvents
,checkEvent
,HookStatus(..)
,HookValidity(..)
,HookId
,Hook(..)
,createHook
,modifyHook
,fetchHook
,listHooks
,eventFromQueryString
,eventFromQueryStringT
-- Documents and pages
,Document(..)
,DocumentId
,DocumentType(..)
,DocumentStatus(..)
,DocumentFilter(..)
,createDocument
,modifyDocument
,fetchDocument
,createPage
,getKindOfAuthentication
,getRequiredDocumentTypes
,listDocuments
,listAllDocuments
-- Accounts
,BankAccount(..)
,BankAccountId
,BankAccountDetails(..)
,PaymentType(..)
,createAccount
,fetchAccount
,listAccounts
,accountCountry
-- Payins
,PaymentExecution(..)
,BankWireId
,BankWire(..)
,createBankWirePayIn
,fetchBankWirePayIn
,mkBankWire
,CardPayinId
,CardPayin(..)
,createCardPayin
,fetchCardPayin
,mkCardPayin
-- Payouts
,PayoutId
,Payout(..)
,mkPayout
,createPayout
,fetchPayout
-- Cards
,CardRegistration(..)
,CardRegistrationId
,CardId
,CardInfo(..)
,Card(..)
,CardValidity(..)
,mkCardRegistration
,createCardRegistration
,modifyCardRegistration
,fetchCard
,listCards
-- Refunds
,RefundId
,Refund(..)
,RefundRequest(..)
,RefundReason(..)
,RefundReasonType(..)
,refundTransfer
,refundPayin
,fetchRefund
)
where
import Web.MangoPay.Access
import Web.MangoPay.Accounts
import Web.MangoPay.Cards
import Web.MangoPay.Documents
import Web.MangoPay.Events
import Web.MangoPay.Monad
import Web.MangoPay.Payins
import Web.MangoPay.Payouts
import Web.MangoPay.Refunds
import Web.MangoPay.Users
import Web.MangoPay.Types
import Web.MangoPay.Wallets
| prowdsponsor/mangopay | mangopay/src/Web/MangoPay.hs | bsd-3-clause | 3,894 | 0 | 5 | 1,396 | 709 | 494 | 215 | 149 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Uninterpreted.Axioms
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Test suite for basic axioms and uninterpreted functions
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveDataTypeable #-}
module TestSuite.Uninterpreted.Axioms(tests) where
import Utils.SBVTestFramework
import Data.Generics
tests :: TestTree
tests =
testGroup "Uninterpreted.Axioms"
[ testCase "unint-axioms" (assertIsThm p0) ]
-- Example provided by Thomas DuBuisson:
newtype Bitstring = Bitstring () deriving (Eq, Ord, Show, Read, Data, SymWord, HasKind)
type SBitstring = SBV Bitstring
a :: SBitstring -> SBool
a = uninterpret "a"
e :: SBitstring -> SBitstring -> SBitstring
e = uninterpret "e"
axE :: [String]
axE = [ "(assert (forall ((p Bitstring) (k Bitstring))"
, " (=> (and (a k) (a p)) (a (e k p)))))"
]
p0 :: Symbolic SBool
p0 = do
p <- free "p" :: Symbolic SBitstring
k <- free "k" :: Symbolic SBitstring
addAxiom "axE" axE
constrain $ a p
constrain $ a k
return $ a (e k p)
| josefs/sbv | SBVTestSuite/TestSuite/Uninterpreted/Axioms.hs | bsd-3-clause | 1,292 | 0 | 10 | 253 | 262 | 141 | 121 | 26 | 1 |
-- Copyright (c) 2009-2010
-- The President and Fellows of Harvard College.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the University nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY AND CONTRIBUTORS ``AS IS'' AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-- OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-- LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-- OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
--------------------------------------------------------------------------------
-- |
-- Module : Data.Symbol
-- Copyright : (c) Harvard University 2009-2010
-- License : BSD-style
-- Maintainer : mainland@eecs.harvard.edu
--
--------------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Data.Symbol (
Symbol,
intern,
intern',
unintern,
unintern'
) where
import Control.Concurrent.MVar
import Control.DeepSeq ( NFData, rnf )
#if __GLASGOW_HASKELL__ >= 608
import Data.String
#endif /* __GLASGOW_HASKELL__ >= 608 */
import qualified Data.Bimap as BM
import System.IO.Unsafe (unsafePerformIO)
newtype Symbol = Symbol Int
instance Eq Symbol where
(Symbol i1) == (Symbol i2) = i1 == i2
instance Ord Symbol where
compare (Symbol i1) (Symbol i2) = compare i1 i2
instance NFData Symbol where
rnf (Symbol a) = rnf a
data SymbolEnv = SymbolEnv
{ uniq :: {-# UNPACK #-} !Int
, symbols :: !(BM.Bimap String Int)
}
symbolEnv :: MVar SymbolEnv
symbolEnv = unsafePerformIO $ newMVar $ SymbolEnv 1 BM.empty
-- We @seq@ @s@ so that we can guarantee that when we perform the lookup we
-- won't potentially have to evaluate a thunk that might itself call @intern@,
-- leading to a deadlock.
-- |Intern a string to produce a 'Symbol'.
{-# NOINLINE intern #-}
intern :: String -> Symbol
intern s = s `seq` unsafePerformIO $ modifyMVar symbolEnv $ \env -> do
case BM.lookup s (symbols env) of
Nothing -> do let sym = uniq env
let env' = env { uniq = uniq env + 1,
symbols = BM.insert s sym
(symbols env)
}
env' `seq` return (env', Symbol sym)
Just sym -> return (env, Symbol sym)
intern' :: String -> Int
intern' s = let Symbol i = intern s in i
-- |Return the 'String' associated with a 'Symbol'.
{-# NOINLINE unintern #-}
unintern' :: Int -> String
unintern' i = unsafePerformIO $ withMVar symbolEnv $ \env -> let str = (symbols env) BM.!> i
in str `seq` return str
unintern :: Symbol -> String
unintern (Symbol i) = unintern' i
| jsa/symbol | Data/Symbol.hs | bsd-3-clause | 3,894 | 4 | 21 | 915 | 562 | 313 | 249 | 44 | 2 |
module Envirius.Commands
(
Cmd(..),
cmdParse,
cmdShowHelp,
getCommandsDesc
)
where
import Data.List (find)
import Envirius.Types.Command
import Envirius.Commands.Ls as Ls
import Envirius.Commands.Mk as Mk
import Envirius.Commands.Current as Current
import Envirius.Commands.LsCommands as LsCommands
import Envirius.Util (getAppName, rpad)
getCommands :: [Cmd]
getCommands = [Ls.cmd, Mk.cmd, Current.cmd,
-- Replace default action with function from current module.
-- getCommandsDesc can not be used in Envirius.Commands.LsCommands
-- as it would be a recursive import
LsCommands.cmd {cmdAction = (\_ -> putStrLn $ unlines $ getCommandsDesc)}]
cmdParse :: String -> Maybe Cmd
cmdParse cmdStr = find (\x -> cmdName x == cmdStr) getCommands
cmdShowHelp :: Cmd -> IO()
cmdShowHelp x = putStrLn $ unlines helpTxt
where helpTxt = [
"Usage: $ " ++ getAppName ++ " "
++ (cmdName x) ++ " " ++ (cmdUsage x),
"Description: " ++ (cmdDesc x),
""] ++ (cmdHelp x)
getCommandsDesc :: [String]
getCommandsDesc =
map (\x -> " " ++ (rpad 15 $ cmdName x) ++ (cmdDesc x)) getCommands
| ekalinin/envirius.hs | src/Envirius/Commands.hs | bsd-3-clause | 1,237 | 0 | 13 | 329 | 335 | 193 | 142 | 28 | 1 |
--------------------------------------------------------------------------
-- --
-- MinimiseDfa.hs --
-- --
-- Minimising a DFA. --
-- --
-- Regular expressions are defined in RegExp, and the type of --
-- NFAs in NfaTypes. The implementation of sets used is in --
-- Sets. --
-- --
-- (c) Simon Thompson, 1995, 2000 --
-- --
--------------------------------------------------------------------------
module Language.Mira.MinimiseDfa where
import qualified Data.Set as Set
import Data.Set ( Set, member )
import Language.Mira.RegExp
import Language.Mira.NfaTypes
--------------------------------------------------------------------------
-- --
-- Minimising the nfa - uses the equivalence classes generated --
-- by the function eqclasses. Replaces each state by the minimum --
-- state equivalent to it. The set functions clear up repeats etc. --
-- --
--------------------------------------------------------------------------
minimise :: Ord a => Nfa a -> Nfa a
minimise mach@(NFA states _ _ _) | Set.null states = mach
minimise mach = replace mini mach
where
replace f (NFA states moves start finish)
= NFA states' moves' start' finish'
where
states' = Set.map f states
moves' = Set.fromList [ Move (f a) c (f b) |
Move a c b <- Set.toList moves ]
start' = f start
finish' = Set.map f finish
mini a = minimum (Set.toList (eqclass a))
eqclass a = case [ b | b <- Set.toList classes , a `member` b ] of
[] -> error "minimise eqclass"
(x:_) -> x
(classes,fun) = eqclasses mach
--------------------------------------------------------------------------
-- --
-- Partition takes a binary predicate, represented by a function --
-- of type --
-- a -> a -> Bool --
-- assumed to represent an equivalence relation, and a (Set a), --
-- and returns the set of the equivalence classes under the --
-- relation. --
-- --
-- Implemented using the function part which does the same --
-- operation, except that it works over sets. --
-- --
--------------------------------------------------------------------------
partition :: Ord a => (a -> a -> Bool) -> Set a -> Set (Set a)
partition f s = Set.fromList (map Set.fromList (part f (Set.toList s)))
--------------------------------------------------------------------------
-- --
-- Partitions a list into a list of equivalence classes (lists) --
-- by folding in the addtoclass function. --
-- --
--------------------------------------------------------------------------
part :: (a -> a -> Bool) -> [a] -> [[a]]
part f = foldr (addtoclass f) []
--------------------------------------------------------------------------
-- --
-- addtoclass will add an element to the (first) equivalence --
-- class to which the element belongs, creating a new class if --
-- necessary. --
-- --
--------------------------------------------------------------------------
addtoclass :: (a -> a -> Bool) -> a -> [[a]] -> [[a]]
addtoclass f a [] = [[a]]
addtoclass f a (c:r)
| (f a (head c)) = (a:c):r
| otherwise = c : addtoclass f a r
--------------------------------------------------------------------------
-- --
-- Given an nfa will return the set of sets of indistinguishable --
-- states, from which the minimal DFA can be constructed. --
-- --
-- This function simply strips off one half of the pair --
-- returned by eqclasses. --
-- --
--------------------------------------------------------------------------
eqivclasses :: Ord a => Nfa a -> Set (Set a)
eqivclasses = fst . eqclasses
--------------------------------------------------------------------------
-- --
-- eqclasses returns a pair, which consists of two --
-- representations of the partition of the states into indistin- --
-- guishable classes: --
-- the set of classes, as sets --
-- the boolean valued function representing the --
-- relation. --
-- --
-- These are found by iterating the function step which takes one --
-- such pair to the next, where at the next stage we distinguish --
-- previously indistinguishable states if and only if a transition --
-- from them on a particular character gives states distinguished --
-- by the previous partition. --
-- Can see from this that we generate the stages simply from the --
-- function representation; we carry the set representation so --
-- that we can compare two partitions for equality: can't compare --
-- functions for equality, so compare the set representations. --
-- --
-- set representations of the partitions are compared by the --
-- function eqpart, which compares sets of sets for (true) set --
-- equality. --
-- --
-- The starting value for the iteration is given by the function --
-- firstpartfun, which distinguishes the states in finish, i.e. --
-- the terminal states, from the rest. --
-- --
--------------------------------------------------------------------------
eqclasses :: Ord a => Nfa a -> ( Set (Set a) , a -> a -> Bool )
eqclasses mach
= to_limit step start
where
start = ( firstpart , firstpartfun )
firstpart = partition firstpartfun states
firstpartfun a b = ( (a `member` finish) == (b `member` finish) )
(NFA states moves startst finish) = mach
step ( part , partfun )
= ( newpart , newpartfun )
where
newpart = partition newpartfun states
newpartfun a b
= and [ partfun c d | (Move a' y c) <- movelist , a==a' ,
(Move b' z d) <- movelist , b==b' ,
y==z ]
&& partfun a b
movelist = Set.toList moves
to_limit f (a,b)
| (eqpart a a') = (a,b)
| otherwise = to_limit f next
where
next = f (a,b)
(a',b') = next
eqpart a a' = and ( Set.toList (Set.map (setmemSet a') a) ) &&
and ( Set.toList (Set.map (setmemSet a) a') )
setmemSet x a = or ( Set.toList (Set.map (== a) x) )
| AidanDelaney/Mira | src/Language/Mira/MinimiseDfa.hs | bsd-3-clause | 6,159 | 34 | 14 | 1,487 | 1,194 | 655 | 539 | 55 | 2 |
-- |
-- Module: Config
-- Description: Representation and parsing of configuration file for cd
-- subcommand
-- Copyright: (c) 2018-2020 Peter Trško
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- Representation and parsing of configuration file for @cd@ subcommand.
module Config
( Config(..)
, defConfig
, readConfig
, ShellCommand(..)
, shellCommand
)
where
import Data.Functor ((<$>))
import Data.Maybe (Maybe(Nothing), fromMaybe)
import GHC.Generics (Generic)
import System.IO (IO)
import Data.Text (Text)
import Dhall (FromDhall, ToDhall)
import qualified Dhall (auto)
import CommandWrapper.Subcommand.Prelude (Params, inputConfig)
import CommandWrapper.Toolset.Config.Command (SimpleCommand)
-- | Representation of @cd@ subcommand configuration file.
data Config = Config
{ directories :: [Text]
-- ^ List of directories for the user to choose from.
, menuTool :: Maybe (Maybe Text -> SimpleCommand)
-- ^ Command to use when requesting user to select a directory from a list.
--
-- If 'Nothing' then very basic TUI implementation is used. Tools that can
-- easily be used for this purpose are @fzf@ and @fzy@
, shell :: Maybe Text
-- ^ Shell to use when spawning a new shell. If 'Nothing' then the value
-- from @SHELL@ environment variable is used.
, terminalEmulator :: Maybe (Text -> Maybe ShellCommand -> SimpleCommand)
-- ^ Terminal emulator to run when such action is requested.
--
-- If 'Nothing' then we don'd assume anything and die with appropriate
-- error message.
}
deriving stock (Generic)
deriving anyclass (Dhall.FromDhall)
-- | Empty 'Config' used when there is no configuration file available.
defConfig :: Config
defConfig = Config
{ directories = []
, menuTool = Nothing
, shell = Nothing
, terminalEmulator = Nothing
}
-- | Representation of command that can be passed to terminal emulator to be
-- executed.
data ShellCommand = ShellCommand
{ command :: Text
, arguments :: [Text]
}
deriving stock (Generic)
deriving anyclass (Dhall.ToDhall)
-- | Smart constructor for 'ShellCommand'.
shellCommand :: Text -> ShellCommand
shellCommand shell = ShellCommand shell []
-- | Read and parse configuration file.
readConfig :: Params -> IO Config
readConfig params = fromMaybe defConfig <$> inputConfig Dhall.auto params
| trskop/command-wrapper | command-wrapper/app-cd/Config.hs | bsd-3-clause | 2,502 | 0 | 13 | 517 | 392 | 243 | 149 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.