code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE TypeOperators #-}
module Demo where
import Control.Applicative ((<$>))
import Data.Word
import Language.Embedded.Imperative
import Language.Embedded.Backend.C
import Language.Embedded.CExp
-- | Custom instruction type with: references, control structures and file I/O
type CMD
= RefCMD
:+: ControlCMD
:+: FileCMD
-- | Program that asks the user for numbers and prints their sum
sumInput :: Program CMD (Param2 CExp CType) ()
sumInput = do
done <- initRef false
sum <- initRef (0 :: CExp Word32)
while (not_ <$> getRef done) $ do
printf "Enter a number (0 means done): "
n <- fget stdin
iff (n #== 0)
(setRef done true)
(modifyRef sum (+n))
printf "The sum of your numbers is %d.\n" =<< getRef sum
run_sumInput = runCompiled sumInput
testAll = do
tag "sumInput" >> compareCompiled sumInput (runIO sumInput) (unlines $ map show $ reverse [0..20])
where
tag str = putStrLn $ "---------------- examples/Demo.hs/" ++ str ++ "\n"
|
kmate/imperative-edsl
|
examples/Demo.hs
|
bsd-3-clause
| 1,036
| 0
| 13
| 245
| 279
| 143
| 136
| 26
| 1
|
module Distribution.Client.Dependency.Modular.Flag where
import Data.Map as M
import Prelude hiding (pi)
import Distribution.PackageDescription hiding (Flag) -- from Cabal
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Types (OptionalStanza(..))
-- | Flag name. Consists of a package instance and the flag identifier itself.
data FN qpn = FN (PI qpn) Flag
deriving (Eq, Ord, Show)
-- | Extract the package name from a flag name.
getPN :: FN qpn -> qpn
getPN (FN (PI qpn _) _) = qpn
instance Functor FN where
fmap f (FN x y) = FN (fmap f x) y
-- | Flag identifier. Just a string.
type Flag = FlagName
unFlag :: Flag -> String
unFlag (FlagName fn) = fn
-- | Flag info. Default value, and whether the flag is manual.
-- Manual flags can only be set explicitly.
data FInfo = FInfo { fdefault :: Bool, fmanual :: Bool }
deriving (Eq, Ord, Show)
-- | Flag defaults.
type FlagInfo = Map Flag FInfo
-- | Qualified flag name.
type QFN = FN QPN
-- | Stanza name. Paired with a package name, much like a flag.
data SN qpn = SN (PI qpn) OptionalStanza
deriving (Eq, Ord, Show)
instance Functor SN where
fmap f (SN x y) = SN (fmap f x) y
-- | Qualified stanza name.
type QSN = SN QPN
unStanza :: OptionalStanza -> String
unStanza TestStanzas = "test"
unStanza BenchStanzas = "bench"
showQFNBool :: QFN -> Bool -> String
showQFNBool qfn@(FN pi _f) b = showPI pi ++ ":" ++ showFBool qfn b
showQSNBool :: QSN -> Bool -> String
showQSNBool qsn@(SN pi _f) b = showPI pi ++ ":" ++ showSBool qsn b
showFBool :: FN qpn -> Bool -> String
showFBool (FN _ f) True = "+" ++ unFlag f
showFBool (FN _ f) False = "-" ++ unFlag f
showSBool :: SN qpn -> Bool -> String
showSBool (SN _ s) True = "*" ++ unStanza s
showSBool (SN _ s) False = "!" ++ unStanza s
showQFN :: QFN -> String
showQFN (FN pi f) = showPI pi ++ ":" ++ unFlag f
showQSN :: QSN -> String
showQSN (SN pi f) = showPI pi ++ ":" ++ unStanza f
|
jwiegley/ghc-release
|
libraries/Cabal/cabal-install/Distribution/Client/Dependency/Modular/Flag.hs
|
gpl-3.0
| 1,946
| 0
| 9
| 394
| 678
| 361
| 317
| 41
| 1
|
{- |
Module : $Header$
Description : This module is for selecting the favoured EP representation
Copyright : (c) Ewaryst Schulz, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : ewaryst.schulz@dfki.de
Stability : experimental
Portability : portable
This module re-exports one of the following modules
GeneralExtendedParameter
SimpleExtendedParameter
-}
module CSL.ExtendedParameter ( module EP ) where
-- import CSL.GeneralExtendedParameter
import CSL.SimpleExtendedParameter as EP
|
mariefarrell/Hets
|
CSL/ExtendedParameter.hs
|
gpl-2.0
| 533
| 0
| 4
| 92
| 21
| 15
| 6
| 2
| 0
|
import Distribution.Simple
main = defaultMain
{- Inferring the package version from git. Posted by https://github.com/hvr
-
- https://gist.github.com/656738
import Control.Exception
import Control.Monad
import Data.Maybe
import Data.Version
import Distribution.PackageDescription (PackageDescription(..), HookedBuildInfo, GenericPackageDescription(..))
import Distribution.Package (PackageIdentifier(..))
import Distribution.Simple (defaultMainWithHooks, simpleUserHooks, UserHooks(..))
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..))
import Distribution.Simple.Setup (BuildFlags(..), ConfigFlags(..))
import Distribution.Simple.Utils (die)
import System.Process (readProcess)
import Text.ParserCombinators.ReadP (readP_to_S)
main :: IO ()
main = defaultMainWithHooks simpleUserHooks
{ confHook = myConfHook
, buildHook = myBuildHook
}
-- configure hook
myConfHook :: (GenericPackageDescription, HookedBuildInfo)
-> ConfigFlags
-> IO LocalBuildInfo
myConfHook (gpdesc, hbinfo) cfg = do
let GenericPackageDescription {
packageDescription = pdesc@PackageDescription {
package = pkgIden }} = gpdesc
gitVersion <- inferVersionFromGit (pkgVersion (package pdesc))
let gpdesc' = gpdesc {
packageDescription = pdesc {
package = pkgIden { pkgVersion = gitVersion } } }
-- putStrLn $ showVersion gitVersion
confHook simpleUserHooks (gpdesc', hbinfo) cfg
-- build hook
myBuildHook :: PackageDescription
-> LocalBuildInfo
-> UserHooks
-> BuildFlags
-> IO ()
myBuildHook pdesc lbinfo uhooks bflags = do
let lastVersion = pkgVersion $ package pdesc
gitVersion <- inferVersionFromGit lastVersion
when (gitVersion /= lastVersion) $
die("The version reported by git '" ++ showVersion gitVersion ++
"' has changed since last time this package was configured (version was '" ++
showVersion lastVersion ++ "' back then), please re-configure package")
buildHook simpleUserHooks pdesc lbinfo uhooks bflags
-- |Infer package version from Git tags. Uses `git describe` to infer 'Version'.
inferVersionFromGit :: Version -> IO Version
inferVersionFromGit version0 = do
ver_line <- init `liftM` readProcess "git"
[ "describe"
, "--abbrev=5"
, "--tags"
, "--match=v[0-9].[0-9][0-9]"
, "--dirty"
, "--long"
, "--always"
] ""
-- ver_line <- return "v0.1-42-gf9f4eb3-dirty"
putStrLn ver_line
-- let versionStr = ver_line -- (head ver_line == 'v') `assert` replaceFirst '-' '.' (tail ver_line)
-- Just version = listToMaybe [ p | (p, "") <- readP_to_S parseVersion versionStr ]
return version0
{-
-- | Helper for replacing first occurrence of character by another one.
replaceFirst :: Eq a => a -> a -> [a] -> [a]
replaceFirst _ _ [] = []
replaceFirst o r (x:xs) | o == x = r : xs
| otherwise = x : replaceFirst o r xs
-}
-}
|
rsasse/tamarin-prover
|
Setup.hs
|
gpl-3.0
| 3,059
| 0
| 4
| 703
| 12
| 7
| 5
| 2
| 1
|
-- C->Haskell Compiler: pretty printing of C abstract syntax
--
-- Author : Manuel M T Chakravarty
-- Created: 25 August 1
--
-- Version $Revision: 1.3 $ from $Date: 2005/06/22 16:01:21 $
--
-- Copyright (c) [2001..2004] Manuel M T Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- Pretty printing support for abstract C trees.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
--- TODO ----------------------------------------------------------------------
--
-- * So far, only covers a small fraction of the abstract tree definition
--
module CPretty (
-- we are just providing instances to the class `Pretty'
) where
import Idents (Ident, identToLexeme)
import Text.PrettyPrint.HughesPJ
import CAST
-- pretty printing of AST nodes
-- ----------------------------
instance Show CDecl where
showsPrec _ = showString . render . pretty
-- overloaded pretty-printing function (EXPORTED)
--
class Pretty a where
pretty :: a -> Doc
prettyPrec :: Int -> a -> Doc
pretty = prettyPrec 0
prettyPrec _ = pretty
-- actual structure tree traversals
-- --------------------------------
instance Pretty CDecl where
pretty (CDecl specs declrs _) =
hsep (map pretty specs) `hang` 2 $
hsep (punctuate comma (map prettyDeclr declrs)) <> semi
instance Pretty CDeclSpec where
pretty (CStorageSpec sspec) = pretty sspec
pretty (CTypeSpec tspec) = pretty tspec
pretty (CTypeQual qspec) = pretty qspec
instance Pretty CStorageSpec where
pretty (CAuto _) = text "auto"
pretty (CRegister _) = text "register"
pretty (CStatic _) = text "static"
pretty (CExtern _) = text "extern"
pretty (CTypedef _) = text "typedef"
instance Pretty CTypeSpec where
pretty (CVoidType _) = text "void"
pretty (CCharType _) = text "char"
pretty (CShortType _) = text "short"
pretty (CIntType _) = text "int"
pretty (CLongType _) = text "long"
pretty (CFloatType _) = text "float"
pretty (CDoubleType _) = text "double"
pretty (CSignedType _) = text "signed"
pretty (CUnsigType _) = text "unsigned"
pretty (CSUType struct _) = text "<<CPretty: CSUType not yet implemented!>>"
pretty (CEnumType enum _) = text "<<CPretty: CEnumType not yet implemented!>>"
pretty (CTypeDef ide _) = ident ide
instance Pretty CTypeQual where
pretty (CConstQual _) = text "const"
pretty (CVolatQual _) = text "volatile"
pretty (CRestrQual _) = text "restrict"
prettyDeclr :: (Maybe CDeclr, Maybe CInit, Maybe CExpr) -> Doc
prettyDeclr (odeclr, oinit, oexpr) =
maybe empty pretty odeclr
<+> maybe empty ((text "=" <+>) . pretty) oinit
<+> maybe empty ((text ":" <+>) . pretty) oexpr
instance Pretty CDeclr where
pretty (CVarDeclr oide _) = maybe empty ident oide
pretty (CPtrDeclr inds declr _) =
let
oneLevel ind = parens . (hsep (map pretty ind) <+>) . (text "*" <>)
in
oneLevel inds (pretty declr)
pretty (CArrDeclr declr _ oexpr _) =
pretty declr <> brackets (maybe empty pretty oexpr)
pretty (CFunDeclr declr decls isVariadic _) =
let
varDoc = if isVariadic then text ", ..." else empty
in
pretty declr
<+> parens (hsep (punctuate comma (map pretty decls)) <> varDoc)
instance Pretty CInit where
pretty _ = text "<<CPretty: CInit not yet implemented!>>"
instance Pretty CExpr where
pretty _ = text "<<CPretty: CExpr not yet implemented!>>"
-- auxilliary functions
-- --------------------
ident :: Ident -> Doc
ident = text . identToLexeme
|
k0001/gtk2hs
|
tools/c2hs/c/CPretty.hs
|
gpl-3.0
| 4,172
| 0
| 16
| 889
| 1,016
| 521
| 495
| 66
| 1
|
{-+ PFE command line parsing utilities
-}
module PfeParse(module PfeParse,arg,(<@),( #@ ),many) where
import Data.Char(isUpper)
import Control.Monad(when)
import Data.Maybe(isJust)
import HsName(ModuleName(..),sameModuleName,parseModuleName,isMainModule)
import TypedIds(NameSpace(..))
import PFE0(getCurrentModuleGraph,projectStatus)
import PrettyPrint(pp,(<+>),fsep)
import CmdLineParser3 as P
import MUtils((@@),( # ),concatMapM,swap,apBoth)
runCmds run cmds = run $ doCmd (cmds, projectStatus)
--type Cmd r = (String,(P r,String))
--doCmd :: ([Cmd (m ()], (m ())) -> String -> m ()
doCmd cmds _ = parseAll (cmdGrammar cmds)
cmdGrammar (cmds,default_cmd) =
named "command" $
foldr (!) (nil default_cmd)
[nil id `chk` kw cmd <@ p :-- usage|(cmd,(p,usage))<-cmds]
usage prg cmds = P.usage prg (cmdGrammar (cmds,projectStatus))
kwOption w = isJust # opt (kw w)
noArgs = nil
args s f = f # many (arg s) -- s should now be in singular form!
filename = arg "<filename>"
filenames = many filename
fileArgs f = f # filenames
fileArg f = fileArgs (mapM_ f)
moduleArg f = moduleArgs (mapM_ f)
moduleArgs f = f @@ checkModuleNames # many (arg "<module>")
moduleArg' opts f = moduleArgs' opts (mapM_ . f)
moduleArgs' opts f = f' #@ opts <@ many (arg "<module>")
where f' o = f o @@ checkModuleNames
checkModuleNames = concatMapM checkModuleName
checkModuleName s =
do ms <- filter sameModule . map (fst.snd) # getCurrentModuleGraph
when (null ms) $ fail (s++": unknown module")
return ms
where
m = parseModuleName s
sameModule = if isMainModule m then (==) m else sameModuleName s
-- "Main{-file.hs-}" selects one particular Main module,
-- "Main" select all main modules in a project
just ms = if null ms then Nothing else Just ms
idArgs f = f # many (arg "<identifier>")
qualIds f = (f @@ parseQualIds) # many (arg "<M.x>")
qualId f = (f @@ parseQualId) # arg "<M.x>"
parseQualIds = mapM parseQualId
{-
parseOneQualId = parseQualId @@ one
where
one [q] = return q
one qs = fail $ "Exactly one qualified name is required: "++unwords qs
-}
parseQualId s =
case splitQualName s of
Just (m,n) -> flip (,) n # checkModuleName1 m
-- TODO: also check that m.n is defined!
_ -> fail $ "Qaulified name required: "++s
where
splitQualName = fmap (apBoth reverse . swap) . split . reverse
split s = case break (=='.') s of
(s1,'.':s2) -> Just (s1,s2)
_ -> Nothing
{-
isQual s =
case break (=='.') s of
(c:_,'.':_:_) -> isUpper c
_ -> False
-}
checkModuleName1 = one @@ checkModuleName
where
one [q] = return q
one qs = fail $ pp $ "Ambiguous module name:"<+>fsep qs
entityId f = (f' # opt idty) <@ arg "<M.x>"
where
f' ns = f . (,) ns @@ parseQualId
-- This could be done with cmd and !,
-- but the usage printer isn't good enough yet.
idty = Token conv "type|class|value|con"
conv arg | isClassOrType arg = Just ClassOrTypeNames
| isValue arg = Just ValueNames
| otherwise = Nothing
isClassOrType arg = arg `elem` ["type","class"]
isValue arg = arg `elem` ["value","con"]
{-
entityId f = Args "[type|class|value] <M.x>" (f @@ parseEntId)
where
parseEntId args0 = (,) ns # parseOneQualId args1
where
(ns,args1) =
case args0 of
arg:args | isClassOrType arg -> (Just ClassOrTypeNames,args)
| isValue arg -> (Just ValueNames,args)
_ -> (Nothing,args0)
isClassOrType arg = arg `elem` ["type","class"]
isValue arg = arg `elem` ["value","con"]
-}
|
kmate/HaRe
|
old/tools/pfe/PfeParse.hs
|
bsd-3-clause
| 3,636
| 0
| 12
| 854
| 1,048
| 555
| 493
| -1
| -1
|
module Stockfighter
( module Stockfighter.Client
, module Stockfighter.Types
) where
import Stockfighter.Client
import Stockfighter.Types
|
cnr/stockfighter-hs
|
src/Stockfighter.hs
|
mit
| 146
| 0
| 5
| 22
| 28
| 18
| 10
| 5
| 0
|
{-
Requirements: youtube-dl must be installed
Run this program from the directory where you want to download all the videos.
Pass in the csv file containing all the video ids.
This will download all the videos sequentially, skipping over videos that
have already been downloaded.
-}
module Main where
import Text.CSV
import Data.List (group)
import Data.List.Split (chunksOf)
import System.Environment (getArgs)
import System.Process (readProcessWithExitCode)
import System.FilePath.Glob (namesMatching)
import System.Exit
import System.IO (hFlush, stdout)
person_class_id :: String
person_class_id = "0"
fastNub :: [String] -> [String]
fastNub = map head . group
csv_class_id :: Show t => [t] -> t
csv_class_id [youtube_id,timestamp_ms,class_id,class_name,object_id,object_presence,xmin,xmax,ymin,ymax] = class_id
csv_class_id err = error (show err)
extractVideoNames :: FilePath -> IO [String]
extractVideoNames fname = do {Right csv <- parseCSVFromFile fname; return (fastNub $ map head (filter ((person_class_id ==) . csv_class_id) csv))}
main :: IO ()
main = do
(fname:_) <- getArgs
videoNames <- extractVideoNames fname
sequence_ [
do
ns <- namesMatching (vidName++".*")
if null ns
then do
putStr vidName
hFlush stdout
(exitCode,_,_) <- readProcessWithExitCode "youtube-dl" ("--prefer-free-formats":"-k":"--id":vidName:[]) []
if exitCode == ExitSuccess then putStrLn "." else putStrLn " failed"
else putStrLn ("skipping " ++ vidName) | vidName <- videoNames]
|
rzil/honours
|
DeepLearning/DownloadVideos.hs
|
mit
| 1,528
| 0
| 20
| 253
| 443
| 237
| 206
| 32
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
module Web.Kirstie.Hooks.RelatedPosts (relatedPosts) where
import Control.Exception
import Control.Monad
import Control.Monad.Error
import Data.List (sort, sortBy)
import Data.Ord (comparing)
import Data.Text (Text)
import Database.MongoDB hiding (sort, lookup, count)
import Web.Kirstie.Model
import Web.Kirstie.Util
import Web.Kirstie.IO
import Web.Kirstie.DB
ioeLogger' = ioeLoggerWithLabel "RelatedPosts: "
putLog' level = putLog level . (++) "RelatedPosts: "
relatedPosts :: Configure -> [Article] -> IO ()
relatedPosts conf articles = ioeLogger' . runErrorT $ do
let pairs = map article2pair articles
pipe <- liftIO . runIOE $ connect (host $ databaseHost conf)
scores <- getScoreList conf pipe
result <- forM pairs $ \(aid, tags) -> do
docs <- findByTags conf pipe tags
return $ (aid, take' 6 aid $ calcRelated scores tags docs)
liftIO $ do
saveToDB conf pipe result
putLog' InfoLog $ "Successfully updated"
take' :: Int -> ArticleId -> [(ArticleId, Float)] -> [ArticleId]
take' n aid = take n . filter (/= aid) .
map fst . reverse . sortBy (comparing snd)
findByTags :: Configure -> Pipe -> [Text] -> ErrorT String IO [Document]
findByTags conf pipe tags = ErrorT $ do
e <- access pipe master (databaseName conf) $
rest =<< find (select (buildSelectorByTags tags) "articles")
return $ strError e
calcRelated :: [(Text, Float)] -> [Text] -> [Document] -> [(ArticleId, Float)]
calcRelated scores tags = calcScores scores tags . map doc2pair
-- io
getScoreList :: Configure -> Pipe -> ErrorT String IO [(Text, Float)]
getScoreList conf pipe = ErrorT $ do
e <- access pipe master (databaseName conf) $
rest =<< find (select [] "articles") {project = ["tags" =: 1]}
return $ strError $ fmap (generateScoreList . concatMap doc2tags) e
buildSelectorByTags :: [Text] -> Document
buildSelectorByTags tags = ["$or" =: map (\t -> ["tags" =: t]) tags]
saveToDB :: Configure -> Pipe -> [(ArticleId, [ArticleId])] -> IO ()
saveToDB conf pipe pairs = mapM_ repsert' pairs
where access' = access pipe master (databaseName conf)
repsert' (aid, aids) = access' $ repsert
(select ["id" =: aid] "related_posts")
["id" =: aid, "relateds" =: aids]
-- supplements
article2pair :: Article -> (ArticleId, [Text])
article2pair a = (articleIdNum a, articleTags a)
doc2pair :: Document -> (ArticleId, [Text])
doc2pair d = ("id" `at` d, "tags" `at` d)
doc2tags :: Document -> [Text]
doc2tags d = "tags" `at` d
sndMap :: (b -> c) -> [(a, b)] -> [(a, c)]
sndMap f = map (\(x,y) -> (x, f y))
count :: (Eq a, Ord a) => [a] -> [(a, Int)]
count xs = let (y:ys) = sort xs in f 1 y ys
where
f c x [] = [(x, c)]
f c x (y:ys) | x == y = f (c+1) x ys
| otherwise = (x, c): f 1 y ys
getScore :: [(Text, Float)] -> Text -> Float
getScore scores = maybe 0 id . flip lookup scores
calcScores :: [(Text, Float)] -> [Text] -> [(ArticleId, [Text])]
-> [(ArticleId, Float)]
calcScores dic xs = sndMap (calcScore dic xs)
calcScore :: [(Text, Float)] -> [Text] -> [Text] -> Float
calcScore dic xs ys = sum . map (getScore dic) $ filter (`elem` ys) xs
generateScoreList :: [Text] -> [(Text, Float)]
generateScoreList tags = map calc pairs
where pairs = count tags
total = sum $ map snd pairs
calc (t, n) = (t, fromIntegral total / fromIntegral n)
|
hekt/blog-system
|
src/Web/Kirstie/Hooks/RelatedPosts.hs
|
mit
| 3,590
| 0
| 16
| 858
| 1,481
| 792
| 689
| 75
| 2
|
-- Nth Root of a Number Redux
-- http://www.codewars.com/kata/552679ea44a9e400b600124f/
module Codewars.Kata.NthRoot where
import Prelude hiding ((**))
root :: Double -> Double -> Double
root x n = exp $ log x / n
|
gafiatulin/codewars
|
src/6 kyu/NthRoot.hs
|
mit
| 216
| 0
| 7
| 35
| 54
| 32
| 22
| 4
| 1
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.AudioListener (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.AudioListener
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.AudioListener
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/AudioListener.hs
|
mit
| 352
| 0
| 5
| 33
| 33
| 26
| 7
| 4
| 0
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.WebKitTransitionEvent (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.WebKitTransitionEvent
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.WebKitTransitionEvent
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/WebKitTransitionEvent.hs
|
mit
| 376
| 0
| 5
| 33
| 33
| 26
| 7
| 4
| 0
|
{-# LANGUAGE BangPatterns, Rank2Types, TypeSynonymInstances,
TemplateHaskell #-}
module Motherboard.NES
(
Mirroring(..),
System(..),
State(..),
HardwareState(..),
SoftwareState(..),
MonadicState(..),
runMonadicState,
runCPU,
runPPU,
powerOnSoftwareState,
cycle,
getAtCPUCycle,
getAboutToBeginInstruction,
disassembleUpcomingInstruction,
)
where
import Control.DeepSeq
import Data.Array.Unboxed
import qualified Data.ByteString as BS
import Data.List hiding (cycle)
import Data.Word
import Prelude hiding (cycle, Maybe(..))
import qualified Control.Monad.State.Strict as State
import Assembly
import Data.Strict.Maybe
import qualified Processor.CPU_6502 as CPU
import qualified PPU.PPU_NES as PPU
data Mirroring = HorizontalMirroring
| VerticalMirroring
| FourScreenMirroring
deriving (Eq, Show)
data System = PlainSystem
| VersusUnisystem
| PlayChoice10
deriving (Eq, Show)
data State =
State {
stateHardwareState :: ! HardwareState,
stateSoftwareState :: ! SoftwareState
-- stateConsoleOutputBuffer :: ByteString
}
data HardwareState =
HardwareState {
hardwareStateProgramReadOnlyMemory :: ! (UArray Int Word8),
hardwareStateCharacterReadOnlyMemory :: ! (UArray Int Word8),
hardwareStateTrainer :: ! (Maybe (UArray Int Word8)),
hardwareStatePlayChoice10HintScreen :: ! (Maybe (UArray Int Word8)),
hardwareStateMapperNumber :: ! Word8,
hardwareStateMirroringType :: ! Mirroring,
hardwareStateBatteryPresent :: ! Bool,
hardwareStateSystem :: ! System
}
data SoftwareState =
SoftwareState {
softwareStateMotherboardClockCount :: ! Int,
softwareStateLastCPUDataBusValue :: ! Word8,
softwareStateLastPPUDataBusValue :: ! Word8,
softwareStateCPUState :: ! CPU.CPU_6502_State,
softwareStatePPUState :: ! PPU.PPU_NES_State,
softwareStateMotherboardCPUMemory :: ! (UArray Int Word8),
softwareStateMotherboardPPUTableMemory :: ! (UArray Int Word8),
softwareStateMotherboardPPUPaletteMemory :: ! (UArray Int Word8),
softwareStateMotherboardPPUSpriteMemory :: ! (UArray Int Word8)
}
data DataBus = CPUDataBus
| PPUDataBus
deriving (Eq, Show)
data AddressMapping = MotherboardCPUMemory
| MotherboardPPUTableMemory
| MotherboardPPUPaletteMemory
| MotherboardPPUSpriteMemory
| ProgramReadOnlyMemory
| CharacterReadOnlyMemory
| PPURegisters
| NoMemory
deriving (Eq, Show)
data Processor = CPU_6502
| PPU_NES
deriving (Eq, Show)
instance NFData State where
rnf state =
(rnf $ stateHardwareState state)
`seq` (rnf $ stateSoftwareState state)
instance NFData HardwareState where
rnf hardwareState =
(rnf $ hardwareStateProgramReadOnlyMemory hardwareState)
`seq` (rnf $ hardwareStateCharacterReadOnlyMemory hardwareState)
`seq` (rnf $ hardwareStateTrainer hardwareState)
`seq` (rnf $ hardwareStatePlayChoice10HintScreen hardwareState)
`seq` (rnf $ hardwareStateMapperNumber hardwareState)
`seq` (rnf $ hardwareStateMirroringType hardwareState)
`seq` (rnf $ hardwareStateBatteryPresent hardwareState)
`seq` (rnf $ hardwareStateSystem hardwareState)
instance NFData SoftwareState where
rnf softwareState =
(rnf $ softwareStateMotherboardClockCount softwareState)
`seq` (rnf $ softwareStateLastCPUDataBusValue softwareState)
`seq` (rnf $ softwareStateLastPPUDataBusValue softwareState)
`seq` (rnf $ softwareStateCPUState softwareState)
`seq` (rnf $ softwareStatePPUState softwareState)
`seq` (rnf $ softwareStateMotherboardCPUMemory softwareState)
`seq` (rnf $ softwareStateMotherboardPPUTableMemory softwareState)
`seq` (rnf $ softwareStateMotherboardPPUPaletteMemory softwareState)
`seq` (rnf $ softwareStateMotherboardPPUSpriteMemory softwareState)
instance NFData Mirroring where
instance NFData System where
newtype CPUMonad a = CPUMonad (State.State State a)
instance Monad CPUMonad where
return = CPUMonad . return
a >>= b = CPUMonad ((runCPU a) >>= (runCPU . b))
instance CPU.MonadChip CPUMonad where
debugFetchByte address = CPUMonad $ do
(addressMapping, localAddress) <- cpuDecodeAddress address
debugFetch CPUDataBus addressMapping localAddress
fetchByte address = CPUMonad $ do
(addressMapping, localAddress) <- cpuDecodeAddress address
fetch CPUDataBus addressMapping localAddress
storeByte address value = CPUMonad $ do
(addressMapping, localAddress) <- cpuDecodeAddress address
store CPUDataBus addressMapping localAddress value
getIRQAsserted = CPUMonad $ do
return False
getNMIAsserted = CPUMonad $ do
runPPU PPU.assertingNMI
getProgramCounter = CPUMonad $
State.get >>= return . CPU.cpu6502StateProgramCounter
. stateSoftwareState
putProgramCounter newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateProgramCounter = newValue
}
CPU.cpu6502StateProgramCounter
}
getStackPointer = CPUMonad $
State.get >>= return . CPU.cpu6502StateStackPointer
. stateSoftwareState
putStackPointer newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateStackPointer = newValue
}
CPU.cpu6502StateStackPointer
}
getAccumulator = CPUMonad $
State.get >>= return . CPU.cpu6502StateStackPointer
. stateSoftwareState
putAccumulator newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateAccumulator = newValue
}
CPU.cpu6502StateAccumulator
}
getXIndexRegister = CPUMonad $
State.get >>= return . CPU.cpu6502StateXIndexRegister
. stateSoftwareState
putXIndexRegister newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateXIndexRegister = newValue
}
CPU.cpu6502StateXIndexRegister
}
getYIndexRegister = CPUMonad $
State.get >>= return . CPU.cpu6502StateYIndexRegister
. stateSoftwareState
putYIndexRegister newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateYIndexRegister = newValue
}
CPU.cpu6502StateYIndexRegister
}
getStatusRegister = CPUMonad $
State.get >>= return . CPU.cpu6502StateStatusRegister
. stateSoftwareState
putStatusRegister newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateStatusRegister = newValue
}
CPU.cpu6502StateStatusRegister
}
getInternalOverflow = CPUMonad $
State.get >>= return . CPU.cpu6502StateInternalOverflow
. stateSoftwareState
putInternalOverflow newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInternalOverflow = newValue
}
CPU.cpu6502StateInternalOverflow
}
getInternalNegative = CPUMonad $
State.get >>= return . CPU.cpu6502StateInternalNegative
. stateSoftwareState
putInternalNegative newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInternalNegative = newValue
}
CPU.cpu6502StateInternalNegative
}
getInternalStoredAddress = CPUMonad $ do
State.get >>= return . CPU.cpu6502StateInternalStoredAddress
. stateSoftwareState
putInternalStoredAddress newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInternalStoredAddress = newValue
}
CPU.cpu6502StateInternalStoredAddress
}
getInternalLatch = CPUMonad $
State.get >>= return . CPU.cpu6502StateInternalLatch
. stateSoftwareState
putInternalLatch newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInternalLatch = newValue
}
CPU.cpu6502StateInternalLatch
}
getMicrocodeInstructionQueue = CPUMonad $ do
State.get >>= return . CPU.cpu6502StateMicrocodeInstructionQueue
. stateSoftwareState
putMicrocodeInstructionQueue newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateMicrocodeInstructionQueue = newValue
}
CPU.cpu6502StateMicrocodeInstructionQueue
}
getInterruptNoticed = CPUMonad $
State.get >>= return . CPU.cpu6502StateInterruptNoticed
. stateSoftwareState
putInterruptNoticed newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptNoticed = newValue
}
CPU.cpu6502StateInterruptNoticed
}
getInterruptAlreadyProcessed = CPUMonad $
State.get >>= return . CPU.cpu6502StateInterruptAlreadyProcessed
. stateSoftwareState
putInterruptAlreadyProcessed newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getNonMaskableInterruptAlreadyProcessed = CPUMonad $
State.get >>= return . CPU.cpu6502StateNonMaskableInterruptAlreadyProcessed
. stateSoftwareState
putNonMaskableInterruptAlreadyProcessed newValue = CPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateNonMaskableInterruptAlreadyProcessed
= newValue
}
CPU.cpu6502StateNonMaskableInterruptAlreadyProcessed
}
newtype PPUMonad a = PPUMonad (State.State State a)
instance PPU.MonadChip PPUMonad where
debugFetchByte address = PPUMonad $ do
(addressMapping, localAddress) <- cpuDecodeAddress address
debugFetch PPUDataBus addressMapping localAddress
fetchByte address = PPUMonad $ do
(addressMapping, localAddress) <- ppuDecodeAddress address
fetch PPUDataBus addressMapping localAddress
storeByte address value = PPUMonad $ do
(addressMapping, localAddress) <- ppuDecodeAddress address
store PPUDataBus addressMapping localAddress value
getTableMemory = PPUMonad $ do
state <- State.get
let memory = softwareStateMotherboardPPUTableMemory
. stateSoftwareState state
return (\offset -> memory ! fromIntegral offset)
getHorizontalClock = PPUMonad $
State.get >>= return . PPU.ppuNESStateHorizontalClock
. stateSoftwareState
putHorizontalClock newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getVerticalClock = PPUMonad $
State.get >>= return . PPU.ppuNESStateVerticalClock
. stateSoftwareState
putVerticalClock newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getStillPoweringUp = PPUMonad $
State.get >>= return . PPU.ppuNESStateStillPoweringUp
. stateSoftwareState
putStillPoweringUp newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getWantsToAssertNMI = PPUMonad $
State.get >>= return . PPU.ppuNESStateWantsToAssertNMI
. stateSoftwareState
putWantsToAssertNMI newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getAllowedToAssertNMI = PPUMonad $
State.get >>= return . PPU.ppuNESStateAllowedToAssertNMI
. stateSoftwareState
putAllowedToAssertNMI newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getTallSprites = PPUMonad $
State.get >>= return . PPU.ppuNESStateTallSprites
. stateSoftwareState
putTallSprites newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getPatternTableForBackground = PPUMonad $
State.get >>= return . PPU.ppuNESStatePatternTableForBackground
. stateSoftwareState
putPatternTableForBackground newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getPatternTableForSprites = PPUMonad $
State.get >>= return . PPU.ppuNESStatePatternTableForSprites
. stateSoftwareState
putPatternTableForSprites newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getAddressIncrementVertically = PPUMonad $
State.get >>= return . PPU.ppuNESStateAddressIncrementVertically
. stateSoftwareState
putAddressIncrementVertically newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getPaletteMonochrome = PPUMonad $
State.get >>= return . PPU.ppuNESStatePaletteMonochrome
. stateSoftwareState
putPaletteMonochrome newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getBackgroundClipped = PPUMonad $
State.get >>= return . PPU.ppuNESStateBackgroundClipped
. stateSoftwareState
putBackgroundClipped newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getSpritesClipped = PPUMonad $
State.get >>= return . PPU.ppuNESStateSpritesClipped
. stateSoftwareState
putSpritesClipped newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getBackgroundVisible = PPUMonad $
State.get >>= return . PPU.ppuNESStateBackgroundVisible
. stateSoftwareState
putBackgroundVisible newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getSpritesVisible = PPUMonad $
State.get >>= return . PPU.ppuNESStateSpritesVisible
. stateSoftwareState
putSpritesVisible newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getIntensifiedColor = PPUMonad $
State.get >>= return . PPU.ppuNESStateIntensifiedColor
. stateSoftwareState
putIntensifiedColor newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getWrittenOddNumberOfTimesToAddresses = PPUMonad $
State.get >>= return . PPU.ppuNESStateWrittenOddNumberOfTimesToAddresses
. stateSoftwareState
putWrittenOddNumberOfTimesToAddresses newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getPermanentAddress = PPUMonad $
State.get >>= return . PPU.ppuNESStatePermanentAddress
. stateSoftwareState
putPermanentAddress newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getTemporaryAddress = PPUMonad $
State.get >>= return . PPU.ppuNESStateTemporaryAddress
. stateSoftwareState
putTemporaryAddress newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getXOffset = PPUMonad $
State.get >>= return . PPU.ppuNESStateXOffset
. stateSoftwareState
putXOffset newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getLatestCompleteFrame = PPUMonad $
State.get >>= return . PPU.ppuNESStateLatestCompleteFrame
. stateSoftwareState
putLatestCompleteFrame newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
getIncompleteVideoFrameNameTableMemory = PPUMonad $
State.get >>= return . PPU.ppuNESStateIncompleteFrameNameTableMemory
. stateSoftwareState
putIncompleteVideoFrameNameTableMemory newValue = PPUMonad $ do
oldState <- State.get
State.put $ oldState {
stateSoftwareState =
(stateSoftwareState oldState) {
CPU.cpu6502StateInterruptAlreadyProcessed = newValue
}
CPU.cpu6502StateInterruptAlreadyProcessed
}
instance Monad PPUMonad where
return = PPUMonad . return
a >>= b = PPUMonad ((runPPU a) >>= (runPPU . b))
runCPU :: CPUMonad a -> MonadicState a
runCPU (CPUMonad action) = action
runPPU :: PPUMonad a -> MonadicState a
runPPU (PPUMonad action) = action
powerOnSoftwareState :: SoftwareState
powerOnSoftwareState =
SoftwareState {
softwareStateMotherboardClockCount = 0,
softwareStateLastCPUDataBusValue = 0x00,
softwareStateLastPPUDataBusValue = 0x00,
softwareStateCPUState = CPU.powerOnState,
softwareStatePPUState = PPU.powerOnState,
softwareStateMotherboardCPUMemory = array (0x0000, 0x07FF)
$ zip [0x0000 .. 0x07FF]
$ repeat 0x00,
softwareStateMotherboardPPUTableMemory =
array (0x0000, 0x07FF)
$ zip [0x0000 .. 0x07FF]
$ repeat 0x00,
softwareStateMotherboardPPUPaletteMemory =
array (0x0000, 0x001F)
$ zip [0x0000 .. 0x001F]
$ repeat 0x00,
softwareStateMotherboardPPUSpriteMemory =
array (0x0000, 0x00FF)
$ zip [0x0000 .. 0x00FF]
$ repeat 0x00
}
cpuDecodeAddress :: Word16 -> MonadicState (AddressMapping, Int)
cpuDecodeAddress address = do
hardwareState <- getHardwareState
let programReadOnlyMemory = hardwareStateProgramReadOnlyMemory hardwareState
programReadOnlyMemoryBankSize = 0x4000
nProgramReadOnlyMemoryBanks =
div (1 + (snd $ bounds programReadOnlyMemory))
programReadOnlyMemoryBankSize
bankOffset bankIndex = bankIndex * programReadOnlyMemoryBankSize
lowBankIndex = 0
highBankIndex = if nProgramReadOnlyMemoryBanks < 2
then 0
else 1
case () of
() | address < 0x2000 -> do
return (MotherboardCPUMemory,
fromIntegral $ mod address 0x0800)
| address < 0x4000 -> do
return (PPURegisters,
fromIntegral $ mod address 0x0008)
| address < 0x8000 -> do
return (NoMemory, 0)
| address < 0xC000 -> do
return (ProgramReadOnlyMemory,
(fromIntegral $ address - 0x8000)
+ bankOffset lowBankIndex)
| otherwise -> do
return (ProgramReadOnlyMemory,
(fromIntegral $ address - 0xC000)
+ bankOffset highBankIndex)
ppuDecodeAddress :: Word16 -> MonadicState (AddressMapping, Int)
ppuDecodeAddress address = do
case mod address 0x4000 of
address'
| address' < 0x2000 -> do
return (CharacterReadOnlyMemory, fromIntegral address')
| address' < 0x3F00 -> do
let tableIndex = div (mod (address' - 0x2000) 0x1000) 0x0400
tableOffset = fromIntegral $ mod (address' - 0x2000) 0x0400
case tableIndex of
0 -> return (MotherboardPPUTableMemory,
0x0000 + tableOffset)
1 -> return (MotherboardPPUTableMemory,
0x0400 + tableOffset)
2 -> return (MotherboardPPUTableMemory,
0x0000 + tableOffset)
3 -> return (MotherboardPPUTableMemory,
0x0400 + tableOffset)
| otherwise -> do
return (MotherboardPPUPaletteMemory,
fromIntegral $ mod (address' - 0x3F00) 0x20)
debugFetch :: DataBus
-> AddressMapping
-> Int
-> MonadicState Word8
debugFetch dataBus addressMapping offset = do
case addressMapping of
MotherboardCPUMemory -> do
memory <- getSoftwareStateMotherboardCPUMemory
return $ memory ! offset
MotherboardPPUTableMemory -> do
memory <- getSoftwareStateMotherboardPPUTableMemory
return $ memory ! offset
MotherboardPPUPaletteMemory -> do
memory <- getSoftwareStateMotherboardPPUPaletteMemory
return $ memory ! offset
MotherboardPPUSpriteMemory -> do
memory <- getSoftwareStateMotherboardPPUSpriteMemory
return $ memory ! offset
ProgramReadOnlyMemory -> do
memory <- getHardwareStateProgramReadOnlyMemory
return $ memory ! offset
CharacterReadOnlyMemory -> do
memory <- getHardwareStateCharacterReadOnlyMemory
return $ memory ! offset
PPURegisters -> do
return 0x00
NoMemory -> do
getLastDataBusValue dataBus
fetch :: DataBus
-> AddressMapping
-> Int
-> MonadicState Word8
fetch dataBus addressMapping offset = do
value <- case addressMapping of
MotherboardCPUMemory -> do
memory <- getSoftwareStateMotherboardCPUMemory
return $ memory ! offset
MotherboardPPUTableMemory -> do
memory <- getSoftwareStateMotherboardPPUTableMemory
return $ memory ! offset
MotherboardPPUPaletteMemory -> do
memory <- getSoftwareStateMotherboardPPUPaletteMemory
return $ memory ! offset
MotherboardPPUSpriteMemory -> do
memory <- getSoftwareStateMotherboardPPUSpriteMemory
return $ memory ! offset
ProgramReadOnlyMemory -> do
memory <- getHardwareStateProgramReadOnlyMemory
return $ memory ! offset
CharacterReadOnlyMemory -> do
memory <- getHardwareStateCharacterReadOnlyMemory
return $ memory ! offset
PPURegisters -> do
let !register = PPU.decodeRegister offset
!readable = PPU.registerReadable register
if readable
then runPPU $ PPU.registerFetch register
else getLastDataBusValue dataBus
NoMemory -> do
getLastDataBusValue dataBus
putLastDataBusValue dataBus value
return value
store :: DataBus
-> AddressMapping
-> Int
-> Word8
-> MonadicState ()
store dataBus addressMapping offset value = do
case addressMapping of
MotherboardCPUMemory -> do
memory <- getSoftwareStateMotherboardCPUMemory
let memory' = memory // [(offset, value)]
putSoftwareStateMotherboardCPUMemory memory'
MotherboardPPUTableMemory -> do
memory <- getSoftwareStateMotherboardPPUTableMemory
let memory' = memory // [(offset, value)]
putSoftwareStateMotherboardPPUTableMemory memory'
MotherboardPPUPaletteMemory -> do
memory <- getSoftwareStateMotherboardPPUPaletteMemory
let memory' = memory // [(offset, value)]
putSoftwareStateMotherboardPPUPaletteMemory memory'
MotherboardPPUSpriteMemory -> do
memory <- getSoftwareStateMotherboardPPUSpriteMemory
let memory' = memory // [(offset, value)]
putSoftwareStateMotherboardPPUSpriteMemory memory'
ProgramReadOnlyMemory -> return ()
CharacterReadOnlyMemory -> return ()
PPURegisters -> do
let !register = PPU.decodeRegister offset
!writeable = PPU.registerWriteable register
if writeable
then runPPU $ PPU.registerStore register value
else return ()
NoMemory -> return ()
putLastDataBusValue dataBus value
getLastDataBusValue :: DataBus -> MonadicState Word8
getLastDataBusValue dataBus = do
case dataBus of
CPUDataBus -> getSoftwareStateLastCPUDataBusValue
PPUDataBus -> getSoftwareStateLastPPUDataBusValue
putLastDataBusValue :: DataBus -> Word8 -> MonadicState ()
putLastDataBusValue dataBus value = do
case dataBus of
CPUDataBus -> putSoftwareStateLastCPUDataBusValue value
PPUDataBus -> putSoftwareStateLastPPUDataBusValue value
cycle :: MonadicState ()
{-# INLINE cycle #-}
cycle = do
clockCount <- getSoftwareStateMotherboardClockCount
mapM_ (\(divisor, chip) -> do
if mod clockCount divisor == 0
then case chip of
CPU_6502 -> runCPU CPU.cycle
PPU_NES -> runPPU PPU.cycle
else return ())
[(4, PPU_NES),
(12, CPU_6502)]
let clockCount' = mod (clockCount + 1) 12
putSoftwareStateMotherboardClockCount clockCount'
getAtCPUCycle :: MonadicState Bool
getAtCPUCycle = do
clockCount <- getSoftwareStateMotherboardClockCount
return $ mod clockCount 12 == 0
getAboutToBeginInstruction :: MonadicState Bool
getAboutToBeginInstruction = do
atCPUCycle <- getAtCPUCycle
atInstructionStart <- runCPU CPU.getAtInstructionStart
return $ atCPUCycle && atInstructionStart
disassembleUpcomingInstruction :: MonadicState String
disassembleUpcomingInstruction = do
horizontalClock <- getSoftwareStatePPUStateHorizontalClock
verticalClock <- getSoftwareStatePPUStateVerticalClock
latestCompleteFrame <- getSoftwareStatePPUStateLatestCompleteFrame
runCPU $ CPU.disassembleInstruction
[("CYC", leftPad (show horizontalClock) 3),
("SL", show verticalClock),
("F", case latestCompleteFrame of
Nothing -> "no"
Just _ -> "yes")]
|
IreneKnapp/legal-emulator
|
BackEnd/Motherboard/NES.hs
|
mit
| 33,227
| 38
| 20
| 11,212
| 6,334
| 3,253
| 3,081
| 747
| 9
|
module Harmonic where
harmonic :: (Fractional a, Enum a) => Int -> a
harmonic n = foldl (+) 0 $ take n $ foldr (\denominator acc -> (1/denominator):acc) [] [1..]
-- runHarmonic :: Fractional a => Int -> [a]
-- runHarmonic n = take n sumHarmonic
-- sumHarmonic :: Fractional a => a -> a
-- sumHarmonic 1 = 1
-- sumHarmonic i = 1/i + sumHarmonic (i-1)
|
raventid/coursera_learning
|
haskell/will_kurt/q9.3_harmonic.hs
|
mit
| 354
| 0
| 11
| 72
| 92
| 52
| 40
| 3
| 1
|
{-# htermination floor :: RealFrac a => a -> Int #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_floor__1.hs
|
mit
| 54
| 0
| 2
| 12
| 3
| 2
| 1
| 1
| 0
|
{-# LANGUAGE CPP #-}
module Data.Aeson.Config.KeyMap (module KeyMap) where
#if MIN_VERSION_aeson(2,0,0)
import Data.Aeson.KeyMap as KeyMap
#else
import Data.HashMap.Strict as KeyMap
#endif
|
sol/hpack
|
src/Data/Aeson/Config/KeyMap.hs
|
mit
| 210
| 0
| 4
| 42
| 26
| 20
| 6
| 3
| 0
|
module Test.Data.Metafield where
import Test.QuickCheck
import Test.Tasty.HUnit
import Text.RawString.QQ
import Data.Either
-- /admin/customers/342305289/metafields.json
metafields :: String
metafields = [r|
{
"metafields":[
{
"created_at":"2014-12-15T21:00:27-05:00",
"description":null,
"id":1475823249,
"key":"Bar",
"namespace":"Bar",
"owner_id":342305289,
"updated_at":"2014-12-15T21:00:27-05:00",
"value":123,
"value_type":"integer",
"owner_resource":"customer"
},
{
"created_at":"2014-12-15T21:00:20-05:00",
"description":null,
"id":1475823149,
"key":"Foo",
"namespace":"Foo",
"owner_id":342305289,
"updated_at":"2014-12-15T21:00:20-05:00",
"value":"Foo value",
"value_type":"string",
"owner_resource":"customer"
}
]
}
|]
-- /admin/metafields/1475823249.json
metafield :: String
metafield = [r|
{
"metafield":{
"created_at":"2014-12-15T21:00:27-05:00",
"description":null,
"id":1475823249,
"key":"Bar",
"namespace":"Bar",
"owner_id":342305289,
"updated_at":"2014-12-15T21:00:27-05:00",
"value":123,
"value_type":"integer",
"owner_resource":"customer"
}
}
|]
|
aaronlevin/haskell-shopify
|
test/Data/Metafield.hs
|
mit
| 1,356
| 0
| 4
| 364
| 59
| 41
| 18
| -1
| -1
|
import Data.List (tails)
combinations :: Int -> [a] -> [([a], [a])]
combinations 0 xs = [([], xs)]
combinations _ [] = error "not enough elements"
combinations n xs@(x:xs') = if length xs == n then [(xs, [])]
else [(x:ys, rest) | (ys, rest) <- combinations (n - 1) xs'] ++ [(ys', x:rest') | (ys', rest') <- combinations n xs']
group :: [Int] -> [a] -> [[[a]]]
group [] _ = [[]]
group (n:ns) xs = [g:gs | (g, rest) <- combinations n xs, gs <- group ns rest]
main :: IO ()
main = do
let value = group [2,3,4] ["aldo","beat","carla","david","evi","flip","gary","hugo","ida"]
print value
|
zeyuanxy/haskell-playground
|
ninety-nine-haskell-problems/vol3/27.hs
|
mit
| 623
| 0
| 12
| 142
| 371
| 207
| 164
| 13
| 2
|
module Instances.Classes (
druid,
primalPredator
) where
import Character.Types (
Player(..),
wearingHeavyArmor,
ClassSpec(..),
Defense(..),
Class(..),
newClass)
druid :: ClassSpec -> Class
druid = newClass "Druid" [(Ref, 1), (Will, 1)] (12 + 9) 5
primalPredator :: ClassSpec
primalPredator = ClassSpec "Primal Predator"
(\player ->
let miscSpeedPlus = (getSpeedMisc player) ++ [oneSpeedIfLightArmor] in
player { getSpeedMisc = miscSpeedPlus })
oneSpeedIfLightArmor :: Player -> Int
oneSpeedIfLightArmor player = if wearingHeavyArmor player then 0 else 1
|
quintenpalmer/dungeons
|
server/src/Instances/Classes.hs
|
mit
| 615
| 0
| 14
| 132
| 186
| 110
| 76
| 19
| 2
|
module Cryptography.SymmetricCryptography.Macro where
import Types
import Macro.MetaMacro
import Macro.Tuple
import Functions.Application.Macro
import Macro.Sets.Macro
-- * Concatenation of messages
(++) :: Note -> Note -> Note
(++) = binop $ commS " " <> "|" <> commS " "
-- conccmp :: Note -> Note -> Note
-- conccmp =
-- | Encryption function
enc_ :: Note
enc_ = "e"
enc :: Note -> Note -> Note -> Note
enc = fn3 enc_
enc' :: Note -> Note -> Note
enc' m k = enc m k $ comm1 "text" "_"
-- | Decryption function
dec_ :: Note
dec_ = "d"
dec :: Note -> Note -> Note
dec = fn2 dec_
-- | Symmetric cryptosystem
scs :: Note -> Note -> Note
scs = tuple
-- | Concrete Symmetric cryptosystem
scs_ :: Note
scs_ = scs enc_ dec_
-- | Message space
msp_ :: Note
msp_ = mathcal "M"
-- | Ciphertext space
csp_ :: Note
csp_ = mathcal "C"
-- | Key space
ksp_ :: Note
ksp_ = mathcal "K"
-- | Randomness space
rsp_ :: Note
rsp_ = mathcal "R"
-- * Pseudorandom generator
gen_ :: Note
gen_ = "g"
gen :: Note -> Note
gen = fn gen_
-- * Bitfield
-- | bit set
bits :: Note
bits = setofs [0, 1]
-- | bits set
bitss :: Note -> Note
bitss n = bits ^: n
-- * Ternary field
terns :: Note
terns = setofs [0, 1, 2]
ternss :: Note -> Note
ternss n = terns ^: n
-- | Length of a bitstring
len :: Note -> Note
len = autoBrackets "|" "|"
|
NorfairKing/the-notes
|
src/Cryptography/SymmetricCryptography/Macro.hs
|
gpl-2.0
| 1,380
| 0
| 8
| 346
| 406
| 231
| 175
| 44
| 1
|
{-# LANGUAGE CPP, PackageImports #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
{- |
The module "Foreign.Marshal.Array" provides operations for marshalling Haskell
lists into monolithic arrays and vice versa. Most functions come in two
flavours: one for arrays terminated by a special termination element and one
where an explicit length parameter is used to determine the extent of an
array. The typical example for the former case are C's NUL terminated
strings. However, please note that C strings should usually be marshalled
using the functions provided by "Foreign.C.String" as
the Unicode encoding has to be taken into account. All functions specifically
operating on arrays that are terminated by a special termination element have
a name ending on @0@---e.g., 'mallocArray' allocates space for an
array of the given size, whereas 'mallocArray0' allocates space for one
more element to ensure that there is room for the terminator.
-}
module Foreign.Marshal.Array (
-- * Marshalling arrays
-- ** Allocation
--
mallocArray, -- :: Storable a => Int -> IO (Ptr a)
mallocArray0, -- :: Storable a => Int -> IO (Ptr a)
allocaArray, -- :: Storable a => Int -> (Ptr a -> IO b) -> IO b
allocaArray0, -- :: Storable a => Int -> (Ptr a -> IO b) -> IO b
reallocArray, -- :: Storable a => Ptr a -> Int -> IO (Ptr a)
reallocArray0, -- :: Storable a => Ptr a -> Int -> IO (Ptr a)
-- ** Marshalling
--
peekArray, -- :: Storable a => Int -> Ptr a -> IO [a]
peekArray0, -- :: (Storable a, Eq a) => a -> Ptr a -> IO [a]
pokeArray, -- :: Storable a => Ptr a -> [a] -> IO ()
pokeArray0, -- :: Storable a => a -> Ptr a -> [a] -> IO ()
-- ** Combined allocation and marshalling
--
newArray, -- :: Storable a => [a] -> IO (Ptr a)
newArray0, -- :: Storable a => a -> [a] -> IO (Ptr a)
withArray, -- :: Storable a => [a] -> (Ptr a -> IO b) -> IO b
withArray0, -- :: Storable a => a -> [a] -> (Ptr a -> IO b) -> IO b
withArrayLen, -- :: Storable a => [a] -> (Int -> Ptr a -> IO b) -> IO b
withArrayLen0, -- :: Storable a => a -> [a] -> (Int -> Ptr a -> IO b) -> IO b
-- ** Copying
-- | (argument order: destination, source)
copyArray, -- :: Storable a => Ptr a -> Ptr a -> Int -> IO ()
moveArray, -- :: Storable a => Ptr a -> Ptr a -> Int -> IO ()
-- ** Finding the length
--
lengthArray0, -- :: (Storable a, Eq a) => a -> Ptr a -> IO Int
-- ** Indexing
--
advancePtr, -- :: Storable a => Ptr a -> Int -> Ptr a
) where
import qualified "base" Foreign.Marshal.Array as Base
import "base" Foreign.Marshal.Array hiding (peekArray)
#if __GLASGOW_HASKELL__ >= 701
import "base" Foreign.Safe hiding (peekArray)
#else
import "base" Foreign hiding (peekArray)
#endif
-- |Convert an array of given length into a Haskell list.
--
peekArray :: Storable a => Int -> Ptr a -> IO [a]
peekArray = Base.peekArray
|
jwiegley/ghc-release
|
libraries/haskell2010/Foreign/Marshal/Array.hs
|
gpl-3.0
| 2,990
| 0
| 9
| 734
| 184
| 133
| 51
| 27
| 1
|
{-
# Hassoc
Author: Bob Desaunois
Description:
1. Reads file
2. Splits content into words
3. generates PHP Associative Array
4. writes the array to file
-}
module Hassoc
(
run
)
where
import System.IO
import Data.List
-----------------------------------------------------------
-- Constants
-----------------------------------------------------------
inputFile :: FilePath
inputFile = "input.txt"
outputFile :: FilePath
outputFile = "output.txt"
-----------------------------------------------------------
-- Functions
-----------------------------------------------------------
run :: IO ()
run = do
inputContents <- readFile inputFile :: IO String
let preOutput = "$array = array (\n" :: String
let output = preOutput ++ (generateOutput 0 (words inputContents) "") :: String
writeFile outputFile output
codeify :: String -> String
codeify string = "\t\"" ++ string ++ "\" => \"\""
appendDelimiter :: Int -> [String] -> String -> String
appendDelimiter x properties codeString = do
let propertiesLength = (-1) + (length properties) :: Int
if x >= propertiesLength
then codeString ++ ");"
else codeString ++ ","
-- TODO Optimise this with a foldr
generateOutput :: Int -> [String] -> String -> String
generateOutput x properties outputString
| x >= (length properties) = outputString
| x < (length properties) = generateOutput ((+1) x) properties output
where output = (outputString ++ generated) ++ "\n"
generated = delimiter (codeify (properties !! x))
delimiter = appendDelimiter x properties
|
bobdesaunois/hassoc
|
Hassoc.hs
|
gpl-3.0
| 1,622
| 0
| 14
| 330
| 370
| 196
| 174
| 30
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : portable
--
-- Signatures for the terms and multiset rewriting rules used to model and
-- reason about a security protocol.
-- modulo the full Diffie-Hellman equational theory and once modulo AC.
module Theory.Model.Signature (
-- * Signature type
Signature(..)
-- ** Pure signatures
, SignaturePure
, emptySignaturePure
, sigpMaudeSig
-- ** Using Maude to handle operations relative to a 'Signature'
, SignatureWithMaude
, toSignatureWithMaude
, toSignaturePure
, sigmMaudeHandle
-- ** Pretty-printing
, prettySignaturePure
, prettySignatureWithMaude
) where
import Data.Binary
import qualified Data.Label as L
-- import Control.Applicative
import Control.DeepSeq
import System.IO.Unsafe (unsafePerformIO)
import Term.Maude.Process (MaudeHandle, mhFilePath, mhMaudeSig, startMaude)
import Term.Maude.Signature (MaudeSig, minimalMaudeSig, prettyMaudeSig)
import Theory.Text.Pretty
-- | A theory signature.
data Signature a = Signature
{ -- The signature of the message algebra
_sigMaudeInfo :: a
}
$(L.mkLabels [''Signature])
------------------------------------------------------------------------------
-- Pure Signatures
------------------------------------------------------------------------------
-- | A 'Signature' without an associated Maude process.
type SignaturePure = Signature MaudeSig
-- | Access the maude signature.
sigpMaudeSig:: SignaturePure L.:-> MaudeSig
sigpMaudeSig = sigMaudeInfo
-- | The empty pure signature.
emptySignaturePure :: Bool -> SignaturePure
emptySignaturePure flag = Signature (minimalMaudeSig flag)
-- Instances
------------
deriving instance Eq SignaturePure
deriving instance Ord SignaturePure
deriving instance Show SignaturePure
instance Binary SignaturePure where
put sig = put (L.get sigMaudeInfo sig)
get = Signature <$> get
instance NFData SignaturePure where
rnf (Signature y) = rnf y
------------------------------------------------------------------------------
-- Signatures with an attached Maude process
------------------------------------------------------------------------------
-- | A 'Signature' with an associated, running Maude process.
type SignatureWithMaude = Signature MaudeHandle
-- | Access the maude handle in a signature.
sigmMaudeHandle :: SignatureWithMaude L.:-> MaudeHandle
sigmMaudeHandle = sigMaudeInfo
-- | Ensure that maude is running and configured with the current signature.
toSignatureWithMaude :: FilePath -- ^ Path to Maude executable.
-> SignaturePure
-> IO (SignatureWithMaude)
toSignatureWithMaude maudePath sig = do
hnd <- startMaude maudePath (L.get sigMaudeInfo sig)
return $ sig { _sigMaudeInfo = hnd }
-- | The pure signature of a 'SignatureWithMaude'.
toSignaturePure :: SignatureWithMaude -> SignaturePure
toSignaturePure sig = sig { _sigMaudeInfo = mhMaudeSig $ L.get sigMaudeInfo sig }
{- TODO: There should be a finalizer in place such that as soon as the
MaudeHandle is garbage collected, the appropriate command is sent to Maude
The code below is a crutch and leads to unnecessary complication.
-- | Stop the maude process. This operation is unsafe, as there still might be
-- thunks that rely on the MaudeHandle to refer to a running Maude process.
unsafeStopMaude :: SignatureWithMaude -> IO (SignaturePure)
unsafeStopMaude = error "unsafeStopMaude: implement"
-- | Run an IO action with maude running and configured with a specific
-- signature. As there must not be any part of the return value that depends
-- on unevaluated calls to the Maude process provided to the inner IO action.
unsafeWithMaude :: FilePath -- ^ Path to Maude executable
-> SignaturePure -- ^ Signature to use
-> (SignatureWithMaude -> IO a) -> IO a
unsafeWithMaude maudePath sig =
bracket (startMaude maudePath sig) unsafeStopMaude
-}
-- Instances
------------
instance Eq SignatureWithMaude where
x == y = toSignaturePure x == toSignaturePure y
instance Ord SignatureWithMaude where
compare x y = compare (toSignaturePure x) (toSignaturePure y)
instance Show SignatureWithMaude where
show = show . toSignaturePure
instance Binary SignatureWithMaude where
put sig@(Signature maude) = do
put (mhFilePath maude)
put (toSignaturePure sig)
-- FIXME: reload the right signature
get = unsafePerformIO <$> (toSignatureWithMaude <$> get <*> get)
instance NFData SignatureWithMaude where
rnf (Signature _maude) = ()
------------------------------------------------------------------------------
-- Pretty-printing
------------------------------------------------------------------------------
-- | Pretty-print a signature with maude.
prettySignaturePure :: HighlightDocument d => SignaturePure -> d
prettySignaturePure sig =
prettyMaudeSig $ L.get sigpMaudeSig sig
-- | Pretty-print a pure signature.
prettySignatureWithMaude :: HighlightDocument d => SignatureWithMaude -> d
prettySignatureWithMaude sig =
prettyMaudeSig $ mhMaudeSig $ L.get sigmMaudeHandle sig
|
tamarin-prover/tamarin-prover
|
lib/theory/src/Theory/Model/Signature.hs
|
gpl-3.0
| 5,601
| 0
| 11
| 1,069
| 709
| 399
| 310
| 70
| 1
|
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- Common types for our constraint solver. They must be declared jointly
-- because there is a recursive dependency between goals, proof contexts, and
-- case distinctions.
-- Needed to move common types to System, now this modul is just passing them through.
module Theory.Constraint.Solver.Heuristics (
GoalRanking(..)
, Heuristic(..)
, goalRankingIdentifiers
, goalRankingIdentifiersDiff
, charToGoalRankingMay
, charToGoalRanking
, charToGoalRankingDiffMay
, charToGoalRankingDiff
, listGoalRankings
, listGoalRankingsDiff
, goalRankingName
) where
import GHC.Generics (Generic)
import Data.Binary
import Control.DeepSeq
import Data.Maybe (fromMaybe)
import qualified Data.Map as M
import Theory.Text.Pretty
-- | The different available functions to rank goals with respect to their
-- order of solving in a constraint system.
data GoalRanking =
GoalNrRanking
| OracleRanking String
| OracleSmartRanking String
| SapicRanking
| SapicLivenessRanking
| SapicPKCS11Ranking
| UsefulGoalNrRanking
| SmartRanking Bool
| SmartDiffRanking
| InjRanking Bool
deriving( Eq, Ord, Show, Generic, NFData, Binary )
newtype Heuristic = Heuristic [GoalRanking]
deriving( Eq, Ord, Show, Generic, NFData, Binary )
goalRankingIdentifiers :: M.Map Char GoalRanking
goalRankingIdentifiers = M.fromList
[ ('s', SmartRanking False)
, ('S', SmartRanking True)
, ('o', OracleRanking "./oracle")
, ('O', OracleSmartRanking "./oracle")
, ('p', SapicRanking)
, ('l', SapicLivenessRanking)
, ('P', SapicPKCS11Ranking)
, ('c', UsefulGoalNrRanking)
, ('C', GoalNrRanking)
, ('i', InjRanking False)
, ('I', InjRanking True)
]
goalRankingIdentifiersDiff :: M.Map Char GoalRanking
goalRankingIdentifiersDiff = M.fromList
[ ('s', SmartDiffRanking)
, ('o', OracleRanking "./oracle")
, ('O', OracleSmartRanking "./oracle")
, ('c', UsefulGoalNrRanking)
, ('C', GoalNrRanking)
]
charToGoalRankingMay :: Char -> Maybe GoalRanking
charToGoalRankingMay c = M.lookup c goalRankingIdentifiers
charToGoalRanking :: Char -> GoalRanking
charToGoalRanking c = fromMaybe
(error $ render $ sep $ map text $ lines $ "Unknown goal ranking '" ++ [c]
++ "'. Use one of the following:\n" ++ listGoalRankings)
$ charToGoalRankingMay c
charToGoalRankingDiffMay :: Char -> Maybe GoalRanking
charToGoalRankingDiffMay c = M.lookup c goalRankingIdentifiersDiff
charToGoalRankingDiff :: Char -> GoalRanking
charToGoalRankingDiff c = fromMaybe
(error $ render $ sep $ map text $ lines $ "Unknown goal ranking '" ++ [c]
++ "'. Use one of the following:\n" ++ listGoalRankingsDiff)
$ charToGoalRankingDiffMay c
listGoalRankings :: String
listGoalRankings = M.foldMapWithKey
(\k v -> "'"++[k]++"': " ++ goalRankingName v ++ "\n") goalRankingIdentifiers
listGoalRankingsDiff :: String
listGoalRankingsDiff = M.foldMapWithKey
(\k v -> "'"++[k]++"': " ++ goalRankingName v ++ "\n") goalRankingIdentifiersDiff
-- | The name/explanation of a 'GoalRanking'.
goalRankingName :: GoalRanking -> String
goalRankingName ranking =
"Goals sorted according to " ++ case ranking of
GoalNrRanking -> "their order of creation"
OracleRanking oracleName -> "an oracle for ranking, located at " ++ oracleName
OracleSmartRanking oracleName -> "an oracle for ranking based on 'smart' heuristic, located at " ++ oracleName
UsefulGoalNrRanking -> "their usefulness and order of creation"
SapicRanking -> "heuristics adapted to the output of the SAPIC tool"
SapicLivenessRanking -> "heuristics adapted to the output of the SAPIC tool for liveness properties"
SapicPKCS11Ranking -> "heuristics adapted to a model of PKCS#11 translated using the SAPIC tool"
SmartRanking useLoopBreakers -> "the 'smart' heuristic" ++ loopStatus useLoopBreakers
SmartDiffRanking -> "the 'smart' heuristic (for diff proofs)"
InjRanking useLoopBreakers -> "heuristics adapted to stateful injective protocols" ++ loopStatus useLoopBreakers
where
loopStatus b = " (loop breakers " ++ (if b then "allowed" else "delayed") ++ ")"
|
rsasse/tamarin-prover
|
lib/theory/src/Theory/Constraint/Solver/Heuristics.hs
|
gpl-3.0
| 5,019
| 0
| 15
| 1,381
| 867
| 486
| 381
| 89
| 11
|
-- This module defines a Tree datatype and instance for the Show and the Edit type classes.
module TreeManipLib (Tree(Tip, (:+:)), treeSize, treeSubst, treeFlip, treeFlattens) where {
data Tree a = Tip a
| Tree a :+: Tree a;
instance (Show a) => Show (Tree a) where {
shows tr s =
case tr of {
Tip x -> "Tip " ++ shows x s;
tr1 :+: tr2 -> "(" ++ shows tr1 (" :+: " ++ (shows tr2 (")" ++ s)))
};
show tr = shows tr ""
};
instance Edit (Tree a) where {
menu tr = [
"Copy",
"Copy by value",
"Paste",
"Flip",
"Evaluate",
"Copy as list",
"Report size of subtree"
];
update tr menuIndex path lhs rhs clip =
case menuIndex of {
0 -> fCopy tr path lhs rhs clip;
1 -> fCopyByValue tr path lhs rhs clip;
2 -> fPaste tr path lhs rhs clip;
3 -> fFlip tr path lhs rhs clip;
4 -> fEval tr path lhs rhs clip;
5 -> fCopyAsList tr path lhs rhs clip;
6 -> (Nothing, Nothing, Just ("Size is: " ++ show (treeSize (treeSelect path tr))))
}
};
treeSize :: Tree a -> Int;
treeSize tr = case tr of {
Tip _ -> 1;
tr1 :+: tr2 -> treeSize tr1 + treeSize tr2
};
treeSelect :: [Int] -> Tree a -> Tree a;
treeSelect path tr =
case (path, tr) of {
([], tr) -> tr;
((n:path), (tr0 :+: tr1)) -> let tr = if n == 0 then tr0 else tr1
in treeSelect path tr;
_ -> error "Tree.treeSelect: bad path"
};
-- Insert tr' in tr at location defined by path
treeSubst :: [Int] -> Tree a -> Tree a -> Tree a;
treeSubst path tr' tr =
case (path, tr) of {
([], tr) -> tr';
((0:path), (tr0 :+: tr1)) -> let tr = treeSubst path tr' tr0 in tr :+: tr1;
((1:path), (tr0 :+: tr1)) -> let tr = treeSubst path tr' tr1 in tr0 :+: tr;
_ -> error "Tree.treeSubst: bad path"
};
treeFlip path tr =
case (path, tr) of {
([], tr) -> treeFlip' tr;
((0:path), (tr0 :+: tr1)) -> let tr = treeFlip path tr0 in tr :+: tr1;
((1:path), (tr0 :+: tr1)) -> let tr = treeFlip path tr1 in tr0 :+: tr;
_ -> error "Tree.treeFlip: bad path"
};
treeFlattens :: Tree a -> [a] -> [a];
treeFlattens tr xs =
case tr of {
Tip x -> x : xs;
tr1 :+: tr2 -> treeFlattens tr1 (treeFlattens tr2 xs)
};
-- Functions evaluated by Menu commands ----------------------------------------------------
fCopy tr path lhs rhs clip =
let { ref = if null lhs then paren rhs else lhs;
sClip = "treeSelect " ++ show path ++ " " ++ ref
} in (Nothing, Just sClip, Just "Copied!");
fCopyByValue tr path lhs rhs clip =
let { subTree = treeSelect path tr;
sClip = show subTree
} in (Nothing, Just sClip, Just "Copied by value!");
fPaste tr path lhs rhs clip =
let { sExp = "treeSubst " ++ show path ++ paren clip ++ paren rhs;
sWarn = "Pasted!"
} in (Just sExp, Nothing, Just sWarn);
fFlip tr path lhs rhs clip =
case tr of {
Tip _ -> (Nothing, Nothing, Just "Cannot flip this element!");
_ ->
let { sExp = "treeFlip " ++ show path ++ paren rhs;
sWarn = "Flipped!"
} in (Just sExp, Nothing, Just sWarn)
};
fEval tr path lhs rhs clip =
let { sExp = show tr;
sWarn = "Evaluated!"
} in (Just sExp, Nothing, Just sWarn);
fCopyAsList tr path lhs rhs clip =
let { sClip = show (treeFlattens tr []);
sWarn = "Copied to clipboard as a list!"
} in (Nothing, Just sClip, Just sWarn);
-- Subsidiary functions -------------------------------------------------------------------
paren s = " (" ++ s ++ ") ";
treeFlip' tr =
case tr of {
Tip x -> tr;
tr1 :+: tr2 -> treeFlip' tr2 :+: treeFlip' tr1
}
}
|
ckaestne/CIDE
|
CIDE_Language_Haskell/test/fromviral/TreeManipLib.hs
|
gpl-3.0
| 4,240
| 0
| 17
| 1,626
| 1,466
| 789
| 677
| 90
| 4
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.LatencyTest.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.LatencyTest.Types
(
-- * Service Configuration
latencyTestService
-- * OAuth Scopes
, monitoringReadOnlyScope
-- * IntValue
, IntValue
, intValue
, ivValue
, ivLabel
-- * DoubleValue
, DoubleValue
, doubleValue
, dvValue
, dvLabel
-- * StringValue
, StringValue
, stringValue
, svValue
, svLabel
-- * AggregatedStatsReply
, AggregatedStatsReply
, aggregatedStatsReply
, asrTestValue
-- * Stats
, Stats
, stats
, sTime
, sDoubleValues
, sStringValues
, sIntValues
-- * AggregatedStats
, AggregatedStats
, aggregatedStats
, asStats
-- * StatsReply
, StatsReply
, statsReply
, srTestValue
) where
import Network.Google.LatencyTest.Types.Product
import Network.Google.LatencyTest.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v2' of the Google Cloud Network Performance Monitoring API. This contains the host and root path used as a starting point for constructing service requests.
latencyTestService :: ServiceConfig
latencyTestService
= defaultService (ServiceId "cloudlatencytest:v2")
"cloudlatencytest-pa.googleapis.com"
-- | View monitoring data for all of your Google Cloud and API projects
monitoringReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/monitoring.readonly"]
monitoringReadOnlyScope = Proxy;
|
rueshyna/gogol
|
gogol-latencytest/gen/Network/Google/LatencyTest/Types.hs
|
mpl-2.0
| 1,998
| 0
| 7
| 464
| 184
| 128
| 56
| 46
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.AuthorizeSecurityGroupIngress
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds one or more ingress rules to a security group.
--
-- EC2-Classic: You can have up to 100 rules per group.
--
-- EC2-VPC: You can have up to 50 rules per group (covering both ingress and
-- egress rules).
--
-- Rule changes are propagated to instances within the security group as
-- quickly as possible. However, a small delay might occur.
--
-- [EC2-Classic] This action gives one or more CIDR IP address ranges
-- permission to access a security group in your account, or gives one or more
-- security groups (called the /source groups/) permission to access a security
-- group for your account. A source group can be for your own AWS account, or
-- another.
--
-- [EC2-VPC] This action gives one or more CIDR IP address ranges permission to
-- access a security group in your VPC, or gives one or more other security
-- groups (called the /source groups/) permission to access a security group for
-- your VPC. The security groups must all be for the same VPC.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AuthorizeSecurityGroupIngress.html>
module Network.AWS.EC2.AuthorizeSecurityGroupIngress
(
-- * Request
AuthorizeSecurityGroupIngress
-- ** Request constructor
, authorizeSecurityGroupIngress
-- ** Request lenses
, asgiCidrIp
, asgiDryRun
, asgiFromPort
, asgiGroupId
, asgiGroupName
, asgiIpPermissions
, asgiIpProtocol
, asgiSourceSecurityGroupName
, asgiSourceSecurityGroupOwnerId
, asgiToPort
-- * Response
, AuthorizeSecurityGroupIngressResponse
-- ** Response constructor
, authorizeSecurityGroupIngressResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data AuthorizeSecurityGroupIngress = AuthorizeSecurityGroupIngress
{ _asgiCidrIp :: Maybe Text
, _asgiDryRun :: Maybe Bool
, _asgiFromPort :: Maybe Int
, _asgiGroupId :: Maybe Text
, _asgiGroupName :: Maybe Text
, _asgiIpPermissions :: List "item" IpPermission
, _asgiIpProtocol :: Maybe Text
, _asgiSourceSecurityGroupName :: Maybe Text
, _asgiSourceSecurityGroupOwnerId :: Maybe Text
, _asgiToPort :: Maybe Int
} deriving (Eq, Read, Show)
-- | 'AuthorizeSecurityGroupIngress' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'asgiCidrIp' @::@ 'Maybe' 'Text'
--
-- * 'asgiDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'asgiFromPort' @::@ 'Maybe' 'Int'
--
-- * 'asgiGroupId' @::@ 'Maybe' 'Text'
--
-- * 'asgiGroupName' @::@ 'Maybe' 'Text'
--
-- * 'asgiIpPermissions' @::@ ['IpPermission']
--
-- * 'asgiIpProtocol' @::@ 'Maybe' 'Text'
--
-- * 'asgiSourceSecurityGroupName' @::@ 'Maybe' 'Text'
--
-- * 'asgiSourceSecurityGroupOwnerId' @::@ 'Maybe' 'Text'
--
-- * 'asgiToPort' @::@ 'Maybe' 'Int'
--
authorizeSecurityGroupIngress :: AuthorizeSecurityGroupIngress
authorizeSecurityGroupIngress = AuthorizeSecurityGroupIngress
{ _asgiDryRun = Nothing
, _asgiGroupName = Nothing
, _asgiGroupId = Nothing
, _asgiSourceSecurityGroupName = Nothing
, _asgiSourceSecurityGroupOwnerId = Nothing
, _asgiIpProtocol = Nothing
, _asgiFromPort = Nothing
, _asgiToPort = Nothing
, _asgiCidrIp = Nothing
, _asgiIpPermissions = mempty
}
-- | The CIDR IP address range. You can't specify this parameter when specifying a
-- source security group.
asgiCidrIp :: Lens' AuthorizeSecurityGroupIngress (Maybe Text)
asgiCidrIp = lens _asgiCidrIp (\s a -> s { _asgiCidrIp = a })
asgiDryRun :: Lens' AuthorizeSecurityGroupIngress (Maybe Bool)
asgiDryRun = lens _asgiDryRun (\s a -> s { _asgiDryRun = a })
-- | The start of port range for the TCP and UDP protocols, or an ICMP type
-- number. For the ICMP type number, use '-1' to specify all ICMP types.
asgiFromPort :: Lens' AuthorizeSecurityGroupIngress (Maybe Int)
asgiFromPort = lens _asgiFromPort (\s a -> s { _asgiFromPort = a })
-- | The ID of the security group. Required for a nondefault VPC.
asgiGroupId :: Lens' AuthorizeSecurityGroupIngress (Maybe Text)
asgiGroupId = lens _asgiGroupId (\s a -> s { _asgiGroupId = a })
-- | [EC2-Classic, default VPC] The name of the security group.
asgiGroupName :: Lens' AuthorizeSecurityGroupIngress (Maybe Text)
asgiGroupName = lens _asgiGroupName (\s a -> s { _asgiGroupName = a })
-- | A set of IP permissions. Can be used to specify multiple rules in a single
-- command.
asgiIpPermissions :: Lens' AuthorizeSecurityGroupIngress [IpPermission]
asgiIpPermissions =
lens _asgiIpPermissions (\s a -> s { _asgiIpPermissions = a })
. _List
-- | The IP protocol name ('tcp', 'udp', 'icmp') or number (see <http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml Protocol Numbers>). (VPC
-- only) Use '-1' to specify all.
asgiIpProtocol :: Lens' AuthorizeSecurityGroupIngress (Maybe Text)
asgiIpProtocol = lens _asgiIpProtocol (\s a -> s { _asgiIpProtocol = a })
-- | [EC2-Classic, default VPC] The name of the source security group. You can't
-- specify a source security group and a CIDR IP address range.
asgiSourceSecurityGroupName :: Lens' AuthorizeSecurityGroupIngress (Maybe Text)
asgiSourceSecurityGroupName =
lens _asgiSourceSecurityGroupName
(\s a -> s { _asgiSourceSecurityGroupName = a })
-- | The ID of the source security group. You can't specify a source security
-- group and a CIDR IP address range.
asgiSourceSecurityGroupOwnerId :: Lens' AuthorizeSecurityGroupIngress (Maybe Text)
asgiSourceSecurityGroupOwnerId =
lens _asgiSourceSecurityGroupOwnerId
(\s a -> s { _asgiSourceSecurityGroupOwnerId = a })
-- | The end of port range for the TCP and UDP protocols, or an ICMP code number.
-- For the ICMP code number, use '-1' to specify all ICMP codes for the ICMP type.
asgiToPort :: Lens' AuthorizeSecurityGroupIngress (Maybe Int)
asgiToPort = lens _asgiToPort (\s a -> s { _asgiToPort = a })
data AuthorizeSecurityGroupIngressResponse = AuthorizeSecurityGroupIngressResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AuthorizeSecurityGroupIngressResponse' constructor.
authorizeSecurityGroupIngressResponse :: AuthorizeSecurityGroupIngressResponse
authorizeSecurityGroupIngressResponse = AuthorizeSecurityGroupIngressResponse
instance ToPath AuthorizeSecurityGroupIngress where
toPath = const "/"
instance ToQuery AuthorizeSecurityGroupIngress where
toQuery AuthorizeSecurityGroupIngress{..} = mconcat
[ "CidrIp" =? _asgiCidrIp
, "DryRun" =? _asgiDryRun
, "FromPort" =? _asgiFromPort
, "GroupId" =? _asgiGroupId
, "GroupName" =? _asgiGroupName
, "IpPermissions" `toQueryList` _asgiIpPermissions
, "IpProtocol" =? _asgiIpProtocol
, "SourceSecurityGroupName" =? _asgiSourceSecurityGroupName
, "SourceSecurityGroupOwnerId" =? _asgiSourceSecurityGroupOwnerId
, "ToPort" =? _asgiToPort
]
instance ToHeaders AuthorizeSecurityGroupIngress
instance AWSRequest AuthorizeSecurityGroupIngress where
type Sv AuthorizeSecurityGroupIngress = EC2
type Rs AuthorizeSecurityGroupIngress = AuthorizeSecurityGroupIngressResponse
request = post "AuthorizeSecurityGroupIngress"
response = nullResponse AuthorizeSecurityGroupIngressResponse
|
kim/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/AuthorizeSecurityGroupIngress.hs
|
mpl-2.0
| 8,775
| 0
| 10
| 1,997
| 998
| 603
| 395
| 104
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Calendar.Settings.Watch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Watch for changes to Settings resources.
--
-- /See:/ <https://developers.google.com/google-apps/calendar/firstapp Calendar API Reference> for @calendar.settings.watch@.
module Network.Google.Resource.Calendar.Settings.Watch
(
-- * REST Resource
SettingsWatchResource
-- * Creating a Request
, settingsWatch
, SettingsWatch
-- * Request Lenses
, swSyncToken
, swPayload
, swPageToken
, swMaxResults
) where
import Network.Google.AppsCalendar.Types
import Network.Google.Prelude
-- | A resource alias for @calendar.settings.watch@ method which the
-- 'SettingsWatch' request conforms to.
type SettingsWatchResource =
"calendar" :>
"v3" :>
"users" :>
"me" :>
"settings" :>
"watch" :>
QueryParam "syncToken" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Channel :> Post '[JSON] Channel
-- | Watch for changes to Settings resources.
--
-- /See:/ 'settingsWatch' smart constructor.
data SettingsWatch =
SettingsWatch'
{ _swSyncToken :: !(Maybe Text)
, _swPayload :: !Channel
, _swPageToken :: !(Maybe Text)
, _swMaxResults :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SettingsWatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'swSyncToken'
--
-- * 'swPayload'
--
-- * 'swPageToken'
--
-- * 'swMaxResults'
settingsWatch
:: Channel -- ^ 'swPayload'
-> SettingsWatch
settingsWatch pSwPayload_ =
SettingsWatch'
{ _swSyncToken = Nothing
, _swPayload = pSwPayload_
, _swPageToken = Nothing
, _swMaxResults = Nothing
}
-- | Token obtained from the nextSyncToken field returned on the last page of
-- results from the previous list request. It makes the result of this list
-- request contain only entries that have changed since then. If the
-- syncToken expires, the server will respond with a 410 GONE response code
-- and the client should clear its storage and perform a full
-- synchronization without any syncToken. Learn more about incremental
-- synchronization. Optional. The default is to return all entries.
swSyncToken :: Lens' SettingsWatch (Maybe Text)
swSyncToken
= lens _swSyncToken (\ s a -> s{_swSyncToken = a})
-- | Multipart request metadata.
swPayload :: Lens' SettingsWatch Channel
swPayload
= lens _swPayload (\ s a -> s{_swPayload = a})
-- | Token specifying which result page to return. Optional.
swPageToken :: Lens' SettingsWatch (Maybe Text)
swPageToken
= lens _swPageToken (\ s a -> s{_swPageToken = a})
-- | Maximum number of entries returned on one result page. By default the
-- value is 100 entries. The page size can never be larger than 250
-- entries. Optional.
swMaxResults :: Lens' SettingsWatch (Maybe Int32)
swMaxResults
= lens _swMaxResults (\ s a -> s{_swMaxResults = a})
. mapping _Coerce
instance GoogleRequest SettingsWatch where
type Rs SettingsWatch = Channel
type Scopes SettingsWatch =
'["https://www.googleapis.com/auth/calendar",
"https://www.googleapis.com/auth/calendar.readonly",
"https://www.googleapis.com/auth/calendar.settings.readonly"]
requestClient SettingsWatch'{..}
= go _swSyncToken _swPageToken _swMaxResults
(Just AltJSON)
_swPayload
appsCalendarService
where go
= buildClient (Proxy :: Proxy SettingsWatchResource)
mempty
|
brendanhay/gogol
|
gogol-apps-calendar/gen/Network/Google/Resource/Calendar/Settings/Watch.hs
|
mpl-2.0
| 4,539
| 0
| 18
| 1,067
| 592
| 349
| 243
| 86
| 1
|
{-
Habit of Fate, a game to incentivize habit formation.
Copyright (C) 2019 Gregory Crosswhite
This program is free software: you can redistribute it and/or modify
it under version 3 of the terms of the GNU Affero General Public License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE UnicodeSyntax #-}
module HabitOfFate.Quests where
import HabitOfFate.Prelude
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Random (MonadRandom, uniform)
import Data.Coerce
import HabitOfFate.Data.Markdown
import HabitOfFate.Data.QuestState
import HabitOfFate.Quest
import HabitOfFate.Story
import qualified HabitOfFate.Quests.Forest as Forest
import qualified HabitOfFate.Quests.DarkLord as DarkLord
import qualified HabitOfFate.Quests.TheLongVoyageHome as TheLongVoyageHome
quests ∷ [Quest]
quests =
[ Forest.quest
, DarkLord.quest
, TheLongVoyageHome.quest
]
randomQuestState ∷ (MonadRandom m, MonadThrow m) ⇒ m (QuestState Markdown)
randomQuestState = uniform quests >>= randomQuestStateFor
interludes ∷ [Markdown]
interludes = coerce $ [dashed_sections|
You stare at the world from above and consider who to follow next.
Ah, that mortal could be interesting.
|]
|
gcross/habit-of-fate
|
sources/library/HabitOfFate/Quests.hs
|
agpl-3.0
| 1,652
| 0
| 8
| 268
| 184
| 117
| 67
| 24
| 1
|
module NetAux where
import Graphics.UI.WX
import Control.Concurrent
import NetData
import WindowContext
version = "HCPN NetEdit v0.1"
-- auxiliary functions -- {{{
scaleVec s v = vec (myround $ (fromIntegral $ vecX v) * f)
(myround $ (fromIntegral $ vecY v) * f)
where
f = s/vecLength v
myround x = truncate (0.5*signum x + x)
findObject pt ((Left (pt',nV)):ns)
| closeByPt nodeUnit pt pt'
= Just nV
findObject pt ((Right ((f,ps',t),aV)):ns)
| not (closeByPt nodeUnit f pt) && not (closeByPt nodeUnit t pt)
&& or cbls && not (any (closeByPt arcPointUnit pt) ps)
= Just aV
where
ps = map snd ps'
pts = f:(ps++[t])
cbls = zipWith (closeByLine pt) (init pts) (tail pts)
findObject pt (_:ns) = findObject pt ns
findObject pt [] = Nothing
findAux pt ((Right ((f,ps',t),aV)):ns)
| not (closeByPt nodeUnit f pt) && not (closeByPt nodeUnit t pt)
&& any (closeByPt arcPointUnit pt) ps
= Just aV
where
ps = map snd ps'
findAux pt (_:ns) = findAux pt ns
findAux pt [] = Nothing
closeByPt unit pt pt' = unit > (abs $ vecLength $ vecBetween pt pt')
closeByLine pt f t = (abs x) < nodeUnit && 0 <= y && y <= l
where (x,y) = betweenPtLine pt f t
l = vecLength $ vecBetween f t
betweenPtLine pt f t = (a*ll,b*ll)
where
(fx,fy) = (fromIntegral $ pointX f,fromIntegral $ pointY f)
(tx,ty) = (fromIntegral $ pointX t,fromIntegral $ pointY t)
(px,py) = (fromIntegral $ pointX pt,fromIntegral $ pointY pt)
(lx,ly) = (tx-fx,ty-fy)
ll = sqrt (lx*lx+ly*ly)
(fpx,fpy) = (fx-px,fy-py)
(a,b) = if lx==0 && ly/=0
then (-fpx/ly,-fpy/ly)
else ((lx*fpy-ly*fpx)/(lx*lx+ly*ly)
,(-a*ly-fpx)/lx)
midArc from to = pointMove (scaleVec (vecLength v/2) v) from
where v = vecBetween from to
getArcPos a@(NetObject{object=(PT p t l ps)}) = do
(p,_) <- getNPos p
(t,_) <- getNPos t
return $ Right ((p,ps,t),a)
getArcPos a@(NetObject{object=(TP t p l ps)}) = do
(p,_) <- getNPos p
(t,_) <- getNPos t
return $ Right ((t,ps,p),a)
getLabel a@(NetObject{object=(TP t p l _)}) = return l
getLabel a@(NetObject{object=(PT p t l _)}) = return l
setLabel a@(NetObject{object=(TP t p _ ps)}) l = return a{object=TP t p l ps}
setLabel a@(NetObject{object=(PT p t _ ps)}) l = return a{object=PT p t l ps}
getArcPlace (NetObject{object=(TP _ p _ _)}) = p
getArcPlace (NetObject{object=(PT p _ _ _)}) = p
getArcTrans (NetObject{object=(TP t _ _ _)}) = t
getArcTrans (NetObject{object=(PT _ t _ _)}) = t
getArcLabel (NetObject{object=(TP _ _ l _)}) = l
getArcLabel (NetObject{object=(PT _ _ l _)}) = l
modArcPoints f (no@NetObject{object=(TP t p l ps)})
= no{object=(TP t p l $ f ps)}
modArcPoints f (no@NetObject{object=(PT p t l ps)})
= no{object=(PT p t l $ f ps)}
getPos nV = readMVar nV >>= \no->return $ Left (nPos $ object no,nV)
getNPos nV = readMVar nV >>= \no->return (nPos $ object no,nV)
getName nV = readMVar nV >>= \no->return (nName $ object no,nV)
getId nV = readMVar nV >>= \no->return (nId no,nV)
getPlaceType nV = readMVar nV >>= \no->return $ (placeType $ nType $ object no,nV)
setPlaceType nV t = updateObject nV $ \o->o{nType=(nType o){pType=t}}
getPlaceInit nV = readMVar nV >>= \no->return $ (placeInit $ nType $ object no,nV)
setPlaceInit nV i = updateObject nV $ \o->o{nType=(nType o){pInit=i}}
setPlaceMark nV m = updateObject nV $ \o->o{nType=(nType o){pInit=m}} -- add separate field later...
getTransGuard nV = readMVar nV >>= \no->return $ (transGuard $ nType $ object no,nV)
setTransGuard nV g = updateObject nV $ \o->o{nType=(nType o){tGuard=g}}
getNodeObject nV = readMVar nV >>= \no->return $ object no
updateObject nV f = modifyMVar_ nV $ \no->return no{object=f (object no)}
class TransAttr a where transGuard :: a -> Maybe String
instance TransAttr Trans where transGuard = Just . tGuard
instance TransAttr Place where transGuard = const Nothing
class PlaceAttr a where placeType :: a -> Maybe String
placeInit :: a -> Maybe String
instance PlaceAttr Place where placeType = Just . pType
placeInit = Just . pInit
instance PlaceAttr Trans where placeType = const Nothing
placeInit = const Nothing
-- }}}
-- defaults-- {{{
-- default editor properties
historyLength :: Int
historyLength = 10
-- default node attributes
placeColour = green
placeNameColour = rgb 0 150 0
placeTypeColour = rgb 0 150 0
placeInitColour = rgb 0 150 0
transColour = grey
transNameColour = darkgrey
transGuardColour = darkgrey
arcLabelColour = darkgrey
arrowWidth,arrowRatio :: Num a => a
arrowWidth = 3
arrowRatio = 5
arrowColour = black
nodeUnit, arcPointUnit :: Num a => a
nodeUnit = 10
arcPointUnit = 5
tHHeight, tHWidth :: Num a => Int
tHHeight = 2
tHWidth = 10
placeRadius :: Int
placeRadius = nodeUnit
transSize = sz (2*tHWidth) (2*tHHeight)
transOffset = vec (-tHWidth) (-tHHeight)
transVSize = sz (2*tHHeight) (2*tHWidth)
transVOffset = vec (-tHHeight) (-tHWidth)
nodeAttrOffset1 = vec (08::Int) (12::Int)
nodeAttrOffset2 = vec (08::Int) (24::Int)
arcPointRect p = rect (pointMove (vec (negate 2) (negate 2)) p) (sz 4 4)
-- }}}
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/playpen/hcpn/src/NetAux.hs
|
unlicense
| 5,313
| 0
| 13
| 1,232
| 2,650
| 1,396
| 1,254
| 119
| 2
|
#!/usr/bin/env stack
-- stack --resolver lts-3.15 --install-ghc runghc --package turtle
{-# LANGUAGE OverloadedStrings #-}
import Turtle
currentPath :: (MonadIO io) => io Text
currentPath = do
dir <- pwd
return (format fp dir)
main = do
a <- currentPath
print a
|
NeQuissimus/HaskellShell
|
printpwd.hs
|
apache-2.0
| 279
| 0
| 9
| 58
| 68
| 34
| 34
| 9
| 1
|
module GuiTest where
import Graphics.UI.Gtk
import Data.IORef
gui :: IO ()
gui = do initGUI
ioref <- newIORef 0
window <- windowNew
button <- buttonNew
set window [ containerBorderWidth := 10,
containerChild := button ]
set button [ buttonLabel := "Button clicked 0 time(s)" ]
onClicked button (whatToDoWith ioref button)
onDestroy window mainQuit
widgetShowAll window
mainGUI
whatToDoWith :: (IORef Int) -> Button -> IO ()
whatToDoWith ioref button = do num <- readIORef ioref
set button [ buttonLabel := ("Button clicked " ++ (show (num+1)) ++ " time(s)") ]
writeIORef ioref (num+1)
|
alexandersgreen/alex-haskell
|
GuiTest/GuiTest.hs
|
apache-2.0
| 753
| 0
| 16
| 260
| 225
| 108
| 117
| 19
| 1
|
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative
import Data.Attoparsec
import qualified Data.Attoparsec.ByteString.Char8 as AC
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import Data.List
import Data.Monoid
import System.Environment
import System.IO
--
import Prelude hiding (take)
main :: IO ()
main = do
args <- getArgs
if length args /= 4
then putStrLn "simplereplace originalstr newstr srcfile tgtfile"
else do
bstr <- B.readFile (args !! 2)
let r = parseOnly (findparser (C.pack (args !! 0))) bstr
case r of
Left err -> print err
Right lst -> let nlst = intersperse (C.pack (args !! 1)) lst
in B.writeFile (args !! 3) (mconcat nlst)
findparser :: B.ByteString -> Parser [B.ByteString]
findparser bstr = do
strs <- many $ manyTill AC.anyChar (try (AC.string bstr))
str <- manyTill AC.anyChar endOfInput
return $ (map C.pack strs) ++ [C.pack str]
|
wavewave/misc-utils
|
oldcode/exe/simplereplace.hs
|
bsd-2-clause
| 1,062
| 0
| 21
| 295
| 347
| 180
| 167
| 28
| 3
|
module Turnpike.Error where
data T = T
deriving (Eq, Ord, Show)
|
massysett/turnpike
|
library/lib/Turnpike/Error.hs
|
bsd-3-clause
| 67
| 0
| 6
| 14
| 28
| 16
| 12
| 3
| 0
|
-- |
-- Module: Control.Wire.TimedMap
-- Copyright: (c) 2012 Ertugrul Soeylemez
-- License: BSD3
-- Maintainer: Ertugrul Soeylemez <es@ertes.de>
--
-- Timed maps for efficient cleanups in the context wires.
module Control.Wire.TimedMap
( -- * Timed maps
TimedMap,
-- * Queries
findWithDefault,
lookup,
-- * Construction
empty,
-- * Insertion
insert,
-- * Deletion
cleanup,
cut,
delete
)
where
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Monad
import Data.Data
import Data.Map (Map)
import Data.Set (Set)
import Prelude hiding (lookup)
-- | A timed map is a map with an additional index based on time.
data TimedMap t k a =
TimedMap !(Map k (a, t)) !(Map t (Set k))
deriving (Data, Show, Typeable)
-- | Remove all elements older than the given time.
cleanup :: (Ord k, Ord t) => t -> TimedMap t k a -> TimedMap t k a
cleanup t0 (TimedMap mk' mt') = TimedMap mk mt
where
(older', middle, mt) = M.splitLookup t0 mt'
older =
M.fromDistinctAscList .
map (, ()) .
S.toList .
M.foldl' S.union S.empty .
maybe id (M.insert t0) middle $ older'
mk = mk' M.\\ older
-- | Remove all but the given number of latest elements.
cut :: (Ord k, Ord t) => Int -> TimedMap t k a -> TimedMap t k a
cut n !tm@(TimedMap mk mt)
| M.size mk > n =
let k = S.findMin . snd . M.findMin $ mt in
cut n (delete k tm)
| otherwise = tm
-- | Deletes the given key from the timed map.
delete :: (Ord k, Ord t) => k -> TimedMap t k a -> TimedMap t k a
delete k (TimedMap mk' mt') = TimedMap mk mt
where
mk = M.delete k mk'
mt = case M.lookup k mk' of
Nothing -> mt'
Just (_, t') ->
let alter Nothing = Nothing
alter (Just s') = do
let s = S.delete k s'
guard (not (S.null s))
return s
in M.alter alter t' mt'
-- | Like 'lookup', but with a default value, if the key is not in the
-- map.
findWithDefault :: (Ord k) => (a, t) -> k -> TimedMap t k a -> (a, t)
findWithDefault def k = maybe def id . lookup k
-- | Empty timed map.
empty :: TimedMap t k a
empty = TimedMap M.empty M.empty
-- | Insert into the timed map.
insert :: (Ord k, Ord t) => t -> k -> a -> TimedMap t k a -> TimedMap t k a
insert t k x (TimedMap mk' mt') = TimedMap mk mt
where
mk = M.insert k (x, t) mk'
mt = case M.lookup k mk' of
Nothing -> M.insertWith S.union t (S.singleton k) mt'
Just (_, t') ->
let alter Nothing = Nothing
alter (Just s') = do
let s = S.delete k s'
guard (not (S.null s))
return s
in M.insertWith S.union t (S.singleton k) .
M.alter alter t' $ mt'
-- | Look up the given key in the timed map.
lookup :: (Ord k) => k -> TimedMap t k a -> Maybe (a, t)
lookup k (TimedMap mk _) = M.lookup k mk
|
MaxDaten/netwire
|
Control/Wire/TimedMap.hs
|
bsd-3-clause
| 3,120
| 0
| 21
| 1,066
| 1,072
| 554
| 518
| -1
| -1
|
----------------------------------------------------------------------------
-- |
-- Module : DART.Examples.Testing.RecurrentSMT
-- Copyright : (c) Carlos López-Camey, University of Freiburg
-- License : BSD-3
--
-- Maintainer : c.lopez@kmels.net
-- Stability : stable
--
--
-- Useful definitions to test the SMT solver
-----------------------------------------------------------------------------
module DART.Examples.Testing.RecurrentSMT (arithmeticBranches,foldOrdered3) where
arithmeticBranches :: Int -> Int -> Int
arithmeticBranches x y =
let z = 2 * y
in if (x < y) then x * 3
else if (x < z) then y
else if (4 * x > 5 * y) then 0
else error $ " If we reach this code, we've won."
-- | Define a function predicate that has a arity greater than 2
-- used to test pretty printing to Z3
isOrdered3 :: Int -> Int -> Int -> Bool
isOrdered3 a b c = a > b && b > c
data Ordered = Ordered | NotOrdered deriving Show
-- | Use isOrdered3 as a predicate to build branches on the ordering (this fold)
foldOrdered3 :: Int -> Int -> Int -> Ordered
foldOrdered3 a b c = if (isOrdered3 a b c)
then
Ordered
else
error . show $ NotOrdered
|
kmels/dart-haskell
|
examples/testing/RecurrentSMT.hs
|
bsd-3-clause
| 1,250
| 0
| 13
| 312
| 239
| 138
| 101
| 15
| 4
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main)
where
import Data.ByteString.Lazy (ByteString)
import Data.ByteString.Lazy.Internal (unpackChars)
import ReadXLSX
import System.Directory
import Test.Tasty (defaultMain, testGroup)
import Test.Tasty.HUnit (testCase)
import Test.Tasty.HUnit ((@=?))
import Test.Tasty.SmallCheck (testProperty)
import WriteXLSX
import WriteXLSX.ExtractKeys
main :: IO ()
main = defaultMain $
testGroup "Tests"
[ testCase "extractKeys" $
testExtractKeys @=? ["a", "\181", "\181"],
testCase "read xl1" $ do
json <- testRXL1
json @=? "{\"A\":[1]}",
testCase "read xl1 by index" $ do
json <- testRXL1bis
json @=? "{\"A\":[1]}",
testCase "read xl2" $ do
json <- testRXL2
json @=? "{\"Col1\":[\"\195\169\",\"\194\181\",\"b\"],\"Col2\":[\"2017-01-25\",\"2017-01-26\",\"2017-01-27\"],\"\194\181\":[1,1,1]}",
testCase "write and read" $ do
json <- testWriteAndRead
json @=? True -- "{\"Col1\":[1,2]}"
]
testExtractKeys :: [String]
testExtractKeys = extractKeys "{\"a\":2,\"\\u00b5\":1,\"µ\":2}"
testRXL1 :: IO ByteString
testRXL1 = sheetnameToJSON "test/simpleExcel.xlsx" "Sheet1" "data" True True Nothing Nothing
testRXL1bis :: IO ByteString
testRXL1bis = sheetindexToJSON "test/simpleExcel.xlsx" 1 "data" True True Nothing Nothing
testRXL2 :: IO ByteString
testRXL2 = sheetnameToJSON "test/utf8.xlsx" "Sheet1" "data" True True Nothing Nothing
testWriteAndRead :: IO Bool
testWriteAndRead = do
let json = "{\"Col1\":[1,2]}"
tmpDir <- getTemporaryDirectory
let tmpFile = tmpDir ++ "/xlsx.xlsx"
write <- write1 json True tmpFile False
jjson <- sheetnameToJSON tmpFile "Sheet1" "data" True True Nothing Nothing
return $ json == (unpackChars jjson)
|
stla/jsonxlsx
|
test/Main.hs
|
bsd-3-clause
| 1,923
| 0
| 11
| 446
| 438
| 229
| 209
| 45
| 1
|
{-# LANGUAGE GADTs, ScopedTypeVariables, Rank2Types #-}
import Data.Aeson
import Data.ByteString.Lazy (ByteString)
import Data.Set (Set, toList)
import Data.Text (Text)
import Database.Riak.Messages (Key(..), BucketName(..))
infixr 9 $>
-- toJSON (Finalize t) = toJSON t
-- toJSON (a :> (Finalize b)) = toJSON $ [to]
-- toJSON (a :> b) =
newtype IndexName = IndexName { fromIndexName :: ByteString }
data ErlangQuery = ErlangQuery { erlangModule :: ByteString
, function :: ByteString }
data JavascriptQuery = SourceQuery ByteString
| Stored BucketName Key
| BuiltIn ByteString
data Inputs = Index { ixBucket :: Maybe BucketName
, index :: Maybe IndexName
, ixStart :: Maybe ByteString
, ixEnd :: Maybe ByteString
, ixKey :: Maybe Key
}
| KeyFilter { keyFilterBucket :: BucketName
, keyFilters :: Filter String ()
}
| Inputs { inputs :: [(BucketName, Key, Maybe ByteString)] }
instance ToJSON Inputs where
toJSON (Index bucket index start end key) = object [ "bucket" .= bucket, "index" .= index, "key" .= key, "start" .= start, "end" .= end ]
toJSON (KeyFilter bucket filter) = object [ "bucket" .= bucket, "key_filters" .= fromFilter filter ]
toJSON (Inputs inputs) = toJSON $ map detuple inputs
where
detuple (b, k, Nothing) = [toJSON b, toJSON k]
detuple (b, k, Just d) = [toJSON b, toJSON k, toJSON d]
instance ToJSON BucketName where
toJSON = toJSON . fromBucketName
instance ToJSON Key where
toJSON = toJSON . fromKey
instance ToJSON IndexName where
toJSON = toJSON . fromIndexName
-- data MapReduce = MapReduce { mrInputs :: Inputs
-- , mrQuery :: [Phase] }
data Phase a = Map { mapSource :: a
, mapArg :: ByteString
, mapKeep :: Maybe Bool
}
| Reduce { reduceSource :: a
, reduceArg :: ByteString
, reduceKeep :: Maybe Bool
}
| Link { bucket :: Maybe ByteString
, tag :: Maybe ByteString
, linkKeep :: Maybe Bool
}
instance ToJSON (Phase a) where
toJSON (Map src a k) = object $ [ "map" .= object []]
toJSON (Reduce src a k) = object $ [ "reduce" .= object []]
toJSON (Link bkt tag k) = object $ [ "link" .= object [ "bucket" .= bkt, "tag" .= tag, "keep" .= k ]]
{-
data ReducePhase a = { language :: a
,
}
-}
data Transform from to where
IntToString :: Transform Int String
StringToInt :: Transform String Int
FloatToString :: Transform Double String
StringToFloat :: Transform String Double
ToUpper :: Transform String String
ToLower :: Transform String String
Tokenize :: String -> Int -> Transform String String
UrlDecode :: Transform String String
newtype Filter a b = Filter { fromFilter :: [Value] }
finalizeFilter :: Filter String a -> Filter String ()
finalizeFilter = Filter . fromFilter
($>) :: (ToJSON a, ToJSON b, ToJSON c) => Transform a b -> Filter b c -> Filter a c
t $> (Filter ts) = Filter (toJSON t : ts)
predicate :: ToJSON b => Predicate a b -> Filter a b
predicate p = (Filter $ (:[]) $ toJSON p) :: Filter a b
data Predicate a b where
GreaterThan :: Num b => b -> Predicate b b
LessThan :: Num b => b -> Predicate b b
GreaterThanEq :: Num b => b -> Predicate b b
LessThanEq :: Num b => b -> Predicate b b
Between :: Num b => b -> b -> Bool -> Predicate b b
Matches :: String -> Predicate String String
NotEqual :: b -> Predicate b b
Equal :: b -> Predicate b b
SetMember :: Set b -> Predicate b b
SimilarTo :: String -> Int -> Predicate String String
StartsWith :: String -> Predicate String String
EndsWith :: String -> Predicate String String
And :: (ToJSON b, ToJSON c) => Filter a b -> Filter a c -> Predicate a d
Or :: (ToJSON b, ToJSON c) => Filter a b -> Filter a c -> Predicate a d
Not :: ToJSON b => Predicate a b -> Predicate a b
instance ToJSON b => ToJSON (Predicate a b) where
toJSON (GreaterThan n) = toJSON [toJSON ("greater_than" :: Text), toJSON n]
toJSON (LessThan n) = toJSON [toJSON ("less_than" :: Text), toJSON n]
toJSON (GreaterThanEq n) = toJSON [toJSON ("greater_than_eq" :: Text), toJSON n]
toJSON (LessThanEq n) = toJSON [toJSON ("less_than_eq" :: Text), toJSON n]
toJSON (Between x y i) = toJSON [toJSON ("between" :: Text), toJSON x, toJSON y, toJSON i]
toJSON (Matches str) = toJSON [toJSON ("matches" :: Text), toJSON str]
toJSON (NotEqual x) = toJSON [toJSON ("neq" :: Text), toJSON x]
toJSON (Equal x) = toJSON [toJSON ("eq" :: Text), toJSON x]
toJSON (SetMember s) = toJSON [toJSON ("set_member" :: Text), toJSON $ toList s]
toJSON (SimilarTo s d) = toJSON [toJSON ("similar_to" :: Text), toJSON s, toJSON d]
toJSON (StartsWith str) = toJSON [toJSON ("starts_with" :: Text), toJSON str]
toJSON (EndsWith str) = toJSON [toJSON ("ends_with" :: Text), toJSON str]
toJSON (And l r) = toJSON [toJSON ("and" :: Text), toJSON l, toJSON r]
toJSON (Or l r) = toJSON [toJSON ("or" :: Text), toJSON l, toJSON r]
toJSON (Not t) = toJSON [toJSON ("not" :: Text), toJSON t]
instance (ToJSON a, ToJSON b) => ToJSON (Transform a b) where
toJSON IntToString = toJSON ["int_to_string" :: Text]
toJSON StringToInt = toJSON ["string_to_int" :: Text]
toJSON FloatToString = toJSON ["float_to_string" :: Text]
toJSON StringToFloat = toJSON ["string_to_float" :: Text]
toJSON ToUpper = toJSON ["to_upper" :: Text]
toJSON ToLower = toJSON ["to_lower" :: Text]
toJSON (Tokenize str i) = toJSON [toJSON str, toJSON i]
toJSON UrlDecode = toJSON ["url_decode" :: Text]
instance (ToJSON b) => ToJSON (Filter a b) where
toJSON = toJSON . fromFilter
-- instance ToJSON MapReduce where
-- toJSON (MapReduce inputs query) = object [ "inputs" .= inputs, "query" .= query ]
|
iand675/hiker
|
Database/Riak/MapReduce.hs
|
bsd-3-clause
| 6,132
| 16
| 11
| 1,682
| 2,142
| 1,127
| 1,015
| 105
| 1
|
--------------------------------------------------------------------
-- |
-- Module : Text.RSS.Syntax
-- Copyright : (c) Galois, Inc. 2008,
-- (c) Sigbjorn Finne 2009-
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
--
-- The basic syntax for putting together feeds.
--
-- For instance, to create a feed with a single item item:
-- (nullRSS \"rss title\" \"link\") {rssChannel=(nullChannel \"channel title\" \"link\") {rssItems=[(nullItem \"item title\")]}}
--------------------------------------------------------------------
module Text.RSS.Syntax where
import Text.XML.Light as XML
-- * Core Types
-- ^The Radio Userland version of RSS documents\/feeds.
-- (versions 0.9x, 2.x)
data RSS
= RSS
{ rssVersion :: String
, rssAttrs :: [XML.Attr]
, rssChannel :: RSSChannel
, rssOther :: [XML.Element]
}
deriving (Show)
type URLString = String
-- | RFC 822 conforming.
type DateString = String
data RSSChannel
= RSSChannel
{ rssTitle :: String
, rssLink :: URLString
, rssDescription :: String
, rssItems :: [RSSItem]
, rssLanguage :: Maybe String
, rssCopyright :: Maybe String
, rssEditor :: Maybe String
, rssWebMaster :: Maybe String
, rssPubDate :: Maybe DateString -- ^ rfc 822 conforming.
, rssLastUpdate :: Maybe DateString -- ^ rfc 822 conforming.
, rssCategories :: [RSSCategory]
, rssGenerator :: Maybe String
, rssDocs :: Maybe URLString
, rssCloud :: Maybe RSSCloud
, rssTTL :: Maybe Integer
, rssImage :: Maybe RSSImage
, rssRating :: Maybe String
, rssTextInput :: Maybe RSSTextInput
, rssSkipHours :: Maybe [Integer]
, rssSkipDays :: Maybe [String]
, rssChannelOther :: [XML.Element]
}
deriving (Show)
data RSSItem
= RSSItem
{ rssItemTitle :: Maybe String
, rssItemLink :: Maybe URLString
, rssItemDescription :: Maybe String -- ^if not present, the title is. (per spec, at least.)
, rssItemAuthor :: Maybe String
, rssItemCategories :: [RSSCategory]
, rssItemComments :: Maybe URLString
, rssItemEnclosure :: Maybe RSSEnclosure
, rssItemGuid :: Maybe RSSGuid
, rssItemPubDate :: Maybe DateString
, rssItemSource :: Maybe RSSSource
, rssItemAttrs :: [XML.Attr]
, rssItemOther :: [XML.Element]
}
deriving (Show)
data RSSSource
= RSSSource
{ rssSourceURL :: URLString
, rssSourceAttrs :: [XML.Attr]
, rssSourceTitle :: String
}
deriving (Show)
data RSSEnclosure
= RSSEnclosure
{ rssEnclosureURL :: URLString
, rssEnclosureLength :: Maybe Integer
, rssEnclosureType :: String
, rssEnclosureAttrs :: [XML.Attr]
}
deriving (Show)
data RSSCategory
= RSSCategory
{ rssCategoryDomain :: Maybe String
, rssCategoryAttrs :: [XML.Attr]
, rssCategoryValue :: String
}
deriving (Show)
data RSSGuid
= RSSGuid
{ rssGuidPermanentURL :: Maybe Bool
, rssGuidAttrs :: [XML.Attr]
, rssGuidValue :: String
}
deriving (Show)
data RSSImage
= RSSImage
{ rssImageURL :: URLString -- the URL to the image resource.
, rssImageTitle :: String
, rssImageLink :: URLString -- URL that the image resource should be an href to.
, rssImageWidth :: Maybe Integer
, rssImageHeight :: Maybe Integer
, rssImageDesc :: Maybe String
, rssImageOther :: [XML.Element]
}
deriving (Show)
data RSSCloud
= RSSCloud
{ rssCloudDomain :: Maybe String
, rssCloudPort :: Maybe String -- on purpose (i.e., not an int)
, rssCloudPath :: Maybe String
, rssCloudRegisterProcedure :: Maybe String
, rssCloudProtocol :: Maybe String
, rssCloudAttrs :: [XML.Attr]
}
deriving (Show)
data RSSTextInput
= RSSTextInput
{ rssTextInputTitle :: String
, rssTextInputDesc :: String
, rssTextInputName :: String
, rssTextInputLink :: URLString
, rssTextInputAttrs :: [XML.Attr]
, rssTextInputOther :: [XML.Element]
}
deriving (Show)
-- * Default Constructors:
nullRSS :: String -- ^channel title
-> URLString -- ^channel link
-> RSS
nullRSS title link =
RSS
{ rssVersion = "2.0"
, rssAttrs = []
, rssChannel = nullChannel title link
, rssOther = []
}
nullChannel :: String -- ^rssTitle
-> URLString -- ^rssLink
-> RSSChannel
nullChannel title link =
RSSChannel
{ rssTitle = title
, rssLink = link
, rssDescription = title
, rssItems = []
, rssLanguage = Nothing
, rssCopyright = Nothing
, rssEditor = Nothing
, rssWebMaster = Nothing
, rssPubDate = Nothing
, rssLastUpdate = Nothing
, rssCategories = []
, rssGenerator = Nothing
, rssDocs = Nothing
, rssCloud = Nothing
, rssTTL = Nothing
, rssImage = Nothing
, rssRating = Nothing
, rssTextInput = Nothing
, rssSkipHours = Nothing
, rssSkipDays = Nothing
, rssChannelOther = []
}
nullItem :: String -- ^title
-> RSSItem
nullItem title =
RSSItem
{ rssItemTitle = Just title
, rssItemLink = Nothing
, rssItemDescription = Nothing
, rssItemAuthor = Nothing
, rssItemCategories = []
, rssItemComments = Nothing
, rssItemEnclosure = Nothing
, rssItemGuid = Nothing
, rssItemPubDate = Nothing
, rssItemSource = Nothing
, rssItemAttrs = []
, rssItemOther = []
}
nullSource :: URLString -- ^source URL
-> String -- ^title
-> RSSSource
nullSource url title =
RSSSource
{ rssSourceURL = url
, rssSourceAttrs = []
, rssSourceTitle = title
}
nullEnclosure :: URLString -- ^enclosure URL
-> Maybe Integer -- ^enclosure length
-> String -- ^enclosure type
-> RSSEnclosure
nullEnclosure url mb_len ty =
RSSEnclosure
{ rssEnclosureURL = url
, rssEnclosureLength = mb_len
, rssEnclosureType = ty
, rssEnclosureAttrs = []
}
newCategory :: String -- ^category Value
-> RSSCategory
newCategory nm =
RSSCategory
{ rssCategoryDomain = Nothing
, rssCategoryAttrs = []
, rssCategoryValue = nm
}
nullGuid :: String -- ^guid value
-> RSSGuid
nullGuid v =
RSSGuid
{ rssGuidPermanentURL = Nothing
, rssGuidAttrs = []
, rssGuidValue = v
}
nullPermaGuid :: String -- ^guid value
-> RSSGuid
nullPermaGuid v = (nullGuid v){rssGuidPermanentURL=Just True}
nullImage :: URLString -- ^imageURL
-> String -- ^imageTitle
-> URLString -- ^imageLink
-> RSSImage
nullImage url title link =
RSSImage
{ rssImageURL = url
, rssImageTitle = title
, rssImageLink = link
, rssImageWidth = Nothing
, rssImageHeight = Nothing
, rssImageDesc = Nothing
, rssImageOther = []
}
nullCloud :: RSSCloud
nullCloud =
RSSCloud
{ rssCloudDomain = Nothing
, rssCloudPort = Nothing
, rssCloudPath = Nothing
, rssCloudRegisterProcedure = Nothing
, rssCloudProtocol = Nothing
, rssCloudAttrs = []
}
nullTextInput :: String -- ^inputTitle
-> String -- ^inputName
-> URLString -- ^inputLink
-> RSSTextInput
nullTextInput title nm link =
RSSTextInput
{ rssTextInputTitle = title
, rssTextInputDesc = title
, rssTextInputName = nm
, rssTextInputLink = link
, rssTextInputAttrs = []
, rssTextInputOther = []
}
|
danfran/feed
|
src/Text/RSS/Syntax.hs
|
bsd-3-clause
| 8,123
| 0
| 10
| 2,634
| 1,559
| 952
| 607
| 223
| 1
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Data.Xml.Parser where
import Control.Applicative
import Control.Lens(Optic, Optic', Choice, prism, set, over, (^?))
import Data.Bifunctor(Bifunctor(bimap))
import Data.Bool(bool)
import Data.Digit(Digit, digitC)
import Data.Functor.Alt(Alt((<!>)))
import Data.Functor.Apply(Apply((<.>)))
import Data.Functor.Bind(Bind((>>-)))
import Data.List.NonEmpty(NonEmpty((:|)), toList)
import Data.Traversable(Traversable(traverse))
import Data.Void(Void)
import Prelude
type Input =
String
data Result e a =
ErrorResult e
| ValueResult Input a
deriving (Eq, Ord, Show)
instance Functor (Result e) where
fmap _ (ErrorResult e) =
ErrorResult e
fmap f (ValueResult s a) =
ValueResult s (f a)
instance Bifunctor Result where
bimap f _ (ErrorResult e) =
ErrorResult (f e)
bimap _ g (ValueResult s a) =
ValueResult s (g a)
class AsErrorResult p f x where
_ErrorResult ::
Optic p f (x e a) (x z a) e z
instance (Choice p, Applicative f) => AsErrorResult p f Result where
_ErrorResult =
prism
ErrorResult
(\r -> case r of
ErrorResult e ->
Right e
ValueResult s a ->
Left (ValueResult s a))
class AsValueResult p f x where
_ValueResult ::
Optic p f (x e a) (x e b) (String, a) (String, b)
instance (Choice p, Applicative f) => AsValueResult p f Result where
_ValueResult =
prism
(uncurry ValueResult)
(\r -> case r of
ErrorResult e ->
Left (ErrorResult e)
ValueResult s a ->
Right (s, a))
newtype Parser s e a =
Parser (Input -> s -> (s, Result e a))
instance Functor (Parser s e) where
fmap f (Parser p) =
Parser (\i s ->
let (t, r) = p i s
in (t, fmap f r))
instance Apply (Parser s e) where
Parser f <.> Parser a =
Parser (\i s ->
let (t, r) = f i s
in case r of
ErrorResult e ->
(t, ErrorResult e)
ValueResult j g ->
fmap (fmap (g$)) (a j t))
instance Applicative (Parser s e) where
(<*>) =
(<.>)
pure a =
Parser (\i s ->
(s, ValueResult i a))
instance Bind (Parser s e) where
Parser p >>- f =
Parser (\i s ->
let (t, r) = p i s
in case r of
ErrorResult e ->
(t, ErrorResult e)
ValueResult j a ->
let Parser q = f a
in q j t)
instance Monad (Parser s e) where
(>>=) =
(>>-)
return =
pure
instance Alt (Parser s e) where
Parser p <!> Parser q =
Parser (\i s ->
let (t, r) = p i s
in case r of
ErrorResult _ ->
q i s
ValueResult j a ->
(t, ValueResult j a))
(.=.) ::
e
-> Parser s x a
-> Parser s e a
e .=. Parser p =
Parser (\i ->
fmap (set _ErrorResult e) . p i)
infixl 6 .=.
(.~.) ::
(x -> e)
-> Parser s x a
-> Parser s e a
e .~. Parser p =
Parser (\i ->
fmap (over _ErrorResult e) . p i)
infixl 6 .~.
class SemiCharState s where
updateCharState ::
Char
-> s
-> s
instance SemiCharState () where
updateCharState _ () =
()
class SemiCharState s => CharState s where
emptyCharState ::
s
instance CharState () where
emptyCharState =
()
failed ::
e
-> Parser s e a
failed e =
Parser (\_ s ->
(s, ErrorResult e))
data UnexpectedEofOr a =
UnexpectedEof
| UnexpectedEofOr a
deriving (Eq, Ord, Show)
class AsUnexpectedEof p f x where
_UnexpectedEof ::
Optic' p f (x a) ()
instance (Choice p, Applicative f) => AsUnexpectedEof p f UnexpectedEofOr where
_UnexpectedEof =
prism
(\() -> UnexpectedEof)
(\e -> case e of
UnexpectedEof ->
Right ()
UnexpectedEofOr a ->
Left (UnexpectedEofOr a))
class AsUnexpectedEofOr p f x where
_UnexpectedEofOr ::
Optic p f (x a) (x b) a b
instance (Choice p, Applicative f) => AsUnexpectedEofOr p f UnexpectedEofOr where
_UnexpectedEofOr =
prism
UnexpectedEofOr
(\e -> case e of
UnexpectedEof ->
Left UnexpectedEof
UnexpectedEofOr a ->
Right a)
character ::
SemiCharState s =>
Parser s (UnexpectedEofOr Void) Char
character =
Parser (\i s ->
case i of
[] ->
(s, ErrorResult UnexpectedEof)
h:t ->
(updateCharState h s, ValueResult t h))
list ::
Parser s e a
-- ?always succeeds -- new parser type?
-> Parser s e [a]
list p =
fmap toList (list1 p) <!> pure []
list1 ::
Parser s e a
-> Parser s e (NonEmpty a)
list1 p =
do h <- p
t <- list p
return (h :| t)
data NotSatisfied =
NotSatisfied (Char -> Bool) Char
satisfy ::
SemiCharState s =>
(Char -> Bool)
-> Parser s (UnexpectedEofOr NotSatisfied) Char
satisfy f =
do c <- UnexpectedEof .=. character
bool (failed (UnexpectedEofOr (NotSatisfied f c))) (return c) (f c)
data NotIs =
NotIs Char Char
deriving (Eq, Ord, Show)
is ::
SemiCharState s =>
Char
-> Parser s (UnexpectedEofOr NotIs) Char
is c =
over _UnexpectedEofOr (\(NotSatisfied _ x) -> NotIs x c) .~. satisfy (== c)
string ::
SemiCharState s =>
String
-> Parser s (UnexpectedEofOr NotIs) String
string =
traverse is
data NotDigit =
NotDigit Char
deriving (Eq, Ord, Show)
digit ::
SemiCharState s =>
Parser s (UnexpectedEofOr NotDigit) Digit
digit =
do c <- UnexpectedEof .=. character
case c ^? digitC of
Nothing ->
failed (UnexpectedEofOr (NotDigit c))
Just d ->
return d
data Space =
Space -- 0x0020
| LineFeed -- 0x000a
| CarriageReturn -- 0x000d
| Tab -- 0x0009
| Ideographic -- 0x3000
deriving (Eq, Ord, Show)
data NotSpace =
NotSpace Char
deriving (Eq, Ord, Show)
space ::
SemiCharState s =>
Parser s (UnexpectedEofOr NotSpace) Space
space =
do c <- UnexpectedEof .=. character
case c of
'\x0020' ->
return Space
'\x000a' ->
return LineFeed
'\x000d' ->
return CarriageReturn
'\x0009' ->
return Tab
'\x3000' ->
return Ideographic
_ ->
failed (UnexpectedEofOr (NotSpace c))
{-
Error types
* UnexpectedEof ~ ()
* UnexpectedEofOrChar ~ Maybe Char
* UnexpectedEofOrCharAnd a ~ Maybe (Char, a)
-}
|
tonymorris/exml
|
src/Data/Xml/Parser.hs
|
bsd-3-clause
| 6,520
| 0
| 18
| 2,067
| 2,490
| 1,283
| 1,207
| 253
| 6
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
module Scheduler.Types
( JobStatus (..)
, Job-- (..)
, newJob
, JobQueue
, jobName
, jobStatus
, jobVal
, jobId
, qJobs
, qStartTime
, emptyQueue
, qStatus
, QueueException (..)
, QStatus (..)
) where
import Control.Lens
import GHC.Exception
import Prelude
import Data.Text (Text)
import GHC.Generics
import Data.Time
import Data.Aeson
import Control.Monad.State
data JobStatus
= Queued
| Running
| Finished
| Cancelling
| Cancelled
deriving (Show, Eq, Generic)
data Job a = Job
{ _jobNameInternal :: Text
, _jobStatus :: JobStatus
, _jobValInternal :: a
, _jobIdInternal :: Int
} deriving (Show, Generic)
data JobQueue a = JobQueue
{ _qJobs :: [Job a]
, _qStartTime :: Maybe UTCTime
, _qStatus :: QStatus
} deriving (Generic, Show)
data QStatus
= QRunning
| QWaiting
deriving (Show, Generic)
data QueueException = QueueException Text
deriving (Show, Eq, Generic)
instance Exception QueueException
instance ToJSON QueueException
instance FromJSON QueueException
makeLenses ''Job
makeLenses ''JobQueue
emptyQueue :: JobQueue a
emptyQueue = JobQueue mempty Nothing QWaiting
instance ToJSON JobStatus
instance FromJSON JobStatus
instance ToJSON QStatus
instance FromJSON QStatus
instance ToJSON a => ToJSON (Job a)
instance FromJSON a => FromJSON (Job a)
instance ToJSON a => ToJSON (JobQueue a)
instance FromJSON a => FromJSON (JobQueue a)
newJob :: MonadState Int m => Text -> JobStatus -> a -> m (Job a)
newJob name status jobVal = state newJob'
where
newJob' id = (Job name status jobVal id, id+1)
jobId :: (Functor f, Contravariant f) => LensLike' f (Job a) Int
jobId = getting jobIdInternal
jobName :: (Functor f, Contravariant f) => LensLike' f (Job a) Text
jobName = getting jobNameInternal
jobVal :: (Functor f, Contravariant f) => LensLike' f (Job a) a
jobVal = getting jobValInternal
|
limaner2002/EPC-tools
|
scheduler-ui/src/Scheduler/Types.hs
|
bsd-3-clause
| 2,053
| 0
| 11
| 391
| 651
| 351
| 300
| 76
| 1
|
{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-}
{-# OPTIONS_GHC -O -funbox-strict-fields #-}
-- We always optimise this, otherwise performance of a non-optimised
-- compiler is severely affected
--
-- (c) The University of Glasgow 2002-2006
--
-- Binary I/O library, with special tweaks for GHC
--
-- Based on the nhc98 Binary library, which is copyright
-- (c) Malcolm Wallace and Colin Runciman, University of York, 1998.
-- Under the terms of the license for that software, we must tell you
-- where you can obtain the original version of the Binary library, namely
-- http://www.cs.york.ac.uk/fp/nhc98/
module Binary
( {-type-} Bin,
{-class-} Binary(..),
{-type-} BinHandle,
SymbolTable, Dictionary,
openBinMem,
-- closeBin,
seekBin,
seekBy,
tellBin,
castBin,
writeBinMem,
readBinMem,
fingerprintBinMem,
computeFingerprint,
isEOFBin,
putAt, getAt,
-- for writing instances:
putByte,
getByte,
-- lazy Bin I/O
lazyGet,
lazyPut,
#ifdef __GLASGOW_HASKELL__
-- GHC only:
ByteArray(..),
getByteArray,
putByteArray,
#endif
UserData(..), getUserData, setUserData,
newReadState, newWriteState,
putDictionary, getDictionary, putFS,
) where
#include "HsVersions.h"
-- The *host* architecture version:
#include "../includes/MachDeps.h"
import {-# SOURCE #-} Name (Name)
import FastString
import Panic
import UniqFM
import FastMutInt
import Fingerprint
import BasicTypes
import Foreign
import Data.Array
import Data.ByteString (ByteString)
import qualified Data.ByteString.Internal as BS
import qualified Data.ByteString.Unsafe as BS
import Data.IORef
import Data.Char ( ord, chr )
import Data.Time
import Data.Typeable
import Data.Typeable.Internal
import Control.Monad ( when )
import System.IO as IO
import System.IO.Unsafe ( unsafeInterleaveIO )
import System.IO.Error ( mkIOError, eofErrorType )
import GHC.Real ( Ratio(..) )
import ExtsCompat46
import GHC.Word ( Word8(..) )
import GHC.IO ( IO(..) )
type BinArray = ForeignPtr Word8
---------------------------------------------------------------
-- BinHandle
---------------------------------------------------------------
data BinHandle
= BinMem { -- binary data stored in an unboxed array
bh_usr :: UserData, -- sigh, need parameterized modules :-)
_off_r :: !FastMutInt, -- the current offset
_sz_r :: !FastMutInt, -- size of the array (cached)
_arr_r :: !(IORef BinArray) -- the array (bounds: (0,size-1))
}
-- XXX: should really store a "high water mark" for dumping out
-- the binary data to a file.
getUserData :: BinHandle -> UserData
getUserData bh = bh_usr bh
setUserData :: BinHandle -> UserData -> BinHandle
setUserData bh us = bh { bh_usr = us }
---------------------------------------------------------------
-- Bin
---------------------------------------------------------------
newtype Bin a = BinPtr Int
deriving (Eq, Ord, Show, Bounded)
castBin :: Bin a -> Bin b
castBin (BinPtr i) = BinPtr i
---------------------------------------------------------------
-- class Binary
---------------------------------------------------------------
class Binary a where
put_ :: BinHandle -> a -> IO ()
put :: BinHandle -> a -> IO (Bin a)
get :: BinHandle -> IO a
-- define one of put_, put. Use of put_ is recommended because it
-- is more likely that tail-calls can kick in, and we rarely need the
-- position return value.
put_ bh a = do _ <- put bh a; return ()
put bh a = do p <- tellBin bh; put_ bh a; return p
putAt :: Binary a => BinHandle -> Bin a -> a -> IO ()
putAt bh p x = do seekBin bh p; put_ bh x; return ()
getAt :: Binary a => BinHandle -> Bin a -> IO a
getAt bh p = do seekBin bh p; get bh
openBinMem :: Int -> IO BinHandle
openBinMem size
| size <= 0 = error "Data.Binary.openBinMem: size must be >= 0"
| otherwise = do
arr <- mallocForeignPtrBytes size
arr_r <- newIORef arr
ix_r <- newFastMutInt
writeFastMutInt ix_r 0
sz_r <- newFastMutInt
writeFastMutInt sz_r size
return (BinMem noUserData ix_r sz_r arr_r)
tellBin :: BinHandle -> IO (Bin a)
tellBin (BinMem _ r _ _) = do ix <- readFastMutInt r; return (BinPtr ix)
seekBin :: BinHandle -> Bin a -> IO ()
seekBin h@(BinMem _ ix_r sz_r _) (BinPtr p) = do
sz <- readFastMutInt sz_r
if (p >= sz)
then do expandBin h p; writeFastMutInt ix_r p
else writeFastMutInt ix_r p
seekBy :: BinHandle -> Int -> IO ()
seekBy h@(BinMem _ ix_r sz_r _) off = do
sz <- readFastMutInt sz_r
ix <- readFastMutInt ix_r
let ix' = ix + off
if (ix' >= sz)
then do expandBin h ix'; writeFastMutInt ix_r ix'
else writeFastMutInt ix_r ix'
isEOFBin :: BinHandle -> IO Bool
isEOFBin (BinMem _ ix_r sz_r _) = do
ix <- readFastMutInt ix_r
sz <- readFastMutInt sz_r
return (ix >= sz)
writeBinMem :: BinHandle -> FilePath -> IO ()
writeBinMem (BinMem _ ix_r _ arr_r) fn = do
h <- openBinaryFile fn WriteMode
arr <- readIORef arr_r
ix <- readFastMutInt ix_r
withForeignPtr arr $ \p -> hPutBuf h p ix
hClose h
readBinMem :: FilePath -> IO BinHandle
-- Return a BinHandle with a totally undefined State
readBinMem filename = do
h <- openBinaryFile filename ReadMode
filesize' <- hFileSize h
let filesize = fromIntegral filesize'
arr <- mallocForeignPtrBytes (filesize*2)
count <- withForeignPtr arr $ \p -> hGetBuf h p filesize
when (count /= filesize) $
error ("Binary.readBinMem: only read " ++ show count ++ " bytes")
hClose h
arr_r <- newIORef arr
ix_r <- newFastMutInt
writeFastMutInt ix_r 0
sz_r <- newFastMutInt
writeFastMutInt sz_r filesize
return (BinMem noUserData ix_r sz_r arr_r)
fingerprintBinMem :: BinHandle -> IO Fingerprint
fingerprintBinMem (BinMem _ ix_r _ arr_r) = do
arr <- readIORef arr_r
ix <- readFastMutInt ix_r
withForeignPtr arr $ \p -> fingerprintData p ix
computeFingerprint :: Binary a
=> (BinHandle -> Name -> IO ())
-> a
-> IO Fingerprint
computeFingerprint put_name a = do
bh <- openBinMem (3*1024) -- just less than a block
bh <- return $ setUserData bh $ newWriteState put_name putFS
put_ bh a
fingerprintBinMem bh
-- expand the size of the array to include a specified offset
expandBin :: BinHandle -> Int -> IO ()
expandBin (BinMem _ _ sz_r arr_r) off = do
sz <- readFastMutInt sz_r
let sz' = head (dropWhile (<= off) (iterate (* 2) sz))
arr <- readIORef arr_r
arr' <- mallocForeignPtrBytes sz'
withForeignPtr arr $ \old ->
withForeignPtr arr' $ \new ->
copyBytes new old sz
writeFastMutInt sz_r sz'
writeIORef arr_r arr'
-- -----------------------------------------------------------------------------
-- Low-level reading/writing of bytes
putWord8 :: BinHandle -> Word8 -> IO ()
putWord8 h@(BinMem _ ix_r sz_r arr_r) w = do
ix <- readFastMutInt ix_r
sz <- readFastMutInt sz_r
-- double the size of the array if it overflows
if (ix >= sz)
then do expandBin h ix
putWord8 h w
else do arr <- readIORef arr_r
withForeignPtr arr $ \p -> pokeByteOff p ix w
writeFastMutInt ix_r (ix+1)
return ()
getWord8 :: BinHandle -> IO Word8
getWord8 (BinMem _ ix_r sz_r arr_r) = do
ix <- readFastMutInt ix_r
sz <- readFastMutInt sz_r
when (ix >= sz) $
ioError (mkIOError eofErrorType "Data.Binary.getWord8" Nothing Nothing)
arr <- readIORef arr_r
w <- withForeignPtr arr $ \p -> peekByteOff p ix
writeFastMutInt ix_r (ix+1)
return w
putByte :: BinHandle -> Word8 -> IO ()
putByte bh w = put_ bh w
getByte :: BinHandle -> IO Word8
getByte = getWord8
-- -----------------------------------------------------------------------------
-- Primitve Word writes
instance Binary Word8 where
put_ = putWord8
get = getWord8
instance Binary Word16 where
put_ h w = do -- XXX too slow.. inline putWord8?
putByte h (fromIntegral (w `shiftR` 8))
putByte h (fromIntegral (w .&. 0xff))
get h = do
w1 <- getWord8 h
w2 <- getWord8 h
return $! ((fromIntegral w1 `shiftL` 8) .|. fromIntegral w2)
instance Binary Word32 where
put_ h w = do
putByte h (fromIntegral (w `shiftR` 24))
putByte h (fromIntegral ((w `shiftR` 16) .&. 0xff))
putByte h (fromIntegral ((w `shiftR` 8) .&. 0xff))
putByte h (fromIntegral (w .&. 0xff))
get h = do
w1 <- getWord8 h
w2 <- getWord8 h
w3 <- getWord8 h
w4 <- getWord8 h
return $! ((fromIntegral w1 `shiftL` 24) .|.
(fromIntegral w2 `shiftL` 16) .|.
(fromIntegral w3 `shiftL` 8) .|.
(fromIntegral w4))
instance Binary Word64 where
put_ h w = do
putByte h (fromIntegral (w `shiftR` 56))
putByte h (fromIntegral ((w `shiftR` 48) .&. 0xff))
putByte h (fromIntegral ((w `shiftR` 40) .&. 0xff))
putByte h (fromIntegral ((w `shiftR` 32) .&. 0xff))
putByte h (fromIntegral ((w `shiftR` 24) .&. 0xff))
putByte h (fromIntegral ((w `shiftR` 16) .&. 0xff))
putByte h (fromIntegral ((w `shiftR` 8) .&. 0xff))
putByte h (fromIntegral (w .&. 0xff))
get h = do
w1 <- getWord8 h
w2 <- getWord8 h
w3 <- getWord8 h
w4 <- getWord8 h
w5 <- getWord8 h
w6 <- getWord8 h
w7 <- getWord8 h
w8 <- getWord8 h
return $! ((fromIntegral w1 `shiftL` 56) .|.
(fromIntegral w2 `shiftL` 48) .|.
(fromIntegral w3 `shiftL` 40) .|.
(fromIntegral w4 `shiftL` 32) .|.
(fromIntegral w5 `shiftL` 24) .|.
(fromIntegral w6 `shiftL` 16) .|.
(fromIntegral w7 `shiftL` 8) .|.
(fromIntegral w8))
-- -----------------------------------------------------------------------------
-- Primitve Int writes
instance Binary Int8 where
put_ h w = put_ h (fromIntegral w :: Word8)
get h = do w <- get h; return $! (fromIntegral (w::Word8))
instance Binary Int16 where
put_ h w = put_ h (fromIntegral w :: Word16)
get h = do w <- get h; return $! (fromIntegral (w::Word16))
instance Binary Int32 where
put_ h w = put_ h (fromIntegral w :: Word32)
get h = do w <- get h; return $! (fromIntegral (w::Word32))
instance Binary Int64 where
put_ h w = put_ h (fromIntegral w :: Word64)
get h = do w <- get h; return $! (fromIntegral (w::Word64))
-- -----------------------------------------------------------------------------
-- Instances for standard types
instance Binary () where
put_ _ () = return ()
get _ = return ()
instance Binary Bool where
put_ bh b = putByte bh (fromIntegral (fromEnum b))
get bh = do x <- getWord8 bh; return $! (toEnum (fromIntegral x))
instance Binary Char where
put_ bh c = put_ bh (fromIntegral (ord c) :: Word32)
get bh = do x <- get bh; return $! (chr (fromIntegral (x :: Word32)))
instance Binary Int where
put_ bh i = put_ bh (fromIntegral i :: Int64)
get bh = do
x <- get bh
return $! (fromIntegral (x :: Int64))
instance Binary a => Binary [a] where
put_ bh l = do
let len = length l
if (len < 0xff)
then putByte bh (fromIntegral len :: Word8)
else do putByte bh 0xff; put_ bh (fromIntegral len :: Word32)
mapM_ (put_ bh) l
get bh = do
b <- getByte bh
len <- if b == 0xff
then get bh
else return (fromIntegral b :: Word32)
let loop 0 = return []
loop n = do a <- get bh; as <- loop (n-1); return (a:as)
loop len
instance (Binary a, Binary b) => Binary (a,b) where
put_ bh (a,b) = do put_ bh a; put_ bh b
get bh = do a <- get bh
b <- get bh
return (a,b)
instance (Binary a, Binary b, Binary c) => Binary (a,b,c) where
put_ bh (a,b,c) = do put_ bh a; put_ bh b; put_ bh c
get bh = do a <- get bh
b <- get bh
c <- get bh
return (a,b,c)
instance (Binary a, Binary b, Binary c, Binary d) => Binary (a,b,c,d) where
put_ bh (a,b,c,d) = do put_ bh a; put_ bh b; put_ bh c; put_ bh d
get bh = do a <- get bh
b <- get bh
c <- get bh
d <- get bh
return (a,b,c,d)
instance (Binary a, Binary b, Binary c, Binary d, Binary e) => Binary (a,b,c,d, e) where
put_ bh (a,b,c,d, e) = do put_ bh a; put_ bh b; put_ bh c; put_ bh d; put_ bh e;
get bh = do a <- get bh
b <- get bh
c <- get bh
d <- get bh
e <- get bh
return (a,b,c,d,e)
instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f) => Binary (a,b,c,d, e, f) where
put_ bh (a,b,c,d, e, f) = do put_ bh a; put_ bh b; put_ bh c; put_ bh d; put_ bh e; put_ bh f;
get bh = do a <- get bh
b <- get bh
c <- get bh
d <- get bh
e <- get bh
f <- get bh
return (a,b,c,d,e,f)
instance Binary a => Binary (Maybe a) where
put_ bh Nothing = putByte bh 0
put_ bh (Just a) = do putByte bh 1; put_ bh a
get bh = do h <- getWord8 bh
case h of
0 -> return Nothing
_ -> do x <- get bh; return (Just x)
instance (Binary a, Binary b) => Binary (Either a b) where
put_ bh (Left a) = do putByte bh 0; put_ bh a
put_ bh (Right b) = do putByte bh 1; put_ bh b
get bh = do h <- getWord8 bh
case h of
0 -> do a <- get bh ; return (Left a)
_ -> do b <- get bh ; return (Right b)
instance Binary UTCTime where
put_ bh u = do put_ bh (utctDay u)
put_ bh (utctDayTime u)
get bh = do day <- get bh
dayTime <- get bh
return $ UTCTime { utctDay = day, utctDayTime = dayTime }
instance Binary Day where
put_ bh d = put_ bh (toModifiedJulianDay d)
get bh = do i <- get bh
return $ ModifiedJulianDay { toModifiedJulianDay = i }
instance Binary DiffTime where
put_ bh dt = put_ bh (toRational dt)
get bh = do r <- get bh
return $ fromRational r
#if defined(__GLASGOW_HASKELL__) || 1
--to quote binary-0.3 on this code idea,
--
-- TODO This instance is not architecture portable. GMP stores numbers as
-- arrays of machine sized words, so the byte format is not portable across
-- architectures with different endianess and word size.
--
-- This makes it hard (impossible) to make an equivalent instance
-- with code that is compilable with non-GHC. Do we need any instance
-- Binary Integer, and if so, does it have to be blazing fast? Or can
-- we just change this instance to be portable like the rest of the
-- instances? (binary package has code to steal for that)
--
-- yes, we need Binary Integer and Binary Rational in basicTypes/Literal.lhs
instance Binary Integer where
-- XXX This is hideous
put_ bh i = put_ bh (show i)
get bh = do str <- get bh
case reads str of
[(i, "")] -> return i
_ -> fail ("Binary Integer: got " ++ show str)
{-
put_ bh (S# i#) = do putByte bh 0; put_ bh (I# i#)
put_ bh (J# s# a#) = do
putByte bh 1
put_ bh (I# s#)
let sz# = sizeofByteArray# a# -- in *bytes*
put_ bh (I# sz#) -- in *bytes*
putByteArray bh a# sz#
get bh = do
b <- getByte bh
case b of
0 -> do (I# i#) <- get bh
return (S# i#)
_ -> do (I# s#) <- get bh
sz <- get bh
(BA a#) <- getByteArray bh sz
return (J# s# a#)
-}
-- As for the rest of this code, even though this module
-- exports it, it doesn't seem to be used anywhere else
-- in GHC!
putByteArray :: BinHandle -> ByteArray# -> Int# -> IO ()
putByteArray bh a s# = loop 0#
where loop n#
| n# ==# s# = return ()
| otherwise = do
putByte bh (indexByteArray a n#)
loop (n# +# 1#)
getByteArray :: BinHandle -> Int -> IO ByteArray
getByteArray bh (I# sz) = do
(MBA arr) <- newByteArray sz
let loop n
| n ==# sz = return ()
| otherwise = do
w <- getByte bh
writeByteArray arr n w
loop (n +# 1#)
loop 0#
freezeByteArray arr
data ByteArray = BA ByteArray#
data MBA = MBA (MutableByteArray# RealWorld)
newByteArray :: Int# -> IO MBA
newByteArray sz = IO $ \s ->
case newByteArray# sz s of { (# s, arr #) ->
(# s, MBA arr #) }
freezeByteArray :: MutableByteArray# RealWorld -> IO ByteArray
freezeByteArray arr = IO $ \s ->
case unsafeFreezeByteArray# arr s of { (# s, arr #) ->
(# s, BA arr #) }
writeByteArray :: MutableByteArray# RealWorld -> Int# -> Word8 -> IO ()
writeByteArray arr i (W8# w) = IO $ \s ->
case writeWord8Array# arr i w s of { s ->
(# s, () #) }
indexByteArray :: ByteArray# -> Int# -> Word8
indexByteArray a# n# = W8# (indexWord8Array# a# n#)
instance (Integral a, Binary a) => Binary (Ratio a) where
put_ bh (a :% b) = do put_ bh a; put_ bh b
get bh = do a <- get bh; b <- get bh; return (a :% b)
#endif
instance Binary (Bin a) where
put_ bh (BinPtr i) = put_ bh (fromIntegral i :: Int32)
get bh = do i <- get bh; return (BinPtr (fromIntegral (i :: Int32)))
-- -----------------------------------------------------------------------------
-- Instances for Data.Typeable stuff
instance Binary TyCon where
put_ bh (TyCon _ p m n) = do
put_ bh (p,m,n)
get bh = do
(p,m,n) <- get bh
return (mkTyCon3 p m n)
instance Binary TypeRep where
put_ bh type_rep = do
let (ty_con, child_type_reps) = splitTyConApp type_rep
put_ bh ty_con
put_ bh child_type_reps
get bh = do
ty_con <- get bh
child_type_reps <- get bh
return (mkTyConApp ty_con child_type_reps)
-- -----------------------------------------------------------------------------
-- Lazy reading/writing
lazyPut :: Binary a => BinHandle -> a -> IO ()
lazyPut bh a = do
-- output the obj with a ptr to skip over it:
pre_a <- tellBin bh
put_ bh pre_a -- save a slot for the ptr
put_ bh a -- dump the object
q <- tellBin bh -- q = ptr to after object
putAt bh pre_a q -- fill in slot before a with ptr to q
seekBin bh q -- finally carry on writing at q
lazyGet :: Binary a => BinHandle -> IO a
lazyGet bh = do
p <- get bh -- a BinPtr
p_a <- tellBin bh
a <- unsafeInterleaveIO $ do
-- NB: Use a fresh off_r variable in the child thread, for thread
-- safety.
off_r <- newFastMutInt
getAt bh { _off_r = off_r } p_a
seekBin bh p -- skip over the object for now
return a
-- -----------------------------------------------------------------------------
-- UserData
-- -----------------------------------------------------------------------------
data UserData =
UserData {
-- for *deserialising* only:
ud_get_name :: BinHandle -> IO Name,
ud_get_fs :: BinHandle -> IO FastString,
-- for *serialising* only:
ud_put_name :: BinHandle -> Name -> IO (),
ud_put_fs :: BinHandle -> FastString -> IO ()
}
newReadState :: (BinHandle -> IO Name)
-> (BinHandle -> IO FastString)
-> UserData
newReadState get_name get_fs
= UserData { ud_get_name = get_name,
ud_get_fs = get_fs,
ud_put_name = undef "put_name",
ud_put_fs = undef "put_fs"
}
newWriteState :: (BinHandle -> Name -> IO ())
-> (BinHandle -> FastString -> IO ())
-> UserData
newWriteState put_name put_fs
= UserData { ud_get_name = undef "get_name",
ud_get_fs = undef "get_fs",
ud_put_name = put_name,
ud_put_fs = put_fs
}
noUserData :: a
noUserData = undef "UserData"
undef :: String -> a
undef s = panic ("Binary.UserData: no " ++ s)
---------------------------------------------------------
-- The Dictionary
---------------------------------------------------------
type Dictionary = Array Int FastString -- The dictionary
-- Should be 0-indexed
putDictionary :: BinHandle -> Int -> UniqFM (Int,FastString) -> IO ()
putDictionary bh sz dict = do
put_ bh sz
mapM_ (putFS bh) (elems (array (0,sz-1) (eltsUFM dict)))
getDictionary :: BinHandle -> IO Dictionary
getDictionary bh = do
sz <- get bh
elems <- sequence (take sz (repeat (getFS bh)))
return (listArray (0,sz-1) elems)
---------------------------------------------------------
-- The Symbol Table
---------------------------------------------------------
-- On disk, the symbol table is an array of IfaceExtName, when
-- reading it in we turn it into a SymbolTable.
type SymbolTable = Array Int Name
---------------------------------------------------------
-- Reading and writing FastStrings
---------------------------------------------------------
putFS :: BinHandle -> FastString -> IO ()
putFS bh fs = putBS bh $ fastStringToByteString fs
getFS :: BinHandle -> IO FastString
getFS bh = do bs <- getBS bh
return $! mkFastStringByteString bs
putBS :: BinHandle -> ByteString -> IO ()
putBS bh bs =
BS.unsafeUseAsCStringLen bs $ \(ptr, l) -> do
put_ bh l
let
go n | n == l = return ()
| otherwise = do
b <- peekElemOff (castPtr ptr) n
putByte bh b
go (n+1)
go 0
{- -- possible faster version, not quite there yet:
getBS bh@BinMem{} = do
(I# l) <- get bh
arr <- readIORef (arr_r bh)
off <- readFastMutInt (off_r bh)
return $! (mkFastSubBytesBA# arr off l)
-}
getBS :: BinHandle -> IO ByteString
getBS bh = do
l <- get bh
fp <- mallocForeignPtrBytes l
withForeignPtr fp $ \ptr -> do
let go n | n == l = return $ BS.fromForeignPtr fp 0 l
| otherwise = do
b <- getByte bh
pokeElemOff ptr n b
go (n+1)
--
go 0
instance Binary ByteString where
put_ bh f = putBS bh f
get bh = getBS bh
instance Binary FastString where
put_ bh f =
case getUserData bh of
UserData { ud_put_fs = put_fs } -> put_fs bh f
get bh =
case getUserData bh of
UserData { ud_get_fs = get_fs } -> get_fs bh
-- Here to avoid loop
instance Binary Fingerprint where
put_ h (Fingerprint w1 w2) = do put_ h w1; put_ h w2
get h = do w1 <- get h; w2 <- get h; return (Fingerprint w1 w2)
instance Binary FunctionOrData where
put_ bh IsFunction = putByte bh 0
put_ bh IsData = putByte bh 1
get bh = do
h <- getByte bh
case h of
0 -> return IsFunction
1 -> return IsData
_ -> panic "Binary FunctionOrData"
instance Binary TupleSort where
put_ bh BoxedTuple = putByte bh 0
put_ bh UnboxedTuple = putByte bh 1
put_ bh ConstraintTuple = putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> do return BoxedTuple
1 -> do return UnboxedTuple
_ -> do return ConstraintTuple
instance Binary Activation where
put_ bh NeverActive = do
putByte bh 0
put_ bh AlwaysActive = do
putByte bh 1
put_ bh (ActiveBefore aa) = do
putByte bh 2
put_ bh aa
put_ bh (ActiveAfter ab) = do
putByte bh 3
put_ bh ab
get bh = do
h <- getByte bh
case h of
0 -> do return NeverActive
1 -> do return AlwaysActive
2 -> do aa <- get bh
return (ActiveBefore aa)
_ -> do ab <- get bh
return (ActiveAfter ab)
instance Binary InlinePragma where
put_ bh (InlinePragma a b c d) = do
put_ bh a
put_ bh b
put_ bh c
put_ bh d
get bh = do
a <- get bh
b <- get bh
c <- get bh
d <- get bh
return (InlinePragma a b c d)
instance Binary RuleMatchInfo where
put_ bh FunLike = putByte bh 0
put_ bh ConLike = putByte bh 1
get bh = do
h <- getByte bh
if h == 1 then return ConLike
else return FunLike
instance Binary InlineSpec where
put_ bh EmptyInlineSpec = putByte bh 0
put_ bh Inline = putByte bh 1
put_ bh Inlinable = putByte bh 2
put_ bh NoInline = putByte bh 3
get bh = do h <- getByte bh
case h of
0 -> return EmptyInlineSpec
1 -> return Inline
2 -> return Inlinable
_ -> return NoInline
instance Binary DefMethSpec where
put_ bh NoDM = putByte bh 0
put_ bh VanillaDM = putByte bh 1
put_ bh GenericDM = putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> return NoDM
1 -> return VanillaDM
_ -> return GenericDM
instance Binary RecFlag where
put_ bh Recursive = do
putByte bh 0
put_ bh NonRecursive = do
putByte bh 1
get bh = do
h <- getByte bh
case h of
0 -> do return Recursive
_ -> do return NonRecursive
instance Binary OverlapMode where
put_ bh NoOverlap = putByte bh 0
put_ bh Overlaps = putByte bh 1
put_ bh Incoherent = putByte bh 2
put_ bh Overlapping = putByte bh 3
put_ bh Overlappable = putByte bh 4
get bh = do
h <- getByte bh
case h of
0 -> return NoOverlap
1 -> return Overlaps
2 -> return Incoherent
3 -> return Overlapping
4 -> return Overlappable
_ -> panic ("get OverlapMode" ++ show h)
instance Binary OverlapFlag where
put_ bh flag = do put_ bh (overlapMode flag)
put_ bh (isSafeOverlap flag)
get bh = do
h <- get bh
b <- get bh
return OverlapFlag { overlapMode = h, isSafeOverlap = b }
instance Binary FixityDirection where
put_ bh InfixL = do
putByte bh 0
put_ bh InfixR = do
putByte bh 1
put_ bh InfixN = do
putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> do return InfixL
1 -> do return InfixR
_ -> do return InfixN
instance Binary Fixity where
put_ bh (Fixity aa ab) = do
put_ bh aa
put_ bh ab
get bh = do
aa <- get bh
ab <- get bh
return (Fixity aa ab)
instance Binary WarningTxt where
put_ bh (WarningTxt w) = do
putByte bh 0
put_ bh w
put_ bh (DeprecatedTxt d) = do
putByte bh 1
put_ bh d
get bh = do
h <- getByte bh
case h of
0 -> do w <- get bh
return (WarningTxt w)
_ -> do d <- get bh
return (DeprecatedTxt d)
|
spacekitteh/smcghc
|
compiler/utils/Binary.hs
|
bsd-3-clause
| 27,832
| 0
| 19
| 8,926
| 8,979
| 4,378
| 4,601
| 644
| 2
|
module Yesod.ReCAPTCHA
( YesodReCAPTCHA(..)
, recaptchaAForm
, recaptchaMForm
, recaptchaOptions
, RecaptchaOptions(..)
) where
import Control.Applicative
import Data.Typeable (Typeable)
import Yesod.Core (whamlet)
import qualified Control.Exception.Lifted as E
import qualified Control.Monad.Trans.Resource as R
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.Default as D
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TEE
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import qualified Network.HTTP.Conduit as H
import qualified Network.HTTP.Types as HT
import qualified Network.Info as NI
import qualified Network.Socket as HS
import qualified Network.Wai as W
import qualified Yesod.Auth as YA
import qualified Yesod.Core as YC
import qualified Yesod.Form.Fields as YF
import qualified Yesod.Form.Functions as YF
import qualified Yesod.Form.Types as YF
-- | Class used by @yesod-recaptcha@'s fields. It should be
-- fairly easy to implement a barebones instance of this class
-- for you foundation data type:
--
-- > instance YesodReCAPTCHA MyType where
-- > recaptchaPublicKey = return "[your public key]"
-- > recaptchaPrivateKey = return "[your private key]"
--
-- You may also write a more sophisticated instance. For
-- example, you may get these values from your @settings.yml@
-- instead of hardcoding them. Or you may give different keys
-- depending on the request (maybe you're serving to two
-- different domains in the same application).
--
-- The 'YA.YesodAuth' superclass is used only for the HTTP
-- request. Please fill a bug report if you think that this
-- @YesodReCAPTCHA@ may be useful without @YesodAuth@.
--
-- /Minimum complete definition:/ 'recaptchaPublicKey' and
-- 'recaptchaPrivateKey'.
class YA.YesodAuth site => YesodReCAPTCHA site where
-- | Your reCAPTCHA public key.
recaptchaPublicKey :: YC.HandlerT site IO T.Text
-- | Your reCAPTCHA private key.
recaptchaPrivateKey :: YC.HandlerT site IO T.Text
-- | A backdoor to the reCAPTCHA mechanism. While doing
-- automated tests you may need to fill a form that is
-- protected by a CAPTCHA. The whole point of using a
-- CAPTCHA is disallowing access to non-humans, which
-- hopefully your test suite is.
--
-- In order to solve this problem, you may define
--
-- > insecureRecaptchaBackdoor = return (Just "<secret CAPTCHA>")
--
-- Now, whenever someone fills @\<secret CAPTCHA\>@ as the
-- CAPTCHA, the @yesod-recaptcha@ library will /not/ contact
-- reCAPTCHA's servers and instead will blindly accept the
-- secret CAPTCHA.
--
-- Note that this is a *huge* security hole in the wrong
-- hands. We /do not/ recommend using this function on a
-- production environment without a good reason. If for
-- whatever reason you must use this function on a production
-- environment, please make use of its access to 'GHandler'
-- in order to return @Just@ only when strictly necessary.
-- For example, you may return @Just@ only when the request
-- comes from @localhost@ and read its contents from a secret
-- file accessible only by SSH which is afterwards removed.
--
-- By default, this function returns @Nothing@, which
-- completely disables the backdoor.
insecureRecaptchaBackdoor :: YC.HandlerT site IO (Maybe T.Text)
insecureRecaptchaBackdoor = return Nothing
-- | A reCAPTCHA field. This 'YF.AForm' returns @()@ because
-- CAPTCHAs give no useful information besides having being typed
-- correctly or not. When the user does not type the CAPTCHA
-- correctly, this 'YF.AForm' will automatically fail in the same
-- way as any other @yesod-form@ widget fails, so you may just
-- ignore the @()@ value.
recaptchaAForm :: YesodReCAPTCHA site => YF.AForm (YC.HandlerT site IO) ()
recaptchaAForm = YF.formToAForm recaptchaMForm
-- | Same as 'recaptchaAForm', but instead of being an
-- 'YF.AForm', it's an 'YF.MForm'.
recaptchaMForm :: YesodReCAPTCHA site =>
YF.MForm (YC.HandlerT site IO)
( YF.FormResult ()
, [YF.FieldView site] )
recaptchaMForm = do
challengeField <- fakeField "recaptcha_challenge_field"
responseField <- fakeField "recaptcha_response_field"
ret <- maybe (return Nothing)
(YC.lift . fmap Just . uncurry check)
((,) <$> challengeField <*> responseField)
let view = recaptchaWidget $ case ret of
Just (Error err) -> Just err
_ -> Nothing
formRet = case ret of
Nothing -> YF.FormMissing
Just Ok -> YF.FormSuccess ()
Just (Error _) -> YF.FormFailure []
formView = YF.FieldView
{ YF.fvLabel = ""
, YF.fvTooltip = Nothing
, YF.fvId = "recaptcha_challenge_field"
, YF.fvInput = view
, YF.fvErrors = Nothing
, YF.fvRequired = True
}
return (formRet, [formView])
-- | Widget with reCAPTCHA's HTML.
recaptchaWidget :: YesodReCAPTCHA site =>
Maybe T.Text -- ^ Error code, if any.
-> YC.WidgetT site IO ()
recaptchaWidget merr = do
publicKey <- YC.handlerToWidget recaptchaPublicKey
isSecure <- W.isSecure <$> YC.waiRequest
let proto | isSecure = "https"
| otherwise = "http" :: T.Text
err = maybe "" (T.append "&error=") merr
[whamlet|
<script src="#{proto}://www.google.com/recaptcha/api/challenge?k=#{publicKey}#{err}">
<noscript>
<iframe src="#{proto}://www.google.com/recaptcha/api/noscript?k=#{publicKey}#{err}"
height="300" width="500" frameborder="0">
<br>
<textarea name="recaptcha_challenge_field" rows="3" cols="40">
<input type="hidden" name="recaptcha_response_field" value="manual_challenge">
|]
-- | Contact reCAPTCHA servers and find out if the user correctly
-- guessed the CAPTCHA. Unfortunately, reCAPTCHA doesn't seem to
-- provide an HTTPS endpoint for this API even though we need to
-- send our private key.
check :: YesodReCAPTCHA site =>
T.Text -- ^ @recaptcha_challenge_field@
-> T.Text -- ^ @recaptcha_response_field@
-> YC.HandlerT site IO CheckRet
check "" _ = return $ Error "invalid-request-cookie"
check _ "" = return $ Error "incorrect-captcha-sol"
check challenge response = do
backdoor <- insecureRecaptchaBackdoor
if Just response == backdoor
then return Ok
else do
manager <- YA.authHttpManager <$> YC.getYesod
privateKey <- recaptchaPrivateKey
sockaddr <- W.remoteHost <$> YC.waiRequest
remoteip <- case sockaddr of
HS.SockAddrInet _ hostAddr ->
return . show $ NI.IPv4 hostAddr
HS.SockAddrInet6 _ _ (w1, w2, w3, w4) _ ->
return . show $ NI.IPv6 w1 w2 w3 w4
_ -> do
$(YC.logError) $ "Yesod.ReCAPTCHA: Couldn't find out remote IP, \
\are you using a reverse proxy? If yes, then \
\please file a bug report at \
\<https://github.com/meteficha/yesod-recaptcha>."
fail "Could not find remote IP address for reCAPTCHA."
let req = D.def
{ H.method = HT.methodPost
, H.host = "www.google.com"
, H.path = "/recaptcha/api/verify"
, H.queryString = HT.renderSimpleQuery False query
}
query = [ ("privatekey", TE.encodeUtf8 privateKey)
, ("remoteip", B8.pack remoteip)
, ("challenge", TE.encodeUtf8 challenge)
, ("response", TE.encodeUtf8 response)
]
eresp <- E.try $ R.runResourceT $ H.httpLbs req manager
case (L8.lines . H.responseBody) <$> eresp of
Right ("true":_) -> return Ok
Right ("false":why:_) -> return . Error . TL.toStrict $
TLE.decodeUtf8With TEE.lenientDecode why
Right other -> do
$(YC.logError) $ T.concat [ "Yesod.ReCAPTCHA: could not parse "
, T.pack (show other) ]
return (Error "recaptcha-not-reachable")
Left exc -> do
$(YC.logError) $ T.concat [ "Yesod.ReCAPTCHA: could not contact server ("
, T.pack (show (exc :: E.SomeException))
, ")" ]
return (Error "recaptcha-not-reachable")
-- | See 'check'.
data CheckRet = Ok | Error T.Text
-- | A fake field. Just returns the value of a field.
fakeField :: (YC.RenderMessage site YF.FormMessage) =>
T.Text -- ^ Field id.
-> YF.MForm (YC.HandlerT site IO) (Maybe T.Text)
fakeField fid = YC.lift $ do mt1 <- YC.lookupGetParam fid
case mt1 of
Nothing -> YC.lookupPostParam fid
Just _ -> return mt1
-- | Define the given 'RecaptchaOptions' for all forms declared
-- after this widget. This widget may be used anywhere, on the
-- @<head>@ or on the @<body>@.
--
-- Note that this is /not/ required to use 'recaptchaAForm' or
-- 'recaptchaMForm'.
recaptchaOptions :: YC.Yesod site =>
RecaptchaOptions
-> YC.WidgetT site IO ()
recaptchaOptions s | s == D.def = return ()
recaptchaOptions s =
[whamlet|
<script>
var RecaptchaOptions = {
$maybe t <- theme s
theme : '#{t}',
$maybe l <- lang s
lang : '#{l}',
x : 'x'
};
|]
-- | Options that may be given to reCAPTCHA. In order to use
-- them on your site, use `recaptchaOptions` anywhere before the
-- form that contains the `recaptchaField`.
--
-- Note that there's an instance for 'D.Default', so you may use
-- 'D.def'.
data RecaptchaOptions =
RecaptchaOptions {
-- | Theme of the reCAPTCHA field. Currently may be
-- @\"red\"@, @\"white\"@, @\"blackglass\"@ or @\"clean\"@.
-- A value of @Nothing@ uses the default.
theme :: Maybe T.Text
-- | Language.
, lang :: Maybe T.Text
}
deriving (Eq, Ord, Show, Typeable)
-- | Allows you to use 'D.def' and get sane default values.
instance D.Default RecaptchaOptions where
def = RecaptchaOptions Nothing Nothing
|
prowdsponsor/yesod-recaptcha
|
src/Yesod/ReCAPTCHA.hs
|
bsd-3-clause
| 10,804
| 0
| 22
| 3,085
| 1,715
| 952
| 763
| -1
| -1
|
{-# language QuasiQuotes #-}
{-# language TemplateHaskell #-}
module OpenCV.Internal.Core.Types.Point.TH
( mkPointType
) where
import "base" Data.List ( intercalate )
import "base" Data.Monoid ( (<>) )
import "base" Foreign.Marshal.Alloc ( alloca )
import "base" Foreign.Storable ( peek )
import "base" System.IO.Unsafe ( unsafePerformIO )
import qualified "inline-c" Language.C.Inline.Unsafe as CU
import "linear" Linear ( V2(..), V3(..) )
import "template-haskell" Language.Haskell.TH
import "template-haskell" Language.Haskell.TH.Quote ( quoteExp )
import "this" OpenCV.Internal.C.PlacementNew.TH ( mkPlacementNewInstance )
import "this" OpenCV.Internal.C.Types
import "this" OpenCV.Internal.Core.Types.Point
import "this" OpenCV.Internal
mkPointType
:: String -- ^ Point type name, for both Haskell and C
-> Integer -- ^ Point dimension
-> String -- ^ Point template name in C
-> Name -- ^ Depth type name in Haskell
-> String -- ^ Depth type name in C
-> Q [Dec]
mkPointType pTypeNameStr dim cTemplateStr depthTypeName cDepthTypeStr
| dim < 2 || dim > 3 = fail $ "mkPointType: Unsupported dimension: " <> show dim
| otherwise =
fmap concat . sequence $
[ pure <$> pointTySynD
, fromPtrDs
, isPointOpenCVInstanceDs
, isPointHaskellInstanceDs
, mkPlacementNewInstance pTypeName
]
where
pTypeName :: Name
pTypeName = mkName pTypeNameStr
cPointTypeStr :: String
cPointTypeStr = pTypeNameStr
pTypeQ :: Q Type
pTypeQ = conT pTypeName
depthTypeQ :: Q Type
depthTypeQ = conT depthTypeName
dimTypeQ :: Q Type
dimTypeQ = litT (numTyLit dim)
pointTySynD :: Q Dec
pointTySynD =
tySynD pTypeName
[]
([t|Point|] `appT` dimTypeQ `appT` depthTypeQ)
fromPtrDs :: Q [Dec]
fromPtrDs =
[d|
instance FromPtr $(pTypeQ) where
fromPtr = objFromPtr Point $ $(finalizerExpQ)
|]
where
finalizerExpQ :: Q Exp
finalizerExpQ = do
ptr <- newName "ptr"
lamE [varP ptr] $
quoteExp CU.exp $
"void { delete $(" <> cPointTypeStr <> " * " <> nameBase ptr <> ") }"
isPointOpenCVInstanceDs :: Q [Dec]
isPointOpenCVInstanceDs =
[d|
instance IsPoint (Point $(dimTypeQ)) $(depthTypeQ) where
toPoint = id
toPointIO = pure
fromPoint = id
|]
isPointHaskellInstanceDs :: Q [Dec]
isPointHaskellInstanceDs =
let ix = fromInteger dim - 2
in withLinear (linearTypeQs !! ix)
(linearConNames !! ix)
where
linearTypeQs :: [Q Type]
linearTypeQs = map conT [''V2, ''V3]
linearConNames :: [Name]
linearConNames = ['V2, 'V3]
withLinear :: Q Type -> Name -> Q [Dec]
withLinear lpTypeQ lvConName =
[d|
instance IsPoint $(lpTypeQ) $(depthTypeQ) where
toPoint = unsafePerformIO . toPointIO
toPointIO = $(toPointIOExpQ)
fromPoint = $(fromPointExpQ)
|]
where
toPointIOExpQ :: Q Exp
toPointIOExpQ = do
ns <- mapM newName elemNames
lamE [conP lvConName $ map varP ns]
$ appE [e|fromPtr|]
$ quoteExp CU.exp
$ inlineCStr ns
where
inlineCStr :: [Name] -> String
inlineCStr ns = concat
[ cPointTypeStr
, " * { new cv::" <> cTemplateStr
, "<" <> cDepthTypeStr <> ">"
, "(" <> intercalate ", " (map elemQuote ns) <> ")"
, " }"
]
where
elemQuote :: Name -> String
elemQuote n = "$(" <> cDepthTypeStr <> " " <> nameBase n <> ")"
fromPointExpQ :: Q Exp
fromPointExpQ = do
point <- newName "point"
pointPtr <- newName "pointPtr"
ptrNames <- mapM (newName . (<> "Ptr")) elemNames
withPtrNames point pointPtr ptrNames
where
withPtrNames :: Name -> Name -> [Name] -> Q Exp
withPtrNames point pointPtr ptrNames =
lamE [varP point]
$ appE [e|unsafePerformIO|]
$ withPtrVarsExpQ ptrNames
where
withPtrVarsExpQ :: [Name] -> Q Exp
withPtrVarsExpQ = foldr (\p -> appE [e|alloca|] . lamE [varP p]) withAllocatedVars
withAllocatedVars :: Q Exp
withAllocatedVars =
appE ([e|withPtr|] `appE` varE point)
$ lamE [varP pointPtr]
$ doE
[ noBindS $ quoteExp CU.block inlineCStr
, noBindS extractExpQ
]
inlineCStr :: String
inlineCStr = unlines $
concat
[ "void {"
, "const cv::" <> cTemplateStr
, "<" <> cDepthTypeStr <> ">"
, " & p = *$("
, cPointTypeStr
, " * "
, nameBase pointPtr
, ");"
]
: map ptrLine (zip [0..] ptrNames)
<> ["}"]
where
ptrLine :: (Int, Name) -> String
ptrLine (ix, ptrName) =
"*$(" <> cDepthTypeStr <> " * " <> nameBase ptrName <> ") = p." <> elemNames !! ix <> ";"
-- Applies the constructor to the values that are
-- read from the pointers.
extractExpQ :: Q Exp
extractExpQ = foldl (\acc peekExp -> [e|(<*>)|] `appE` acc `appE` peekExp)
([e|pure|] `appE` conE lvConName)
peekExpQs
where
peekExpQs :: [Q Exp]
peekExpQs = map (\p -> [e|peek|] `appE` varE p) ptrNames
elemNames :: [String]
elemNames = take (fromInteger dim)
["x", "y", "z"]
|
Cortlandd/haskell-opencv
|
src/OpenCV/Internal/Core/Types/Point/TH.hs
|
bsd-3-clause
| 6,560
| 0
| 23
| 2,785
| 1,388
| 774
| 614
| -1
| -1
|
-- xmobar config used by Cedric Fung on 15" rMBP
-- Origin: Vic Fryzel (http://github.com/vicfryzel/xmonad-config)
-- Modifier: Cedric Fung (http://github.com/vec.io/.linux/xmonad)
Config {
font = "xft:WenQuanYi Micro Hei Mono:size=7:antialias=true",
bgColor = "#000000",
fgColor = "#ffffff",
position = Static { xpos = 0, ypos = 0, width = 2716, height = 32 },
lowerOnStart = True,
commands = [
Run MultiCpu ["-t","CPU: <total0> <total1> <total2> <total3> <total4> <total5> <total6> <total7>","-L","30","-H","60","-h","#FFB6B0","-l","#CEFFAC","-n","#FFFFCC"] 10,
Run CoreTemp ["-t", "T: <core0> <core1> <core2> <core3> <core4>", "-L", "40", "-H", "60", "-l", "lightblue", "-n", "gray90", "-h", "red"] 50,
Run Memory ["-t","M: <usedratio>%","-H","8192","-L","4096","-h","#FFB6B0","-l","#CEFFAC","-n","#FFFFCC"] 10,
Run Battery ["-t", "<left>% <timeleft>", "-L", "20", "-H", "75", "-h", "green", "-n", "yellow", "-l", "red"] 60,
Run DiskIO [("/", "R:<total>"), ("/home", "H:<total>")] [] 10,
Run DynNetwork [] 10,
Run Date "%a %b %_d %l:%M" "date" 10,
Run StdinReader
],
sepChar = "%",
alignSep = "}{",
template = "%StdinReader% }{ %dynnetwork% %multicpu% %coretemp% %memory% %diskio% %battery% <fc=#FFFFCC>%date%</fc>"
}
|
vecio/.linux
|
xmonad/xmobar.hs
|
bsd-3-clause
| 1,332
| 0
| 10
| 262
| 335
| 208
| 127
| -1
| -1
|
-- | Follow-up from meetings with Chung-cheih shan and Co., where we
-- discussed how to do a search over splitting strategy.
module GenNeighborhood where
-- IR datatype from last meeting.
-- | A program is a flat list of variable bindings.
type MidIR = [([Var], AExp)]
data AExp = V Var
| Map Exp Var | Fold Exp Var
| Split Var | Concat Var Var
deriving (Show)
type Var = Integer
type Exp = String
f :: Exp
f = "f"
g :: Exp
g = "g"
-- one example program and a permutation
prog1 :: MidIR
prog1 = [([1], Fold g 0),
([2], Map f 1)]
prog2 :: MidIR
prog2 = [([1], Fold g 0),
([3,2], Split 1),
([5], (Map f 2)),
([4], (Map f 3)),
([6], Concat 4 5)]
-- generating some neighbors
nextNum :: [([Var], AExp)] -> Var
nextNum = (+1) . maximum . map (maximum . nums)
where nums (l, Fold _ v) = v : l
nums (l, Split v) = v : l
nums (l, Map _ v) = v : l
nums (l, Concat v n) = n : v : l
-- walkCode :: (a -> Maybe [a]) -> [a] -> [a]
walkCode :: (([Var], AExp) -> Maybe MidIR) -> MidIR -> MidIR
walkCode _ [] = []
walkCode gen code = code'
where code' = case elem of
Just x -> x ++ tail code
Nothing -> head code : walkCode gen (tail code)
elem = gen $ head code
-- splits :: [([Var], AExp)] -> [([Var], AExp)]
splits :: MidIR -> MidIR
splits code = walkCode gen code
where num = nextNum code
gen ([n], Map f n1) = Just [(makeSplit n1),
([num+2], Map f num),
([num+3], Map f (num+1)),
([n], Concat (num+2) (num+3))]
gen ([n], Fold f n1) = Just [(makeSplit n1),
([num+2], Fold f num),
([num+3], Fold f (num+1)),
([n], Concat (num+2) (num+3))]
gen _ = Nothing
makeSplit n = ([num, num+1], Split n)
|
iu-parfunc/AutoObsidian
|
interface_brainstorming/01_early_sketch/GenNeighborhood.hs
|
bsd-3-clause
| 2,016
| 0
| 13
| 761
| 808
| 455
| 353
| 47
| 4
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeFamilies #-}
module Network.Kafka.Primitive.OffsetCommit where
import Control.Lens
import qualified Data.Vector as V
import Network.Kafka.Exports
import Network.Kafka.Types
data CommitPartitionV0 = CommitPartitionV0
{ commitPartitionV0Partition :: !PartitionId
, commitPartitionV0Offset :: !Int64
, commitPartitionV0Metadata :: !Utf8
} deriving (Eq, Show, Generic)
instance Binary CommitPartitionV0 where
get = CommitPartitionV0 <$> get <*> get <*> get
put c = do
put $ commitPartitionV0Partition c
put $ commitPartitionV0Offset c
put $ commitPartitionV0Metadata c
instance ByteSize CommitPartitionV0 where
byteSize c = byteSize (commitPartitionV0Partition c) +
byteSize (commitPartitionV0Offset c) +
byteSize (commitPartitionV0Metadata c)
data CommitV0 = CommitV0
{ commitV0Topic :: !Utf8
, commitV0Partitions :: !(V.Vector CommitPartitionV0)
} deriving (Eq, Show, Generic)
instance Binary CommitV0 where
get = CommitV0 <$> get <*> (fromArray <$> get)
put c = do
put $ commitV0Topic c
put $ Array $ commitV0Partitions c
instance ByteSize CommitV0 where
byteSize c = byteSize (commitV0Topic c) +
byteSize (commitV0Partitions c)
data OffsetCommitRequestV0 = OffsetCommitRequestV0
{ offsetCommitRequestV0ConsumerGroup :: !Utf8
, offsetCommitRequestV0Commits :: !(V.Vector CommitV0)
} deriving (Eq, Show, Generic)
instance Binary OffsetCommitRequestV0 where
get = OffsetCommitRequestV0 <$> get <*> (fromArray <$> get)
put r = do
put $ offsetCommitRequestV0ConsumerGroup r
put $ Array $ offsetCommitRequestV0Commits r
instance ByteSize OffsetCommitRequestV0 where
byteSize r = byteSize (offsetCommitRequestV0ConsumerGroup r) +
byteSize (offsetCommitRequestV0Commits r)
data CommitPartitionV1 = CommitPartitionV1
{ commitPartitionV1Partition :: !PartitionId
, commitPartitionV1Offset :: !Int64
, commitPartitionV1Timestamp :: !Int64
, commitPartitionV1Metadata :: !Utf8
} deriving (Eq, Show, Generic)
instance Binary CommitPartitionV1 where
get = CommitPartitionV1 <$> get <*> get <*> get <*> get
put c = do
put $ commitPartitionV1Partition c
put $ commitPartitionV1Offset c
put $ commitPartitionV1Timestamp c
put $ commitPartitionV1Metadata c
instance ByteSize CommitPartitionV1 where
byteSize c = byteSize (commitPartitionV1Partition c) +
byteSize (commitPartitionV1Offset c) +
byteSize (commitPartitionV1Timestamp c) +
byteSize (commitPartitionV1Metadata c)
data CommitV1 = CommitV1
{ commitV1Topic :: !Utf8
, commitV1Partitions :: !(V.Vector CommitPartitionV1)
} deriving (Eq, Show, Generic)
instance Binary CommitV1 where
get = CommitV1 <$> get <*> (fromArray <$> get)
put c = do
put $ commitV1Topic c
put $ Array $ commitV1Partitions c
instance ByteSize CommitV1 where
byteSize c = byteSize (commitV1Topic c) +
byteSize (commitV1Partitions c)
data OffsetCommitRequestV1 = OffsetCommitRequestV1
{ offsetCommitRequestV1ConsumerGroup :: !Utf8
, offsetCommitRequestV1Generation :: !GenerationId
, offsetCommitRequestV1Consumer :: !ConsumerId
, offsetCommitRequestV1Commits :: !(V.Vector CommitV1)
} deriving (Eq, Show, Generic)
instance Binary OffsetCommitRequestV1 where
get = OffsetCommitRequestV1 <$> get <*> get <*> get <*> (fromArray <$> get)
put r = do
put $ offsetCommitRequestV1ConsumerGroup r
put $ offsetCommitRequestV1Generation r
put $ offsetCommitRequestV1Consumer r
put $ Array $ offsetCommitRequestV1Commits r
instance ByteSize OffsetCommitRequestV1 where
byteSize r = byteSize (offsetCommitRequestV1ConsumerGroup r) +
byteSize (offsetCommitRequestV1Generation r) +
byteSize (offsetCommitRequestV1Consumer r) +
byteSize (offsetCommitRequestV1Commits r)
data CommitPartitionV2 = CommitPartitionV2
{ commitPartitionV2Partition :: !PartitionId
, commitPartitionV2Offset :: !Int64
, commitPartitionV2Metadata :: !Utf8
} deriving (Eq, Show, Generic)
instance Binary CommitPartitionV2 where
get = CommitPartitionV2 <$> get <*> get <*> get
put c = do
put $ commitPartitionV2Partition c
put $ commitPartitionV2Offset c
put $ commitPartitionV2Metadata c
instance ByteSize CommitPartitionV2 where
byteSize c = byteSize (commitPartitionV2Partition c) +
byteSize (commitPartitionV2Offset c) +
byteSize (commitPartitionV2Metadata c)
data CommitV2 = CommitV2
{ commitV2Topic :: !Utf8
, commitV2Partitions :: !(V.Vector CommitPartitionV2)
} deriving (Eq, Show, Generic)
instance Binary CommitV2 where
get = CommitV2 <$> get <*> (fromArray <$> get)
put c = do
put $ commitV2Topic c
put $ Array $ commitV2Partitions c
instance ByteSize CommitV2 where
byteSize c = byteSize (commitV2Topic c) +
byteSize (commitV2Partitions c)
data OffsetCommitRequestV2 = OffsetCommitRequestV2
{ offsetCommitRequestV2ConsumerGroup :: !Utf8
, offsetCommitRequestV2Generation :: !GenerationId
, offsetCommitRequestV2Consumer :: !ConsumerId
, offsetCommitRequestV2RetentionTime :: !Int64
, offsetCommitRequestV2Commits :: !(V.Vector CommitV2)
} deriving (Eq, Show, Generic)
instance Binary OffsetCommitRequestV2 where
get = OffsetCommitRequestV2 <$> get <*> get <*> get <*> get <*> (fromArray <$> get)
put r = do
put $ offsetCommitRequestV2ConsumerGroup r
put $ offsetCommitRequestV2Generation r
put $ offsetCommitRequestV2Consumer r
put $ offsetCommitRequestV2RetentionTime r
put $ Array $ offsetCommitRequestV2Commits r
instance ByteSize OffsetCommitRequestV2 where
byteSize r = byteSize (offsetCommitRequestV2ConsumerGroup r) +
byteSize (offsetCommitRequestV2Generation r) +
byteSize (offsetCommitRequestV2Consumer r) +
byteSize (offsetCommitRequestV2RetentionTime r) +
byteSize (offsetCommitRequestV2Commits r)
data CommitPartitionResult = CommitPartitionResult
{ commitPartitionResultPartition :: !PartitionId
, commitPartitionResultErrorCode :: !ErrorCode
} deriving (Eq, Show, Generic)
instance Binary CommitPartitionResult where
get = CommitPartitionResult <$> get <*> get
put c = do
put $ commitPartitionResultPartition c
put $ commitPartitionResultErrorCode c
instance ByteSize CommitPartitionResult where
byteSize c = byteSize (commitPartitionResultPartition c) +
byteSize (commitPartitionResultErrorCode c)
data CommitTopicResult = CommitTopicResult
{ commitTopicResultTopic :: !Utf8
, commitTopicResultResults :: !(V.Vector CommitPartitionResult)
} deriving (Eq, Show, Generic)
instance Binary CommitTopicResult where
get = CommitTopicResult <$> get <*> (fromFixedArray <$> get)
put c = do
put $ commitTopicResultTopic c
put $ FixedArray $ commitTopicResultResults c
instance ByteSize CommitTopicResult where
byteSize c = byteSize (commitTopicResultTopic c) +
byteSize (FixedArray $ commitTopicResultResults c)
newtype OffsetCommitResponseV0 = OffsetCommitResponseV0
{ offsetCommitResponseV0Results :: V.Vector CommitTopicResult
} deriving (Eq, Show, Generic)
instance Binary OffsetCommitResponseV0 where
get = (OffsetCommitResponseV0 . fromArray) <$> get
put = put . Array . offsetCommitResponseV0Results
instance ByteSize OffsetCommitResponseV0 where
byteSize = byteSize . offsetCommitResponseV0Results
newtype OffsetCommitResponseV1 = OffsetCommitResponseV1
{ offsetCommitResponseV1Results :: V.Vector CommitTopicResult
} deriving (Eq, Show, Generic)
instance Binary OffsetCommitResponseV1 where
get = (OffsetCommitResponseV1 . fromArray) <$> get
put = put . Array . offsetCommitResponseV1Results
instance ByteSize OffsetCommitResponseV1 where
byteSize = byteSize . offsetCommitResponseV1Results
newtype OffsetCommitResponseV2 = OffsetCommitResponseV2
{ offsetCommitResponseV2Results :: V.Vector CommitTopicResult
} deriving (Eq, Show, Generic)
instance Binary OffsetCommitResponseV2 where
get = (OffsetCommitResponseV2 . fromArray) <$> get
put = put . Array . offsetCommitResponseV2Results
instance ByteSize OffsetCommitResponseV2 where
byteSize = byteSize . offsetCommitResponseV2Results
instance RequestApiKey OffsetCommitRequestV0 where
apiKey = theApiKey 8
instance RequestApiVersion OffsetCommitRequestV0 where
apiVersion = const 0
instance RequestApiKey OffsetCommitRequestV1 where
apiKey = theApiKey 8
instance RequestApiVersion OffsetCommitRequestV1 where
apiVersion = const 1
instance RequestApiKey OffsetCommitRequestV2 where
apiKey = theApiKey 8
instance RequestApiVersion OffsetCommitRequestV2 where
apiVersion = const 2
|
iand675/hs-kafka
|
src/Network/Kafka/Primitive/OffsetCommit.hs
|
bsd-3-clause
| 9,205
| 0
| 12
| 1,801
| 2,216
| 1,121
| 1,095
| 270
| 0
|
{-# LANGUAGE MultiParamTypeClasses, DeriveDataTypeable #-}
module Reactor.Task
( Task
, run
, spawn
, io
) where
import Control.Applicative
import Control.Monad
import Control.Exception
import Control.Monad.Reader.Class
import Control.Monad.Error.Class
import Control.Monad.IO.Class
import Data.Array.IO
import Data.Functor.Bind
import Data.Functor.Plus
import Reactor.Deque (Deque)
import Data.Data
import qualified Reactor.Deque as Deque
newtype Env = Env { envDeque :: Deque IOArray (Task ()) }
mkEnv :: IO Env
mkEnv = Env <$> Deque.empty
newtype Task a = Task
{ runTask :: (a -> IO ()) ->
(SomeException -> IO ()) ->
(Env -> IO ())
} deriving Typeable
instance Functor Task where
fmap f (Task m) = Task $ \ks -> m (ks . f)
instance Apply Task where
Task mf <.> Task ma = Task $ \ks kf e -> mf (\f -> ma (ks . f) kf e) kf e
instance Applicative Task where
pure a = Task (\ks _kf _e -> ks a)
(<*>) = (<.>)
instance Bind Task where
Task mf >>- k = Task (\ks kf e -> mf (\a -> runTask (k a) ks kf e) kf e)
instance Monad Task where
return = pure
(>>=) = (>>-)
instance MonadReader Env Task where
ask = Task (\ks _kf e -> ks e)
local f (Task ma) = Task (\ks kf e -> ma ks kf (f e))
instance MonadIO Task where
liftIO = io
io :: IO a -> Task a
io act = Task (\ks _kf _e -> act >>= ks)
instance MonadError SomeException Task where
throwError err = Task (\_ks kf _e -> kf err)
catchError (Task m) h = Task (\ks kf e -> m ks (\err -> runTask (h err) ks kf e) e)
instance Alt Task where
Task ma <!> Task mb = Task (\ks kf e -> ma ks (\_ -> mb ks kf e) e)
instance Plus Task where
zero = Task (\_ks kf _e -> kf (toException (ErrorCall "empty")))
instance Alternative Task where
(<|>) = (<!>)
empty = zero
instance MonadPlus Task where
mzero = zero
mplus = (<!>)
spawn :: Task () -> Task ()
spawn task = Task (\_ks _kf e -> Deque.push task (envDeque e))
-- run a single threaded pump, all tasks are placed locally
run :: Task () -> IO ()
run task0 = do
env <- mkEnv
bracket_
(register env)
(go env task0)
(unregister env)
where
go :: Env -> Task () -> IO ()
go env (Task m) = m (success env) (failure env) env
success env _ = Deque.pop (envDeque env) >>= maybe (return ()) (go env)
failure _env = throw -- TODO: shut down workers?
register _env = return () -- TODO: start up if necessary and tell worker threads about us
unregister _env = return () -- TODO: shutdown if necessary and tell worker threads about us
|
ekmett/reactor
|
Reactor/Task.hs
|
bsd-3-clause
| 2,556
| 0
| 14
| 615
| 1,083
| 571
| 512
| 73
| 1
|
module AERN2.Poly.Power.RootsInt
(
initialBernsteinCoefs
, bernsteinCoefs
, signVars
, reflect
, contract
, translate
, transform
, findRoots
--, reduce
, Terms
)
where
import AERN2.Poly.Power.RootsIntVector
|
michalkonecny/aern2
|
aern2-fun-univariate/src/AERN2/Poly/Power/RootsInt.hs
|
bsd-3-clause
| 231
| 0
| 4
| 49
| 45
| 31
| 14
| 12
| 0
|
{-# LANGUAGE CPP #-}
#ifdef STRICT
import Data.Map.Strict as Data.Map
#else
import Data.Map.Lazy as Data.Map
#endif
import Data.Monoid
import Data.Maybe hiding (mapMaybe)
import qualified Data.Maybe as Maybe (mapMaybe)
import Data.Ord
import Data.Function
import Prelude hiding (lookup, null, map, filter, foldr, foldl)
import qualified Prelude (map)
import Data.List (nub,sort)
import qualified Data.List as List
import qualified Data.Set
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test, Testable)
import Test.QuickCheck
import Text.Show.Functions ()
default (Int)
main :: IO ()
main = defaultMain
[ testCase "ticket4242" test_ticket4242
, testCase "index" test_index
, testCase "size" test_size
, testCase "size2" test_size2
, testCase "member" test_member
, testCase "notMember" test_notMember
, testCase "lookup" test_lookup
, testCase "findWithDefault" test_findWithDefault
, testCase "lookupLT" test_lookupLT
, testCase "lookupGT" test_lookupGT
, testCase "lookupLE" test_lookupLE
, testCase "lookupGE" test_lookupGE
, testCase "empty" test_empty
, testCase "mempty" test_mempty
, testCase "singleton" test_singleton
, testCase "insert" test_insert
, testCase "insertWith" test_insertWith
, testCase "insertWithKey" test_insertWithKey
, testCase "insertLookupWithKey" test_insertLookupWithKey
, testCase "delete" test_delete
, testCase "adjust" test_adjust
, testCase "adjustWithKey" test_adjustWithKey
, testCase "update" test_update
, testCase "updateWithKey" test_updateWithKey
, testCase "updateLookupWithKey" test_updateLookupWithKey
, testCase "alter" test_alter
, testCase "union" test_union
, testCase "mappend" test_mappend
, testCase "unionWith" test_unionWith
, testCase "unionWithKey" test_unionWithKey
, testCase "unions" test_unions
, testCase "mconcat" test_mconcat
, testCase "unionsWith" test_unionsWith
, testCase "difference" test_difference
, testCase "differenceWith" test_differenceWith
, testCase "differenceWithKey" test_differenceWithKey
, testCase "intersection" test_intersection
, testCase "intersectionWith" test_intersectionWith
, testCase "intersectionWithKey" test_intersectionWithKey
, testCase "map" test_map
, testCase "mapWithKey" test_mapWithKey
, testCase "mapAccum" test_mapAccum
, testCase "mapAccumWithKey" test_mapAccumWithKey
, testCase "mapAccumRWithKey" test_mapAccumRWithKey
, testCase "mapKeys" test_mapKeys
, testCase "mapKeysWith" test_mapKeysWith
, testCase "mapKeysMonotonic" test_mapKeysMonotonic
, testCase "elems" test_elems
, testCase "keys" test_keys
, testCase "assocs" test_assocs
, testCase "keysSet" test_keysSet
, testCase "fromSet" test_fromSet
, testCase "toList" test_toList
, testCase "fromList" test_fromList
, testCase "fromListWith" test_fromListWith
, testCase "fromListWithKey" test_fromListWithKey
, testCase "toAscList" test_toAscList
, testCase "toDescList" test_toDescList
, testCase "showTree" test_showTree
, testCase "showTree'" test_showTree'
, testCase "fromAscList" test_fromAscList
, testCase "fromAscListWith" test_fromAscListWith
, testCase "fromAscListWithKey" test_fromAscListWithKey
, testCase "fromDistinctAscList" test_fromDistinctAscList
, testCase "filter" test_filter
, testCase "filterWithKey" test_filteWithKey
, testCase "partition" test_partition
, testCase "partitionWithKey" test_partitionWithKey
, testCase "mapMaybe" test_mapMaybe
, testCase "mapMaybeWithKey" test_mapMaybeWithKey
, testCase "mapEither" test_mapEither
, testCase "mapEitherWithKey" test_mapEitherWithKey
, testCase "split" test_split
, testCase "splitLookup" test_splitLookup
, testCase "isSubmapOfBy" test_isSubmapOfBy
, testCase "isSubmapOf" test_isSubmapOf
, testCase "isProperSubmapOfBy" test_isProperSubmapOfBy
, testCase "isProperSubmapOf" test_isProperSubmapOf
, testCase "lookupIndex" test_lookupIndex
, testCase "findIndex" test_findIndex
, testCase "elemAt" test_elemAt
, testCase "updateAt" test_updateAt
, testCase "deleteAt" test_deleteAt
, testCase "findMin" test_findMin
, testCase "findMax" test_findMax
, testCase "deleteMin" test_deleteMin
, testCase "deleteMax" test_deleteMax
, testCase "deleteFindMin" test_deleteFindMin
, testCase "deleteFindMax" test_deleteFindMax
, testCase "updateMin" test_updateMin
, testCase "updateMax" test_updateMax
, testCase "updateMinWithKey" test_updateMinWithKey
, testCase "updateMaxWithKey" test_updateMaxWithKey
, testCase "minView" test_minView
, testCase "maxView" test_maxView
, testCase "minViewWithKey" test_minViewWithKey
, testCase "maxViewWithKey" test_maxViewWithKey
, testCase "valid" test_valid
, testProperty "valid" prop_valid
, testProperty "insert to singleton" prop_singleton
, testProperty "insert" prop_insert
, testProperty "insert then lookup" prop_insertLookup
, testProperty "insert then delete" prop_insertDelete
, testProperty "insert then delete2" prop_insertDelete2
, testProperty "delete non member" prop_deleteNonMember
, testProperty "deleteMin" prop_deleteMin
, testProperty "deleteMax" prop_deleteMax
, testProperty "split" prop_split
, testProperty "split then join" prop_join
, testProperty "split then merge" prop_merge
, testProperty "union" prop_union
, testProperty "union model" prop_unionModel
, testProperty "union singleton" prop_unionSingleton
, testProperty "union associative" prop_unionAssoc
, testProperty "union+unionWith" prop_unionWith
, testProperty "unionWith" prop_unionWith2
, testProperty "union sum" prop_unionSum
, testProperty "difference" prop_difference
, testProperty "difference model" prop_differenceModel
, testProperty "intersection" prop_intersection
, testProperty "intersection model" prop_intersectionModel
, testProperty "intersectionWith" prop_intersectionWith
, testProperty "intersectionWithModel" prop_intersectionWithModel
, testProperty "intersectionWithKey" prop_intersectionWithKey
, testProperty "intersectionWithKeyModel" prop_intersectionWithKeyModel
, testProperty "mergeWithKey model" prop_mergeWithKeyModel
, testProperty "fromAscList" prop_ordered
, testProperty "fromList then toList" prop_list
, testProperty "toDescList" prop_descList
, testProperty "toAscList+toDescList" prop_ascDescList
, testProperty "fromList" prop_fromList
, testProperty "alter" prop_alter
, testProperty "index" prop_index
, testProperty "null" prop_null
, testProperty "member" prop_member
, testProperty "notmember" prop_notmember
, testProperty "lookup" prop_lookup
, testProperty "find" prop_find
, testProperty "findWithDefault" prop_findWithDefault
, testProperty "lookupLT" prop_lookupLT
, testProperty "lookupGT" prop_lookupGT
, testProperty "lookupLE" prop_lookupLE
, testProperty "lookupGE" prop_lookupGE
, testProperty "findIndex" prop_findIndex
, testProperty "lookupIndex" prop_lookupIndex
, testProperty "findMin" prop_findMin
, testProperty "findMax" prop_findMax
, testProperty "deleteMin" prop_deleteMinModel
, testProperty "deleteMax" prop_deleteMaxModel
, testProperty "filter" prop_filter
, testProperty "partition" prop_partition
, testProperty "map" prop_map
, testProperty "fmap" prop_fmap
, testProperty "mapkeys" prop_mapkeys
, testProperty "split" prop_splitModel
, testProperty "foldr" prop_foldr
, testProperty "foldr'" prop_foldr'
, testProperty "foldl" prop_foldl
, testProperty "foldl'" prop_foldl'
, testProperty "keysSet" prop_keysSet
, testProperty "fromSet" prop_fromSet
]
{--------------------------------------------------------------------
Arbitrary, reasonably balanced trees
--------------------------------------------------------------------}
instance (Enum k,Arbitrary a) => Arbitrary (Map k a) where
arbitrary = sized (arbtree 0 maxkey)
where maxkey = 10^5
arbtree :: (Enum k, Arbitrary a) => Int -> Int -> Int -> Gen (Map k a)
arbtree lo hi n = do t <- gentree lo hi n
if balanced t then return t else arbtree lo hi n
where gentree lo hi n
| n <= 0 = return Tip
| lo >= hi = return Tip
| otherwise = do{ x <- arbitrary
; i <- choose (lo,hi)
; m <- choose (1,70)
; let (ml,mr) | m==(1::Int)= (1,2)
| m==2 = (2,1)
| m==3 = (1,1)
| otherwise = (2,2)
; l <- gentree lo (i-1) (n `div` ml)
; r <- gentree (i+1) hi (n `div` mr)
; return (bin (toEnum i) x l r)
}
------------------------------------------------------------------------
type UMap = Map Int ()
type IMap = Map Int Int
type SMap = Map Int String
----------------------------------------------------------------
-- Unit tests
----------------------------------------------------------------
test_ticket4242 :: Assertion
test_ticket4242 = (valid $ deleteMin $ deleteMin $ fromList [ (i, ()) | i <- [0,2,5,1,6,4,8,9,7,11,10,3] :: [Int] ]) @?= True
----------------------------------------------------------------
-- Operators
test_index :: Assertion
test_index = fromList [(5,'a'), (3,'b')] ! 5 @?= 'a'
----------------------------------------------------------------
-- Query
test_size :: Assertion
test_size = do
null (empty) @?= True
null (singleton 1 'a') @?= False
test_size2 :: Assertion
test_size2 = do
size empty @?= 0
size (singleton 1 'a') @?= 1
size (fromList([(1,'a'), (2,'c'), (3,'b')])) @?= 3
test_member :: Assertion
test_member = do
member 5 (fromList [(5,'a'), (3,'b')]) @?= True
member 1 (fromList [(5,'a'), (3,'b')]) @?= False
test_notMember :: Assertion
test_notMember = do
notMember 5 (fromList [(5,'a'), (3,'b')]) @?= False
notMember 1 (fromList [(5,'a'), (3,'b')]) @?= True
test_lookup :: Assertion
test_lookup = do
employeeCurrency "John" @?= Just "Euro"
employeeCurrency "Pete" @?= Nothing
where
employeeDept = fromList([("John","Sales"), ("Bob","IT")])
deptCountry = fromList([("IT","USA"), ("Sales","France")])
countryCurrency = fromList([("USA", "Dollar"), ("France", "Euro")])
employeeCurrency :: String -> Maybe String
employeeCurrency name = do
dept <- lookup name employeeDept
country <- lookup dept deptCountry
lookup country countryCurrency
test_findWithDefault :: Assertion
test_findWithDefault = do
findWithDefault 'x' 1 (fromList [(5,'a'), (3,'b')]) @?= 'x'
findWithDefault 'x' 5 (fromList [(5,'a'), (3,'b')]) @?= 'a'
test_lookupLT :: Assertion
test_lookupLT = do
lookupLT 3 (fromList [(3,'a'), (5,'b')]) @?= Nothing
lookupLT 4 (fromList [(3,'a'), (5,'b')]) @?= Just (3, 'a')
test_lookupGT :: Assertion
test_lookupGT = do
lookupGT 4 (fromList [(3,'a'), (5,'b')]) @?= Just (5, 'b')
lookupGT 5 (fromList [(3,'a'), (5,'b')]) @?= Nothing
test_lookupLE :: Assertion
test_lookupLE = do
lookupLE 2 (fromList [(3,'a'), (5,'b')]) @?= Nothing
lookupLE 4 (fromList [(3,'a'), (5,'b')]) @?= Just (3, 'a')
lookupLE 5 (fromList [(3,'a'), (5,'b')]) @?= Just (5, 'b')
test_lookupGE :: Assertion
test_lookupGE = do
lookupGE 3 (fromList [(3,'a'), (5,'b')]) @?= Just (3, 'a')
lookupGE 4 (fromList [(3,'a'), (5,'b')]) @?= Just (5, 'b')
lookupGE 6 (fromList [(3,'a'), (5,'b')]) @?= Nothing
----------------------------------------------------------------
-- Construction
test_empty :: Assertion
test_empty = do
(empty :: UMap) @?= fromList []
size empty @?= 0
test_mempty :: Assertion
test_mempty = do
(mempty :: UMap) @?= fromList []
size (mempty :: UMap) @?= 0
test_singleton :: Assertion
test_singleton = do
singleton 1 'a' @?= fromList [(1, 'a')]
size (singleton 1 'a') @?= 1
test_insert :: Assertion
test_insert = do
insert 5 'x' (fromList [(5,'a'), (3,'b')]) @?= fromList [(3, 'b'), (5, 'x')]
insert 7 'x' (fromList [(5,'a'), (3,'b')]) @?= fromList [(3, 'b'), (5, 'a'), (7, 'x')]
insert 5 'x' empty @?= singleton 5 'x'
test_insertWith :: Assertion
test_insertWith = do
insertWith (++) 5 "xxx" (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "xxxa")]
insertWith (++) 7 "xxx" (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a"), (7, "xxx")]
insertWith (++) 5 "xxx" empty @?= singleton 5 "xxx"
test_insertWithKey :: Assertion
test_insertWithKey = do
insertWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "5:xxx|a")]
insertWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a"), (7, "xxx")]
insertWithKey f 5 "xxx" empty @?= singleton 5 "xxx"
where
f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
test_insertLookupWithKey :: Assertion
test_insertLookupWithKey = do
insertLookupWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) @?= (Just "a", fromList [(3, "b"), (5, "5:xxx|a")])
insertLookupWithKey f 2 "xxx" (fromList [(5,"a"), (3,"b")]) @?= (Nothing,fromList [(2,"xxx"),(3,"b"),(5,"a")])
insertLookupWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) @?= (Nothing, fromList [(3, "b"), (5, "a"), (7, "xxx")])
insertLookupWithKey f 5 "xxx" empty @?= (Nothing, singleton 5 "xxx")
where
f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
----------------------------------------------------------------
-- Delete/Update
test_delete :: Assertion
test_delete = do
delete 5 (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
delete 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a")]
delete 5 empty @?= (empty :: IMap)
test_adjust :: Assertion
test_adjust = do
adjust ("new " ++) 5 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "new a")]
adjust ("new " ++) 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a")]
adjust ("new " ++) 7 empty @?= empty
test_adjustWithKey :: Assertion
test_adjustWithKey = do
adjustWithKey f 5 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "5:new a")]
adjustWithKey f 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a")]
adjustWithKey f 7 empty @?= empty
where
f key x = (show key) ++ ":new " ++ x
test_update :: Assertion
test_update = do
update f 5 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "new a")]
update f 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a")]
update f 3 (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
where
f x = if x == "a" then Just "new a" else Nothing
test_updateWithKey :: Assertion
test_updateWithKey = do
updateWithKey f 5 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "5:new a")]
updateWithKey f 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a")]
updateWithKey f 3 (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
where
f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
test_updateLookupWithKey :: Assertion
test_updateLookupWithKey = do
updateLookupWithKey f 5 (fromList [(5,"a"), (3,"b")]) @?= (Just "5:new a", fromList [(3, "b"), (5, "5:new a")])
updateLookupWithKey f 7 (fromList [(5,"a"), (3,"b")]) @?= (Nothing, fromList [(3, "b"), (5, "a")])
updateLookupWithKey f 3 (fromList [(5,"a"), (3,"b")]) @?= (Just "b", singleton 5 "a")
where
f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
test_alter :: Assertion
test_alter = do
alter f 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a")]
alter f 5 (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
alter g 7 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "a"), (7, "c")]
alter g 5 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "c")]
where
f _ = Nothing
g _ = Just "c"
----------------------------------------------------------------
-- Combine
test_union :: Assertion
test_union = union (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= fromList [(3, "b"), (5, "a"), (7, "C")]
test_mappend :: Assertion
test_mappend = mappend (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= fromList [(3, "b"), (5, "a"), (7, "C")]
test_unionWith :: Assertion
test_unionWith = unionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= fromList [(3, "b"), (5, "aA"), (7, "C")]
test_unionWithKey :: Assertion
test_unionWithKey = unionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= fromList [(3, "b"), (5, "5:a|A"), (7, "C")]
where
f key left_value right_value = (show key) ++ ":" ++ left_value ++ "|" ++ right_value
test_unions :: Assertion
test_unions = do
unions [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
@?= fromList [(3, "b"), (5, "a"), (7, "C")]
unions [(fromList [(5, "A3"), (3, "B3")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "a"), (3, "b")])]
@?= fromList [(3, "B3"), (5, "A3"), (7, "C")]
test_mconcat :: Assertion
test_mconcat = do
mconcat [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
@?= fromList [(3, "b"), (5, "a"), (7, "C")]
mconcat [(fromList [(5, "A3"), (3, "B3")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "a"), (3, "b")])]
@?= fromList [(3, "B3"), (5, "A3"), (7, "C")]
test_unionsWith :: Assertion
test_unionsWith = unionsWith (++) [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
@?= fromList [(3, "bB3"), (5, "aAA3"), (7, "C")]
test_difference :: Assertion
test_difference = difference (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= singleton 3 "b"
test_differenceWith :: Assertion
test_differenceWith = differenceWith f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (7, "C")])
@?= singleton 3 "b:B"
where
f al ar = if al== "b" then Just (al ++ ":" ++ ar) else Nothing
test_differenceWithKey :: Assertion
test_differenceWithKey = differenceWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (10, "C")])
@?= singleton 3 "3:b|B"
where
f k al ar = if al == "b" then Just ((show k) ++ ":" ++ al ++ "|" ++ ar) else Nothing
test_intersection :: Assertion
test_intersection = intersection (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= singleton 5 "a"
test_intersectionWith :: Assertion
test_intersectionWith = intersectionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= singleton 5 "aA"
test_intersectionWithKey :: Assertion
test_intersectionWithKey = intersectionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) @?= singleton 5 "5:a|A"
where
f k al ar = (show k) ++ ":" ++ al ++ "|" ++ ar
----------------------------------------------------------------
-- Traversal
test_map :: Assertion
test_map = map (++ "x") (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "bx"), (5, "ax")]
test_mapWithKey :: Assertion
test_mapWithKey = mapWithKey f (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "3:b"), (5, "5:a")]
where
f key x = (show key) ++ ":" ++ x
test_mapAccum :: Assertion
test_mapAccum = mapAccum f "Everything: " (fromList [(5,"a"), (3,"b")]) @?= ("Everything: ba", fromList [(3, "bX"), (5, "aX")])
where
f a b = (a ++ b, b ++ "X")
test_mapAccumWithKey :: Assertion
test_mapAccumWithKey = mapAccumWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) @?= ("Everything: 3-b 5-a", fromList [(3, "bX"), (5, "aX")])
where
f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
test_mapAccumRWithKey :: Assertion
test_mapAccumRWithKey = mapAccumRWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) @?= ("Everything: 5-a 3-b", fromList [(3, "bX"), (5, "aX")])
where
f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
test_mapKeys :: Assertion
test_mapKeys = do
mapKeys (+ 1) (fromList [(5,"a"), (3,"b")]) @?= fromList [(4, "b"), (6, "a")]
mapKeys (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) @?= singleton 1 "c"
mapKeys (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) @?= singleton 3 "c"
test_mapKeysWith :: Assertion
test_mapKeysWith = do
mapKeysWith (++) (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) @?= singleton 1 "cdab"
mapKeysWith (++) (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) @?= singleton 3 "cdab"
test_mapKeysMonotonic :: Assertion
test_mapKeysMonotonic = do
mapKeysMonotonic (+ 1) (fromList [(5,"a"), (3,"b")]) @?= fromList [(4, "b"), (6, "a")]
mapKeysMonotonic (\ k -> k * 2) (fromList [(5,"a"), (3,"b")]) @?= fromList [(6, "b"), (10, "a")]
valid (mapKeysMonotonic (\ k -> k * 2) (fromList [(5,"a"), (3,"b")])) @?= True
valid (mapKeysMonotonic (\ _ -> 1) (fromList [(5,"a"), (3,"b")])) @?= False
----------------------------------------------------------------
-- Conversion
test_elems :: Assertion
test_elems = do
elems (fromList [(5,"a"), (3,"b")]) @?= ["b","a"]
elems (empty :: UMap) @?= []
test_keys :: Assertion
test_keys = do
keys (fromList [(5,"a"), (3,"b")]) @?= [3,5]
keys (empty :: UMap) @?= []
test_assocs :: Assertion
test_assocs = do
assocs (fromList [(5,"a"), (3,"b")]) @?= [(3,"b"), (5,"a")]
assocs (empty :: UMap) @?= []
test_keysSet :: Assertion
test_keysSet = do
keysSet (fromList [(5,"a"), (3,"b")]) @?= Data.Set.fromList [3,5]
keysSet (empty :: UMap) @?= Data.Set.empty
test_fromSet :: Assertion
test_fromSet = do
fromSet (\k -> replicate k 'a') (Data.Set.fromList [3, 5]) @?= fromList [(5,"aaaaa"), (3,"aaa")]
fromSet undefined Data.Set.empty @?= (empty :: IMap)
----------------------------------------------------------------
-- Lists
test_toList :: Assertion
test_toList = do
toList (fromList [(5,"a"), (3,"b")]) @?= [(3,"b"), (5,"a")]
toList (empty :: SMap) @?= []
test_fromList :: Assertion
test_fromList = do
fromList [] @?= (empty :: SMap)
fromList [(5,"a"), (3,"b"), (5, "c")] @?= fromList [(5,"c"), (3,"b")]
fromList [(5,"c"), (3,"b"), (5, "a")] @?= fromList [(5,"a"), (3,"b")]
test_fromListWith :: Assertion
test_fromListWith = do
fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] @?= fromList [(3, "ab"), (5, "aba")]
fromListWith (++) [] @?= (empty :: SMap)
test_fromListWithKey :: Assertion
test_fromListWithKey = do
fromListWithKey f [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] @?= fromList [(3, "3ab"), (5, "5a5ba")]
fromListWithKey f [] @?= (empty :: SMap)
where
f k a1 a2 = (show k) ++ a1 ++ a2
----------------------------------------------------------------
-- Ordered lists
test_toAscList :: Assertion
test_toAscList = toAscList (fromList [(5,"a"), (3,"b")]) @?= [(3,"b"), (5,"a")]
test_toDescList :: Assertion
test_toDescList = toDescList (fromList [(5,"a"), (3,"b")]) @?= [(5,"a"), (3,"b")]
test_showTree :: Assertion
test_showTree =
(let t = fromDistinctAscList [(x,()) | x <- [1..5]]
in showTree t) @?= "4:=()\n+--2:=()\n| +--1:=()\n| +--3:=()\n+--5:=()\n"
test_showTree' :: Assertion
test_showTree' =
(let t = fromDistinctAscList [(x,()) | x <- [1..5]]
in s t ) @?= "+--5:=()\n|\n4:=()\n|\n| +--3:=()\n| |\n+--2:=()\n |\n +--1:=()\n"
where
showElem k x = show k ++ ":=" ++ show x
s = showTreeWith showElem False True
test_fromAscList :: Assertion
test_fromAscList = do
fromAscList [(3,"b"), (5,"a")] @?= fromList [(3, "b"), (5, "a")]
fromAscList [(3,"b"), (5,"a"), (5,"b")] @?= fromList [(3, "b"), (5, "b")]
valid (fromAscList [(3,"b"), (5,"a"), (5,"b")]) @?= True
valid (fromAscList [(5,"a"), (3,"b"), (5,"b")]) @?= False
test_fromAscListWith :: Assertion
test_fromAscListWith = do
fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")] @?= fromList [(3, "b"), (5, "ba")]
valid (fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")]) @?= True
valid (fromAscListWith (++) [(5,"a"), (3,"b"), (5,"b")]) @?= False
test_fromAscListWithKey :: Assertion
test_fromAscListWithKey = do
fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b"), (5,"b")] @?= fromList [(3, "b"), (5, "5:b5:ba")]
valid (fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b"), (5,"b")]) @?= True
valid (fromAscListWithKey f [(5,"a"), (3,"b"), (5,"b"), (5,"b")]) @?= False
where
f k a1 a2 = (show k) ++ ":" ++ a1 ++ a2
test_fromDistinctAscList :: Assertion
test_fromDistinctAscList = do
fromDistinctAscList [(3,"b"), (5,"a")] @?= fromList [(3, "b"), (5, "a")]
valid (fromDistinctAscList [(3,"b"), (5,"a")]) @?= True
valid (fromDistinctAscList [(3,"b"), (5,"a"), (5,"b")]) @?= False
----------------------------------------------------------------
-- Filter
test_filter :: Assertion
test_filter = do
filter (> "a") (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
filter (> "x") (fromList [(5,"a"), (3,"b")]) @?= empty
filter (< "a") (fromList [(5,"a"), (3,"b")]) @?= empty
test_filteWithKey :: Assertion
test_filteWithKey = filterWithKey (\k _ -> k > 4) (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
test_partition :: Assertion
test_partition = do
partition (> "a") (fromList [(5,"a"), (3,"b")]) @?= (singleton 3 "b", singleton 5 "a")
partition (< "x") (fromList [(5,"a"), (3,"b")]) @?= (fromList [(3, "b"), (5, "a")], empty)
partition (> "x") (fromList [(5,"a"), (3,"b")]) @?= (empty, fromList [(3, "b"), (5, "a")])
test_partitionWithKey :: Assertion
test_partitionWithKey = do
partitionWithKey (\ k _ -> k > 3) (fromList [(5,"a"), (3,"b")]) @?= (singleton 5 "a", singleton 3 "b")
partitionWithKey (\ k _ -> k < 7) (fromList [(5,"a"), (3,"b")]) @?= (fromList [(3, "b"), (5, "a")], empty)
partitionWithKey (\ k _ -> k > 7) (fromList [(5,"a"), (3,"b")]) @?= (empty, fromList [(3, "b"), (5, "a")])
test_mapMaybe :: Assertion
test_mapMaybe = mapMaybe f (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "new a"
where
f x = if x == "a" then Just "new a" else Nothing
test_mapMaybeWithKey :: Assertion
test_mapMaybeWithKey = mapMaybeWithKey f (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "key : 3"
where
f k _ = if k < 5 then Just ("key : " ++ (show k)) else Nothing
test_mapEither :: Assertion
test_mapEither = do
mapEither f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
@?= (fromList [(3,"b"), (5,"a")], fromList [(1,"x"), (7,"z")])
mapEither (\ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
@?= ((empty :: SMap), fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
where
f a = if a < "c" then Left a else Right a
test_mapEitherWithKey :: Assertion
test_mapEitherWithKey = do
mapEitherWithKey f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
@?= (fromList [(1,2), (3,6)], fromList [(5,"aa"), (7,"zz")])
mapEitherWithKey (\_ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
@?= ((empty :: SMap), fromList [(1,"x"), (3,"b"), (5,"a"), (7,"z")])
where
f k a = if k < 5 then Left (k * 2) else Right (a ++ a)
test_split :: Assertion
test_split = do
split 2 (fromList [(5,"a"), (3,"b")]) @?= (empty, fromList [(3,"b"), (5,"a")])
split 3 (fromList [(5,"a"), (3,"b")]) @?= (empty, singleton 5 "a")
split 4 (fromList [(5,"a"), (3,"b")]) @?= (singleton 3 "b", singleton 5 "a")
split 5 (fromList [(5,"a"), (3,"b")]) @?= (singleton 3 "b", empty)
split 6 (fromList [(5,"a"), (3,"b")]) @?= (fromList [(3,"b"), (5,"a")], empty)
test_splitLookup :: Assertion
test_splitLookup = do
splitLookup 2 (fromList [(5,"a"), (3,"b")]) @?= (empty, Nothing, fromList [(3,"b"), (5,"a")])
splitLookup 3 (fromList [(5,"a"), (3,"b")]) @?= (empty, Just "b", singleton 5 "a")
splitLookup 4 (fromList [(5,"a"), (3,"b")]) @?= (singleton 3 "b", Nothing, singleton 5 "a")
splitLookup 5 (fromList [(5,"a"), (3,"b")]) @?= (singleton 3 "b", Just "a", empty)
splitLookup 6 (fromList [(5,"a"), (3,"b")]) @?= (fromList [(3,"b"), (5,"a")], Nothing, empty)
----------------------------------------------------------------
-- Submap
test_isSubmapOfBy :: Assertion
test_isSubmapOfBy = do
isSubmapOfBy (==) (fromList [('a',1)]) (fromList [('a',1),('b',2)]) @?= True
isSubmapOfBy (<=) (fromList [('a',1)]) (fromList [('a',1),('b',2)]) @?= True
isSubmapOfBy (==) (fromList [('a',1),('b',2)]) (fromList [('a',1),('b',2)]) @?= True
isSubmapOfBy (==) (fromList [('a',2)]) (fromList [('a',1),('b',2)]) @?= False
isSubmapOfBy (<) (fromList [('a',1)]) (fromList [('a',1),('b',2)]) @?= False
isSubmapOfBy (==) (fromList [('a',1),('b',2)]) (fromList [('a',1)]) @?= False
test_isSubmapOf :: Assertion
test_isSubmapOf = do
isSubmapOf (fromList [('a',1)]) (fromList [('a',1),('b',2)]) @?= True
isSubmapOf (fromList [('a',1),('b',2)]) (fromList [('a',1),('b',2)]) @?= True
isSubmapOf (fromList [('a',2)]) (fromList [('a',1),('b',2)]) @?= False
isSubmapOf (fromList [('a',1),('b',2)]) (fromList [('a',1)]) @?= False
test_isProperSubmapOfBy :: Assertion
test_isProperSubmapOfBy = do
isProperSubmapOfBy (==) (fromList [(1,1)]) (fromList [(1,1),(2,2)]) @?= True
isProperSubmapOfBy (<=) (fromList [(1,1)]) (fromList [(1,1),(2,2)]) @?= True
isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)]) @?= False
isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1)]) @?= False
isProperSubmapOfBy (<) (fromList [(1,1)]) (fromList [(1,1),(2,2)]) @?= False
test_isProperSubmapOf :: Assertion
test_isProperSubmapOf = do
isProperSubmapOf (fromList [(1,1)]) (fromList [(1,1),(2,2)]) @?= True
isProperSubmapOf (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)]) @?= False
isProperSubmapOf (fromList [(1,1),(2,2)]) (fromList [(1,1)]) @?= False
----------------------------------------------------------------
-- Indexed
test_lookupIndex :: Assertion
test_lookupIndex = do
isJust (lookupIndex 2 (fromList [(5,"a"), (3,"b")])) @?= False
fromJust (lookupIndex 3 (fromList [(5,"a"), (3,"b")])) @?= 0
fromJust (lookupIndex 5 (fromList [(5,"a"), (3,"b")])) @?= 1
isJust (lookupIndex 6 (fromList [(5,"a"), (3,"b")])) @?= False
test_findIndex :: Assertion
test_findIndex = do
findIndex 3 (fromList [(5,"a"), (3,"b")]) @?= 0
findIndex 5 (fromList [(5,"a"), (3,"b")]) @?= 1
test_elemAt :: Assertion
test_elemAt = do
elemAt 0 (fromList [(5,"a"), (3,"b")]) @?= (3,"b")
elemAt 1 (fromList [(5,"a"), (3,"b")]) @?= (5, "a")
test_updateAt :: Assertion
test_updateAt = do
updateAt (\ _ _ -> Just "x") 0 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "x"), (5, "a")]
updateAt (\ _ _ -> Just "x") 1 (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "x")]
updateAt (\_ _ -> Nothing) 0 (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
updateAt (\_ _ -> Nothing) 1 (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
-- updateAt (\_ _ -> Nothing) 7 (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
test_deleteAt :: Assertion
test_deleteAt = do
deleteAt 0 (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
deleteAt 1 (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
----------------------------------------------------------------
-- Min/Max
test_findMin :: Assertion
test_findMin = findMin (fromList [(5,"a"), (3,"b")]) @?= (3,"b")
test_findMax :: Assertion
test_findMax = findMax (fromList [(5,"a"), (3,"b")]) @?= (5,"a")
test_deleteMin :: Assertion
test_deleteMin = do
deleteMin (fromList [(5,"a"), (3,"b"), (7,"c")]) @?= fromList [(5,"a"), (7,"c")]
deleteMin (empty :: SMap) @?= empty
test_deleteMax :: Assertion
test_deleteMax = do
deleteMax (fromList [(5,"a"), (3,"b"), (7,"c")]) @?= fromList [(3,"b"), (5,"a")]
deleteMax (empty :: SMap) @?= empty
test_deleteFindMin :: Assertion
test_deleteFindMin = deleteFindMin (fromList [(5,"a"), (3,"b"), (10,"c")]) @?= ((3,"b"), fromList[(5,"a"), (10,"c")])
test_deleteFindMax :: Assertion
test_deleteFindMax = deleteFindMax (fromList [(5,"a"), (3,"b"), (10,"c")]) @?= ((10,"c"), fromList [(3,"b"), (5,"a")])
test_updateMin :: Assertion
test_updateMin = do
updateMin (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "Xb"), (5, "a")]
updateMin (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
test_updateMax :: Assertion
test_updateMax = do
updateMax (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) @?= fromList [(3, "b"), (5, "Xa")]
updateMax (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
test_updateMinWithKey :: Assertion
test_updateMinWithKey = do
updateMinWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) @?= fromList [(3,"3:b"), (5,"a")]
updateMinWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) @?= singleton 5 "a"
test_updateMaxWithKey :: Assertion
test_updateMaxWithKey = do
updateMaxWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) @?= fromList [(3,"b"), (5,"5:a")]
updateMaxWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) @?= singleton 3 "b"
test_minView :: Assertion
test_minView = do
minView (fromList [(5,"a"), (3,"b")]) @?= Just ("b", singleton 5 "a")
minView (empty :: SMap) @?= Nothing
test_maxView :: Assertion
test_maxView = do
maxView (fromList [(5,"a"), (3,"b")]) @?= Just ("a", singleton 3 "b")
maxView (empty :: SMap) @?= Nothing
test_minViewWithKey :: Assertion
test_minViewWithKey = do
minViewWithKey (fromList [(5,"a"), (3,"b")]) @?= Just ((3,"b"), singleton 5 "a")
minViewWithKey (empty :: SMap) @?= Nothing
test_maxViewWithKey :: Assertion
test_maxViewWithKey = do
maxViewWithKey (fromList [(5,"a"), (3,"b")]) @?= Just ((5,"a"), singleton 3 "b")
maxViewWithKey (empty :: SMap) @?= Nothing
----------------------------------------------------------------
-- Debug
test_valid :: Assertion
test_valid = do
valid (fromAscList [(3,"b"), (5,"a")]) @?= True
valid (fromAscList [(5,"a"), (3,"b")]) @?= False
----------------------------------------------------------------
-- QuickCheck
----------------------------------------------------------------
prop_valid :: UMap -> Bool
prop_valid t = valid t
prop_singleton :: Int -> Int -> Bool
prop_singleton k x = insert k x empty == singleton k x
prop_insert :: Int -> UMap -> Bool
prop_insert k t = valid $ insert k () t
prop_insertLookup :: Int -> UMap -> Bool
prop_insertLookup k t = lookup k (insert k () t) /= Nothing
prop_insertDelete :: Int -> UMap -> Bool
prop_insertDelete k t = valid $ delete k (insert k () t)
prop_insertDelete2 :: Int -> UMap -> Property
prop_insertDelete2 k t = (lookup k t == Nothing) ==> (delete k (insert k () t) == t)
prop_deleteNonMember :: Int -> UMap -> Property
prop_deleteNonMember k t = (lookup k t == Nothing) ==> (delete k t == t)
prop_deleteMin :: UMap -> Bool
prop_deleteMin t = valid $ deleteMin $ deleteMin t
prop_deleteMax :: UMap -> Bool
prop_deleteMax t = valid $ deleteMax $ deleteMax t
----------------------------------------------------------------
prop_split :: Int -> UMap -> Bool
prop_split k t = let (r,l) = split k t
in (valid r, valid l) == (True, True)
prop_join :: Int -> UMap -> Bool
prop_join k t = let (l,r) = split k t
in valid (join k () l r)
prop_merge :: Int -> UMap -> Bool
prop_merge k t = let (l,r) = split k t
in valid (merge l r)
----------------------------------------------------------------
prop_union :: UMap -> UMap -> Bool
prop_union t1 t2 = valid (union t1 t2)
prop_unionModel :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_unionModel xs ys
= sort (keys (union (fromList xs) (fromList ys)))
== sort (nub (Prelude.map fst xs ++ Prelude.map fst ys))
prop_unionSingleton :: IMap -> Int -> Int -> Bool
prop_unionSingleton t k x = union (singleton k x) t == insert k x t
prop_unionAssoc :: IMap -> IMap -> IMap -> Bool
prop_unionAssoc t1 t2 t3 = union t1 (union t2 t3) == union (union t1 t2) t3
prop_unionWith :: IMap -> IMap -> Bool
prop_unionWith t1 t2 = (union t1 t2 == unionWith (\_ y -> y) t2 t1)
prop_unionWith2 :: IMap -> IMap -> Bool
prop_unionWith2 t1 t2 = valid (unionWithKey (\_ x y -> x+y) t1 t2)
prop_unionSum :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_unionSum xs ys
= sum (elems (unionWith (+) (fromListWith (+) xs) (fromListWith (+) ys)))
== (sum (Prelude.map snd xs) + sum (Prelude.map snd ys))
prop_difference :: IMap -> IMap -> Bool
prop_difference t1 t2 = valid (difference t1 t2)
prop_differenceModel :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_differenceModel xs ys
= sort (keys (difference (fromListWith (+) xs) (fromListWith (+) ys)))
== sort ((List.\\) (nub (Prelude.map fst xs)) (nub (Prelude.map fst ys)))
prop_intersection :: IMap -> IMap -> Bool
prop_intersection t1 t2 = valid (intersection t1 t2)
prop_intersectionModel :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_intersectionModel xs ys
= sort (keys (intersection (fromListWith (+) xs) (fromListWith (+) ys)))
== sort (nub ((List.intersect) (Prelude.map fst xs) (Prelude.map fst ys)))
prop_intersectionWith :: (Int -> Int -> Maybe Int) -> IMap -> IMap -> Bool
prop_intersectionWith f t1 t2 = valid (intersectionWith f t1 t2)
prop_intersectionWithModel :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_intersectionWithModel xs ys
= toList (intersectionWith f (fromList xs') (fromList ys'))
== [(kx, f vx vy) | (kx, vx) <- List.sort xs', (ky, vy) <- ys', kx == ky]
where xs' = List.nubBy ((==) `on` fst) xs
ys' = List.nubBy ((==) `on` fst) ys
f l r = l + 2 * r
prop_intersectionWithKey :: (Int -> Int -> Int -> Maybe Int) -> IMap -> IMap -> Bool
prop_intersectionWithKey f t1 t2 = valid (intersectionWithKey f t1 t2)
prop_intersectionWithKeyModel :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_intersectionWithKeyModel xs ys
= toList (intersectionWithKey f (fromList xs') (fromList ys'))
== [(kx, f kx vx vy) | (kx, vx) <- List.sort xs', (ky, vy) <- ys', kx == ky]
where xs' = List.nubBy ((==) `on` fst) xs
ys' = List.nubBy ((==) `on` fst) ys
f k l r = k + 2 * l + 3 * r
prop_mergeWithKeyModel :: [(Int,Int)] -> [(Int,Int)] -> Bool
prop_mergeWithKeyModel xs ys
= and [ testMergeWithKey f keep_x keep_y
| f <- [ \_k x1 _x2 -> Just x1
, \_k _x1 x2 -> Just x2
, \_k _x1 _x2 -> Nothing
, \k x1 x2 -> if k `mod` 2 == 0 then Nothing else Just (2 * x1 + 3 * x2)
]
, keep_x <- [ True, False ]
, keep_y <- [ True, False ]
]
where xs' = List.nubBy ((==) `on` fst) xs
ys' = List.nubBy ((==) `on` fst) ys
xm = fromList xs'
ym = fromList ys'
testMergeWithKey f keep_x keep_y
= toList (mergeWithKey f (keep keep_x) (keep keep_y) xm ym) == emulateMergeWithKey f keep_x keep_y
where keep False _ = empty
keep True m = m
emulateMergeWithKey f keep_x keep_y
= Maybe.mapMaybe combine (sort $ List.union (List.map fst xs') (List.map fst ys'))
where combine k = case (List.lookup k xs', List.lookup k ys') of
(Nothing, Just y) -> if keep_y then Just (k, y) else Nothing
(Just x, Nothing) -> if keep_x then Just (k, x) else Nothing
(Just x, Just y) -> (\v -> (k, v)) `fmap` f k x y
-- We prevent inlining testMergeWithKey to disable the SpecConstr
-- optimalization. There are too many call patterns here so several
-- warnings are issued if testMergeWithKey gets inlined.
{-# NOINLINE testMergeWithKey #-}
----------------------------------------------------------------
prop_ordered :: Property
prop_ordered
= forAll (choose (5,100)) $ \n ->
let xs = [(x,()) | x <- [0..n::Int]]
in fromAscList xs == fromList xs
prop_list :: [Int] -> Bool
prop_list xs = (sort (nub xs) == [x | (x,()) <- toList (fromList [(x,()) | x <- xs])])
prop_descList :: [Int] -> Bool
prop_descList xs = (reverse (sort (nub xs)) == [x | (x,()) <- toDescList (fromList [(x,()) | x <- xs])])
prop_ascDescList :: [Int] -> Bool
prop_ascDescList xs = toAscList m == reverse (toDescList m)
where m = fromList $ zip xs $ repeat ()
prop_fromList :: [Int] -> Bool
prop_fromList xs
= case fromList (zip xs xs) of
t -> t == fromAscList (zip sort_xs sort_xs) &&
t == fromDistinctAscList (zip nub_sort_xs nub_sort_xs) &&
t == List.foldr (uncurry insert) empty (zip xs xs)
where sort_xs = sort xs
nub_sort_xs = List.map List.head $ List.group sort_xs
----------------------------------------------------------------
prop_alter :: UMap -> Int -> Bool
prop_alter t k = balanced t' && case lookup k t of
Just _ -> (size t - 1) == size t' && lookup k t' == Nothing
Nothing -> (size t + 1) == size t' && lookup k t' /= Nothing
where
t' = alter f k t
f Nothing = Just ()
f (Just ()) = Nothing
------------------------------------------------------------------------
-- Compare against the list model (after nub on keys)
prop_index :: [Int] -> Property
prop_index xs = length xs > 0 ==>
let m = fromList (zip xs xs)
in xs == [ m ! i | i <- xs ]
prop_null :: IMap -> Bool
prop_null m = null m == (size m == 0)
prop_member :: [Int] -> Int -> Bool
prop_member xs n =
let m = fromList (zip xs xs)
in all (\k -> k `member` m == (k `elem` xs)) (n : xs)
prop_notmember :: [Int] -> Int -> Bool
prop_notmember xs n =
let m = fromList (zip xs xs)
in all (\k -> k `notMember` m == (k `notElem` xs)) (n : xs)
prop_lookup :: [(Int, Int)] -> Int -> Bool
prop_lookup xs n =
let xs' = List.nubBy ((==) `on` fst) xs
m = fromList xs'
in all (\k -> lookup k m == List.lookup k xs') (n : List.map fst xs')
prop_find :: [(Int, Int)] -> Bool
prop_find xs =
let xs' = List.nubBy ((==) `on` fst) xs
m = fromList xs'
in all (\(k, v) -> m ! k == v) xs'
prop_findWithDefault :: [(Int, Int)] -> Int -> Int -> Bool
prop_findWithDefault xs n x =
let xs' = List.nubBy ((==) `on` fst) xs
m = fromList xs'
in all (\k -> findWithDefault x k m == maybe x id (List.lookup k xs')) (n : List.map fst xs')
test_lookupSomething :: (Int -> Map Int Int -> Maybe (Int, Int)) -> (Int -> Int -> Bool) -> [(Int, Int)] -> Bool
test_lookupSomething lookup' cmp xs =
let odd_sorted_xs = filter_odd $ sort $ List.nubBy ((==) `on` fst) xs
t = fromList odd_sorted_xs
test k = case List.filter ((`cmp` k) . fst) odd_sorted_xs of
[] -> lookup' k t == Nothing
cs | 0 `cmp` 1 -> lookup' k t == Just (last cs) -- we want largest such element
| otherwise -> lookup' k t == Just (head cs) -- we want smallest such element
in all test (List.map fst xs)
where filter_odd [] = []
filter_odd [_] = []
filter_odd (_ : o : xs) = o : filter_odd xs
prop_lookupLT :: [(Int, Int)] -> Bool
prop_lookupLT = test_lookupSomething lookupLT (<)
prop_lookupGT :: [(Int, Int)] -> Bool
prop_lookupGT = test_lookupSomething lookupGT (>)
prop_lookupLE :: [(Int, Int)] -> Bool
prop_lookupLE = test_lookupSomething lookupLE (<=)
prop_lookupGE :: [(Int, Int)] -> Bool
prop_lookupGE = test_lookupSomething lookupGE (>=)
prop_findIndex :: [(Int, Int)] -> Property
prop_findIndex ys = length ys > 0 ==>
let m = fromList ys
in findIndex (fst (head ys)) m `seq` True
prop_lookupIndex :: [(Int, Int)] -> Property
prop_lookupIndex ys = length ys > 0 ==>
let m = fromList ys
in isJust (lookupIndex (fst (head ys)) m)
prop_findMin :: [(Int, Int)] -> Property
prop_findMin ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in findMin m == List.minimumBy (comparing fst) xs
prop_findMax :: [(Int, Int)] -> Property
prop_findMax ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in findMax m == List.maximumBy (comparing fst) xs
prop_deleteMinModel :: [(Int, Int)] -> Property
prop_deleteMinModel ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in toAscList (deleteMin m) == tail (sort xs)
prop_deleteMaxModel :: [(Int, Int)] -> Property
prop_deleteMaxModel ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in toAscList (deleteMax m) == init (sort xs)
prop_filter :: (Int -> Bool) -> [(Int, Int)] -> Property
prop_filter p ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in filter p m == fromList (List.filter (p . snd) xs)
prop_partition :: (Int -> Bool) -> [(Int, Int)] -> Property
prop_partition p ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in partition p m == let (a,b) = (List.partition (p . snd) xs) in (fromList a, fromList b)
prop_map :: (Int -> Int) -> [(Int, Int)] -> Property
prop_map f ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in map f m == fromList [ (a, f b) | (a,b) <- xs ]
prop_fmap :: (Int -> Int) -> [(Int, Int)] -> Property
prop_fmap f ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in fmap f m == fromList [ (a, f b) | (a,b) <- xs ]
prop_mapkeys :: (Int -> Int) -> [(Int, Int)] -> Property
prop_mapkeys f ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in mapKeys f m == (fromList $ List.nubBy ((==) `on` fst) $ reverse [ (f a, b) | (a,b) <- sort xs])
prop_splitModel :: Int -> [(Int, Int)] -> Property
prop_splitModel n ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
(l, r) = split n $ fromList xs
in toAscList l == sort [(k, v) | (k,v) <- xs, k < n] &&
toAscList r == sort [(k, v) | (k,v) <- xs, k > n]
prop_foldr :: Int -> [(Int, Int)] -> Property
prop_foldr n ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in foldr (+) n m == List.foldr (+) n (List.map snd xs) &&
foldr (:) [] m == List.map snd (List.sort xs) &&
foldrWithKey (\_ a b -> a + b) n m == List.foldr (+) n (List.map snd xs) &&
foldrWithKey (\k _ b -> k + b) n m == List.foldr (+) n (List.map fst xs) &&
foldrWithKey (\k x xs -> (k,x):xs) [] m == List.sort xs
prop_foldr' :: Int -> [(Int, Int)] -> Property
prop_foldr' n ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in foldr' (+) n m == List.foldr (+) n (List.map snd xs) &&
foldr' (:) [] m == List.map snd (List.sort xs) &&
foldrWithKey' (\_ a b -> a + b) n m == List.foldr (+) n (List.map snd xs) &&
foldrWithKey' (\k _ b -> k + b) n m == List.foldr (+) n (List.map fst xs) &&
foldrWithKey' (\k x xs -> (k,x):xs) [] m == List.sort xs
prop_foldl :: Int -> [(Int, Int)] -> Property
prop_foldl n ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in foldl (+) n m == List.foldr (+) n (List.map snd xs) &&
foldl (flip (:)) [] m == reverse (List.map snd (List.sort xs)) &&
foldlWithKey (\b _ a -> a + b) n m == List.foldr (+) n (List.map snd xs) &&
foldlWithKey (\b k _ -> k + b) n m == List.foldr (+) n (List.map fst xs) &&
foldlWithKey (\xs k x -> (k,x):xs) [] m == reverse (List.sort xs)
prop_foldl' :: Int -> [(Int, Int)] -> Property
prop_foldl' n ys = length ys > 0 ==>
let xs = List.nubBy ((==) `on` fst) ys
m = fromList xs
in foldl' (+) n m == List.foldr (+) n (List.map snd xs) &&
foldl' (flip (:)) [] m == reverse (List.map snd (List.sort xs)) &&
foldlWithKey' (\b _ a -> a + b) n m == List.foldr (+) n (List.map snd xs) &&
foldlWithKey' (\b k _ -> k + b) n m == List.foldr (+) n (List.map fst xs) &&
foldlWithKey' (\xs k x -> (k,x):xs) [] m == reverse (List.sort xs)
prop_keysSet :: [(Int, Int)] -> Bool
prop_keysSet xs =
keysSet (fromList xs) == Data.Set.fromList (List.map fst xs)
prop_fromSet :: [(Int, Int)] -> Bool
prop_fromSet ys =
let xs = List.nubBy ((==) `on` fst) ys
in fromSet (\k -> fromJust $ List.lookup k xs) (Data.Set.fromList $ List.map fst xs) == fromList xs
|
ekmett/containers
|
tests/map-properties.hs
|
bsd-3-clause
| 50,903
| 580
| 20
| 12,086
| 21,456
| 12,018
| 9,438
| 910
| 7
|
{-# LANGUAGE CPP,TemplateHaskell,DeriveDataTypeable #-}
module Data.Encoding.ISO88595
(ISO88595(..)) where
import Data.Array ((!),Array)
import Data.Word (Word8)
import Data.Map (Map,lookup,member)
import Data.Encoding.Base
import Prelude hiding (lookup)
import Control.OldException (throwDyn)
import Data.Typeable
data ISO88595 = ISO88595 deriving (Eq,Show,Typeable)
instance Encoding ISO88595 where
encode _ = encodeSinglebyte (\c -> case lookup c encodeMap of
Just v -> v
Nothing -> throwDyn (HasNoRepresentation c))
encodable _ c = member c encodeMap
decode _ = decodeSinglebyte (decodeArr!)
decodeArr :: Array Word8 Char
#ifndef __HADDOCK__
decodeArr = $(decodingArray "8859-5.TXT")
#endif
encodeMap :: Map Char Word8
#ifndef __HADDOCK__
encodeMap = $(encodingMap "8859-5.TXT")
#endif
|
abuiles/turbinado-blog
|
tmp/dependencies/encoding-0.4.1/Data/Encoding/ISO88595.hs
|
bsd-3-clause
| 804
| 2
| 14
| 108
| 250
| 140
| 110
| 21
| 1
|
module Test where
import WCC.Utils --(fileToMatrix)
import WCC.Parse --(parseFile)
import Png(png)
import WCC.Types
import WCC
import Control.Applicative ((<$>))
import qualified Data.ByteString.Lazy as B
import System.Process(system)
import Control.Monad(when)
import System.Exit(ExitCode (ExitFailure),exitWith)
import Math.Statistics(pearson)
-- r = replicate 120 (255,0,0)
-- b = replicate 120 (0,255,0)
-- g = replicate 120 (0,0,255)
-- imgData = replicate 40 r ++ replicate 40 b ++ replicate 40 g
-- img = png imgData
-- orgParams = Param { windowIncrement = 20
-- , windowSize = 120
-- , maxLag = 400
-- , lagIncrement = 10
-- }
--testData = concat . replicate 20 $ replicate 15 1 ++ replicate 5 0
testImg = repl (255,0,0) ++ repl (0,255,0) -- ++ repl (0,0,0) ++ repl (0,0,0)
repl = replicate 20 . replicate 41
testParams' = Param 1 2 2 1
testData' = take 16 . cycle $ [0,1]
paramtoC (Param wInc wMax tSteps tInc) = unwords ["-wInc",show wInc
,"-wMax",show wMax
,"-tMax",show (tInc*tSteps)
,"-tInc",show tInc
]
--params = Param 20 120 400 10
testFile = "example_dyad_data.txt"
wcc_C = "/home/rune/Dropbox/wcc/c/windcross"
--testImg = repl (255,0,0) ++ repl (0,255,0) ++ repl (255,255,255) ++ repl (0,0,0)
row n = map (pearson $ take 20 dat) $ (take 40 $ wccSplit 20 2 dat)
where dat = drop (n*2) testData
main = do when (not $ checkMatrix testImg) $ error "wrong Hs matrix"
B.writeFile "testImg.png" . png $ testImg
colorToGray :: (Int,Int,Int) -> Int
colorToGray (r,g,b) = floor $ 0.3*r'+0.59*g'+0.11*b'
where r' = fromIntegral r
g' = fromIntegral g
b' = fromIntegral b
main' =
do putStrLn "writing testData to file"
writeFile "testData.txt" $ dataToFile testData testData
let result = wcc testParams testData testData
putStrLn "writing results to file"
matrixToFile "testResultHs.txt" result
putStrLn "checking Results"
when (not $ checkMatrix result) $ error "wrong Hs matrix"
putStrLn "Writing image to file"
B.writeFile "testImgHs.png" $ matrixToPng result
putStrLn "running C command"
let c_command = unwords $ [wcc_C
,paramtoC testParams
,"-i testData.txt"
,"-o testResultsC.txt"
]
putStrLn c_command
errorCode <- system c_command
case errorCode of
ExitFailure x -> putStrLn $ "C_code ended with exit code " ++ show x
_ -> do putStrLn "writing C results to imagefile"
c_res <- readMatrix <$> readFile "testResultsC.txt"
when (not $ checkMatrix result) $ error "wrong C matrix"
B.writeFile "testImgC.png" $ matrixToPng c_res
|
runebak/wcc
|
Source/Garbage/Test.hs
|
bsd-3-clause
| 3,008
| 0
| 16
| 945
| 713
| 367
| 346
| 54
| 2
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Handles template instantiation and project descriptors.
module Yate.Template(doTemplate,
readDescription, readJSONDescription,
ProjectDescription) where
import Control.Applicative ((<|>))
import Control.Monad (mplus)
import Data.Aeson (FromJSON(..), Value(Object, String), eitherDecode, withObject)
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as H
import qualified Data.Text as Text
import Text.Regex.Base.RegexLike
import Text.Regex.PCRE (compDotAll, (=~))
-- | A single "node"
data Node a = S a
-- ^A leaf node
| L [ Tree a ]
-- ^A intemediate node leading to children `Tree`s
deriving (Eq, Show, Read)
instance FromJSON (Node String) where
parseJSON o@(Object obj) =
L <$> mapM toTree (H.toList obj)
where
toTree (k, v) = (Text.unpack k :>:) <$> parseJSON v
parseJSON (String s) = pure $ S (Text.unpack s)
parseJSON other = fail $ "cannot parse Node from " <> show other
-- | A tree whose leaves are key/values pairs and nodes are labelled n-way trees.
data Tree a = String :>: Node a
deriving (Eq, Show, Read)
instance FromJSON (Tree String) where
parseJSON =
withObject "Tree" $
\ obj -> case H.toList obj of
[(k, v)] -> (Text.unpack k :>:) <$> parseJSON v
other -> fail $ "cannot parse Tree from " <> show other
newtype ProjectDescription = Project { projectTree :: Tree String }
deriving (Eq)
instance Show ProjectDescription where
show (Project tree) = show tree
instance Read ProjectDescription where
readsPrec n s =
case readsPrec n s of
[(tree,"")] -> [(Project tree, "")]
other -> fail $ "cannot read ProjectDescription from " <> show other
instance FromJSON ProjectDescription where
parseJSON v = Project <$> parseJSON v
-- | Read a project description in JSON format
--
-- Input JSON must be an object with a single root key and then a recursive
-- structure of objects and strings.
--
-- >>> :set -XOverloadedStrings
-- >>> readJSONDescription "{\"project\": { \"name\": \"myproj\"}}"
-- Right "project" :>: L ["name" :>: S "myproj"]
--
-- >>> readJSONDescription "{\"project\": { \"name\": [ \"myproj\" ]}}"
-- Left "Error in $: cannot parse Node from Array [String \"myproj\"]"
--
-- >>> readJSONDescription "{\"project\": \"foo\", \"bar\" : { \"name\": [ \"myproj\" ]}}"
-- Left "Error in $: cannot parse Tree from [(\"project\",String \"foo\"),(\"bar\",Object (fromList [(\"name\",Array [String \"myproj\"])]))]"
readJSONDescription ::
LBS.ByteString -> Either String ProjectDescription
readJSONDescription = eitherDecode
-- | Read a project description as a labelled tree.
--
-- >>> readDescription "\"b\" :>: S \"foo\""
-- Right "b" :>: S "foo"
--
-- >>> readDescription "\"project\" :>: L [ \"name\" :>: S \"myproj\"]"
-- Right "project" :>: L ["name" :>: S "myproj"]
--
-- >>> readDescription "\"a\" :>: L [ \"b\" :>: S \"foo\", \"c\" :>: S \"bar\"]"
-- Right "a" :>: L ["b" :>: S "foo","c" :>: S "bar"]
readDescription :: String -> Either String ProjectDescription
readDescription s =
case readsPrec 0 s of
[(v,"")] -> Right v
other -> Left (show other)
data Path = K String
| String :.: Path
deriving (Eq, Show, Read)
-- |Build a path from a string description
--
-- >>> path "foo"
-- K "foo"
-- >>> path "foo.bar"
-- "foo" :.: K "bar"
-- >>> path "foo.bar.baz"
-- "foo" :.: ("bar" :.: K "baz")
path :: String -> Path
path input = case span (/= '.') input of
(w,[]) -> K w
(w,rest) -> w :.: path (tail rest)
-- | Extracts a single value from tree given a path
--
-- >>> select (K "bar") ("bar" :>: S "foo")
-- Just "foo"
-- >>> select (K "bar") ("baz" :>: S "foo")
-- Nothing
-- >>> select ("foo" :.: K "bar") ("foo" :>: L ["bar" :>: S "baz" ])
-- Just "baz"
-- >>> select ("foo" :.: K "bar") ("foo" :>: L ["qix" :>: S "foo", "bar" :>: S "baz" ])
-- Just "baz"
select :: Path -> Tree a -> Maybe a
select (K w) (w' :>: S a) | w == w' = Just a
select (w :.: rest) (w' :>: L l) | w == w' = foldl (mplus) Nothing (map (select rest) l)
select _ _ = Nothing
-- |Select several subtrees of a tree.
--
-- >>> selectMult (K "bar") ("bar" :>: L [ "foo" :>: S "bar", "foo" :>: S "baz" ])
-- ["foo" :>: S "bar","foo" :>: S "baz"]
-- >>> selectMult ("bar" :.: K "foo") ("bar" :>: L [ "foo" :>: S "bar", "foo" :>: S "baz" ])
-- []
-- >>> selectMult ("bar" :.: K "foo") ("bar" :>: L [ "foo" :>: L [ "baz" :>: S "bar", "foo" :>: S "baz" ]])
-- ["baz" :>: S "bar","foo" :>: S "baz"]
-- >>> selectMult ("bar" :.: K "foo") ("bar" :>: L [ "foo" :>: L [ "baz" :>: S "bar"], "foo" :>: L [ "baz" :>: S "baz" ]])
-- ["baz" :>: S "bar","baz" :>: S "baz"]
selectMult :: Path -> Tree a -> [ Tree a ]
selectMult (K w) (w' :>: L l) | w == w' = l
selectMult (w :.: rest) (w' :>: L l) | w == w' = foldl (mplus) [] (map (selectMult rest) l)
selectMult _ _ = []
-- |Instantiate a template given some project description
--
-- Replaces all occurences of {{var}} with their value.
--
-- >>> instantiate (Project ("foo" :>: L ["qix" :>: S "foo", "bar" :>: S "baz" ])) "foo"
-- "foo"
-- >>> instantiate (Project ("foo" :>: L ["qix" :>: S "foo", "bar" :>: S "baz" ])) "{{foo.qix}}"
-- "foo"
-- >>> instantiate (Project ("foo" :>: L ["qix" :>: S "foo", "bar" :>: S "baz" ])) "baz{{foo.qix}}bar"
-- "bazfoobar"
-- >>> instantiate (Project ("foo" :>: L ["qix" :>: S "foo", "bar" :>: S "baz" ])) "baz{{foo.qix}}bar{{foo.bar}}"
-- "bazfoobarbaz"
-- >>> instantiate (Project ("foo" :>: L ["qix" :>: S "foo", "bar" :>: S "baz" ])) "baz{{foo}}bar{{foo.bar}}"
-- "bazbarbaz"
instantiate :: ProjectDescription -> String -> String
instantiate project input =
let (beg,found,end,subs) = input =~ "{{([^}]*)}}" :: (String, String, String, [String])
in case found of
"" -> input
_ -> case select (path (head subs)) (projectTree project) of
Just v -> beg ++ v ++ instantiate project end
Nothing -> beg ++ instantiate project end
-- |Instantiate list templates, eg. templates with multiple values
--
-- >>> instantiateMult (Project ("foo" :>: L ["name" :>: S "foo", "name" :>: S "baz" ])) "{{foo.name}}{{#foo}}name: {{name}}\n{{/foo}}"
-- "fooname: foo\nname: baz\n"
-- >>> instantiateMult (Project ("project" :>: L ["name" :>: S "foo", "authors" :>: L ["author" :>: S "baz" ]])) "{{#project.authors}}name: {{author}}\n{{/project.authors}}"
-- "name: baz\n"
instantiateMult :: ProjectDescription -> String -> String
instantiateMult project input =
let regex = makeRegexOpts (defaultCompOpt + compDotAll) defaultExecOpt "{{#([^}]*)}}(.*){{/\\1}}"
(beg,found,end,subs) = match regex input :: (String, String, String, [String])
in case found of
"" -> input
_ -> case selectMult (path (head subs)) (projectTree project) of
[] -> instantiate project beg ++ instantiate project end
rest -> instantiate project beg
++ concatMap (flip instantiate (head $ tail subs) . Project) rest
++ instantiateMult project end
-- |Main template function
--
-- Instantiate variables and blocks references found in @input@ with the contnet of @descriptor@
doTemplate :: ProjectDescription -- ^Context for template instantiation
-> String -- ^Input string
-> String -- ^Output string
doTemplate descriptor =
instantiate descriptor . instantiateMult descriptor
|
abailly/yate
|
src/Yate/Template.hs
|
bsd-3-clause
| 7,660
| 0
| 21
| 1,734
| 1,464
| 808
| 656
| 88
| 3
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
module TcCanonical(
canonicalize,
unifyDerived,
makeSuperClasses, maybeSym,
StopOrContinue(..), stopWith, continueWith,
solveCallStack -- For TcSimplify
) where
#include "HsVersions.h"
import GhcPrelude
import Constraint
import Predicate
import TcOrigin
import TcUnify( swapOverTyVars, metaTyVarUpdateOK )
import TcType
import Type
import TcFlatten
import TcSMonad
import TcEvidence
import TcEvTerm
import Class
import TyCon
import TyCoRep -- cleverly decomposes types, good for completeness checking
import Coercion
import CoreSyn
import Id( idType, mkTemplateLocals )
import FamInstEnv ( FamInstEnvs )
import FamInst ( tcTopNormaliseNewTypeTF_maybe )
import Var
import VarEnv( mkInScopeSet )
import VarSet( delVarSetList )
import OccName ( OccName )
import Outputable
import DynFlags( DynFlags )
import NameSet
import RdrName
import GHC.Hs.Types( HsIPName(..) )
import Pair
import Util
import Bag
import MonadUtils
import Control.Monad
import Data.Maybe ( isJust )
import Data.List ( zip4 )
import BasicTypes
import Data.Bifunctor ( bimap )
import Data.Foldable ( traverse_ )
{-
************************************************************************
* *
* The Canonicaliser *
* *
************************************************************************
Note [Canonicalization]
~~~~~~~~~~~~~~~~~~~~~~~
Canonicalization converts a simple constraint to a canonical form. It is
unary (i.e. treats individual constraints one at a time).
Constraints originating from user-written code come into being as
CNonCanonicals (except for CHoleCans, arising from holes). We know nothing
about these constraints. So, first:
Classify CNonCanoncal constraints, depending on whether they
are equalities, class predicates, or other.
Then proceed depending on the shape of the constraint. Generally speaking,
each constraint gets flattened and then decomposed into one of several forms
(see type Ct in TcRnTypes).
When an already-canonicalized constraint gets kicked out of the inert set,
it must be recanonicalized. But we know a bit about its shape from the
last time through, so we can skip the classification step.
-}
-- Top-level canonicalization
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
canonicalize :: Ct -> TcS (StopOrContinue Ct)
canonicalize (CNonCanonical { cc_ev = ev })
= {-# SCC "canNC" #-}
case classifyPredType pred of
ClassPred cls tys -> do traceTcS "canEvNC:cls" (ppr cls <+> ppr tys)
canClassNC ev cls tys
EqPred eq_rel ty1 ty2 -> do traceTcS "canEvNC:eq" (ppr ty1 $$ ppr ty2)
canEqNC ev eq_rel ty1 ty2
IrredPred {} -> do traceTcS "canEvNC:irred" (ppr pred)
canIrred ev
ForAllPred _ _ pred -> do traceTcS "canEvNC:forall" (ppr pred)
canForAll ev (isClassPred pred)
where
pred = ctEvPred ev
canonicalize (CQuantCan (QCI { qci_ev = ev, qci_pend_sc = pend_sc }))
= canForAll ev pend_sc
canonicalize (CIrredCan { cc_ev = ev })
| EqPred eq_rel ty1 ty2 <- classifyPredType (ctEvPred ev)
= -- For insolubles (all of which are equalities, do /not/ flatten the arguments
-- In #14350 doing so led entire-unnecessary and ridiculously large
-- type function expansion. Instead, canEqNC just applies
-- the substitution to the predicate, and may do decomposition;
-- e.g. a ~ [a], where [G] a ~ [Int], can decompose
canEqNC ev eq_rel ty1 ty2
| otherwise
= canIrred ev
canonicalize (CDictCan { cc_ev = ev, cc_class = cls
, cc_tyargs = xis, cc_pend_sc = pend_sc })
= {-# SCC "canClass" #-}
canClass ev cls xis pend_sc
canonicalize (CTyEqCan { cc_ev = ev
, cc_tyvar = tv
, cc_rhs = xi
, cc_eq_rel = eq_rel })
= {-# SCC "canEqLeafTyVarEq" #-}
canEqNC ev eq_rel (mkTyVarTy tv) xi
-- NB: Don't use canEqTyVar because that expects flattened types,
-- and tv and xi may not be flat w.r.t. an updated inert set
canonicalize (CFunEqCan { cc_ev = ev
, cc_fun = fn
, cc_tyargs = xis1
, cc_fsk = fsk })
= {-# SCC "canEqLeafFunEq" #-}
canCFunEqCan ev fn xis1 fsk
canonicalize (CHoleCan { cc_ev = ev, cc_occ = occ, cc_hole = hole })
= canHole ev occ hole
{-
************************************************************************
* *
* Class Canonicalization
* *
************************************************************************
-}
canClassNC :: CtEvidence -> Class -> [Type] -> TcS (StopOrContinue Ct)
-- "NC" means "non-canonical"; that is, we have got here
-- from a NonCanonical constraint, not from a CDictCan
-- Precondition: EvVar is class evidence
canClassNC ev cls tys
| isGiven ev -- See Note [Eagerly expand given superclasses]
= do { sc_cts <- mkStrictSuperClasses ev [] [] cls tys
; emitWork sc_cts
; canClass ev cls tys False }
| isWanted ev
, Just ip_name <- isCallStackPred cls tys
, OccurrenceOf func <- ctLocOrigin loc
-- If we're given a CallStack constraint that arose from a function
-- call, we need to push the current call-site onto the stack instead
-- of solving it directly from a given.
-- See Note [Overview of implicit CallStacks] in TcEvidence
-- and Note [Solving CallStack constraints] in TcSMonad
= do { -- First we emit a new constraint that will capture the
-- given CallStack.
; let new_loc = setCtLocOrigin loc (IPOccOrigin (HsIPName ip_name))
-- We change the origin to IPOccOrigin so
-- this rule does not fire again.
-- See Note [Overview of implicit CallStacks]
; new_ev <- newWantedEvVarNC new_loc pred
-- Then we solve the wanted by pushing the call-site
-- onto the newly emitted CallStack
; let ev_cs = EvCsPushCall func (ctLocSpan loc) (ctEvExpr new_ev)
; solveCallStack ev ev_cs
; canClass new_ev cls tys False }
| otherwise
= canClass ev cls tys (has_scs cls)
where
has_scs cls = not (null (classSCTheta cls))
loc = ctEvLoc ev
pred = ctEvPred ev
solveCallStack :: CtEvidence -> EvCallStack -> TcS ()
-- Also called from TcSimplify when defaulting call stacks
solveCallStack ev ev_cs = do
-- We're given ev_cs :: CallStack, but the evidence term should be a
-- dictionary, so we have to coerce ev_cs to a dictionary for
-- `IP ip CallStack`. See Note [Overview of implicit CallStacks]
cs_tm <- evCallStack ev_cs
let ev_tm = mkEvCast cs_tm (wrapIP (ctEvPred ev))
setEvBindIfWanted ev ev_tm
canClass :: CtEvidence
-> Class -> [Type]
-> Bool -- True <=> un-explored superclasses
-> TcS (StopOrContinue Ct)
-- Precondition: EvVar is class evidence
canClass ev cls tys pend_sc
= -- all classes do *nominal* matching
ASSERT2( ctEvRole ev == Nominal, ppr ev $$ ppr cls $$ ppr tys )
do { (xis, cos, _kind_co) <- flattenArgsNom ev cls_tc tys
; MASSERT( isTcReflCo _kind_co )
; let co = mkTcTyConAppCo Nominal cls_tc cos
xi = mkClassPred cls xis
mk_ct new_ev = CDictCan { cc_ev = new_ev
, cc_tyargs = xis
, cc_class = cls
, cc_pend_sc = pend_sc }
; mb <- rewriteEvidence ev xi co
; traceTcS "canClass" (vcat [ ppr ev
, ppr xi, ppr mb ])
; return (fmap mk_ct mb) }
where
cls_tc = classTyCon cls
{- Note [The superclass story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to add superclass constraints for two reasons:
* For givens [G], they give us a route to proof. E.g.
f :: Ord a => a -> Bool
f x = x == x
We get a Wanted (Eq a), which can only be solved from the superclass
of the Given (Ord a).
* For wanteds [W], and deriveds [WD], [D], they may give useful
functional dependencies. E.g.
class C a b | a -> b where ...
class C a b => D a b where ...
Now a [W] constraint (D Int beta) has (C Int beta) as a superclass
and that might tell us about beta, via C's fundeps. We can get this
by generating a [D] (C Int beta) constraint. It's derived because
we don't actually have to cough up any evidence for it; it's only there
to generate fundep equalities.
See Note [Why adding superclasses can help].
For these reasons we want to generate superclass constraints for both
Givens and Wanteds. But:
* (Minor) they are often not needed, so generating them aggressively
is a waste of time.
* (Major) if we want recursive superclasses, there would be an infinite
number of them. Here is a real-life example (#10318);
class (Frac (Frac a) ~ Frac a,
Fractional (Frac a),
IntegralDomain (Frac a))
=> IntegralDomain a where
type Frac a :: *
Notice that IntegralDomain has an associated type Frac, and one
of IntegralDomain's superclasses is another IntegralDomain constraint.
So here's the plan:
1. Eagerly generate superclasses for given (but not wanted)
constraints; see Note [Eagerly expand given superclasses].
This is done using mkStrictSuperClasses in canClassNC, when
we take a non-canonical Given constraint and cannonicalise it.
However stop if you encounter the same class twice. That is,
mkStrictSuperClasses expands eagerly, but has a conservative
termination condition: see Note [Expanding superclasses] in TcType.
2. Solve the wanteds as usual, but do no further expansion of
superclasses for canonical CDictCans in solveSimpleGivens or
solveSimpleWanteds; Note [Danger of adding superclasses during solving]
However, /do/ continue to eagerly expand superclasses for new /given/
/non-canonical/ constraints (canClassNC does this). As #12175
showed, a type-family application can expand to a class constraint,
and we want to see its superclasses for just the same reason as
Note [Eagerly expand given superclasses].
3. If we have any remaining unsolved wanteds
(see Note [When superclasses help] in Constraint)
try harder: take both the Givens and Wanteds, and expand
superclasses again. See the calls to expandSuperClasses in
TcSimplify.simpl_loop and solveWanteds.
This may succeed in generating (a finite number of) extra Givens,
and extra Deriveds. Both may help the proof.
3a An important wrinkle: only expand Givens from the current level.
Two reasons:
- We only want to expand it once, and that is best done at
the level it is bound, rather than repeatedly at the leaves
of the implication tree
- We may be inside a type where we can't create term-level
evidence anyway, so we can't superclass-expand, say,
(a ~ b) to get (a ~# b). This happened in #15290.
4. Go round to (2) again. This loop (2,3,4) is implemented
in TcSimplify.simpl_loop.
The cc_pend_sc flag in a CDictCan records whether the superclasses of
this constraint have been expanded. Specifically, in Step 3 we only
expand superclasses for constraints with cc_pend_sc set to true (i.e.
isPendingScDict holds).
Why do we do this? Two reasons:
* To avoid repeated work, by repeatedly expanding the superclasses of
same constraint,
* To terminate the above loop, at least in the -XNoRecursiveSuperClasses
case. If there are recursive superclasses we could, in principle,
expand forever, always encountering new constraints.
When we take a CNonCanonical or CIrredCan, but end up classifying it
as a CDictCan, we set the cc_pend_sc flag to False.
Note [Superclass loops]
~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
class C a => D a
class D a => C a
Then, when we expand superclasses, we'll get back to the self-same
predicate, so we have reached a fixpoint in expansion and there is no
point in fruitlessly expanding further. This case just falls out from
our strategy. Consider
f :: C a => a -> Bool
f x = x==x
Then canClassNC gets the [G] d1: C a constraint, and eager emits superclasses
G] d2: D a, [G] d3: C a (psc). (The "psc" means it has its sc_pend flag set.)
When processing d3 we find a match with d1 in the inert set, and we always
keep the inert item (d1) if possible: see Note [Replacement vs keeping] in
TcInteract. So d3 dies a quick, happy death.
Note [Eagerly expand given superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In step (1) of Note [The superclass story], why do we eagerly expand
Given superclasses by one layer? (By "one layer" we mean expand transitively
until you meet the same class again -- the conservative criterion embodied
in expandSuperClasses. So a "layer" might be a whole stack of superclasses.)
We do this eagerly for Givens mainly because of some very obscure
cases like this:
instance Bad a => Eq (T a)
f :: (Ord (T a)) => blah
f x = ....needs Eq (T a), Ord (T a)....
Here if we can't satisfy (Eq (T a)) from the givens we'll use the
instance declaration; but then we are stuck with (Bad a). Sigh.
This is really a case of non-confluent proofs, but to stop our users
complaining we expand one layer in advance.
Note [Instance and Given overlap] in TcInteract.
We also want to do this if we have
f :: F (T a) => blah
where
type instance F (T a) = Ord (T a)
So we may need to do a little work on the givens to expose the
class that has the superclasses. That's why the superclass
expansion for Givens happens in canClassNC.
Note [Why adding superclasses can help]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Examples of how adding superclasses can help:
--- Example 1
class C a b | a -> b
Suppose we want to solve
[G] C a b
[W] C a beta
Then adding [D] beta~b will let us solve it.
-- Example 2 (similar but using a type-equality superclass)
class (F a ~ b) => C a b
And try to sllve:
[G] C a b
[W] C a beta
Follow the superclass rules to add
[G] F a ~ b
[D] F a ~ beta
Now we get [D] beta ~ b, and can solve that.
-- Example (tcfail138)
class L a b | a -> b
class (G a, L a b) => C a b
instance C a b' => G (Maybe a)
instance C a b => C (Maybe a) a
instance L (Maybe a) a
When solving the superclasses of the (C (Maybe a) a) instance, we get
[G] C a b, and hance by superclasses, [G] G a, [G] L a b
[W] G (Maybe a)
Use the instance decl to get
[W] C a beta
Generate its derived superclass
[D] L a beta. Now using fundeps, combine with [G] L a b to get
[D] beta ~ b
which is what we want.
Note [Danger of adding superclasses during solving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's a serious, but now out-dated example, from #4497:
class Num (RealOf t) => Normed t
type family RealOf x
Assume the generated wanted constraint is:
[W] RealOf e ~ e
[W] Normed e
If we were to be adding the superclasses during simplification we'd get:
[W] RealOf e ~ e
[W] Normed e
[D] RealOf e ~ fuv
[D] Num fuv
==>
e := fuv, Num fuv, Normed fuv, RealOf fuv ~ fuv
While looks exactly like our original constraint. If we add the
superclass of (Normed fuv) again we'd loop. By adding superclasses
definitely only once, during canonicalisation, this situation can't
happen.
Mind you, now that Wanteds cannot rewrite Derived, I think this particular
situation can't happen.
-}
makeSuperClasses :: [Ct] -> TcS [Ct]
-- Returns strict superclasses, transitively, see Note [The superclasses story]
-- See Note [The superclass story]
-- The loop-breaking here follows Note [Expanding superclasses] in TcType
-- Specifically, for an incoming (C t) constraint, we return all of (C t)'s
-- superclasses, up to /and including/ the first repetition of C
--
-- Example: class D a => C a
-- class C [a] => D a
-- makeSuperClasses (C x) will return (D x, C [x])
--
-- NB: the incoming constraints have had their cc_pend_sc flag already
-- flipped to False, by isPendingScDict, so we are /obliged/ to at
-- least produce the immediate superclasses
makeSuperClasses cts = concatMapM go cts
where
go (CDictCan { cc_ev = ev, cc_class = cls, cc_tyargs = tys })
= mkStrictSuperClasses ev [] [] cls tys
go (CQuantCan (QCI { qci_pred = pred, qci_ev = ev }))
= ASSERT2( isClassPred pred, ppr pred ) -- The cts should all have
-- class pred heads
mkStrictSuperClasses ev tvs theta cls tys
where
(tvs, theta, cls, tys) = tcSplitDFunTy (ctEvPred ev)
go ct = pprPanic "makeSuperClasses" (ppr ct)
mkStrictSuperClasses
:: CtEvidence
-> [TyVar] -> ThetaType -- These two args are non-empty only when taking
-- superclasses of a /quantified/ constraint
-> Class -> [Type] -> TcS [Ct]
-- Return constraints for the strict superclasses of
-- ev :: forall as. theta => cls tys
mkStrictSuperClasses ev tvs theta cls tys
= mk_strict_superclasses (unitNameSet (className cls))
ev tvs theta cls tys
mk_strict_superclasses :: NameSet -> CtEvidence
-> [TyVar] -> ThetaType
-> Class -> [Type] -> TcS [Ct]
-- Always return the immediate superclasses of (cls tys);
-- and expand their superclasses, provided none of them are in rec_clss
-- nor are repeated
mk_strict_superclasses rec_clss ev tvs theta cls tys
| CtGiven { ctev_evar = evar, ctev_loc = loc } <- ev
= concatMapM (do_one_given evar (mk_given_loc loc)) $
classSCSelIds cls
where
dict_ids = mkTemplateLocals theta
size = sizeTypes tys
do_one_given evar given_loc sel_id
| isUnliftedType sc_pred
, not (null tvs && null theta)
= -- See Note [Equality superclasses in quantified constraints]
return []
| otherwise
= do { given_ev <- newGivenEvVar given_loc $
(given_ty, mk_sc_sel evar sel_id)
; mk_superclasses rec_clss given_ev tvs theta sc_pred }
where
sc_pred = funResultTy (piResultTys (idType sel_id) tys)
given_ty = mkInfSigmaTy tvs theta sc_pred
mk_sc_sel evar sel_id
= EvExpr $ mkLams tvs $ mkLams dict_ids $
Var sel_id `mkTyApps` tys `App`
(evId evar `mkTyApps` mkTyVarTys tvs `mkVarApps` dict_ids)
mk_given_loc loc
| isCTupleClass cls
= loc -- For tuple predicates, just take them apart, without
-- adding their (large) size into the chain. When we
-- get down to a base predicate, we'll include its size.
-- #10335
| GivenOrigin skol_info <- ctLocOrigin loc
-- See Note [Solving superclass constraints] in TcInstDcls
-- for explantation of this transformation for givens
= case skol_info of
InstSkol -> loc { ctl_origin = GivenOrigin (InstSC size) }
InstSC n -> loc { ctl_origin = GivenOrigin (InstSC (n `max` size)) }
_ -> loc
| otherwise -- Probably doesn't happen, since this function
= loc -- is only used for Givens, but does no harm
mk_strict_superclasses rec_clss ev tvs theta cls tys
| all noFreeVarsOfType tys
= return [] -- Wanteds with no variables yield no deriveds.
-- See Note [Improvement from Ground Wanteds]
| otherwise -- Wanted/Derived case, just add Derived superclasses
-- that can lead to improvement.
= ASSERT2( null tvs && null theta, ppr tvs $$ ppr theta )
concatMapM do_one_derived (immSuperClasses cls tys)
where
loc = ctEvLoc ev
do_one_derived sc_pred
= do { sc_ev <- newDerivedNC loc sc_pred
; mk_superclasses rec_clss sc_ev [] [] sc_pred }
{- Note [Improvement from Ground Wanteds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose class C b a => D a b
and consider
[W] D Int Bool
Is there any point in emitting [D] C Bool Int? No! The only point of
emitting superclass constraints for W/D constraints is to get
improvement, extra unifications that result from functional
dependencies. See Note [Why adding superclasses can help] above.
But no variables means no improvement; case closed.
-}
mk_superclasses :: NameSet -> CtEvidence
-> [TyVar] -> ThetaType -> PredType -> TcS [Ct]
-- Return this constraint, plus its superclasses, if any
mk_superclasses rec_clss ev tvs theta pred
| ClassPred cls tys <- classifyPredType pred
= mk_superclasses_of rec_clss ev tvs theta cls tys
| otherwise -- Superclass is not a class predicate
= return [mkNonCanonical ev]
mk_superclasses_of :: NameSet -> CtEvidence
-> [TyVar] -> ThetaType -> Class -> [Type]
-> TcS [Ct]
-- Always return this class constraint,
-- and expand its superclasses
mk_superclasses_of rec_clss ev tvs theta cls tys
| loop_found = do { traceTcS "mk_superclasses_of: loop" (ppr cls <+> ppr tys)
; return [this_ct] } -- cc_pend_sc of this_ct = True
| otherwise = do { traceTcS "mk_superclasses_of" (vcat [ ppr cls <+> ppr tys
, ppr (isCTupleClass cls)
, ppr rec_clss
])
; sc_cts <- mk_strict_superclasses rec_clss' ev tvs theta cls tys
; return (this_ct : sc_cts) }
-- cc_pend_sc of this_ct = False
where
cls_nm = className cls
loop_found = not (isCTupleClass cls) && cls_nm `elemNameSet` rec_clss
-- Tuples never contribute to recursion, and can be nested
rec_clss' = rec_clss `extendNameSet` cls_nm
this_ct | null tvs, null theta
= CDictCan { cc_ev = ev, cc_class = cls, cc_tyargs = tys
, cc_pend_sc = loop_found }
-- NB: If there is a loop, we cut off, so we have not
-- added the superclasses, hence cc_pend_sc = True
| otherwise
= CQuantCan (QCI { qci_tvs = tvs, qci_pred = mkClassPred cls tys
, qci_ev = ev
, qci_pend_sc = loop_found })
{- Note [Equality superclasses in quantified constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (#15359, #15593, #15625)
f :: (forall a. theta => a ~ b) => stuff
It's a bit odd to have a local, quantified constraint for `(a~b)`,
but some people want such a thing (see the tickets). And for
Coercible it is definitely useful
f :: forall m. (forall p q. Coercible p q => Coercible (m p) (m q)))
=> stuff
Moreover it's not hard to arrange; we just need to look up /equality/
constraints in the quantified-constraint environment, which we do in
TcInteract.doTopReactOther.
There is a wrinkle though, in the case where 'theta' is empty, so
we have
f :: (forall a. a~b) => stuff
Now, potentially, the superclass machinery kicks in, in
makeSuperClasses, giving us a a second quantified constraint
(forall a. a ~# b)
BUT this is an unboxed value! And nothing has prepared us for
dictionary "functions" that are unboxed. Actually it does just
about work, but the simplifier ends up with stuff like
case (/\a. eq_sel d) of df -> ...(df @Int)...
and fails to simplify that any further. And it doesn't satisfy
isPredTy any more.
So for now we simply decline to take superclasses in the quantified
case. Instead we have a special case in TcInteract.doTopReactOther,
which looks for primitive equalities specially in the quantified
constraints.
See also Note [Evidence for quantified constraints] in Predicate.
************************************************************************
* *
* Irreducibles canonicalization
* *
************************************************************************
-}
canIrred :: CtEvidence -> TcS (StopOrContinue Ct)
-- Precondition: ty not a tuple and no other evidence form
canIrred ev
= do { let pred = ctEvPred ev
; traceTcS "can_pred" (text "IrredPred = " <+> ppr pred)
; (xi,co) <- flatten FM_FlattenAll ev pred -- co :: xi ~ pred
; rewriteEvidence ev xi co `andWhenContinue` \ new_ev ->
do { -- Re-classify, in case flattening has improved its shape
; case classifyPredType (ctEvPred new_ev) of
ClassPred cls tys -> canClassNC new_ev cls tys
EqPred eq_rel ty1 ty2 -> canEqNC new_ev eq_rel ty1 ty2
_ -> continueWith $
mkIrredCt new_ev } }
canHole :: CtEvidence -> OccName -> HoleSort -> TcS (StopOrContinue Ct)
canHole ev occ hole_sort
= do { let pred = ctEvPred ev
; (xi,co) <- flatten FM_SubstOnly ev pred -- co :: xi ~ pred
; rewriteEvidence ev xi co `andWhenContinue` \ new_ev ->
do { updInertIrreds (`snocCts` (CHoleCan { cc_ev = new_ev
, cc_occ = occ
, cc_hole = hole_sort }))
; stopWith new_ev "Emit insoluble hole" } }
{- *********************************************************************
* *
* Quantified predicates
* *
********************************************************************* -}
{- Note [Quantified constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The -XQuantifiedConstraints extension allows type-class contexts like this:
data Rose f x = Rose x (f (Rose f x))
instance (Eq a, forall b. Eq b => Eq (f b))
=> Eq (Rose f a) where
(Rose x1 rs1) == (Rose x2 rs2) = x1==x2 && rs1 == rs2
Note the (forall b. Eq b => Eq (f b)) in the instance contexts.
This quantified constraint is needed to solve the
[W] (Eq (f (Rose f x)))
constraint which arises form the (==) definition.
The wiki page is
https://gitlab.haskell.org/ghc/ghc/wikis/quantified-constraints
which in turn contains a link to the GHC Proposal where the change
is specified, and a Haskell Symposium paper about it.
We implement two main extensions to the design in the paper:
1. We allow a variable in the instance head, e.g.
f :: forall m a. (forall b. m b) => D (m a)
Notice the 'm' in the head of the quantified constraint, not
a class.
2. We support superclasses to quantified constraints.
For example (contrived):
f :: (Ord b, forall b. Ord b => Ord (m b)) => m a -> m a -> Bool
f x y = x==y
Here we need (Eq (m a)); but the quantified constraint deals only
with Ord. But we can make it work by using its superclass.
Here are the moving parts
* Language extension {-# LANGUAGE QuantifiedConstraints #-}
and add it to ghc-boot-th:GHC.LanguageExtensions.Type.Extension
* A new form of evidence, EvDFun, that is used to discharge
such wanted constraints
* checkValidType gets some changes to accept forall-constraints
only in the right places.
* Predicate.Pred gets a new constructor ForAllPred, and
and classifyPredType analyses a PredType to decompose
the new forall-constraints
* TcSMonad.InertCans gets an extra field, inert_insts,
which holds all the Given forall-constraints. In effect,
such Given constraints are like local instance decls.
* When trying to solve a class constraint, via
TcInteract.matchInstEnv, use the InstEnv from inert_insts
so that we include the local Given forall-constraints
in the lookup. (See TcSMonad.getInstEnvs.)
* TcCanonical.canForAll deals with solving a
forall-constraint. See
Note [Solving a Wanted forall-constraint]
* We augment the kick-out code to kick out an inert
forall constraint if it can be rewritten by a new
type equality; see TcSMonad.kick_out_rewritable
Note that a quantified constraint is never /inferred/
(by TcSimplify.simplifyInfer). A function can only have a
quantified constraint in its type if it is given an explicit
type signature.
Note that we implement
-}
canForAll :: CtEvidence -> Bool -> TcS (StopOrContinue Ct)
-- We have a constraint (forall as. blah => C tys)
canForAll ev pend_sc
= do { -- First rewrite it to apply the current substitution
-- Do not bother with type-family reductions; we can't
-- do them under a forall anyway (c.f. Flatten.flatten_one
-- on a forall type)
let pred = ctEvPred ev
; (xi,co) <- flatten FM_SubstOnly ev pred -- co :: xi ~ pred
; rewriteEvidence ev xi co `andWhenContinue` \ new_ev ->
do { -- Now decompose into its pieces and solve it
-- (It takes a lot less code to flatten before decomposing.)
; case classifyPredType (ctEvPred new_ev) of
ForAllPred tv_bndrs theta pred
-> solveForAll new_ev tv_bndrs theta pred pend_sc
_ -> pprPanic "canForAll" (ppr new_ev)
} }
solveForAll :: CtEvidence -> [TyVarBinder] -> TcThetaType -> PredType -> Bool
-> TcS (StopOrContinue Ct)
solveForAll ev tv_bndrs theta pred pend_sc
| CtWanted { ctev_dest = dest } <- ev
= -- See Note [Solving a Wanted forall-constraint]
do { let skol_info = QuantCtxtSkol
empty_subst = mkEmptyTCvSubst $ mkInScopeSet $
tyCoVarsOfTypes (pred:theta) `delVarSetList` tvs
; (subst, skol_tvs) <- tcInstSkolTyVarsX empty_subst tvs
; given_ev_vars <- mapM newEvVar (substTheta subst theta)
; (w_id, ev_binds)
<- checkConstraintsTcS skol_info skol_tvs given_ev_vars $
do { wanted_ev <- newWantedEvVarNC loc $
substTy subst pred
; return ( ctEvEvId wanted_ev
, unitBag (mkNonCanonical wanted_ev)) }
; setWantedEvTerm dest $
EvFun { et_tvs = skol_tvs, et_given = given_ev_vars
, et_binds = ev_binds, et_body = w_id }
; stopWith ev "Wanted forall-constraint" }
| isGiven ev -- See Note [Solving a Given forall-constraint]
= do { addInertForAll qci
; stopWith ev "Given forall-constraint" }
| otherwise
= stopWith ev "Derived forall-constraint"
where
loc = ctEvLoc ev
tvs = binderVars tv_bndrs
qci = QCI { qci_ev = ev, qci_tvs = tvs
, qci_pred = pred, qci_pend_sc = pend_sc }
{- Note [Solving a Wanted forall-constraint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Solving a wanted forall (quantified) constraint
[W] df :: forall ab. (Eq a, Ord b) => C x a b
is delightfully easy. Just build an implication constraint
forall ab. (g1::Eq a, g2::Ord b) => [W] d :: C x a
and discharge df thus:
df = /\ab. \g1 g2. let <binds> in d
where <binds> is filled in by solving the implication constraint.
All the machinery is to hand; there is little to do.
Note [Solving a Given forall-constraint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For a Given constraint
[G] df :: forall ab. (Eq a, Ord b) => C x a b
we just add it to TcS's local InstEnv of known instances,
via addInertForall. Then, if we look up (C x Int Bool), say,
we'll find a match in the InstEnv.
************************************************************************
* *
* Equalities
* *
************************************************************************
Note [Canonicalising equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to canonicalise an equality, we look at the structure of the
two types at hand, looking for similarities. A difficulty is that the
types may look dissimilar before flattening but similar after flattening.
However, we don't just want to jump in and flatten right away, because
this might be wasted effort. So, after looking for similarities and failing,
we flatten and then try again. Of course, we don't want to loop, so we
track whether or not we've already flattened.
It is conceivable to do a better job at tracking whether or not a type
is flattened, but this is left as future work. (Mar '15)
Note [FunTy and decomposing tycon applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When can_eq_nc' attempts to decompose a tycon application we haven't yet zonked.
This means that we may very well have a FunTy containing a type of some unknown
kind. For instance, we may have,
FunTy (a :: k) Int
Where k is a unification variable. tcRepSplitTyConApp_maybe panics in the event
that it sees such a type as it cannot determine the RuntimeReps which the (->)
is applied to. Consequently, it is vital that we instead use
tcRepSplitTyConApp_maybe', which simply returns Nothing in such a case.
When this happens can_eq_nc' will fail to decompose, zonk, and try again.
Zonking should fill the variable k, meaning that decomposition will succeed the
second time around.
-}
canEqNC :: CtEvidence -> EqRel -> Type -> Type -> TcS (StopOrContinue Ct)
canEqNC ev eq_rel ty1 ty2
= do { result <- zonk_eq_types ty1 ty2
; case result of
Left (Pair ty1' ty2') -> can_eq_nc False ev eq_rel ty1' ty1 ty2' ty2
Right ty -> canEqReflexive ev eq_rel ty }
can_eq_nc
:: Bool -- True => both types are flat
-> CtEvidence
-> EqRel
-> Type -> Type -- LHS, after and before type-synonym expansion, resp
-> Type -> Type -- RHS, after and before type-synonym expansion, resp
-> TcS (StopOrContinue Ct)
can_eq_nc flat ev eq_rel ty1 ps_ty1 ty2 ps_ty2
= do { traceTcS "can_eq_nc" $
vcat [ ppr flat, ppr ev, ppr eq_rel, ppr ty1, ppr ps_ty1, ppr ty2, ppr ps_ty2 ]
; rdr_env <- getGlobalRdrEnvTcS
; fam_insts <- getFamInstEnvs
; can_eq_nc' flat rdr_env fam_insts ev eq_rel ty1 ps_ty1 ty2 ps_ty2 }
can_eq_nc'
:: Bool -- True => both input types are flattened
-> GlobalRdrEnv -- needed to see which newtypes are in scope
-> FamInstEnvs -- needed to unwrap data instances
-> CtEvidence
-> EqRel
-> Type -> Type -- LHS, after and before type-synonym expansion, resp
-> Type -> Type -- RHS, after and before type-synonym expansion, resp
-> TcS (StopOrContinue Ct)
-- Expand synonyms first; see Note [Type synonyms and canonicalization]
can_eq_nc' flat rdr_env envs ev eq_rel ty1 ps_ty1 ty2 ps_ty2
| Just ty1' <- tcView ty1 = can_eq_nc' flat rdr_env envs ev eq_rel ty1' ps_ty1 ty2 ps_ty2
| Just ty2' <- tcView ty2 = can_eq_nc' flat rdr_env envs ev eq_rel ty1 ps_ty1 ty2' ps_ty2
-- need to check for reflexivity in the ReprEq case.
-- See Note [Eager reflexivity check]
-- Check only when flat because the zonk_eq_types check in canEqNC takes
-- care of the non-flat case.
can_eq_nc' True _rdr_env _envs ev ReprEq ty1 _ ty2 _
| ty1 `tcEqType` ty2
= canEqReflexive ev ReprEq ty1
-- When working with ReprEq, unwrap newtypes.
-- See Note [Unwrap newtypes first]
can_eq_nc' _flat rdr_env envs ev eq_rel ty1 ps_ty1 ty2 ps_ty2
| ReprEq <- eq_rel
, Just stuff1 <- tcTopNormaliseNewTypeTF_maybe envs rdr_env ty1
= can_eq_newtype_nc ev NotSwapped ty1 stuff1 ty2 ps_ty2
| ReprEq <- eq_rel
, Just stuff2 <- tcTopNormaliseNewTypeTF_maybe envs rdr_env ty2
= can_eq_newtype_nc ev IsSwapped ty2 stuff2 ty1 ps_ty1
-- Then, get rid of casts
can_eq_nc' flat _rdr_env _envs ev eq_rel (CastTy ty1 co1) _ ty2 ps_ty2
= canEqCast flat ev eq_rel NotSwapped ty1 co1 ty2 ps_ty2
can_eq_nc' flat _rdr_env _envs ev eq_rel ty1 ps_ty1 (CastTy ty2 co2) _
= canEqCast flat ev eq_rel IsSwapped ty2 co2 ty1 ps_ty1
-- NB: pattern match on True: we want only flat types sent to canEqTyVar.
-- See also Note [No top-level newtypes on RHS of representational equalities]
can_eq_nc' True _rdr_env _envs ev eq_rel (TyVarTy tv1) ps_ty1 ty2 ps_ty2
= canEqTyVar ev eq_rel NotSwapped tv1 ps_ty1 ty2 ps_ty2
can_eq_nc' True _rdr_env _envs ev eq_rel ty1 ps_ty1 (TyVarTy tv2) ps_ty2
= canEqTyVar ev eq_rel IsSwapped tv2 ps_ty2 ty1 ps_ty1
----------------------
-- Otherwise try to decompose
----------------------
-- Literals
can_eq_nc' _flat _rdr_env _envs ev eq_rel ty1@(LitTy l1) _ (LitTy l2) _
| l1 == l2
= do { setEvBindIfWanted ev (evCoercion $ mkReflCo (eqRelRole eq_rel) ty1)
; stopWith ev "Equal LitTy" }
-- Try to decompose type constructor applications
-- Including FunTy (s -> t)
can_eq_nc' _flat _rdr_env _envs ev eq_rel ty1 _ ty2 _
--- See Note [FunTy and decomposing type constructor applications].
| Just (tc1, tys1) <- repSplitTyConApp_maybe ty1
, Just (tc2, tys2) <- repSplitTyConApp_maybe ty2
, not (isTypeFamilyTyCon tc1)
, not (isTypeFamilyTyCon tc2)
= canTyConApp ev eq_rel tc1 tys1 tc2 tys2
can_eq_nc' _flat _rdr_env _envs ev eq_rel
s1@(ForAllTy {}) _ s2@(ForAllTy {}) _
= can_eq_nc_forall ev eq_rel s1 s2
-- See Note [Canonicalising type applications] about why we require flat types
can_eq_nc' True _rdr_env _envs ev eq_rel (AppTy t1 s1) _ ty2 _
| NomEq <- eq_rel
, Just (t2, s2) <- tcSplitAppTy_maybe ty2
= can_eq_app ev t1 s1 t2 s2
can_eq_nc' True _rdr_env _envs ev eq_rel ty1 _ (AppTy t2 s2) _
| NomEq <- eq_rel
, Just (t1, s1) <- tcSplitAppTy_maybe ty1
= can_eq_app ev t1 s1 t2 s2
-- No similarity in type structure detected. Flatten and try again.
can_eq_nc' False rdr_env envs ev eq_rel _ ps_ty1 _ ps_ty2
= do { (xi1, co1) <- flatten FM_FlattenAll ev ps_ty1
; (xi2, co2) <- flatten FM_FlattenAll ev ps_ty2
; new_ev <- rewriteEqEvidence ev NotSwapped xi1 xi2 co1 co2
; can_eq_nc' True rdr_env envs new_ev eq_rel xi1 xi1 xi2 xi2 }
-- We've flattened and the types don't match. Give up.
can_eq_nc' True _rdr_env _envs ev eq_rel _ ps_ty1 _ ps_ty2
= do { traceTcS "can_eq_nc' catch-all case" (ppr ps_ty1 $$ ppr ps_ty2)
; case eq_rel of -- See Note [Unsolved equalities]
ReprEq -> continueWith (mkIrredCt ev)
NomEq -> continueWith (mkInsolubleCt ev) }
-- No need to call canEqFailure/canEqHardFailure because they
-- flatten, and the types involved here are already flat
{- Note [Unsolved equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an unsolved equality like
(a b ~R# Int)
that is not necessarily insoluble! Maybe 'a' will turn out to be a newtype.
So we want to make it a potentially-soluble Irred not an insoluble one.
Missing this point is what caused #15431
-}
---------------------------------
can_eq_nc_forall :: CtEvidence -> EqRel
-> Type -> Type -- LHS and RHS
-> TcS (StopOrContinue Ct)
-- (forall as. phi1) ~ (forall bs. phi2)
-- Check for length match of as, bs
-- Then build an implication constraint: forall as. phi1 ~ phi2[as/bs]
-- But remember also to unify the kinds of as and bs
-- (this is the 'go' loop), and actually substitute phi2[as |> cos / bs]
-- Remember also that we might have forall z (a:z). blah
-- so we must proceed one binder at a time (#13879)
can_eq_nc_forall ev eq_rel s1 s2
| CtWanted { ctev_loc = loc, ctev_dest = orig_dest } <- ev
= do { let free_tvs = tyCoVarsOfTypes [s1,s2]
(bndrs1, phi1) = tcSplitForAllVarBndrs s1
(bndrs2, phi2) = tcSplitForAllVarBndrs s2
; if not (equalLength bndrs1 bndrs2)
then do { traceTcS "Forall failure" $
vcat [ ppr s1, ppr s2, ppr bndrs1, ppr bndrs2
, ppr (map binderArgFlag bndrs1)
, ppr (map binderArgFlag bndrs2) ]
; canEqHardFailure ev s1 s2 }
else
do { traceTcS "Creating implication for polytype equality" $ ppr ev
; let empty_subst1 = mkEmptyTCvSubst $ mkInScopeSet free_tvs
; (subst1, skol_tvs) <- tcInstSkolTyVarsX empty_subst1 $
binderVars bndrs1
; let skol_info = UnifyForAllSkol phi1
phi1' = substTy subst1 phi1
-- Unify the kinds, extend the substitution
go :: [TcTyVar] -> TCvSubst -> [TyVarBinder]
-> TcS (TcCoercion, Cts)
go (skol_tv:skol_tvs) subst (bndr2:bndrs2)
= do { let tv2 = binderVar bndr2
; (kind_co, wanteds1) <- unify loc Nominal (tyVarKind skol_tv)
(substTy subst (tyVarKind tv2))
; let subst' = extendTvSubstAndInScope subst tv2
(mkCastTy (mkTyVarTy skol_tv) kind_co)
-- skol_tv is already in the in-scope set, but the
-- free vars of kind_co are not; hence "...AndInScope"
; (co, wanteds2) <- go skol_tvs subst' bndrs2
; return ( mkTcForAllCo skol_tv kind_co co
, wanteds1 `unionBags` wanteds2 ) }
-- Done: unify phi1 ~ phi2
go [] subst bndrs2
= ASSERT( null bndrs2 )
unify loc (eqRelRole eq_rel) phi1' (substTyUnchecked subst phi2)
go _ _ _ = panic "cna_eq_nc_forall" -- case (s:ss) []
empty_subst2 = mkEmptyTCvSubst (getTCvInScope subst1)
; all_co <- checkTvConstraintsTcS skol_info skol_tvs $
go skol_tvs empty_subst2 bndrs2
; setWantedEq orig_dest all_co
; stopWith ev "Deferred polytype equality" } }
| otherwise
= do { traceTcS "Omitting decomposition of given polytype equality" $
pprEq s1 s2 -- See Note [Do not decompose given polytype equalities]
; stopWith ev "Discard given polytype equality" }
where
unify :: CtLoc -> Role -> TcType -> TcType -> TcS (TcCoercion, Cts)
-- This version returns the wanted constraint rather
-- than putting it in the work list
unify loc role ty1 ty2
| ty1 `tcEqType` ty2
= return (mkTcReflCo role ty1, emptyBag)
| otherwise
= do { (wanted, co) <- newWantedEq loc role ty1 ty2
; return (co, unitBag (mkNonCanonical wanted)) }
---------------------------------
-- | Compare types for equality, while zonking as necessary. Gives up
-- as soon as it finds that two types are not equal.
-- This is quite handy when some unification has made two
-- types in an inert Wanted to be equal. We can discover the equality without
-- flattening, which is sometimes very expensive (in the case of type functions).
-- In particular, this function makes a ~20% improvement in test case
-- perf/compiler/T5030.
--
-- Returns either the (partially zonked) types in the case of
-- inequality, or the one type in the case of equality. canEqReflexive is
-- a good next step in the 'Right' case. Returning 'Left' is always safe.
--
-- NB: This does *not* look through type synonyms. In fact, it treats type
-- synonyms as rigid constructors. In the future, it might be convenient
-- to look at only those arguments of type synonyms that actually appear
-- in the synonym RHS. But we're not there yet.
zonk_eq_types :: TcType -> TcType -> TcS (Either (Pair TcType) TcType)
zonk_eq_types = go
where
go (TyVarTy tv1) (TyVarTy tv2) = tyvar_tyvar tv1 tv2
go (TyVarTy tv1) ty2 = tyvar NotSwapped tv1 ty2
go ty1 (TyVarTy tv2) = tyvar IsSwapped tv2 ty1
-- We handle FunTys explicitly here despite the fact that they could also be
-- treated as an application. Why? Well, for one it's cheaper to just look
-- at two types (the argument and result types) than four (the argument,
-- result, and their RuntimeReps). Also, we haven't completely zonked yet,
-- so we may run into an unzonked type variable while trying to compute the
-- RuntimeReps of the argument and result types. This can be observed in
-- testcase tc269.
go ty1 ty2
| Just (arg1, res1) <- split1
, Just (arg2, res2) <- split2
= do { res_a <- go arg1 arg2
; res_b <- go res1 res2
; return $ combine_rev mkVisFunTy res_b res_a
}
| isJust split1 || isJust split2
= bale_out ty1 ty2
where
split1 = tcSplitFunTy_maybe ty1
split2 = tcSplitFunTy_maybe ty2
go ty1 ty2
| Just (tc1, tys1) <- repSplitTyConApp_maybe ty1
, Just (tc2, tys2) <- repSplitTyConApp_maybe ty2
= if tc1 == tc2 && tys1 `equalLength` tys2
-- Crucial to check for equal-length args, because
-- we cannot assume that the two args to 'go' have
-- the same kind. E.g go (Proxy * (Maybe Int))
-- (Proxy (*->*) Maybe)
-- We'll call (go (Maybe Int) Maybe)
-- See #13083
then tycon tc1 tys1 tys2
else bale_out ty1 ty2
go ty1 ty2
| Just (ty1a, ty1b) <- tcRepSplitAppTy_maybe ty1
, Just (ty2a, ty2b) <- tcRepSplitAppTy_maybe ty2
= do { res_a <- go ty1a ty2a
; res_b <- go ty1b ty2b
; return $ combine_rev mkAppTy res_b res_a }
go ty1@(LitTy lit1) (LitTy lit2)
| lit1 == lit2
= return (Right ty1)
go ty1 ty2 = bale_out ty1 ty2
-- We don't handle more complex forms here
bale_out ty1 ty2 = return $ Left (Pair ty1 ty2)
tyvar :: SwapFlag -> TcTyVar -> TcType
-> TcS (Either (Pair TcType) TcType)
-- Try to do as little as possible, as anything we do here is redundant
-- with flattening. In particular, no need to zonk kinds. That's why
-- we don't use the already-defined zonking functions
tyvar swapped tv ty
= case tcTyVarDetails tv of
MetaTv { mtv_ref = ref }
-> do { cts <- readTcRef ref
; case cts of
Flexi -> give_up
Indirect ty' -> do { trace_indirect tv ty'
; unSwap swapped go ty' ty } }
_ -> give_up
where
give_up = return $ Left $ unSwap swapped Pair (mkTyVarTy tv) ty
tyvar_tyvar tv1 tv2
| tv1 == tv2 = return (Right (mkTyVarTy tv1))
| otherwise = do { (ty1', progress1) <- quick_zonk tv1
; (ty2', progress2) <- quick_zonk tv2
; if progress1 || progress2
then go ty1' ty2'
else return $ Left (Pair (TyVarTy tv1) (TyVarTy tv2)) }
trace_indirect tv ty
= traceTcS "Following filled tyvar (zonk_eq_types)"
(ppr tv <+> equals <+> ppr ty)
quick_zonk tv = case tcTyVarDetails tv of
MetaTv { mtv_ref = ref }
-> do { cts <- readTcRef ref
; case cts of
Flexi -> return (TyVarTy tv, False)
Indirect ty' -> do { trace_indirect tv ty'
; return (ty', True) } }
_ -> return (TyVarTy tv, False)
-- This happens for type families, too. But recall that failure
-- here just means to try harder, so it's OK if the type function
-- isn't injective.
tycon :: TyCon -> [TcType] -> [TcType]
-> TcS (Either (Pair TcType) TcType)
tycon tc tys1 tys2
= do { results <- zipWithM go tys1 tys2
; return $ case combine_results results of
Left tys -> Left (mkTyConApp tc <$> tys)
Right tys -> Right (mkTyConApp tc tys) }
combine_results :: [Either (Pair TcType) TcType]
-> Either (Pair [TcType]) [TcType]
combine_results = bimap (fmap reverse) reverse .
foldl' (combine_rev (:)) (Right [])
-- combine (in reverse) a new result onto an already-combined result
combine_rev :: (a -> b -> c)
-> Either (Pair b) b
-> Either (Pair a) a
-> Either (Pair c) c
combine_rev f (Left list) (Left elt) = Left (f <$> elt <*> list)
combine_rev f (Left list) (Right ty) = Left (f <$> pure ty <*> list)
combine_rev f (Right tys) (Left elt) = Left (f <$> elt <*> pure tys)
combine_rev f (Right tys) (Right ty) = Right (f ty tys)
{- See Note [Unwrap newtypes first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
newtype N m a = MkN (m a)
Then N will get a conservative, Nominal role for its second parameter 'a',
because it appears as an argument to the unknown 'm'. Now consider
[W] N Maybe a ~R# N Maybe b
If we decompose, we'll get
[W] a ~N# b
But if instead we unwrap we'll get
[W] Maybe a ~R# Maybe b
which in turn gives us
[W] a ~R# b
which is easier to satisfy.
Bottom line: unwrap newtypes before decomposing them!
c.f. #9123 comment:52,53 for a compelling example.
Note [Newtypes can blow the stack]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
newtype X = MkX (Int -> X)
newtype Y = MkY (Int -> Y)
and now wish to prove
[W] X ~R Y
This Wanted will loop, expanding out the newtypes ever deeper looking
for a solid match or a solid discrepancy. Indeed, there is something
appropriate to this looping, because X and Y *do* have the same representation,
in the limit -- they're both (Fix ((->) Int)). However, no finitely-sized
coercion will ever witness it. This loop won't actually cause GHC to hang,
though, because we check our depth when unwrapping newtypes.
Note [Eager reflexivity check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
newtype X = MkX (Int -> X)
and
[W] X ~R X
Naively, we would start unwrapping X and end up in a loop. Instead,
we do this eager reflexivity check. This is necessary only for representational
equality because the flattener technology deals with the similar case
(recursive type families) for nominal equality.
Note that this check does not catch all cases, but it will catch the cases
we're most worried about, types like X above that are actually inhabited.
Here's another place where this reflexivity check is key:
Consider trying to prove (f a) ~R (f a). The AppTys in there can't
be decomposed, because representational equality isn't congruent with respect
to AppTy. So, when canonicalising the equality above, we get stuck and
would normally produce a CIrredCan. However, we really do want to
be able to solve (f a) ~R (f a). So, in the representational case only,
we do a reflexivity check.
(This would be sound in the nominal case, but unnecessary, and I [Richard
E.] am worried that it would slow down the common case.)
-}
------------------------
-- | We're able to unwrap a newtype. Update the bits accordingly.
can_eq_newtype_nc :: CtEvidence -- ^ :: ty1 ~ ty2
-> SwapFlag
-> TcType -- ^ ty1
-> ((Bag GlobalRdrElt, TcCoercion), TcType) -- ^ :: ty1 ~ ty1'
-> TcType -- ^ ty2
-> TcType -- ^ ty2, with type synonyms
-> TcS (StopOrContinue Ct)
can_eq_newtype_nc ev swapped ty1 ((gres, co), ty1') ty2 ps_ty2
= do { traceTcS "can_eq_newtype_nc" $
vcat [ ppr ev, ppr swapped, ppr co, ppr gres, ppr ty1', ppr ty2 ]
-- check for blowing our stack:
-- See Note [Newtypes can blow the stack]
; checkReductionDepth (ctEvLoc ev) ty1
-- Next, we record uses of newtype constructors, since coercing
-- through newtypes is tantamount to using their constructors.
; addUsedGREs gre_list
-- If a newtype constructor was imported, don't warn about not
-- importing it...
; traverse_ keepAlive $ map gre_name gre_list
-- ...and similarly, if a newtype constructor was defined in the same
-- module, don't warn about it being unused.
-- See Note [Tracking unused binding and imports] in TcRnTypes.
; new_ev <- rewriteEqEvidence ev swapped ty1' ps_ty2
(mkTcSymCo co) (mkTcReflCo Representational ps_ty2)
; can_eq_nc False new_ev ReprEq ty1' ty1' ty2 ps_ty2 }
where
gre_list = bagToList gres
---------
-- ^ Decompose a type application.
-- All input types must be flat. See Note [Canonicalising type applications]
-- Nominal equality only!
can_eq_app :: CtEvidence -- :: s1 t1 ~N s2 t2
-> Xi -> Xi -- s1 t1
-> Xi -> Xi -- s2 t2
-> TcS (StopOrContinue Ct)
-- AppTys only decompose for nominal equality, so this case just leads
-- to an irreducible constraint; see typecheck/should_compile/T10494
-- See Note [Decomposing equality], note {4}
can_eq_app ev s1 t1 s2 t2
| CtDerived {} <- ev
= do { unifyDeriveds loc [Nominal, Nominal] [s1, t1] [s2, t2]
; stopWith ev "Decomposed [D] AppTy" }
| CtWanted { ctev_dest = dest } <- ev
= do { co_s <- unifyWanted loc Nominal s1 s2
; let arg_loc
| isNextArgVisible s1 = loc
| otherwise = updateCtLocOrigin loc toInvisibleOrigin
; co_t <- unifyWanted arg_loc Nominal t1 t2
; let co = mkAppCo co_s co_t
; setWantedEq dest co
; stopWith ev "Decomposed [W] AppTy" }
-- If there is a ForAll/(->) mismatch, the use of the Left coercion
-- below is ill-typed, potentially leading to a panic in splitTyConApp
-- Test case: typecheck/should_run/Typeable1
-- We could also include this mismatch check above (for W and D), but it's slow
-- and we'll get a better error message not doing it
| s1k `mismatches` s2k
= canEqHardFailure ev (s1 `mkAppTy` t1) (s2 `mkAppTy` t2)
| CtGiven { ctev_evar = evar } <- ev
= do { let co = mkTcCoVarCo evar
co_s = mkTcLRCo CLeft co
co_t = mkTcLRCo CRight co
; evar_s <- newGivenEvVar loc ( mkTcEqPredLikeEv ev s1 s2
, evCoercion co_s )
; evar_t <- newGivenEvVar loc ( mkTcEqPredLikeEv ev t1 t2
, evCoercion co_t )
; emitWorkNC [evar_t]
; canEqNC evar_s NomEq s1 s2 }
where
loc = ctEvLoc ev
s1k = tcTypeKind s1
s2k = tcTypeKind s2
k1 `mismatches` k2
= isForAllTy k1 && not (isForAllTy k2)
|| not (isForAllTy k1) && isForAllTy k2
-----------------------
-- | Break apart an equality over a casted type
-- looking like (ty1 |> co1) ~ ty2 (modulo a swap-flag)
canEqCast :: Bool -- are both types flat?
-> CtEvidence
-> EqRel
-> SwapFlag
-> TcType -> Coercion -- LHS (res. RHS), ty1 |> co1
-> TcType -> TcType -- RHS (res. LHS), ty2 both normal and pretty
-> TcS (StopOrContinue Ct)
canEqCast flat ev eq_rel swapped ty1 co1 ty2 ps_ty2
= do { traceTcS "Decomposing cast" (vcat [ ppr ev
, ppr ty1 <+> text "|>" <+> ppr co1
, ppr ps_ty2 ])
; new_ev <- rewriteEqEvidence ev swapped ty1 ps_ty2
(mkTcGReflRightCo role ty1 co1)
(mkTcReflCo role ps_ty2)
; can_eq_nc flat new_ev eq_rel ty1 ty1 ty2 ps_ty2 }
where
role = eqRelRole eq_rel
------------------------
canTyConApp :: CtEvidence -> EqRel
-> TyCon -> [TcType]
-> TyCon -> [TcType]
-> TcS (StopOrContinue Ct)
-- See Note [Decomposing TyConApps]
canTyConApp ev eq_rel tc1 tys1 tc2 tys2
| tc1 == tc2
, tys1 `equalLength` tys2
= do { inerts <- getTcSInerts
; if can_decompose inerts
then do { traceTcS "canTyConApp"
(ppr ev $$ ppr eq_rel $$ ppr tc1 $$ ppr tys1 $$ ppr tys2)
; canDecomposableTyConAppOK ev eq_rel tc1 tys1 tys2
; stopWith ev "Decomposed TyConApp" }
else canEqFailure ev eq_rel ty1 ty2 }
-- See Note [Skolem abstract data] (at tyConSkolem)
| tyConSkolem tc1 || tyConSkolem tc2
= do { traceTcS "canTyConApp: skolem abstract" (ppr tc1 $$ ppr tc2)
; continueWith (mkIrredCt ev) }
-- Fail straight away for better error messages
-- See Note [Use canEqFailure in canDecomposableTyConApp]
| eq_rel == ReprEq && not (isGenerativeTyCon tc1 Representational &&
isGenerativeTyCon tc2 Representational)
= canEqFailure ev eq_rel ty1 ty2
| otherwise
= canEqHardFailure ev ty1 ty2
where
ty1 = mkTyConApp tc1 tys1
ty2 = mkTyConApp tc2 tys2
loc = ctEvLoc ev
pred = ctEvPred ev
-- See Note [Decomposing equality]
can_decompose inerts
= isInjectiveTyCon tc1 (eqRelRole eq_rel)
|| (ctEvFlavour ev /= Given && isEmptyBag (matchableGivens loc pred inerts))
{-
Note [Use canEqFailure in canDecomposableTyConApp]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must use canEqFailure, not canEqHardFailure here, because there is
the possibility of success if working with a representational equality.
Here is one case:
type family TF a where TF Char = Bool
data family DF a
newtype instance DF Bool = MkDF Int
Suppose we are canonicalising (Int ~R DF (TF a)), where we don't yet
know `a`. This is *not* a hard failure, because we might soon learn
that `a` is, in fact, Char, and then the equality succeeds.
Here is another case:
[G] Age ~R Int
where Age's constructor is not in scope. We don't want to report
an "inaccessible code" error in the context of this Given!
For example, see typecheck/should_compile/T10493, repeated here:
import Data.Ord (Down) -- no constructor
foo :: Coercible (Down Int) Int => Down Int -> Int
foo = coerce
That should compile, but only because we use canEqFailure and not
canEqHardFailure.
Note [Decomposing equality]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have a constraint (of any flavour and role) that looks like
T tys1 ~ T tys2, what can we conclude about tys1 and tys2? The answer,
of course, is "it depends". This Note spells it all out.
In this Note, "decomposition" refers to taking the constraint
[fl] (T tys1 ~X T tys2)
(for some flavour fl and some role X) and replacing it with
[fls'] (tys1 ~Xs' tys2)
where that notation indicates a list of new constraints, where the
new constraints may have different flavours and different roles.
The key property to consider is injectivity. When decomposing a Given the
decomposition is sound if and only if T is injective in all of its type
arguments. When decomposing a Wanted, the decomposition is sound (assuming the
correct roles in the produced equality constraints), but it may be a guess --
that is, an unforced decision by the constraint solver. Decomposing Wanteds
over injective TyCons does not entail guessing. But sometimes we want to
decompose a Wanted even when the TyCon involved is not injective! (See below.)
So, in broad strokes, we want this rule:
(*) Decompose a constraint (T tys1 ~X T tys2) if and only if T is injective
at role X.
Pursuing the details requires exploring three axes:
* Flavour: Given vs. Derived vs. Wanted
* Role: Nominal vs. Representational
* TyCon species: datatype vs. newtype vs. data family vs. type family vs. type variable
(So a type variable isn't a TyCon, but it's convenient to put the AppTy case
in the same table.)
Right away, we can say that Derived behaves just as Wanted for the purposes
of decomposition. The difference between Derived and Wanted is the handling of
evidence. Since decomposition in these cases isn't a matter of soundness but of
guessing, we want the same behavior regardless of evidence.
Here is a table (discussion following) detailing where decomposition of
(T s1 ... sn) ~r (T t1 .. tn)
is allowed. The first four lines (Data types ... type family) refer
to TyConApps with various TyCons T; the last line is for AppTy, where
there is presumably a type variable at the head, so it's actually
(s s1 ... sn) ~r (t t1 .. tn)
NOMINAL GIVEN WANTED
Datatype YES YES
Newtype YES YES
Data family YES YES
Type family YES, in injective args{1} YES, in injective args{1}
Type variable YES YES
REPRESENTATIONAL GIVEN WANTED
Datatype YES YES
Newtype NO{2} MAYBE{2}
Data family NO{3} MAYBE{3}
Type family NO NO
Type variable NO{4} NO{4}
{1}: Type families can be injective in some, but not all, of their arguments,
so we want to do partial decomposition. This is quite different than the way
other decomposition is done, where the decomposed equalities replace the original
one. We thus proceed much like we do with superclasses: emitting new Givens
when "decomposing" a partially-injective type family Given and new Deriveds
when "decomposing" a partially-injective type family Wanted. (As of the time of
writing, 13 June 2015, the implementation of injective type families has not
been merged, but it should be soon. Please delete this parenthetical if the
implementation is indeed merged.)
{2}: See Note [Decomposing newtypes at representational role]
{3}: Because of the possibility of newtype instances, we must treat
data families like newtypes. See also Note [Decomposing newtypes at
representational role]. See #10534 and test case
typecheck/should_fail/T10534.
{4}: Because type variables can stand in for newtypes, we conservatively do not
decompose AppTys over representational equality.
In the implementation of can_eq_nc and friends, we don't directly pattern
match using lines like in the tables above, as those tables don't cover
all cases (what about PrimTyCon? tuples?). Instead we just ask about injectivity,
boiling the tables above down to rule (*). The exceptions to rule (*) are for
injective type families, which are handled separately from other decompositions,
and the MAYBE entries above.
Note [Decomposing newtypes at representational role]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This note discusses the 'newtype' line in the REPRESENTATIONAL table
in Note [Decomposing equality]. (At nominal role, newtypes are fully
decomposable.)
Here is a representative example of why representational equality over
newtypes is tricky:
newtype Nt a = Mk Bool -- NB: a is not used in the RHS,
type role Nt representational -- but the user gives it an R role anyway
If we have [W] Nt alpha ~R Nt beta, we *don't* want to decompose to
[W] alpha ~R beta, because it's possible that alpha and beta aren't
representationally equal. Here's another example.
newtype Nt a = MkNt (Id a)
type family Id a where Id a = a
[W] Nt Int ~R Nt Age
Because of its use of a type family, Nt's parameter will get inferred to have
a nominal role. Thus, decomposing the wanted will yield [W] Int ~N Age, which
is unsatisfiable. Unwrapping, though, leads to a solution.
Conclusion:
* Unwrap newtypes before attempting to decompose them.
This is done in can_eq_nc'.
It all comes from the fact that newtypes aren't necessarily injective
w.r.t. representational equality.
Furthermore, as explained in Note [NthCo and newtypes] in TyCoRep, we can't use
NthCo on representational coercions over newtypes. NthCo comes into play
only when decomposing givens.
Conclusion:
* Do not decompose [G] N s ~R N t
Is it sensible to decompose *Wanted* constraints over newtypes? Yes!
It's the only way we could ever prove (IO Int ~R IO Age), recalling
that IO is a newtype.
However we must be careful. Consider
type role Nt representational
[G] Nt a ~R Nt b (1)
[W] NT alpha ~R Nt b (2)
[W] alpha ~ a (3)
If we focus on (3) first, we'll substitute in (2), and now it's
identical to the given (1), so we succeed. But if we focus on (2)
first, and decompose it, we'll get (alpha ~R b), which is not soluble.
This is exactly like the question of overlapping Givens for class
constraints: see Note [Instance and Given overlap] in TcInteract.
Conclusion:
* Decompose [W] N s ~R N t iff there no given constraint that could
later solve it.
-}
canDecomposableTyConAppOK :: CtEvidence -> EqRel
-> TyCon -> [TcType] -> [TcType]
-> TcS ()
-- Precondition: tys1 and tys2 are the same length, hence "OK"
canDecomposableTyConAppOK ev eq_rel tc tys1 tys2
= ASSERT( tys1 `equalLength` tys2 )
case ev of
CtDerived {}
-> unifyDeriveds loc tc_roles tys1 tys2
CtWanted { ctev_dest = dest }
-- new_locs and tc_roles are both infinite, so
-- we are guaranteed that cos has the same length
-- as tys1 and tys2
-> do { cos <- zipWith4M unifyWanted new_locs tc_roles tys1 tys2
; setWantedEq dest (mkTyConAppCo role tc cos) }
CtGiven { ctev_evar = evar }
-> do { let ev_co = mkCoVarCo evar
; given_evs <- newGivenEvVars loc $
[ ( mkPrimEqPredRole r ty1 ty2
, evCoercion $ mkNthCo r i ev_co )
| (r, ty1, ty2, i) <- zip4 tc_roles tys1 tys2 [0..]
, r /= Phantom
, not (isCoercionTy ty1) && not (isCoercionTy ty2) ]
; emitWorkNC given_evs }
where
loc = ctEvLoc ev
role = eqRelRole eq_rel
-- infinite, as tyConRolesX returns an infinite tail of Nominal
tc_roles = tyConRolesX role tc
-- Add nuances to the location during decomposition:
-- * if the argument is a kind argument, remember this, so that error
-- messages say "kind", not "type". This is determined based on whether
-- the corresponding tyConBinder is named (that is, dependent)
-- * if the argument is invisible, note this as well, again by
-- looking at the corresponding binder
-- For oversaturated tycons, we need the (repeat loc) tail, which doesn't
-- do either of these changes. (Forgetting to do so led to #16188)
--
-- NB: infinite in length
new_locs = [ new_loc
| bndr <- tyConBinders tc
, let new_loc0 | isNamedTyConBinder bndr = toKindLoc loc
| otherwise = loc
new_loc | isVisibleTyConBinder bndr
= updateCtLocOrigin new_loc0 toInvisibleOrigin
| otherwise
= new_loc0 ]
++ repeat loc
-- | Call when canonicalizing an equality fails, but if the equality is
-- representational, there is some hope for the future.
-- Examples in Note [Use canEqFailure in canDecomposableTyConApp]
canEqFailure :: CtEvidence -> EqRel
-> TcType -> TcType -> TcS (StopOrContinue Ct)
canEqFailure ev NomEq ty1 ty2
= canEqHardFailure ev ty1 ty2
canEqFailure ev ReprEq ty1 ty2
= do { (xi1, co1) <- flatten FM_FlattenAll ev ty1
; (xi2, co2) <- flatten FM_FlattenAll ev ty2
-- We must flatten the types before putting them in the
-- inert set, so that we are sure to kick them out when
-- new equalities become available
; traceTcS "canEqFailure with ReprEq" $
vcat [ ppr ev, ppr ty1, ppr ty2, ppr xi1, ppr xi2 ]
; new_ev <- rewriteEqEvidence ev NotSwapped xi1 xi2 co1 co2
; continueWith (mkIrredCt new_ev) }
-- | Call when canonicalizing an equality fails with utterly no hope.
canEqHardFailure :: CtEvidence
-> TcType -> TcType -> TcS (StopOrContinue Ct)
-- See Note [Make sure that insolubles are fully rewritten]
canEqHardFailure ev ty1 ty2
= do { (s1, co1) <- flatten FM_SubstOnly ev ty1
; (s2, co2) <- flatten FM_SubstOnly ev ty2
; new_ev <- rewriteEqEvidence ev NotSwapped s1 s2 co1 co2
; continueWith (mkInsolubleCt new_ev) }
{-
Note [Decomposing TyConApps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we see (T s1 t1 ~ T s2 t2), then we can just decompose to
(s1 ~ s2, t1 ~ t2)
and push those back into the work list. But if
s1 = K k1 s2 = K k2
then we will just decomopose s1~s2, and it might be better to
do so on the spot. An important special case is where s1=s2,
and we get just Refl.
So canDecomposableTyCon is a fast-path decomposition that uses
unifyWanted etc to short-cut that work.
Note [Canonicalising type applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given (s1 t1) ~ ty2, how should we proceed?
The simple things is to see if ty2 is of form (s2 t2), and
decompose. By this time s1 and s2 can't be saturated type
function applications, because those have been dealt with
by an earlier equation in can_eq_nc, so it is always sound to
decompose.
However, over-eager decomposition gives bad error messages
for things like
a b ~ Maybe c
e f ~ p -> q
Suppose (in the first example) we already know a~Array. Then if we
decompose the application eagerly, yielding
a ~ Maybe
b ~ c
we get an error "Can't match Array ~ Maybe",
but we'd prefer to get "Can't match Array b ~ Maybe c".
So instead can_eq_wanted_app flattens the LHS and RHS, in the hope of
replacing (a b) by (Array b), before using try_decompose_app to
decompose it.
Note [Make sure that insolubles are fully rewritten]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When an equality fails, we still want to rewrite the equality
all the way down, so that it accurately reflects
(a) the mutable reference substitution in force at start of solving
(b) any ty-binds in force at this point in solving
See Note [Rewrite insolubles] in TcSMonad.
And if we don't do this there is a bad danger that
TcSimplify.applyTyVarDefaulting will find a variable
that has in fact been substituted.
Note [Do not decompose Given polytype equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider [G] (forall a. t1 ~ forall a. t2). Can we decompose this?
No -- what would the evidence look like? So instead we simply discard
this given evidence.
Note [Combining insoluble constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As this point we have an insoluble constraint, like Int~Bool.
* If it is Wanted, delete it from the cache, so that subsequent
Int~Bool constraints give rise to separate error messages
* But if it is Derived, DO NOT delete from cache. A class constraint
may get kicked out of the inert set, and then have its functional
dependency Derived constraints generated a second time. In that
case we don't want to get two (or more) error messages by
generating two (or more) insoluble fundep constraints from the same
class constraint.
Note [No top-level newtypes on RHS of representational equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we're in this situation:
work item: [W] c1 : a ~R b
inert: [G] c2 : b ~R Id a
where
newtype Id a = Id a
We want to make sure canEqTyVar sees [W] a ~R a, after b is flattened
and the Id newtype is unwrapped. This is assured by requiring only flat
types in canEqTyVar *and* having the newtype-unwrapping check above
the tyvar check in can_eq_nc.
Note [Occurs check error]
~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an occurs check error, are we necessarily hosed? Say our
tyvar is tv1 and the type it appears in is xi2. Because xi2 is function
free, then if we're computing w.r.t. nominal equality, then, yes, we're
hosed. Nothing good can come from (a ~ [a]). If we're computing w.r.t.
representational equality, this is a little subtler. Once again, (a ~R [a])
is a bad thing, but (a ~R N a) for a newtype N might be just fine. This
means also that (a ~ b a) might be fine, because `b` might become a newtype.
So, we must check: does tv1 appear in xi2 under any type constructor
that is generative w.r.t. representational equality? That's what
isInsolubleOccursCheck does.
See also #10715, which induced this addition.
Note [canCFunEqCan]
~~~~~~~~~~~~~~~~~~~
Flattening the arguments to a type family can change the kind of the type
family application. As an easy example, consider (Any k) where (k ~ Type)
is in the inert set. The original (Any k :: k) becomes (Any Type :: Type).
The problem here is that the fsk in the CFunEqCan will have the old kind.
The solution is to come up with a new fsk/fmv of the right kind. For
givens, this is easy: just introduce a new fsk and update the flat-cache
with the new one. For wanteds, we want to solve the old one if favor of
the new one, so we use dischargeFmv. This also kicks out constraints
from the inert set; this behavior is correct, as the kind-change may
allow more constraints to be solved.
We use `isTcReflexiveCo`, to ensure that we only use the hetero-kinded case
if we really need to. Of course `flattenArgsNom` should return `Refl`
whenever possible, but #15577 was an infinite loop because even
though the coercion was homo-kinded, `kind_co` was not `Refl`, so we
made a new (identical) CFunEqCan, and then the entire process repeated.
-}
canCFunEqCan :: CtEvidence
-> TyCon -> [TcType] -- LHS
-> TcTyVar -- RHS
-> TcS (StopOrContinue Ct)
-- ^ Canonicalise a CFunEqCan. We know that
-- the arg types are already flat,
-- and the RHS is a fsk, which we must *not* substitute.
-- So just substitute in the LHS
canCFunEqCan ev fn tys fsk
= do { (tys', cos, kind_co) <- flattenArgsNom ev fn tys
-- cos :: tys' ~ tys
; let lhs_co = mkTcTyConAppCo Nominal fn cos
-- :: F tys' ~ F tys
new_lhs = mkTyConApp fn tys'
flav = ctEvFlavour ev
; (ev', fsk')
<- if isTcReflexiveCo kind_co -- See Note [canCFunEqCan]
then do { traceTcS "canCFunEqCan: refl" (ppr new_lhs)
; let fsk_ty = mkTyVarTy fsk
; ev' <- rewriteEqEvidence ev NotSwapped new_lhs fsk_ty
lhs_co (mkTcNomReflCo fsk_ty)
; return (ev', fsk) }
else do { traceTcS "canCFunEqCan: non-refl" $
vcat [ text "Kind co:" <+> ppr kind_co
, text "RHS:" <+> ppr fsk <+> dcolon <+> ppr (tyVarKind fsk)
, text "LHS:" <+> hang (ppr (mkTyConApp fn tys))
2 (dcolon <+> ppr (tcTypeKind (mkTyConApp fn tys)))
, text "New LHS" <+> hang (ppr new_lhs)
2 (dcolon <+> ppr (tcTypeKind new_lhs)) ]
; (ev', new_co, new_fsk)
<- newFlattenSkolem flav (ctEvLoc ev) fn tys'
; let xi = mkTyVarTy new_fsk `mkCastTy` kind_co
-- sym lhs_co :: F tys ~ F tys'
-- new_co :: F tys' ~ new_fsk
-- co :: F tys ~ (new_fsk |> kind_co)
co = mkTcSymCo lhs_co `mkTcTransCo`
mkTcCoherenceRightCo Nominal
(mkTyVarTy new_fsk)
kind_co
new_co
; traceTcS "Discharging fmv/fsk due to hetero flattening" (ppr ev)
; dischargeFunEq ev fsk co xi
; return (ev', new_fsk) }
; extendFlatCache fn tys' (ctEvCoercion ev', mkTyVarTy fsk', ctEvFlavour ev')
; continueWith (CFunEqCan { cc_ev = ev', cc_fun = fn
, cc_tyargs = tys', cc_fsk = fsk' }) }
---------------------
canEqTyVar :: CtEvidence -- ev :: lhs ~ rhs
-> EqRel -> SwapFlag
-> TcTyVar -- tv1
-> TcType -- lhs: pretty lhs, already flat
-> TcType -> TcType -- rhs: already flat
-> TcS (StopOrContinue Ct)
canEqTyVar ev eq_rel swapped tv1 ps_xi1 xi2 ps_xi2
| k1 `tcEqType` k2
= canEqTyVarHomo ev eq_rel swapped tv1 ps_xi1 xi2 ps_xi2
-- So the LHS and RHS don't have equal kinds
-- Note [Flattening] in TcFlatten gives us (F2), which says that
-- flattening is always homogeneous (doesn't change kinds). But
-- perhaps by flattening the kinds of the two sides of the equality
-- at hand makes them equal. So let's try that.
| otherwise
= do { (flat_k1, k1_co) <- flattenKind loc flav k1 -- k1_co :: flat_k1 ~N kind(xi1)
; (flat_k2, k2_co) <- flattenKind loc flav k2 -- k2_co :: flat_k2 ~N kind(xi2)
; traceTcS "canEqTyVar tried flattening kinds"
(vcat [ sep [ parens (ppr tv1 <+> dcolon <+> ppr k1)
, text "~"
, parens (ppr xi2 <+> dcolon <+> ppr k2) ]
, ppr flat_k1
, ppr k1_co
, ppr flat_k2
, ppr k2_co ])
-- We know the LHS is a tyvar. So let's dump all the coercions on the RHS
-- If flat_k1 == flat_k2, let's dump all the coercions on the RHS and
-- then call canEqTyVarHomo. If they don't equal, just rewriteEqEvidence
-- (as an optimization, so that we don't have to flatten the kinds again)
-- and then emit a kind equality in canEqTyVarHetero.
-- See Note [Equalities with incompatible kinds]
; let role = eqRelRole eq_rel
; if flat_k1 `tcEqType` flat_k2
then do { let rhs_kind_co = mkTcSymCo k2_co `mkTcTransCo` k1_co
-- :: kind(xi2) ~N kind(xi1)
new_rhs = xi2 `mkCastTy` rhs_kind_co
ps_rhs = ps_xi2 `mkCastTy` rhs_kind_co
rhs_co = mkTcGReflLeftCo role xi2 rhs_kind_co
; new_ev <- rewriteEqEvidence ev swapped xi1 new_rhs
(mkTcReflCo role xi1) rhs_co
-- NB: rewriteEqEvidence executes a swap, if any, so we're
-- NotSwapped now.
; canEqTyVarHomo new_ev eq_rel NotSwapped tv1 ps_xi1 new_rhs ps_rhs }
else
do { let sym_k1_co = mkTcSymCo k1_co -- :: kind(xi1) ~N flat_k1
sym_k2_co = mkTcSymCo k2_co -- :: kind(xi2) ~N flat_k2
new_lhs = xi1 `mkCastTy` sym_k1_co -- :: flat_k1
new_rhs = xi2 `mkCastTy` sym_k2_co -- :: flat_k2
ps_rhs = ps_xi2 `mkCastTy` sym_k2_co
lhs_co = mkTcGReflLeftCo role xi1 sym_k1_co
rhs_co = mkTcGReflLeftCo role xi2 sym_k2_co
-- lhs_co :: (xi1 |> sym k1_co) ~ xi1
-- rhs_co :: (xi2 |> sym k2_co) ~ xi2
; new_ev <- rewriteEqEvidence ev swapped new_lhs new_rhs lhs_co rhs_co
-- no longer swapped, due to rewriteEqEvidence
; canEqTyVarHetero new_ev eq_rel tv1 sym_k1_co flat_k1 ps_xi1
new_rhs flat_k2 ps_rhs } }
where
xi1 = mkTyVarTy tv1
k1 = tyVarKind tv1
k2 = tcTypeKind xi2
loc = ctEvLoc ev
flav = ctEvFlavour ev
canEqTyVarHetero :: CtEvidence -- :: (tv1 |> co1 :: ki1) ~ (xi2 :: ki2)
-> EqRel
-> TcTyVar -> TcCoercionN -> TcKind -- tv1 |> co1 :: ki1
-> TcType -- pretty tv1 (*without* the coercion)
-> TcType -> TcKind -- xi2 :: ki2
-> TcType -- pretty xi2
-> TcS (StopOrContinue Ct)
canEqTyVarHetero ev eq_rel tv1 co1 ki1 ps_tv1 xi2 ki2 ps_xi2
-- See Note [Equalities with incompatible kinds]
| CtGiven { ctev_evar = evar } <- ev
-- unswapped: tm :: (lhs :: ki1) ~ (rhs :: ki2)
-- swapped : tm :: (rhs :: ki2) ~ (lhs :: ki1)
= do { let kind_co = mkTcKindCo (mkTcCoVarCo evar)
; kind_ev <- newGivenEvVar kind_loc (kind_pty, evCoercion kind_co)
; let -- kind_ev :: (ki1 :: *) ~ (ki2 :: *) (whether swapped or not)
-- co1 :: kind(tv1) ~N ki1
-- homo_co :: ki2 ~N kind(tv1)
homo_co = mkTcSymCo (ctEvCoercion kind_ev) `mkTcTransCo` mkTcSymCo co1
rhs' = mkCastTy xi2 homo_co -- :: kind(tv1)
ps_rhs' = mkCastTy ps_xi2 homo_co -- :: kind(tv1)
rhs_co = mkTcGReflLeftCo role xi2 homo_co
-- rhs_co :: (xi2 |> homo_co :: kind(tv1)) ~ xi2
lhs' = mkTyVarTy tv1 -- :: kind(tv1)
lhs_co = mkTcGReflRightCo role lhs' co1
-- lhs_co :: (tv1 :: kind(tv1)) ~ (tv1 |> co1 :: ki1)
; traceTcS "Hetero equality gives rise to given kind equality"
(ppr kind_ev <+> dcolon <+> ppr kind_pty)
; emitWorkNC [kind_ev]
; type_ev <- rewriteEqEvidence ev NotSwapped lhs' rhs' lhs_co rhs_co
; canEqTyVarHomo type_ev eq_rel NotSwapped tv1 ps_tv1 rhs' ps_rhs' }
-- See Note [Equalities with incompatible kinds]
| otherwise -- Wanted and Derived
-- NB: all kind equalities are Nominal
= do { emitNewDerivedEq kind_loc Nominal ki1 ki2
-- kind_ev :: (ki1 :: *) ~ (ki2 :: *)
; traceTcS "Hetero equality gives rise to derived kind equality" $
ppr ev
; continueWith (mkIrredCt ev) }
where
kind_pty = mkHeteroPrimEqPred liftedTypeKind liftedTypeKind ki1 ki2
kind_loc = mkKindLoc (mkTyVarTy tv1 `mkCastTy` co1) xi2 loc
loc = ctev_loc ev
role = eqRelRole eq_rel
-- guaranteed that tcTypeKind lhs == tcTypeKind rhs
canEqTyVarHomo :: CtEvidence
-> EqRel -> SwapFlag
-> TcTyVar -- lhs: tv1
-> TcType -- pretty lhs, flat
-> TcType -> TcType -- rhs, flat
-> TcS (StopOrContinue Ct)
canEqTyVarHomo ev eq_rel swapped tv1 ps_xi1 xi2 _
| Just (tv2, _) <- tcGetCastedTyVar_maybe xi2
, tv1 == tv2
= canEqReflexive ev eq_rel (mkTyVarTy tv1)
-- we don't need to check co because it must be reflexive
| Just (tv2, co2) <- tcGetCastedTyVar_maybe xi2
, swapOverTyVars tv1 tv2
= do { traceTcS "canEqTyVar swapOver" (ppr tv1 $$ ppr tv2 $$ ppr swapped)
-- FM_Avoid commented out: see Note [Lazy flattening] in TcFlatten
-- let fmode = FE { fe_ev = ev, fe_mode = FM_Avoid tv1' True }
-- Flatten the RHS less vigorously, to avoid gratuitous flattening
-- True <=> xi2 should not itself be a type-function application
; let role = eqRelRole eq_rel
sym_co2 = mkTcSymCo co2
ty1 = mkTyVarTy tv1
new_lhs = ty1 `mkCastTy` sym_co2
lhs_co = mkTcGReflLeftCo role ty1 sym_co2
new_rhs = mkTyVarTy tv2
rhs_co = mkTcGReflRightCo role new_rhs co2
; new_ev <- rewriteEqEvidence ev swapped new_lhs new_rhs lhs_co rhs_co
; dflags <- getDynFlags
; canEqTyVar2 dflags new_ev eq_rel IsSwapped tv2 (ps_xi1 `mkCastTy` sym_co2) }
canEqTyVarHomo ev eq_rel swapped tv1 _ _ ps_xi2
= do { dflags <- getDynFlags
; canEqTyVar2 dflags ev eq_rel swapped tv1 ps_xi2 }
-- The RHS here is either not a casted tyvar, or it's a tyvar but we want
-- to rewrite the LHS to the RHS (as per swapOverTyVars)
canEqTyVar2 :: DynFlags
-> CtEvidence -- lhs ~ rhs (or, if swapped, orhs ~ olhs)
-> EqRel
-> SwapFlag
-> TcTyVar -- lhs = tv, flat
-> TcType -- rhs, flat
-> TcS (StopOrContinue Ct)
-- LHS is an inert type variable,
-- and RHS is fully rewritten, but with type synonyms
-- preserved as much as possible
canEqTyVar2 dflags ev eq_rel swapped tv1 rhs
| Just rhs' <- metaTyVarUpdateOK dflags tv1 rhs -- No occurs check
-- Must do the occurs check even on tyvar/tyvar
-- equalities, in case have x ~ (y :: ..x...)
-- #12593
= do { new_ev <- rewriteEqEvidence ev swapped lhs rhs' rewrite_co1 rewrite_co2
; continueWith (CTyEqCan { cc_ev = new_ev, cc_tyvar = tv1
, cc_rhs = rhs', cc_eq_rel = eq_rel }) }
| otherwise -- For some reason (occurs check, or forall) we can't unify
-- We must not use it for further rewriting!
= do { traceTcS "canEqTyVar2 can't unify" (ppr tv1 $$ ppr rhs)
; new_ev <- rewriteEqEvidence ev swapped lhs rhs rewrite_co1 rewrite_co2
; if isInsolubleOccursCheck eq_rel tv1 rhs
then continueWith (mkInsolubleCt new_ev)
-- If we have a ~ [a], it is not canonical, and in particular
-- we don't want to rewrite existing inerts with it, otherwise
-- we'd risk divergence in the constraint solver
else continueWith (mkIrredCt new_ev) }
-- A representational equality with an occurs-check problem isn't
-- insoluble! For example:
-- a ~R b a
-- We might learn that b is the newtype Id.
-- But, the occurs-check certainly prevents the equality from being
-- canonical, and we might loop if we were to use it in rewriting.
where
role = eqRelRole eq_rel
lhs = mkTyVarTy tv1
rewrite_co1 = mkTcReflCo role lhs
rewrite_co2 = mkTcReflCo role rhs
-- | Solve a reflexive equality constraint
canEqReflexive :: CtEvidence -- ty ~ ty
-> EqRel
-> TcType -- ty
-> TcS (StopOrContinue Ct) -- always Stop
canEqReflexive ev eq_rel ty
= do { setEvBindIfWanted ev (evCoercion $
mkTcReflCo (eqRelRole eq_rel) ty)
; stopWith ev "Solved by reflexivity" }
{-
Note [Canonical orientation for tyvar/tyvar equality constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have a ~ b where both 'a' and 'b' are TcTyVars, which way
round should be oriented in the CTyEqCan? The rules, implemented by
canEqTyVarTyVar, are these
* If either is a flatten-meta-variables, it goes on the left.
* Put a meta-tyvar on the left if possible
alpha[3] ~ r
* If both are meta-tyvars, put the more touchable one (deepest level
number) on the left, so there is the best chance of unifying it
alpha[3] ~ beta[2]
* If both are meta-tyvars and both at the same level, put a TyVarTv
on the right if possible
alpha[2] ~ beta[2](sig-tv)
That way, when we unify alpha := beta, we don't lose the TyVarTv flag.
* Put a meta-tv with a System Name on the left if possible so it
gets eliminated (improves error messages)
* If one is a flatten-skolem, put it on the left so that it is
substituted out Note [Eliminate flat-skols] in TcUinfy
fsk ~ a
Note [Equalities with incompatible kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What do we do when we have an equality
(tv :: k1) ~ (rhs :: k2)
where k1 and k2 differ? This Note explores this treacherous area.
We must proceed differently here depending on whether we have a Wanted
or a Given. Consider this:
[W] w :: (alpha :: k) ~ (Int :: Type)
where k is a skolem. One possible way forward is this:
[W] co :: k ~ Type
[W] w :: (alpha :: k) ~ (Int |> sym co :: k)
The next step will be to unify
alpha := Int |> sym co
Now, consider what error we'll report if we can't solve the "co"
wanted. Its CtOrigin is the w wanted... which now reads (after zonking)
Int ~ Int. The user thus sees that GHC can't solve Int ~ Int, which
is embarrassing. See #11198 for more tales of destruction.
The reason for this odd behavior is much the same as
Note [Wanteds do not rewrite Wanteds] in Constraint: note that the
new `co` is a Wanted.
The solution is then not to use `co` to "rewrite" -- that is, cast -- `w`, but
instead to keep `w` heterogeneous and irreducible. Given that we're not using
`co`, there is no reason to collect evidence for it, so `co` is born a
Derived, with a CtOrigin of KindEqOrigin. When the Derived is solved (by
unification), the original wanted (`w`) will get kicked out. We thus get
[D] _ :: k ~ Type
[W] w :: (alpha :: k) ~ (Int :: Type)
Note that the Wanted is unchanged and will be irreducible. This all happens
in canEqTyVarHetero.
Note that, if we had [G] co1 :: k ~ Type available, then we never get
to canEqTyVarHetero: canEqTyVar tries flattening the kinds first. If
we have [G] co1 :: k ~ Type, then flattening the kind of alpha would
rewrite k to Type, and we would end up in canEqTyVarHomo.
Successive canonicalizations of the same Wanted may produce
duplicate Deriveds. Similar duplications can happen with fundeps, and there
seems to be no easy way to avoid. I expect this case to be rare.
For Givens, this problem (the Wanteds-rewriting-Wanteds action of
a kind coercion) doesn't bite, so a heterogeneous Given gives
rise to a Given kind equality. No Deriveds here. We thus homogenise
the Given (see the "homo_co" in the Given case in canEqTyVarHetero) and
carry on with a homogeneous equality constraint.
Note [Type synonyms and canonicalization]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We treat type synonym applications as xi types, that is, they do not
count as type function applications. However, we do need to be a bit
careful with type synonyms: like type functions they may not be
generative or injective. However, unlike type functions, they are
parametric, so there is no problem in expanding them whenever we see
them, since we do not need to know anything about their arguments in
order to expand them; this is what justifies not having to treat them
as specially as type function applications. The thing that causes
some subtleties is that we prefer to leave type synonym applications
*unexpanded* whenever possible, in order to generate better error
messages.
If we encounter an equality constraint with type synonym applications
on both sides, or a type synonym application on one side and some sort
of type application on the other, we simply must expand out the type
synonyms in order to continue decomposing the equality constraint into
primitive equality constraints. For example, suppose we have
type F a = [Int]
and we encounter the equality
F a ~ [b]
In order to continue we must expand F a into [Int], giving us the
equality
[Int] ~ [b]
which we can then decompose into the more primitive equality
constraint
Int ~ b.
However, if we encounter an equality constraint with a type synonym
application on one side and a variable on the other side, we should
NOT (necessarily) expand the type synonym, since for the purpose of
good error messages we want to leave type synonyms unexpanded as much
as possible. Hence the ps_xi1, ps_xi2 argument passed to canEqTyVar.
-}
{-
************************************************************************
* *
Evidence transformation
* *
************************************************************************
-}
data StopOrContinue a
= ContinueWith a -- The constraint was not solved, although it may have
-- been rewritten
| Stop CtEvidence -- The (rewritten) constraint was solved
SDoc -- Tells how it was solved
-- Any new sub-goals have been put on the work list
deriving (Functor)
instance Outputable a => Outputable (StopOrContinue a) where
ppr (Stop ev s) = text "Stop" <> parens s <+> ppr ev
ppr (ContinueWith w) = text "ContinueWith" <+> ppr w
continueWith :: a -> TcS (StopOrContinue a)
continueWith = return . ContinueWith
stopWith :: CtEvidence -> String -> TcS (StopOrContinue a)
stopWith ev s = return (Stop ev (text s))
andWhenContinue :: TcS (StopOrContinue a)
-> (a -> TcS (StopOrContinue b))
-> TcS (StopOrContinue b)
andWhenContinue tcs1 tcs2
= do { r <- tcs1
; case r of
Stop ev s -> return (Stop ev s)
ContinueWith ct -> tcs2 ct }
infixr 0 `andWhenContinue` -- allow chaining with ($)
rewriteEvidence :: CtEvidence -- old evidence
-> TcPredType -- new predicate
-> TcCoercion -- Of type :: new predicate ~ <type of old evidence>
-> TcS (StopOrContinue CtEvidence)
-- Returns Just new_ev iff either (i) 'co' is reflexivity
-- or (ii) 'co' is not reflexivity, and 'new_pred' not cached
-- In either case, there is nothing new to do with new_ev
{-
rewriteEvidence old_ev new_pred co
Main purpose: create new evidence for new_pred;
unless new_pred is cached already
* Returns a new_ev : new_pred, with same wanted/given/derived flag as old_ev
* If old_ev was wanted, create a binding for old_ev, in terms of new_ev
* If old_ev was given, AND not cached, create a binding for new_ev, in terms of old_ev
* Returns Nothing if new_ev is already cached
Old evidence New predicate is Return new evidence
flavour of same flavor
-------------------------------------------------------------------
Wanted Already solved or in inert Nothing
or Derived Not Just new_evidence
Given Already in inert Nothing
Not Just new_evidence
Note [Rewriting with Refl]
~~~~~~~~~~~~~~~~~~~~~~~~~~
If the coercion is just reflexivity then you may re-use the same
variable. But be careful! Although the coercion is Refl, new_pred
may reflect the result of unification alpha := ty, so new_pred might
not _look_ the same as old_pred, and it's vital to proceed from now on
using new_pred.
qThe flattener preserves type synonyms, so they should appear in new_pred
as well as in old_pred; that is important for good error messages.
-}
rewriteEvidence old_ev@(CtDerived {}) new_pred _co
= -- If derived, don't even look at the coercion.
-- This is very important, DO NOT re-order the equations for
-- rewriteEvidence to put the isTcReflCo test first!
-- Why? Because for *Derived* constraints, c, the coercion, which
-- was produced by flattening, may contain suspended calls to
-- (ctEvExpr c), which fails for Derived constraints.
-- (Getting this wrong caused #7384.)
continueWith (old_ev { ctev_pred = new_pred })
rewriteEvidence old_ev new_pred co
| isTcReflCo co -- See Note [Rewriting with Refl]
= continueWith (old_ev { ctev_pred = new_pred })
rewriteEvidence ev@(CtGiven { ctev_evar = old_evar, ctev_loc = loc }) new_pred co
= do { new_ev <- newGivenEvVar loc (new_pred, new_tm)
; continueWith new_ev }
where
-- mkEvCast optimises ReflCo
new_tm = mkEvCast (evId old_evar) (tcDowngradeRole Representational
(ctEvRole ev)
(mkTcSymCo co))
rewriteEvidence ev@(CtWanted { ctev_dest = dest
, ctev_nosh = si
, ctev_loc = loc }) new_pred co
= do { mb_new_ev <- newWanted_SI si loc new_pred
-- The "_SI" variant ensures that we make a new Wanted
-- with the same shadow-info as the existing one
-- with the same shadow-info as the existing one (#16735)
; MASSERT( tcCoercionRole co == ctEvRole ev )
; setWantedEvTerm dest
(mkEvCast (getEvExpr mb_new_ev)
(tcDowngradeRole Representational (ctEvRole ev) co))
; case mb_new_ev of
Fresh new_ev -> continueWith new_ev
Cached _ -> stopWith ev "Cached wanted" }
rewriteEqEvidence :: CtEvidence -- Old evidence :: olhs ~ orhs (not swapped)
-- or orhs ~ olhs (swapped)
-> SwapFlag
-> TcType -> TcType -- New predicate nlhs ~ nrhs
-- Should be zonked, because we use tcTypeKind on nlhs/nrhs
-> TcCoercion -- lhs_co, of type :: nlhs ~ olhs
-> TcCoercion -- rhs_co, of type :: nrhs ~ orhs
-> TcS CtEvidence -- Of type nlhs ~ nrhs
-- For (rewriteEqEvidence (Given g olhs orhs) False nlhs nrhs lhs_co rhs_co)
-- we generate
-- If not swapped
-- g1 : nlhs ~ nrhs = lhs_co ; g ; sym rhs_co
-- If 'swapped'
-- g1 : nlhs ~ nrhs = lhs_co ; Sym g ; sym rhs_co
--
-- For (Wanted w) we do the dual thing.
-- New w1 : nlhs ~ nrhs
-- If not swapped
-- w : olhs ~ orhs = sym lhs_co ; w1 ; rhs_co
-- If swapped
-- w : orhs ~ olhs = sym rhs_co ; sym w1 ; lhs_co
--
-- It's all a form of rewwriteEvidence, specialised for equalities
rewriteEqEvidence old_ev swapped nlhs nrhs lhs_co rhs_co
| CtDerived {} <- old_ev -- Don't force the evidence for a Derived
= return (old_ev { ctev_pred = new_pred })
| NotSwapped <- swapped
, isTcReflCo lhs_co -- See Note [Rewriting with Refl]
, isTcReflCo rhs_co
= return (old_ev { ctev_pred = new_pred })
| CtGiven { ctev_evar = old_evar } <- old_ev
= do { let new_tm = evCoercion (lhs_co
`mkTcTransCo` maybeSym swapped (mkTcCoVarCo old_evar)
`mkTcTransCo` mkTcSymCo rhs_co)
; newGivenEvVar loc' (new_pred, new_tm) }
| CtWanted { ctev_dest = dest, ctev_nosh = si } <- old_ev
= do { (new_ev, hole_co) <- newWantedEq_SI si loc' (ctEvRole old_ev) nlhs nrhs
-- The "_SI" variant ensures that we make a new Wanted
-- with the same shadow-info as the existing one (#16735)
; let co = maybeSym swapped $
mkSymCo lhs_co
`mkTransCo` hole_co
`mkTransCo` rhs_co
; setWantedEq dest co
; traceTcS "rewriteEqEvidence" (vcat [ppr old_ev, ppr nlhs, ppr nrhs, ppr co])
; return new_ev }
#if __GLASGOW_HASKELL__ <= 810
| otherwise
= panic "rewriteEvidence"
#endif
where
new_pred = mkTcEqPredLikeEv old_ev nlhs nrhs
-- equality is like a type class. Bumping the depth is necessary because
-- of recursive newtypes, where "reducing" a newtype can actually make
-- it bigger. See Note [Newtypes can blow the stack].
loc = ctEvLoc old_ev
loc' = bumpCtLocDepth loc
{- Note [unifyWanted and unifyDerived]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When decomposing equalities we often create new wanted constraints for
(s ~ t). But what if s=t? Then it'd be faster to return Refl right away.
Similar remarks apply for Derived.
Rather than making an equality test (which traverses the structure of the
type, perhaps fruitlessly), unifyWanted traverses the common structure, and
bales out when it finds a difference by creating a new Wanted constraint.
But where it succeeds in finding common structure, it just builds a coercion
to reflect it.
-}
unifyWanted :: CtLoc -> Role
-> TcType -> TcType -> TcS Coercion
-- Return coercion witnessing the equality of the two types,
-- emitting new work equalities where necessary to achieve that
-- Very good short-cut when the two types are equal, or nearly so
-- See Note [unifyWanted and unifyDerived]
-- The returned coercion's role matches the input parameter
unifyWanted loc Phantom ty1 ty2
= do { kind_co <- unifyWanted loc Nominal (tcTypeKind ty1) (tcTypeKind ty2)
; return (mkPhantomCo kind_co ty1 ty2) }
unifyWanted loc role orig_ty1 orig_ty2
= go orig_ty1 orig_ty2
where
go ty1 ty2 | Just ty1' <- tcView ty1 = go ty1' ty2
go ty1 ty2 | Just ty2' <- tcView ty2 = go ty1 ty2'
go (FunTy _ s1 t1) (FunTy _ s2 t2)
= do { co_s <- unifyWanted loc role s1 s2
; co_t <- unifyWanted loc role t1 t2
; return (mkFunCo role co_s co_t) }
go (TyConApp tc1 tys1) (TyConApp tc2 tys2)
| tc1 == tc2, tys1 `equalLength` tys2
, isInjectiveTyCon tc1 role -- don't look under newtypes at Rep equality
= do { cos <- zipWith3M (unifyWanted loc)
(tyConRolesX role tc1) tys1 tys2
; return (mkTyConAppCo role tc1 cos) }
go ty1@(TyVarTy tv) ty2
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty1' -> go ty1' ty2
Nothing -> bale_out ty1 ty2}
go ty1 ty2@(TyVarTy tv)
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty2' -> go ty1 ty2'
Nothing -> bale_out ty1 ty2 }
go ty1@(CoercionTy {}) (CoercionTy {})
= return (mkReflCo role ty1) -- we just don't care about coercions!
go ty1 ty2 = bale_out ty1 ty2
bale_out ty1 ty2
| ty1 `tcEqType` ty2 = return (mkTcReflCo role ty1)
-- Check for equality; e.g. a ~ a, or (m a) ~ (m a)
| otherwise = emitNewWantedEq loc role orig_ty1 orig_ty2
unifyDeriveds :: CtLoc -> [Role] -> [TcType] -> [TcType] -> TcS ()
-- See Note [unifyWanted and unifyDerived]
unifyDeriveds loc roles tys1 tys2 = zipWith3M_ (unify_derived loc) roles tys1 tys2
unifyDerived :: CtLoc -> Role -> Pair TcType -> TcS ()
-- See Note [unifyWanted and unifyDerived]
unifyDerived loc role (Pair ty1 ty2) = unify_derived loc role ty1 ty2
unify_derived :: CtLoc -> Role -> TcType -> TcType -> TcS ()
-- Create new Derived and put it in the work list
-- Should do nothing if the two types are equal
-- See Note [unifyWanted and unifyDerived]
unify_derived _ Phantom _ _ = return ()
unify_derived loc role orig_ty1 orig_ty2
= go orig_ty1 orig_ty2
where
go ty1 ty2 | Just ty1' <- tcView ty1 = go ty1' ty2
go ty1 ty2 | Just ty2' <- tcView ty2 = go ty1 ty2'
go (FunTy _ s1 t1) (FunTy _ s2 t2)
= do { unify_derived loc role s1 s2
; unify_derived loc role t1 t2 }
go (TyConApp tc1 tys1) (TyConApp tc2 tys2)
| tc1 == tc2, tys1 `equalLength` tys2
, isInjectiveTyCon tc1 role
= unifyDeriveds loc (tyConRolesX role tc1) tys1 tys2
go ty1@(TyVarTy tv) ty2
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty1' -> go ty1' ty2
Nothing -> bale_out ty1 ty2 }
go ty1 ty2@(TyVarTy tv)
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty2' -> go ty1 ty2'
Nothing -> bale_out ty1 ty2 }
go ty1 ty2 = bale_out ty1 ty2
bale_out ty1 ty2
| ty1 `tcEqType` ty2 = return ()
-- Check for equality; e.g. a ~ a, or (m a) ~ (m a)
| otherwise = emitNewDerivedEq loc role orig_ty1 orig_ty2
maybeSym :: SwapFlag -> TcCoercion -> TcCoercion
maybeSym IsSwapped co = mkTcSymCo co
maybeSym NotSwapped co = co
|
sdiehl/ghc
|
compiler/typecheck/TcCanonical.hs
|
bsd-3-clause
| 103,692
| 192
| 24
| 29,470
| 12,412
| 6,722
| 5,690
| -1
| -1
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE TypeFamilies #-}
-- | Richmen part of LRC DB.
module Pos.DB.Lrc.RichmenBase
(
-- * Generalization
RichmenComponent (..)
-- * Getters
, getRichmen
-- * Operations
, putRichmen
) where
import Universum
import Pos.Binary.Class (Bi, serialize')
import Pos.Chain.Lrc (FullRichmenData, RichmenComponent (..))
import Pos.Core.Slotting (EpochIndex)
import Pos.DB.Class (MonadDB, MonadDBRead)
import Pos.DB.Lrc.Common (getBi, putBi)
----------------------------------------------------------------------------
-- Getters
----------------------------------------------------------------------------
getRichmen
:: (Bi richmenData, MonadDBRead m)
=> RichmenComponent richmenData
-> EpochIndex
-> m (Maybe richmenData)
getRichmen = getBi ... richmenKey
----------------------------------------------------------------------------
-- Operations
----------------------------------------------------------------------------
putRichmen
:: (Bi richmenData, MonadDB m)
=> RichmenComponent richmenData
-> EpochIndex
-> FullRichmenData
-> m ()
putRichmen rc e = putBi (richmenKey rc e) . rcToData rc
richmenKey :: RichmenComponent richmenData -> EpochIndex -> ByteString
richmenKey rc e = mconcat ["r/", rcTag rc, "/", serialize' e]
|
input-output-hk/pos-haskell-prototype
|
db/src/Pos/DB/Lrc/RichmenBase.hs
|
mit
| 1,438
| 0
| 10
| 307
| 274
| 159
| 115
| 28
| 1
|
{-- snippet all --}
import qualified Data.Map as Map
-- Functions to generate a Map that represents an association list
-- as a map
al = [(1, "one"), (2, "two"), (3, "three"), (4, "four")]
{- | Create a map representation of 'al' by converting the association
- list using Map.fromList -}
mapFromAL =
Map.fromList al
{- | Create a map representation of 'al' by doing a fold -}
mapFold =
foldl (\map (k, v) -> Map.insert k v map) Map.empty al
{- | Manually create a map with the elements of 'al' in it -}
mapManual =
Map.insert 2 "two" .
Map.insert 4 "four" .
Map.insert 1 "one" .
Map.insert 3 "three" $ Map.empty
{-- /snippet all --}
|
binesiyu/ifl
|
examples/ch13/buildmap.hs
|
mit
| 665
| 4
| 10
| 149
| 170
| 90
| 80
| 11
| 1
|
{-# LANGUAGE PackageImports #-}
import "MyBlog" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
|
MaxGabriel/YesodScreencast
|
app/devel.hs
|
cc0-1.0
| 124
| 0
| 7
| 19
| 39
| 21
| 18
| 5
| 1
|
-- (c) The FFI task force, 2001
--
-- Provides fixed sized, signed integral types
module Int (
Int8, Int16, Int32, Int64
) where
-- Constraints applying to all of the following types:
--
-- * For any types, all arithmetic is performed modulo 2^n, where n is the
-- number of bit width of the type minus one (for the sign).
--
-- * The rules that hold for Enum instances over a bounded type such as Int
-- (see the section of the Haskell report dealing with arithmetic sequences)
-- also hold for the Enum instances over the various Int types defined here.
-- 8 bit integers
--
data Int8 = -128 | ... | -1 | 0 | 1 | ... | 127
deriving (Eq, Ord, Enum, Bounded, Show, Read)
instance Num Int8 where ...
instance Real Int8 where ...
instance Integral Int8 where ...
instance Ix Int8 where ...
-- 16 bit integers
--
data Int16 = -32768 | ... | -1 | 0 | 1 | ... | 32767
deriving (Eq, Ord, Enum, Bounded, Show, Read)
instance Num Int16 where ...
instance Real Int16 where ...
instance Integral Int16 where ...
instance Ix Int16 where ...
-- 32 bit integers
--
data Int32 = -2147483648 | ... | -1 | 0 | 1 | ... | 2147483647
deriving (Eq, Ord, Enum, Bounded, Show, Read)
instance Num Int32 where ...
instance Real Int32 where ...
instance Integral Int32 where ...
instance Ix Int32 where ...
-- 64 bit integers
--
data Int64 = -9223372036854775808 | ... | -1 | 0 | 1 | ... | 9223372036854775807
deriving (Eq, Ord, Enum, Bounded, Show, Read)
instance Num Int64 where ...
instance Real Int64 where ...
instance Integral Int64 where ...
instance Ix Int64 where ...
|
mimi1vx/gtk2hs
|
tools/c2hs/doc/c2hs/lib/Int.hs
|
gpl-3.0
| 1,673
| 10
| 13
| 418
| 436
| 247
| 189
| -1
| -1
|
module Ex14 where
fac n = if n == 0 then 1 else n * ( fac (n==1))
|
roberth/uu-helium
|
test/typeerrors/Edinburgh/Ex14.hs
|
gpl-3.0
| 67
| 0
| 10
| 19
| 41
| 23
| 18
| 2
| 2
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
-- |
-- Module : Network.AWS.Env
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- Environment and AWS specific configuration for the
-- 'Network.AWS.AWS' and 'Control.Monad.Trans.AWS.AWST' monads.
module Network.AWS.Env
(
-- * Creating the Environment
newEnv
, newEnvWith
, Env (..)
, HasEnv (..)
-- * Overriding Default Configuration
, override
, configure
-- * Scoped Actions
, reconfigure
, within
, once
, timeout
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Retry
import Data.Function (on)
import Data.IORef
import Data.Monoid
import Network.AWS.Auth
import Network.AWS.Internal.Logger
import Network.AWS.Types
import Network.HTTP.Conduit
import Prelude
-- | The environment containing the parameters required to make AWS requests.
data Env = Env
{ _envRegion :: !Region
, _envLogger :: !Logger
, _envRetryCheck :: !(Int -> HttpException -> Bool)
, _envOverride :: !(Dual (Endo Service))
, _envManager :: !Manager
, _envEC2 :: !(IORef (Maybe Bool))
, _envAuth :: !Auth
}
-- Note: The strictness annotations aobe are applied to ensure
-- total field initialisation.
class HasEnv a where
environment :: Lens' a Env
{-# MINIMAL environment #-}
-- | The current region.
envRegion :: Lens' a Region
-- | The function used to output log messages.
envLogger :: Lens' a Logger
-- | The function used to determine if an 'HttpException' should be retried.
envRetryCheck :: Lens' a (Int -> HttpException -> Bool)
-- | The currently applied overrides to all 'Service' configuration.
envOverride :: Lens' a (Dual (Endo Service))
-- | The 'Manager' used to create and manage open HTTP connections.
envManager :: Lens' a Manager
-- | The credentials used to sign requests for authentication with AWS.
envAuth :: Lens' a Auth
-- | A memoised predicate for whether the underlying host is an EC2 instance.
envEC2 :: Getter a (IORef (Maybe Bool))
envRegion = environment . lens _envRegion (\s a -> s { _envRegion = a })
envLogger = environment . lens _envLogger (\s a -> s { _envLogger = a })
envRetryCheck = environment . lens _envRetryCheck (\s a -> s { _envRetryCheck = a })
envOverride = environment . lens _envOverride (\s a -> s { _envOverride = a })
envManager = environment . lens _envManager (\s a -> s { _envManager = a })
envAuth = environment . lens _envAuth (\s a -> s { _envAuth = a })
envEC2 = environment . to _envEC2
instance HasEnv Env where
environment = id
instance ToLog Env where
build Env{..} = b <> "\n" <> build _envAuth
where
b = buildLines
[ "[Amazonka Env] {"
, " region = " <> build _envRegion
, "}"
]
-- | Provide a function which will be added to the existing stack
-- of overrides applied to all service configuration.
--
-- To override a specific service, it's suggested you use
-- either 'configure' or 'reconfigure' with a modified version of the default
-- service, such as @Network.AWS.DynamoDB.dynamoDB@.
override :: HasEnv a => (Service -> Service) -> a -> a
override f = envOverride <>~ Dual (Endo f)
-- | Configure a specific service. All requests belonging to the
-- supplied service will use this configuration instead of the default.
--
-- It's suggested you use a modified version of the default service, such
-- as @Network.AWS.DynamoDB.dynamoDB@.
--
-- /See:/ 'reconfigure'.
configure :: HasEnv a => Service -> a -> a
configure s = override f
where
f x | on (==) _svcAbbrev s x = s
| otherwise = x
-- | Scope an action such that all requests belonging to the supplied service
-- will use this configuration instead of the default.
--
-- It's suggested you use a modified version of the default service, such
-- as @Network.AWS.DynamoDB.dynamoDB@.
--
-- /See:/ 'configure'.
reconfigure :: (MonadReader r m, HasEnv r) => Service -> m a -> m a
reconfigure = local . configure
-- | Scope an action within the specific 'Region'.
within :: (MonadReader r m, HasEnv r) => Region -> m a -> m a
within r = local (envRegion .~ r)
-- | Scope an action such that any retry logic for the 'Service' is
-- ignored and any requests will at most be sent once.
once :: (MonadReader r m, HasEnv r) => m a -> m a
once = local (override (serviceRetry . retryAttempts .~ 0))
-- | Scope an action such that any HTTP response will use this timeout value.
--
-- Default timeouts are chosen by considering:
--
-- * This 'timeout', if set.
--
-- * The related 'Service' timeout for the sent request if set. (Usually 70s)
--
-- * The 'envManager' timeout if set.
--
-- * The default 'ClientRequest' timeout. (Approximately 30s)
timeout :: (MonadReader r m, HasEnv r) => Seconds -> m a -> m a
timeout s = local (override (serviceTimeout ?~ s))
-- | Creates a new environment with a new 'Manager' without debug logging
-- and uses 'getAuth' to expand/discover the supplied 'Credentials'.
-- Lenses from 'HasEnv' can be used to further configure the resulting 'Env'.
--
-- Throws 'AuthError' when environment variables or IAM profiles cannot be read.
--
-- /See:/ 'newEnvWith'.
newEnv :: (Applicative m, MonadIO m, MonadCatch m)
=> Region -- ^ Initial region to operate in.
-> Credentials -- ^ Credential discovery mechanism.
-> m Env
newEnv r c = liftIO (newManager conduitManagerSettings)
>>= newEnvWith r c Nothing
-- | /See:/ 'newEnv'
--
-- Throws 'AuthError' when environment variables or IAM profiles cannot be read.
newEnvWith :: (Applicative m, MonadIO m, MonadCatch m)
=> Region -- ^ Initial region to operate in.
-> Credentials -- ^ Credential discovery mechanism.
-> Maybe Bool -- ^ Dictate if the instance is running on EC2. (Preload memoisation.)
-> Manager
-> m Env
newEnvWith r c p m =
Env r logger check mempty m <$> liftIO (newIORef p) <*> getAuth m c
where
logger _ _ = return ()
check _ _ = True
|
olorin/amazonka
|
amazonka/src/Network/AWS/Env.hs
|
mpl-2.0
| 6,806
| 0
| 13
| 1,781
| 1,224
| 686
| 538
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.MachineLearning.DescribeDataSources
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a list of 'DataSource' that match the search criteria in the
-- request.
--
-- /See:/ <http://http://docs.aws.amazon.com/machine-learning/latest/APIReference/API_DescribeDataSources.html AWS API Reference> for DescribeDataSources.
--
-- This operation returns paginated results.
module Network.AWS.MachineLearning.DescribeDataSources
(
-- * Creating a Request
describeDataSources
, DescribeDataSources
-- * Request Lenses
, ddsEQ
, ddsGE
, ddsPrefix
, ddsGT
, ddsNE
, ddsNextToken
, ddsSortOrder
, ddsLimit
, ddsLT
, ddsFilterVariable
, ddsLE
-- * Destructuring the Response
, describeDataSourcesResponse
, DescribeDataSourcesResponse
-- * Response Lenses
, ddssrsResults
, ddssrsNextToken
, ddssrsResponseStatus
) where
import Network.AWS.MachineLearning.Types
import Network.AWS.MachineLearning.Types.Product
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeDataSources' smart constructor.
data DescribeDataSources = DescribeDataSources'
{ _ddsEQ :: !(Maybe Text)
, _ddsGE :: !(Maybe Text)
, _ddsPrefix :: !(Maybe Text)
, _ddsGT :: !(Maybe Text)
, _ddsNE :: !(Maybe Text)
, _ddsNextToken :: !(Maybe Text)
, _ddsSortOrder :: !(Maybe SortOrder)
, _ddsLimit :: !(Maybe Nat)
, _ddsLT :: !(Maybe Text)
, _ddsFilterVariable :: !(Maybe DataSourceFilterVariable)
, _ddsLE :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeDataSources' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddsEQ'
--
-- * 'ddsGE'
--
-- * 'ddsPrefix'
--
-- * 'ddsGT'
--
-- * 'ddsNE'
--
-- * 'ddsNextToken'
--
-- * 'ddsSortOrder'
--
-- * 'ddsLimit'
--
-- * 'ddsLT'
--
-- * 'ddsFilterVariable'
--
-- * 'ddsLE'
describeDataSources
:: DescribeDataSources
describeDataSources =
DescribeDataSources'
{ _ddsEQ = Nothing
, _ddsGE = Nothing
, _ddsPrefix = Nothing
, _ddsGT = Nothing
, _ddsNE = Nothing
, _ddsNextToken = Nothing
, _ddsSortOrder = Nothing
, _ddsLimit = Nothing
, _ddsLT = Nothing
, _ddsFilterVariable = Nothing
, _ddsLE = Nothing
}
-- | The equal to operator. The 'DataSource' results will have
-- 'FilterVariable' values that exactly match the value specified with
-- 'EQ'.
ddsEQ :: Lens' DescribeDataSources (Maybe Text)
ddsEQ = lens _ddsEQ (\ s a -> s{_ddsEQ = a});
-- | The greater than or equal to operator. The 'DataSource' results will
-- have 'FilterVariable' values that are greater than or equal to the value
-- specified with 'GE'.
ddsGE :: Lens' DescribeDataSources (Maybe Text)
ddsGE = lens _ddsGE (\ s a -> s{_ddsGE = a});
-- | A string that is found at the beginning of a variable, such as 'Name' or
-- 'Id'.
--
-- For example, a 'DataSource' could have the 'Name'
-- '2014-09-09-HolidayGiftMailer'. To search for this 'DataSource', select
-- 'Name' for the 'FilterVariable' and any of the following strings for the
-- 'Prefix':
--
-- - 2014-09
--
-- - 2014-09-09
--
-- - 2014-09-09-Holiday
--
ddsPrefix :: Lens' DescribeDataSources (Maybe Text)
ddsPrefix = lens _ddsPrefix (\ s a -> s{_ddsPrefix = a});
-- | The greater than operator. The 'DataSource' results will have
-- 'FilterVariable' values that are greater than the value specified with
-- 'GT'.
ddsGT :: Lens' DescribeDataSources (Maybe Text)
ddsGT = lens _ddsGT (\ s a -> s{_ddsGT = a});
-- | The not equal to operator. The 'DataSource' results will have
-- 'FilterVariable' values not equal to the value specified with 'NE'.
ddsNE :: Lens' DescribeDataSources (Maybe Text)
ddsNE = lens _ddsNE (\ s a -> s{_ddsNE = a});
-- | The ID of the page in the paginated results.
ddsNextToken :: Lens' DescribeDataSources (Maybe Text)
ddsNextToken = lens _ddsNextToken (\ s a -> s{_ddsNextToken = a});
-- | A two-value parameter that determines the sequence of the resulting list
-- of 'DataSource'.
--
-- - 'asc' - Arranges the list in ascending order (A-Z, 0-9).
-- - 'dsc' - Arranges the list in descending order (Z-A, 9-0).
--
-- Results are sorted by 'FilterVariable'.
ddsSortOrder :: Lens' DescribeDataSources (Maybe SortOrder)
ddsSortOrder = lens _ddsSortOrder (\ s a -> s{_ddsSortOrder = a});
-- | The maximum number of 'DataSource' to include in the result.
ddsLimit :: Lens' DescribeDataSources (Maybe Natural)
ddsLimit = lens _ddsLimit (\ s a -> s{_ddsLimit = a}) . mapping _Nat;
-- | The less than operator. The 'DataSource' results will have
-- 'FilterVariable' values that are less than the value specified with
-- 'LT'.
ddsLT :: Lens' DescribeDataSources (Maybe Text)
ddsLT = lens _ddsLT (\ s a -> s{_ddsLT = a});
-- | Use one of the following variables to filter a list of 'DataSource':
--
-- - 'CreatedAt' - Sets the search criteria to 'DataSource' creation
-- dates.
-- - 'Status' - Sets the search criteria to 'DataSource' statuses.
-- - 'Name' - Sets the search criteria to the contents of 'DataSource'
-- ____ 'Name'.
-- - 'DataUri' - Sets the search criteria to the URI of data files used
-- to create the 'DataSource'. The URI can identify either a file or an
-- Amazon Simple Storage Service (Amazon S3) bucket or directory.
-- - 'IAMUser' - Sets the search criteria to the user account that
-- invoked the 'DataSource' creation.
ddsFilterVariable :: Lens' DescribeDataSources (Maybe DataSourceFilterVariable)
ddsFilterVariable = lens _ddsFilterVariable (\ s a -> s{_ddsFilterVariable = a});
-- | The less than or equal to operator. The 'DataSource' results will have
-- 'FilterVariable' values that are less than or equal to the value
-- specified with 'LE'.
ddsLE :: Lens' DescribeDataSources (Maybe Text)
ddsLE = lens _ddsLE (\ s a -> s{_ddsLE = a});
instance AWSPager DescribeDataSources where
page rq rs
| stop (rs ^. ddssrsNextToken) = Nothing
| stop (rs ^. ddssrsResults) = Nothing
| otherwise =
Just $ rq & ddsNextToken .~ rs ^. ddssrsNextToken
instance AWSRequest DescribeDataSources where
type Rs DescribeDataSources =
DescribeDataSourcesResponse
request = postJSON machineLearning
response
= receiveJSON
(\ s h x ->
DescribeDataSourcesResponse' <$>
(x .?> "Results" .!@ mempty) <*> (x .?> "NextToken")
<*> (pure (fromEnum s)))
instance ToHeaders DescribeDataSources where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("AmazonML_20141212.DescribeDataSources" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DescribeDataSources where
toJSON DescribeDataSources'{..}
= object
(catMaybes
[("EQ" .=) <$> _ddsEQ, ("GE" .=) <$> _ddsGE,
("Prefix" .=) <$> _ddsPrefix, ("GT" .=) <$> _ddsGT,
("NE" .=) <$> _ddsNE,
("NextToken" .=) <$> _ddsNextToken,
("SortOrder" .=) <$> _ddsSortOrder,
("Limit" .=) <$> _ddsLimit, ("LT" .=) <$> _ddsLT,
("FilterVariable" .=) <$> _ddsFilterVariable,
("LE" .=) <$> _ddsLE])
instance ToPath DescribeDataSources where
toPath = const "/"
instance ToQuery DescribeDataSources where
toQuery = const mempty
-- | Represents the query results from a DescribeDataSources operation. The
-- content is essentially a list of 'DataSource'.
--
-- /See:/ 'describeDataSourcesResponse' smart constructor.
data DescribeDataSourcesResponse = DescribeDataSourcesResponse'
{ _ddssrsResults :: !(Maybe [DataSource])
, _ddssrsNextToken :: !(Maybe Text)
, _ddssrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeDataSourcesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddssrsResults'
--
-- * 'ddssrsNextToken'
--
-- * 'ddssrsResponseStatus'
describeDataSourcesResponse
:: Int -- ^ 'ddssrsResponseStatus'
-> DescribeDataSourcesResponse
describeDataSourcesResponse pResponseStatus_ =
DescribeDataSourcesResponse'
{ _ddssrsResults = Nothing
, _ddssrsNextToken = Nothing
, _ddssrsResponseStatus = pResponseStatus_
}
-- | A list of 'DataSource' that meet the search criteria.
ddssrsResults :: Lens' DescribeDataSourcesResponse [DataSource]
ddssrsResults = lens _ddssrsResults (\ s a -> s{_ddssrsResults = a}) . _Default . _Coerce;
-- | An ID of the next page in the paginated results that indicates at least
-- one more page follows.
ddssrsNextToken :: Lens' DescribeDataSourcesResponse (Maybe Text)
ddssrsNextToken = lens _ddssrsNextToken (\ s a -> s{_ddssrsNextToken = a});
-- | The response status code.
ddssrsResponseStatus :: Lens' DescribeDataSourcesResponse Int
ddssrsResponseStatus = lens _ddssrsResponseStatus (\ s a -> s{_ddssrsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-ml/gen/Network/AWS/MachineLearning/DescribeDataSources.hs
|
mpl-2.0
| 10,112
| 0
| 14
| 2,332
| 1,625
| 972
| 653
| 172
| 1
|
{-|
Author : Luc Taesch 2003
Portability : portable ? (tested on XP)
Illustrate more controls from wxHaskell, Hacked from Controls.hs ( Daan Leijen 2003)
namely bitmapButtons, righ click menus, vertical labels on notebooks, usage of tooltips
-}
module Main where
import Graphics.UI.WX
import Graphics.UI.WXCore
main :: IO ()
main
= start gui
gui :: IO ()
gui
= do -- main gui elements: frame, panel, text control, and the notebook
f <- frame [text := "Controls" ]
p <- panel f []
-- use text control as logger
textlog <- textCtrl p [wrap := WrapLine, enabled := False]
textCtrlMakeLogActiveTarget textlog
logMessage "logging enabled"
-- menu
file <- menuPane [text := "&File"]
aRightClick <- menuItem file [text := "Say Something\tCtrl+Q", help := "An interesting Message"]
-- button page
nb <- notebookRight p []
p1 <- panel nb []
ok <- bitmapButton p1 [picture := "../bitmaps/wxwin16.png",
text := "Ok", on command := logMessage "bitmap button pressed",
tooltip := "tooltip",
on clickRight := (\pt -> menuPopup file pt p)]
-- specify layout
set f [menuBar := [file]
,layout :=
container p $
column 0
[ tabs nb
[tab "buttons" (container p1 $ margin 10 $ floatCentre $ row 5 $ [widget ok])
]
, hfill $ widget textlog
]
, on (menu aRightClick) := infoDialog f "Say.." "Something"
, clientSize := sz 400 300 ]
return ()
where
logSelect labels w
= do i <- get w selection
s <- get w (item i)
logMessage ("selected index: " ++ show i ++ ": " ++ s)
-- like notebook, with labels created on the side ( rather than on top): wxNB_RIGHT
notebookRight parent props
= do nb <- notebookCreate parent idAny rectNull ( wxCLIP_CHILDREN + wxNB_RIGHT)
set nb props
return nb
|
ekmett/wxHaskell
|
samples/contrib/NotebookRight.hs
|
lgpl-2.1
| 2,239
| 0
| 20
| 870
| 520
| 253
| 267
| 40
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Blackbox.Types where
import Control.Lens
import Snap.Snaplet
import Snap.Snaplet.Heist
import Blackbox.FooSnaplet
import Blackbox.BarSnaplet
import Blackbox.EmbeddedSnaplet
import Snap.Snaplet.Session
data App = App
{ _heist :: Snaplet (Heist App)
, _foo :: Snaplet FooSnaplet
, _bar :: Snaplet (BarSnaplet App)
, _session :: Snaplet SessionManager
, _embedded :: Snaplet EmbeddedSnaplet
}
makeLenses ''App
instance HasHeist App where heistLens = subSnaplet heist
|
snapframework/snap-templates
|
test/suite/Blackbox/Types.hs
|
bsd-3-clause
| 714
| 0
| 11
| 116
| 139
| 80
| 59
| 22
| 0
|
{-# LANGUAGE CPP, DeriveDataTypeable #-}
{-# OPTIONS -Wall #-}
-- | An 'Annotation' that sets the execution priority of the
-- statements. Statements with 'Ballon's will be allocated
-- as fast as possible, and statements with negative ballons,
-- or @Stone@s, will be allocated as later as possible.
module Language.Paraiso.Annotation.Balloon (
Balloon(..)
) where
import Data.Dynamic
import Language.Paraiso.Prelude
import Prelude (Eq, Ord)
data (Ord a, Typeable a) => Balloon a
= Balloon a
deriving (Eq, Ord, Typeable)
|
nushio3/Paraiso
|
Language/Paraiso/Annotation/Balloon.hs
|
bsd-3-clause
| 541
| 0
| 6
| 96
| 85
| 54
| 31
| 10
| 0
|
{- |
Module : Control.Monad.Cont
Copyright : (c) The University of Glasgow 2001,
(c) Jeff Newbern 2003-2007,
(c) Andriy Palamarchuk 2007
License : BSD-style (see the file LICENSE)
Maintainer : libraries@haskell.org
Stability : experimental
Portability : portable
[Computation type:] Computations which can be interrupted and resumed.
[Binding strategy:] Binding a function to a monadic value creates
a new continuation which uses the function as the continuation of the monadic
computation.
[Useful for:] Complex control structures, error handling,
and creating co-routines.
[Zero and plus:] None.
[Example type:] @'Cont' r a@
The Continuation monad represents computations in continuation-passing style
(CPS).
In continuation-passing style function result is not returned,
but instead is passed to another function,
received as a parameter (continuation).
Computations are built up from sequences
of nested continuations, terminated by a final continuation (often @id@)
which produces the final result.
Since continuations are functions which represent the future of a computation,
manipulation of the continuation functions can achieve complex manipulations
of the future of the computation,
such as interrupting a computation in the middle, aborting a portion
of a computation, restarting a computation, and interleaving execution of
computations.
The Continuation monad adapts CPS to the structure of a monad.
Before using the Continuation monad, be sure that you have
a firm understanding of continuation-passing style
and that continuations represent the best solution to your particular
design problem.
Many algorithms which require continuations in other languages do not require
them in Haskell, due to Haskell's lazy semantics.
Abuse of the Continuation monad can produce code that is impossible
to understand and maintain.
-}
module Control.Monad.Cont (
-- * MonadCont class
MonadCont(..),
-- * The Cont monad
Cont,
cont,
runCont,
evalCont,
mapCont,
withCont,
-- * The ContT monad transformer
ContT(ContT),
runContT,
evalContT,
mapContT,
withContT,
module Control.Monad,
module Control.Monad.Trans,
-- * Example 1: Simple Continuation Usage
-- $simpleContExample
-- * Example 2: Using @callCC@
-- $callCCExample
-- * Example 3: Using @ContT@ Monad Transformer
-- $ContTExample
) where
import Control.Monad.Cont.Class
import Control.Monad.Trans
import Control.Monad.Trans.Cont
import Control.Monad
{- $simpleContExample
Calculating length of a list continuation-style:
>calculateLength :: [a] -> Cont r Int
>calculateLength l = return (length l)
Here we use @calculateLength@ by making it to pass its result to @print@:
>main = do
> runCont (calculateLength "123") print
> -- result: 3
It is possible to chain 'Cont' blocks with @>>=@.
>double :: Int -> Cont r Int
>double n = return (n * 2)
>
>main = do
> runCont (calculateLength "123" >>= double) print
> -- result: 6
-}
{- $callCCExample
This example gives a taste of how escape continuations work, shows a typical
pattern for their usage.
>-- Returns a string depending on the length of the name parameter.
>-- If the provided string is empty, returns an error.
>-- Otherwise, returns a welcome message.
>whatsYourName :: String -> String
>whatsYourName name =
> (`runCont` id) $ do -- 1
> response <- callCC $ \exit -> do -- 2
> validateName name exit -- 3
> return $ "Welcome, " ++ name ++ "!" -- 4
> return response -- 5
>
>validateName name exit = do
> when (null name) (exit "You forgot to tell me your name!")
Here is what this example does:
(1) Runs an anonymous 'Cont' block and extracts value from it with
@(\`runCont\` id)@. Here @id@ is the continuation, passed to the @Cont@ block.
(1) Binds @response@ to the result of the following 'Control.Monad.Cont.Class.callCC' block,
binds @exit@ to the continuation.
(1) Validates @name@.
This approach illustrates advantage of using 'Control.Monad.Cont.Class.callCC' over @return@.
We pass the continuation to @validateName@,
and interrupt execution of the @Cont@ block from /inside/ of @validateName@.
(1) Returns the welcome message from the 'Control.Monad.Cont.Class.callCC' block.
This line is not executed if @validateName@ fails.
(1) Returns from the @Cont@ block.
-}
{-$ContTExample
'ContT' can be used to add continuation handling to other monads.
Here is an example how to combine it with @IO@ monad:
>import Control.Monad.Cont
>import System.IO
>
>main = do
> hSetBuffering stdout NoBuffering
> runContT (callCC askString) reportResult
>
>askString :: (String -> ContT () IO String) -> ContT () IO String
>askString next = do
> liftIO $ putStrLn "Please enter a string"
> s <- liftIO $ getLine
> next s
>
>reportResult :: String -> IO ()
>reportResult s = do
> putStrLn ("You entered: " ++ s)
Action @askString@ requests user to enter a string,
and passes it to the continuation.
@askString@ takes as a parameter a continuation taking a string parameter,
and returning @IO ()@.
Compare its signature to 'runContT' definition.
-}
|
ekmett/mtl
|
Control/Monad/Cont.hs
|
bsd-3-clause
| 5,231
| 0
| 5
| 1,029
| 105
| 76
| 29
| 21
| 0
|
-- -*- mode: haskell -*-
-- -- $Id$
import Challenger
import SAT.SAT
student = Aufgabe
{ problem = SAT
, instanz = [ (Pos "x", Pos "y", Pos "z")
, (Neg "x", Pos "x", Pos "y")
]
, beweis = listToFM [("x", True),("y", False),("z", False)]
}
|
Erdwolf/autotool-bonn
|
src/SAT/doc/Aufgabe.hs
|
gpl-2.0
| 277
| 4
| 9
| 84
| 112
| 66
| 46
| 7
| 1
|
{-# LANGUAGE DeriveFunctor #-}
module Parser.AST where
data Const
= ConstString String
| ConstInt Int
-- | ConstReal Double
-- | ConstChar Char
-- | ConstBool Bool
deriving Eq
data LetIn a = Let (Definition a) (LetIn a) | In (Expression a)
deriving (Eq, Show, Functor)
makeLet :: Expression a -> [Definition a] -> LetIn a
makeLet v = foldr Let (In v)
data Definition a
= Definition a [a] (LetIn a)
| Assign a (LetIn a)
-- | While Expression LetIn
-- | If Expression LetIn LetIn
deriving (Eq, Show, Functor)
data Expression a
= Application (Expression a) [Expression a]
| Atom a
| Constant Const
deriving (Eq, Functor)
instance Show Const where
show (ConstString x) = show x
show (ConstInt x) = show x
instance Show a => Show (Expression a) where
show e = case e of
Constant c -> show c
Atom aa -> show aa
Application a b -> show a ++ concatMap (\b -> ' ' : show b) b
|
kayuri/HNC
|
Parser/AST.hs
|
lgpl-3.0
| 910
| 14
| 14
| 209
| 370
| 192
| 178
| 27
| 1
|
{-# LANGUAGE BangPatterns, CPP, DeriveFunctor #-}
-- | This module allows for streaming decoding of CSV data. This is
-- useful if you need to parse large amounts of input in constant
-- space. The API also allows you to ignore type conversion errors on
-- a per-record basis.
module Data.Csv.Streaming
(
-- * Usage example
-- $example
-- * Stream representation
-- $stream-representation
Records(..)
-- * Decoding records
-- $typeconversion
-- ** Index-based record conversion
-- $indexbased
, HasHeader(..)
, decode
, decodeWith
-- ** Name-based record conversion
-- $namebased
, decodeByName
, decodeByNameWith
) where
import Control.Applicative ((<$>), (<*>), pure)
import Control.DeepSeq (NFData(rnf))
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BL8
import Data.Foldable (Foldable(..))
import Data.Traversable (Traversable(..))
import Prelude hiding (foldr)
import Data.Csv.Conversion
import Data.Csv.Incremental hiding (decode, decodeByName, decodeByNameWith,
decodeWith)
import qualified Data.Csv.Incremental as I
import Data.Csv.Parser
import Data.Csv.Types
#if !MIN_VERSION_bytestring(0,10,0)
import qualified Data.ByteString.Lazy.Internal as BL -- for constructors
#endif
-- $example
--
-- A short usage example:
--
-- > for_ (decode NoHeader "John,27\r\nJane,28\r\n") $ \ (name, age :: Int) ->
-- > putStrLn $ name ++ " is " ++ show age ++ " years old"
--
-- N.B. The 'Foldable' instance, which is used above, skips records
-- that failed to convert. If you don't want this behavior, work
-- directly with the 'Cons' and 'Nil' constructors.
-- $stream-representation
--
-- A stream of records is represented as a (lazy) list that may
-- contain errors.
-- $typeconversion
--
-- Just like in the case of non-streaming decoding, there are two ways
-- to convert CSV records to and from and user-defined data types:
-- index-based conversion and name-based conversion.
-- $indexbased
--
-- See documentation on index-based conversion in "Data.Csv" for more
-- information.
-- $namebased
--
-- See documentation on name-based conversion in "Data.Csv" for more
-- information.
-- | A stream of parsed records. If type conversion failed for the
-- record, the error is returned as @'Left' errMsg@.
data Records a
= -- | A record or an error message, followed by more records.
Cons (Either String a) (Records a)
-- | End of stream, potentially due to a parse error. If a parse
-- error occured, the first field contains the error message.
-- The second field contains any unconsumed input.
| Nil (Maybe String) BL.ByteString
deriving (Eq, Functor, Show)
-- | Skips records that failed to convert.
instance Foldable Records where
foldr = foldrRecords
#if MIN_VERSION_base(4,6,0)
foldl' = foldlRecords'
#endif
foldrRecords :: (a -> b -> b) -> b -> Records a -> b
foldrRecords f = go
where
go z (Cons (Right x) rs) = f x (go z rs)
go z _ = z
{-# INLINE foldrRecords #-}
#if MIN_VERSION_base(4,6,0)
foldlRecords' :: (a -> b -> a) -> a -> Records b -> a
foldlRecords' f = go
where
go z (Cons (Right x) rs) = let z' = f z x in z' `seq` go z' rs
go z _ = z
{-# INLINE foldlRecords' #-}
#endif
instance Traversable Records where
traverse _ (Nil merr rest) = pure $ Nil merr rest
traverse f (Cons x xs) = Cons <$> traverseElem x <*> traverse f xs
where
traverseElem (Left err) = pure $ Left err
traverseElem (Right y) = Right <$> f y
instance NFData a => NFData (Records a) where
rnf (Cons r rs) = rnf r `seq` rnf rs
#if MIN_VERSION_bytestring(0,10,0)
rnf (Nil errMsg rest) = rnf errMsg `seq` rnf rest
#else
rnf (Nil errMsg rest) = rnf errMsg `seq` rnfLazyByteString rest
rnfLazyByteString :: BL.ByteString -> ()
rnfLazyByteString BL.Empty = ()
rnfLazyByteString (BL.Chunk _ b) = rnfLazyByteString b
#endif
-- | Efficiently deserialize CSV records in a streaming fashion.
-- Equivalent to @'decodeWith' 'defaultDecodeOptions'@.
decode :: FromRecord a
=> HasHeader -- ^ Data contains header that should be
-- skipped
-> BL.ByteString -- ^ CSV data
-> Records a
decode = decodeWith defaultDecodeOptions
-- | Like 'decode', but lets you customize how the CSV data is parsed.
decodeWith :: FromRecord a
=> DecodeOptions -- ^ Decoding options
-> HasHeader -- ^ Data contains header that should be
-- skipped
-> BL.ByteString -- ^ CSV data
-> Records a
decodeWith !opts hasHeader s0 =
go (BL.toChunks s0) (I.decodeWith opts hasHeader)
where
go ss (Done xs) = foldr Cons (Nil Nothing (BL.fromChunks ss)) xs
go ss (Fail rest err) = Nil (Just err) (BL.fromChunks (rest:ss))
go [] (Many xs k) = foldr Cons (go [] (k B.empty)) xs
go (s:ss) (Many xs k) = foldr Cons (go ss (k s)) xs
-- | Efficiently deserialize CSV in a streaming fashion. The data is
-- assumed to be preceeded by a header. Returns @'Left' errMsg@ if
-- parsing the header fails. Equivalent to @'decodeByNameWith'
-- 'defaultDecodeOptions'@.
decodeByName :: FromNamedRecord a
=> BL.ByteString -- ^ CSV data
-> Either String (Header, Records a)
decodeByName = decodeByNameWith defaultDecodeOptions
-- TODO: Include something more in error messages?
-- | Like 'decodeByName', but lets you customize how the CSV data is
-- parsed.
decodeByNameWith :: FromNamedRecord a
=> DecodeOptions -- ^ Decoding options
-> BL.ByteString -- ^ CSV data
-> Either String (Header, Records a)
decodeByNameWith !opts s0 = go (BL.toChunks s0) (I.decodeByNameWith opts)
where
go ss (DoneH hdr p) = Right (hdr, go2 ss p)
go ss (FailH rest err) = Left $ err ++ " at " ++
show (BL8.unpack . BL.fromChunks $ rest : ss)
go [] (PartialH k) = go [] (k B.empty)
go (s:ss) (PartialH k) = go ss (k s)
go2 ss (Done xs) = foldr Cons (Nil Nothing (BL.fromChunks ss)) xs
go2 ss (Fail rest err) = Nil (Just err) (BL.fromChunks (rest:ss))
go2 [] (Many xs k) = foldr Cons (go2 [] (k B.empty)) xs
go2 (s:ss) (Many xs k) = foldr Cons (go2 ss (k s)) xs
|
plow-technologies/cassava
|
Data/Csv/Streaming.hs
|
bsd-3-clause
| 6,429
| 0
| 13
| 1,554
| 1,435
| 790
| 645
| 80
| 7
|
module BuildDistr where
import System.Directory ( doesFileExist, doesDirectoryExist, removeFile
, getCurrentDirectory,setCurrentDirectory
, getDirectoryContents, createDirectoryIfMissing )
import System.Environment (getArgs)
import System.Process (system)
import Data.Time (getZonedTime, formatTime)
import BuildSpecific ( defaultHome, distrDir, srcDir, utilDir, parseDir, libDir
, manualDir, testsDir, rootDir, extension, version )
import System.Locale (defaultTimeLocale)
license =
"-- Copyright (c) 2002-2013, Tim Sheard, Gabor Greif\n" ++
"-- OGI School of Science & Engineering, Oregon Health & Science University\n" ++
"-- Maseeh College of Engineering, Portland State University\n" ++
"-- See LICENSE.txt for details.\n"
---------------------------------------------------------------------
sources libDir parseDir srcDir testsDir rootDir utilDir =
[ (libDir, "Auxillary", ".hs"),
(libDir, "Bind", ".hs"),
(libDir, "DepthFirstSearch", ".hs"),
(libDir, "Monads", ".hs"),
(libDir, "SCC", ".hs"),
(parseDir, "StdTokenDef", ".hs"),
(parseDir, "ParseError", ".hs"),
(parseDir, "ParseExpr", ".hs"),
(parseDir, "Parser", ".hs"),
(parseDir, "ParserAll", ".hs"),
(parseDir, "ParseToken", ".hs"),
(parseDir, "TokenDefExample", ".hs"),
(parseDir, "CommentDefExample", ".hs"),
(srcDir, "ParserDef", ".hs"),
(srcDir, "PrimParser", ".hs"),
(srcDir, "CommentDef", ".hs"),
(srcDir, "Encoding", ".hs"),
(srcDir, "Infer", ".hs"),
(srcDir, "LangEval", ".hs"),
(srcDir, "Main", ".hs"),
(srcDir, "RankN", ".hs"),
(srcDir, "Syntax", ".hs"),
(testsDir, "tests", ".prg"),
(srcDir, "TokenDef", ".hs"),
(srcDir, "Toplevel", ".hs"),
(srcDir, "Value", ".hs"),
(srcDir, "Manual", ".hs"),
(srcDir, "Commands", ".hs"),
(srcDir, "Narrow", ".hs"),
(srcDir, "NarrowData", ".hs"),
(srcDir, "Cooper", ".hs"),
(srcDir, "SyntaxExt", ".hs"),
(srcDir, "PureReadline",".hs"),
(srcDir, "Version", ".hs"),
(srcDir, "LangPrelude", ".prg"),
(srcDir, "Parsing", ".prg"),
(rootDir, "LICENSE", ".txt"),
(srcDir, "Makefile",""),
(utilDir, "omega",".cabal"),
(utilDir, "Setup",".hs")
]
-- ====================================================================
-- Create the target directory if it doesn't exist, if it does then
-- remove all the files there to get ready to move new versions there
ifM :: Monad m => m Bool -> m b -> m b -> m b
ifM test x y = do { b <- test; if b then x else y }
pwd = do { current <- getCurrentDirectory; putStrLn current}
cd s = setCurrentDirectory s
cleanTarget distrDir =
ifM (doesDirectoryExist distrDir)
(do { current <- getCurrentDirectory
; setCurrentDirectory distrDir
; allfiles <- getDirectoryContents distrDir
; let files = drop 2 allfiles -- remove . and .. from list
f s = removeFile s
; putStr (show files)
; mapM f files
; setCurrentDirectory current
})
(createDirectoryIfMissing True distrDir)
getTime =
do { clockt <- getZonedTime
; let calendart = formatTime defaultTimeLocale "%a %h %d %T %Z %Y" clockt
; return calendart
}
prepend time license source2path targetpath =
do { writeFile targetpath (license ++ "-- " ++ time ++ "\n-- " ++ version ++ "\n\n")
; source2String <- readFile source2path
; appendFile targetpath source2String
}
copyfile source target =
do { string <- readFile source
; writeFile target string
}
verbatimFile source target =
do { string <- readFile source
; writeFile target ("\\begin{verbatim}\n" ++ string ++ "\\end{verbatim}\n")
}
move1file time distrDir (dir, name, typ@".txt") =
copyfile (dir ++ name ++ typ) (distrDir ++ "/" ++ name ++ typ)
move1file time distrDir (dir, name, typ@".cabal") =
copyfile (dir ++ name ++ typ) (distrDir ++ "/" ++ name ++ typ)
move1file time distrDir (dir, name, typ@".ps") =
system ("cp " ++ dir ++ name ++ typ ++ " " ++ distrDir ++ name ++ typ) >> return ()
move1file time distrDir (dir, name, typ@".pdf") =
system ("cp " ++ dir ++ name ++ typ ++ " " ++ distrDir ++ name ++ typ) >> return ()
move1file time distrDir (dir, name, "") =
copyfile (dir ++ name) (distrDir ++ "/" ++ name)
move1file time distrDir (dir, name, typ) =
prepend time license
(dir ++ name ++ typ) (distrDir ++ "/" ++ name ++ typ)
compile dir =
do { setCurrentDirectory dir
; system "which ghc"
; system ("make EXT=" ++ extension)
}
writeVersionInfo time srcDir =
do { let versionfile = srcDir ++ "Version.hs"
body = "module Version where\n" ++
"version = \"" ++ version ++ "\"\n" ++
"buildtime = \"" ++ time ++ "\"\n"
; writeFile versionfile body
}
manuals :: String -> [(String, String, String)]
manuals manualDir =
[ (manualDir, "OmegaManual", ".ps"),
(manualDir, "OmegaManual", ".pdf")
]
makeManual dir time distrDir manualDir =
do { system ("make -C " ++ dir ++ " manual EXT=" ++ extension)
; mapM (move1file time distrDir) (manuals manualDir)
}
main =
do { time <- getTime
; putStr time
; home <- do { ans <- getArgs
; case ans of
[x] -> return x
[] -> return defaultHome }
; let libDir' = libDir home
parseDir' = parseDir home
srcDir' = srcDir home
testsDir' = testsDir home
rootDir' = rootDir home
utilDir' = utilDir home
distrDir' = distrDir home
manualDir' = manualDir home
; system $ "make -C " ++ srcDir' ++ " update"
; cleanTarget distrDir'
; writeVersionInfo time srcDir'
; mapM (move1file time distrDir') $ sources libDir' parseDir' srcDir' testsDir' rootDir' utilDir'
; makeManual srcDir' time distrDir' manualDir' -- compiles, calls omega -manual, and then LaTeX
; setCurrentDirectory $ distrDir'
; system "make clean"
; putStr ("\n" ++ version ++ "\n" ++ time ++ "\n")
; putStr ("Target Directory: " ++ distrDir' ++ "\n")
; putStr ("Root Directory: " ++ rootDir' ++ "\n")
; putStr ("Source Directory: " ++ srcDir' ++ "\n")
; putStr ("Parse Directory: " ++ parseDir' ++ "\n")
; putStr ("Library Directory: " ++ libDir' ++ "\n")
}
|
cartazio/omega
|
util/BuildDistr.hs
|
bsd-3-clause
| 6,421
| 0
| 15
| 1,576
| 1,930
| 1,075
| 855
| 143
| 2
|
{- |
Module : Neovim.Plugin.IPC
Description : Communication between Haskell processes/threads
Copyright : (c) Sebastian Witte
License : Apache-2.0
Maintainer : woozletoff@gmail.com
Stability : experimental
This module reexports publicly available means to communicate between different
plugins (or more generally threads running in the same plugin provider).
-}
module Neovim.Plugin.IPC (
SomeMessage(..),
fromMessage,
) where
import Neovim.Plugin.IPC.Classes
|
saep/nvim-hs
|
library/Neovim/Plugin/IPC.hs
|
apache-2.0
| 507
| 0
| 5
| 103
| 28
| 20
| 8
| 4
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
module ReadmeExample where
import Numeric.Units.Dimensional.Prelude
import Numeric.Units.Dimensional.NonSI (mile)
leg :: Length Double
leg = 1 *~ mile -- *~ combines a raw number and a unit to form a quantity
speeds :: [Velocity Double]
speeds = [60, 50, 40, 30] *~~ (kilo meter / hour)
-- *~~ does the same thing for a whole Functor at once
-- Parentheses are required around unit expressions that are comingled with *~, /~, *~~, or /~~ operations
timeOfJourney :: Time Double
timeOfJourney = sum $ fmap (leg /) speeds
-- We can use dimensional versions of ordinary functions like / and sum to combine quantities
averageSpeed :: Velocity Double
averageSpeed = _4 * leg / timeOfJourney
-- _4 is an alias for the dimensionless number 4
wholeSeconds :: Integer
wholeSeconds = ceiling $ timeOfJourney /~ second
-- /~ lets us recover a raw number from a quantity and a unit in which it should be expressed
main :: IO ()
main = do
putStrLn $ "Length of journey is: " ++ showIn minute timeOfJourney
putStrLn $ "Average speed is: " ++ showIn (mile / hour) averageSpeed
putStrLn $ "If we don't want to be explicit about units, the show instance uses the SI basis: " ++ show averageSpeed
putStrLn $ "The journey requires " ++ show wholeSeconds ++ " seconds, rounded up to the nearest second."
|
bjornbm/dimensional-dk
|
examples/ReadmeExample.hs
|
bsd-3-clause
| 1,373
| 0
| 10
| 281
| 240
| 131
| 109
| 20
| 1
|
module MediaWiki.API.Query.DeletedRevisions.Import where
import MediaWiki.API.Types
import MediaWiki.API.Utils
import MediaWiki.API.Query.DeletedRevisions
import Text.XML.Light.Types
import Text.XML.Light.Proc ( strContent )
import Control.Monad
import Data.Maybe
stringXml :: String -> Either (String,[{-Error msg-}String]) DeletedRevisionsResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe DeletedRevisionsResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "query" es1
let es = children p
ps <- fmap (mapMaybe xmlPage) (fmap children $ pNode "revisions" es)
let cont = pNode "query-continue" es1 >>= xmlContinue "deletedrevs" "drstart"
return emptyDeletedRevisionsResponse{drRevisions=ps,drContinue=cont}
xmlPage :: Element -> Maybe DeletedRevision
xmlPage e = do
guard (elName e == nsName "page")
let ns = fromMaybe "0" $ pAttr "ns" e
let tit = fromMaybe "" $ pAttr "title" e
let pid = pAttr "pageid" e
let pg = emptyPageTitle{pgNS=ns,pgTitle=tit,pgMbId=pid}
let ts = pAttr "timestamp" e
let re = pAttr "revid" e
let usr = pAttr "user" e
let co = pAttr "comment" e
let ism = isJust $ pAttr "minor" e
let le = pAttr "len" e >>= readMb
let cts = case (strContent e) of { "" -> Nothing; xs -> Just xs}
let tok = pAttr "token" e
return emptyDeletedRevision
{ drPage = pg
, drTimestamp = ts
, drRevId = re
, drUser = usr
, drComment = co
, drIsMinor = ism
, drLength = le
, drContent = cts
, drToken = tok
}
|
HyperGainZ/neobot
|
mediawiki/MediaWiki/API/Query/DeletedRevisions/Import.hs
|
bsd-3-clause
| 1,627
| 0
| 13
| 408
| 574
| 291
| 283
| 44
| 2
|
module IO (
Handle, HandlePosn,
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
stdin, stdout, stderr,
openFile, hClose, hFileSize, hIsEOF, isEOF,
hSetBuffering, hGetBuffering, hFlush,
hGetPosn, hSetPosn, hSeek,
hWaitForInput, hReady, hGetChar, hGetLine, hLookAhead, hGetContents,
hPutChar, hPutStr, hPutStrLn, hPrint,
hIsOpen, hIsClosed, hIsReadable, hIsWritable, hIsSeekable,
isAlreadyExistsError, isDoesNotExistError, isAlreadyInUseError,
isFullError, isEOFError,
isIllegalOperation, isPermissionError, isUserError,
ioeGetErrorString, ioeGetHandle, ioeGetFileName,
try, bracket, bracket_,
-- ...and what the Prelude exports
IO, FilePath, IOError, ioError, userError, interact,
putChar, putStr, putStrLn, print, getChar, getLine, getContents,
readFile, writeFile, appendFile, readIO, readLn
) where
import System.IO
import Control.Exception (try)
import System.IO.Error hiding (try)
-- | The 'bracket' function captures a common allocate, compute, deallocate
-- idiom in which the deallocation step must occur even in the case of an
-- error during computation. This is similar to try-catch-finally in Java.
--
-- This version handles only IO errors, as defined by Haskell 98.
-- The version of @bracket@ in "Control.Exception" handles all exceptions,
-- and should be used instead.
bracket :: IO a -> (a -> IO b) -> (a -> IO c) -> IO c
bracket before after m = do
x <- before
rs <- try (m x)
_ <- after x
case rs of
Right r -> return r
Left e -> ioError e
-- | A variant of 'bracket' where the middle computation doesn't want @x@.
--
-- This version handles only IO errors, as defined by Haskell 98.
-- The version of @bracket_@ in "Control.Exception" handles all exceptions,
-- and should be used instead.
bracket_ :: IO a -> (a -> IO b) -> IO c -> IO c
bracket_ before after m = do
x <- before
rs <- try m
_ <- after x
case rs of
Right r -> return r
Left e -> ioError e
|
m-alvarez/jhc
|
compat/haskell98/IO.hs
|
mit
| 2,203
| 0
| 10
| 507
| 495
| 290
| 205
| 45
| 2
|
module Syntax where
type Name = String
data Expr
= Lam Name Expr
| App Expr Expr
| Var Name
| Lit Lit
| Op Binop Expr Expr
deriving (Eq,Show)
data Lit
= LInt Int
| LBool Bool
deriving (Show, Eq, Ord)
data Binop = Add | Sub | Mul | Eql
deriving (Eq, Ord, Show)
|
FranklinChen/write-you-a-haskell
|
chapter9/happy/Syntax.hs
|
mit
| 284
| 0
| 6
| 80
| 118
| 68
| 50
| 15
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Util.Blaze
(toStylesheet,
toScript,
toLink,
mdToHTML) where
import Text.Blaze ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Markdown (markdown, def)
import Data.Text.Lazy (Text)
toStylesheet :: String -> H.Html
toStylesheet href = H.link ! A.rel "stylesheet"
! A.type_ "text/css"
! A.href (H.stringValue href)
toScript :: String -> H.Html
toScript src = H.script ! A.src (H.stringValue src) $ ""
toLink :: String -> String -> H.Html
toLink link content = H.a ! A.href (H.stringValue link)
$ H.toHtml content
-- | mdToHTML takes in the contents of a file written in Mark Down and converts it to
-- blaze-HTML.
mdToHTML :: Text -> H.Html
mdToHTML = markdown def
|
Ian-Stewart-Binks/courseography
|
hs/Util/Blaze.hs
|
gpl-3.0
| 874
| 0
| 10
| 224
| 248
| 138
| 110
| 22
| 1
|
module HAD.Y2014.M03.D25.Exercise
( Board
, board
, getList
) where
import Data.List (groupBy)
import Test.QuickCheck
-- Preamble
-- $setup
-- >>> import Control.Applicative ((<$>), (<*>))
-- >>> import Data.List (group)
-- A board is a "square list of list"
-- The "square" form, is ensure by the constructor (board)
newtype Board a = Board {getList :: [[a]]}
deriving (Eq, Show, Read)
-- | Exercise
-- Build an Arbitrary instance for Board of any given size
--
-- The external list length is equal to the length of each internal lists
-- prop> (1==) . length . group . (flip (:) <*> length) . map length $ getList bs
--
instance Arbitrary a => Arbitrary (Board a) where
arbitrary = undefined
-- Just some extra content, it isn't useful for today's exercise
-- Constructor
-- | board Yesterday's squareOf, build a square board with initial values
board :: Int -> a -> [a] -> [[a]]
board n x = take n . map (take n) . iterate (drop n) . (++ repeat x)
|
1HaskellADay/1HAD
|
exercises/HAD/Y2014/M03/D25/Exercise.hs
|
mit
| 975
| 0
| 10
| 196
| 187
| 112
| 75
| 12
| 1
|
module Jhc.Numeric where
import Jhc.IO
import Jhc.Num
import Jhc.Type.Basic
import Jhc.Order
import Jhc.Type.Float
import Jhc.Class.Real
infixr 8 ^, ^^
{-# SPECIALIZE gcd :: Int -> Int -> Int #-}
{-# SPECIALIZE gcd :: Integer -> Integer -> Integer #-}
gcd :: (Integral a) => a -> a -> a
gcd 0 0 = error "Prelude.gcd: gcd 0 0 is undefined"
gcd x y = gcd' (abs x) (abs y)
where gcd' x 0 = x
gcd' x y = gcd' y (x `rem` y)
{-# SPECIALIZE lcm :: Int -> Int -> Int #-}
{-# SPECIALIZE lcm :: Integer -> Integer -> Integer #-}
lcm :: (Integral a) => a -> a -> a
lcm _ 0 = 0
lcm 0 _ = 0
lcm x y = abs ((x `quot` (gcd x y)) * y)
{-# SPECIALIZE (^) :: Int -> Int -> Int #-}
{-# SPECIALIZE (^) :: Integer -> Int -> Integer #-}
{-# SPECIALIZE (^) :: Double -> Int -> Double #-}
(^) :: (Num a, Integral b) => a -> b -> a
x ^ 0 = 1
x ^ n | n > 0 = f x (n-1) x
where f _ 0 y = y
f x n y = g x n where
g x n | even n = g (x*x) (n `quot` 2)
| True = f x (n-1) (x*y)
_ ^ _ = error "Prelude.^: negative exponent"
(^^) :: (Fractional a, Integral b) => a -> b -> a
x ^^ n = if n >= 0 then x^n else recip (x^(-n))
|
m-alvarez/jhc
|
lib/jhc/Jhc/Numeric.hs
|
mit
| 1,414
| 0
| 12
| 586
| 496
| 263
| 233
| 34
| 2
|
module A2 where
import D2
sumSq xs ys = (sum (map sq xs)) + (sumSquares xs)
main = sumSq [1 .. 4]
|
kmate/HaRe
|
old/testing/rmOneParameter/A2_AstOut.hs
|
bsd-3-clause
| 101
| 0
| 9
| 25
| 55
| 30
| 25
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module AppCache where
import Control.Monad (when)
import Control.Monad.Trans.Writer
import Data.Hashable (hashWithSalt)
import Data.List (intercalate)
import qualified Data.Set as Set
import Data.Text (Text)
import Data.Text (pack)
import Language.Haskell.TH.Syntax
import Yesod.Core
import Yesod.Routes.TH
newtype AppCache = AppCache { unAppCache :: Text }
appCache :: [ResourceTree String] -> Q Exp
appCache trees = do
piecesSet <- execWriterT $ mapM_ (goTree id) trees
let body = unlines $ map toPath $ Set.toList piecesSet
hash = hashWithSalt 0 body
total = concat
[ "CACHE MANIFEST\n# Version: "
, show hash
, "\n\nCACHE:\n"
, body
]
[|return (AppCache (pack total))|]
where
toPath [] = "/"
toPath x = concatMap ('/':) x
goTree :: Monad m
=> ([String] -> [String])
-> ResourceTree String
-> WriterT (Set.Set [String]) m ()
goTree front (ResourceLeaf res) = do
pieces' <- goPieces (resourceName res) $ resourcePieces res
when ("CACHE" `elem` resourceAttrs res) $
tell $ Set.singleton $ front pieces'
goTree front (ResourceParent name pieces trees) = do
pieces' <- goPieces name pieces
mapM_ (goTree $ front . (pieces' ++)) trees
goPieces :: Monad m => String -> [(CheckOverlap, Piece String)] -> m [String]
goPieces name =
mapM (goPiece . snd)
where
goPiece (Static s) = return s
goPiece (Dynamic _) = fail $ concat
[ "AppCache only applies to fully-static paths, but "
, name
, " has dynamic pieces."
]
instance ToContent AppCache where
toContent = toContent . unAppCache
instance ToTypedContent AppCache where
toTypedContent = TypedContent "text/cache-manifest" . toContent
|
ygale/yesod
|
demo/appcache/AppCache.hs
|
mit
| 2,038
| 0
| 13
| 637
| 570
| 302
| 268
| 50
| 2
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
module Main where
import Data.Ix (range)
import Data.List (iterate')
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (mapMaybe)
import Data.Monoid (Sum (..))
import Linear.V2
data Acre = Open | Tree | Yard deriving (Eq, Ord)
type Forest = Map YX Acre
type YX = V2 Int
parseInput :: (Int, Int) -> (Int, Int) -> String -> Forest
parseInput ys xs input = M.fromList $ do
(y, l) <- zip (range ys) $ lines input
(x, c) <- zip (range xs) l
let yx = V2 y x
case c of
'.' -> [(yx, Open)]
'|' -> [(yx, Tree)]
'#' -> [(yx, Yard)]
_ -> []
counts :: Forest -> YX -> Map Acre Int
counts m yx =
M.fromListWith (+) . fmap (, 1) . mapMaybe (m M.!?) . filter (/= yx) $ range
(yx - pure 1, yx + pure 1)
step :: Forest -> Forest
step m = M.mapWithKey f m
where
f yx = \case
Open | count Tree >= 3 -> Tree
| otherwise -> Open
Tree | count Yard >= 3 -> Yard
| otherwise -> Tree
Yard | count Yard >= 1 && count Tree >= 1 -> Yard
| otherwise -> Open
where count b = M.findWithDefault 0 b $ counts m yx
score :: Forest -> Int
score = g . foldMap f
where
g (Sum a, Sum b) = a * b
f Open = mempty
f Tree = (Sum 1, Sum 0)
f Yard = (Sum 0, Sum 1)
part1 :: Forest -> Int
part1 = score . (!! 10) . iterate' step
part2 :: Int -> Forest -> Int
part2 n m = score . (!! go 1 (M.singleton m 0) m) $ iterate' step m
where
go !i !seen !x =
let x' = step x
in case seen M.!? x' of
Nothing -> go (i + 1) (M.insert x' i seen) x'
Just time -> let extra = (n - time) `mod` (i - time) in extra + time
main :: IO ()
main = do
m <- parseInput (0, 49) (0, 49) <$> readFile "input"
print $ part1 m
print $ part2 1000000000 m
|
genos/online_problems
|
advent_of_code_2018/day18/src/Main.hs
|
mit
| 1,967
| 0
| 18
| 635
| 914
| 471
| 443
| 58
| 4
|
module Feature.OptionsSpec where
import Network.Wai (Application)
import Network.Wai.Test (SResponse (..))
import Network.HTTP.Types
import Test.Hspec
import Test.Hspec.Wai
import Protolude
import SpecHelper
spec :: SpecWith ((), Application)
spec = describe "Allow header" $ do
context "a table" $ do
it "includes read/write verbs for writeable table" $ do
r <- request methodOptions "/items" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,POST,PUT,PATCH,DELETE"
context "a view" $ do
context "auto updatable" $ do
it "includes read/write verbs for auto updatable views with pk" $ do
r <- request methodOptions "/projects_auto_updatable_view_with_pk" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,POST,PUT,PATCH,DELETE"
it "includes read/write verbs for auto updatable views without pk" $ do
r <- request methodOptions "/projects_auto_updatable_view_without_pk" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,POST,PATCH,DELETE"
context "non auto updatable" $ do
it "includes read verbs for non auto updatable views" $ do
r <- request methodOptions "/projects_view_without_triggers" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD"
it "includes read/write verbs for insertable, updatable and deletable views with pk" $ do
r <- request methodOptions "/projects_view_with_all_triggers_with_pk" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,POST,PUT,PATCH,DELETE"
it "includes read/write verbs for insertable, updatable and deletable views without pk" $ do
r <- request methodOptions "/projects_view_with_all_triggers_without_pk" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,POST,PATCH,DELETE"
it "includes read and insert verbs for insertable views" $ do
r <- request methodOptions "/projects_view_with_insert_trigger" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,POST"
it "includes read and update verbs for updatable views" $ do
r <- request methodOptions "/projects_view_with_update_trigger" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,PATCH"
it "includes read and delete verbs for deletable views" $ do
r <- request methodOptions "/projects_view_with_delete_trigger" [] ""
liftIO $
simpleHeaders r `shouldSatisfy`
matchHeader "Allow" "OPTIONS,GET,HEAD,DELETE"
|
steve-chavez/postgrest
|
test/Feature/OptionsSpec.hs
|
mit
| 2,889
| 0
| 19
| 708
| 563
| 264
| 299
| 59
| 1
|
module BACnet
(
module BACnet.Reader,
module BACnet.Writer,
) where
import BACnet.Reader
import BACnet.Writer
|
michaelgwelch/bacnet
|
src/BACnet.hs
|
mit
| 119
| 0
| 5
| 22
| 29
| 19
| 10
| 6
| 0
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
module Protop.Core.Compositions
( (:.)(..)
, (:.|)(..)
, (:|.)(..)
, ASS(..)
) where
import Data.Proxy (Proxy(..))
import Protop.Core.Morphisms
import Protop.Core.Proofs
import Protop.Core.Setoids
type CComp a b = (IsMorphism a, IsMorphism b, Source a ~ Target b)
infixr 9 :.
data (:.) :: * -> * -> * where
(:.) :: CComp a b => a -> b -> a :. b
instance Show (a :. b) where
show (f :. g) = "(" ++ show f ++ " . " ++ show g ++ ")"
instance CComp a b => IsMorphism (a :. b) where
type Source (a :. b) = Source b
type Target (a :. b) = Target a
onDomains (f :. g) = setComp (onDomains f) (onDomains g)
proxy' _ = proxy' Proxy :. proxy' Proxy
type CCOMPLEFT a b = (IsMorphism a, IsProof b, Source a ~ TARGET b)
infix :.|
data (:.|) :: * -> * -> * where
(:.|) :: CCOMPLEFT a b => a -> b -> a :.| b
instance Show (a :.| b) where
show (a :.| b) = "(" ++ show a ++ " . " ++ show b ++ ")"
instance CCOMPLEFT a b => IsProof (a :.| b) where
type Lhs (a :.| b) = a :. Lhs b
type Rhs (a :.| b) = a :. Rhs b
proof (f :.| p) x = let Functoid _ g = onDomains f in g $ proof p x
proxy'' _ = proxy' Proxy :.| proxy'' Proxy
type CCOMPRIGHT a b = (IsProof a, IsMorphism b, SOURCE a ~ Target b)
infix :|.
data (:|.) :: * -> * -> * where
(:|.) :: CCOMPRIGHT a b => a -> b -> a :|. b
instance Show (a :|. b) where
show (a :|. b) = "(" ++ show a ++ " . " ++ show b ++ ")"
instance CCOMPRIGHT a b => IsProof (a :|. b) where
type Lhs (a :|. b) = Lhs a :. b
type Rhs (a :|. b) = Rhs a :. b
proof (p :|. f) x = proof p $ f .$ x
proxy'' _ = proxy'' Proxy :|. proxy' Proxy
type CASS a b c = (IsMorphism a, IsMorphism b, IsMorphism c, Source a ~ Target b, Source b ~ Target c)
data ASS :: * -> * -> * -> * where
ASS :: CASS a b c => a -> b -> c -> ASS a b c
instance Show (ASS a b c) where
show (ASS a b c) = "(ASS " ++ show a ++ " " ++ show b ++ " " ++ show c ++ ")"
instance CASS a b c => IsProof (ASS a b c) where
type Lhs (ASS a b c) = (a :. b) :. c
type Rhs (ASS a b c) = a :. b :. c
proof (ASS f g h) x = reflexivity $ f .$ g .$ h .$ x
proxy'' _ = ASS (proxy' Proxy) (proxy' Proxy) (proxy' Proxy)
|
brunjlar/protop
|
src/Protop/Core/Compositions.hs
|
mit
| 2,415
| 0
| 12
| 712
| 1,173
| 621
| 552
| 57
| 0
|
{-|
Module: Flaw.Visual.Geometry
Description: Geometry.
License: MIT
-}
{-# LANGUAGE DeriveGeneric, FlexibleContexts #-}
module Flaw.Visual.Geometry
( Geometry(..)
, PackedGeometry(..)
, packGeometry
, packIndexedGeometry
, loadPackedGeometry
, emitGeometryAsset
, loadGeometryAsset
, indexGeometryVertices
) where
import Control.Exception
import qualified Data.ByteString as B
import qualified Data.Serialize as S
import qualified Data.Text as T
import qualified Data.Vector.Algorithms.Intro as VAI
import qualified Data.Vector.Algorithms.Search as VAS
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Generic.Mutable as VGM
import qualified Data.Vector.Storable as VS
import Data.Word
import Foreign.Storable
import GHC.Generics(Generic)
import Language.Haskell.TH
import System.IO.Unsafe
import Flaw.Asset.Collada
import Flaw.Book
import Flaw.Build
import Flaw.Exception
import Flaw.Graphics
import Flaw.Visual.Geometry.Vertex
import Flaw.Visual.Geometry.CacheOptimization
data Geometry d = Geometry
{ geometryVertexBuffer :: !(VertexBufferId d)
, geometryIndexBuffer :: !(IndexBufferId d)
, geometryIndicesCount :: {-# UNPACK #-} !Int
}
data PackedGeometry = PackedGeometry
{ packedGeometryVerticesBytes :: !B.ByteString
, packedGeometryIndicesBytes :: !B.ByteString
, packedGeometryIndicesCount :: {-# UNPACK #-} !Int
, packedGeometryVertexStride :: {-# UNPACK #-} !Int
, packedGeometryIndexTopology :: !IndexTopology
, packedGeometryIndexStride :: !IndexStride
} deriving Generic
instance S.Serialize PackedGeometry
-- | Pack raw vertices.
packGeometry :: (Ord a, Storable a, VG.Vector v a, VG.Vector v Word32) => v a -> PackedGeometry
packGeometry = uncurry packIndexedGeometry . indexGeometryVertices
-- | Pack geometry with indices.
-- Chooses indices format.
packIndexedGeometry :: (Storable a, VG.Vector v a) => v a -> VS.Vector Word32 -> PackedGeometry
packIndexedGeometry vertices indices = PackedGeometry
{ packedGeometryVerticesBytes = verticesBytes
, packedGeometryIndicesBytes = indicesBytes
, packedGeometryIndicesCount = VG.length indices
, packedGeometryVertexStride = sizeOf (VG.head vertices)
, packedGeometryIndexTopology = IndexTopologyTriangles
, packedGeometryIndexStride = indexStride
} where
(optimizedVertices, optimizedIndices) = optimizeGeometryLocality vertices indices
verticesBytes = packVector optimizedVertices
(indexStride, indicesBytes) =
if VG.length optimizedVertices > 0x10000 then
(IndexStride32Bit, packVector optimizedIndices)
else
(IndexStride16Bit, packVector (VG.map fromIntegral optimizedIndices :: VS.Vector Word16))
-- | Load geometry into device.
loadPackedGeometry :: Device d => d -> PackedGeometry -> IO (Geometry d, IO ())
loadPackedGeometry device PackedGeometry
{ packedGeometryVerticesBytes = verticesBytes
, packedGeometryIndicesBytes = indicesBytes
, packedGeometryIndicesCount = indicesCount
, packedGeometryVertexStride = vertexStride
, packedGeometryIndexTopology = indexTopology
, packedGeometryIndexStride = indexStride
} = withSpecialBook $ \bk -> do
vertexBuffer <- book bk $ createStaticVertexBuffer device verticesBytes vertexStride
indexBuffer <- book bk $ createStaticIndexBuffer device indicesBytes indexTopology indexStride
return Geometry
{ geometryVertexBuffer = vertexBuffer
, geometryIndexBuffer = indexBuffer
, geometryIndicesCount = indicesCount
}
-- | Pack geometry into bytestring.
emitGeometryAsset :: FilePath -> ColladaM ColladaElement -> Q B.ByteString
emitGeometryAsset fileName getElement = do
bytes <- loadFile fileName
let
eitherVertices = runCollada $ do
initColladaCache bytes
createColladaVertices =<< parseGeometry =<< getElement
case eitherVertices of
Right vertices -> return $ S.encode $ packGeometry (vertices :: VS.Vector VertexPNT)
Left err -> do
let msg = "failed to emit geometry asset " ++ fileName ++ ": " ++ T.unpack err
reportError msg
return B.empty
-- | Load geometry from bytestring.
loadGeometryAsset :: Device d => d -> B.ByteString -> IO (Geometry d, IO ())
loadGeometryAsset device bytes = case S.decode bytes of
Right packedGeometry -> loadPackedGeometry device packedGeometry
Left err -> throwIO $ DescribeFirstException $ "failed to load geometry asset: " ++ err
-- | Create indices for raw vertices.
indexGeometryVertices :: (Ord a, VG.Vector v a, VG.Vector v Word32) => v a -> (v a, VS.Vector Word32)
indexGeometryVertices vertices = unsafePerformIO $ do
mVertices <- VG.thaw vertices
VAI.sort mVertices
uniqueVertices <- unique mVertices
indices <- noDegenerateTriangles <$> VG.mapM ((fromIntegral <$>) . VAS.binarySearchL uniqueVertices) vertices
resultVertices <- VG.freeze uniqueVertices
return (resultVertices, indices)
where
unique v = if VGM.null v then return v else do
let
n = VGM.length v
f p i =
if i < n then do
a <- VGM.unsafeRead v i
b <- VGM.unsafeRead v p
if a == b then f p (i + 1)
else do
let q = p + 1
VGM.write v q a
f q (i + 1)
else return p
end <- f 0 0
return $ VGM.slice 0 (end + 1) v
noDegenerateTriangles v = VG.create $ do
let n = VG.length v
indices <- VGM.new n
let
f p i =
if i + 2 < n then do
let
a = v VG.! i
b = v VG.! (i + 1)
c = v VG.! (i + 2)
if a == b || b == c || a == c then f p (i + 3)
else do
VGM.unsafeWrite indices p a
VGM.unsafeWrite indices (p + 1) b
VGM.unsafeWrite indices (p + 2) c
f (p + 3) (i + 3)
else return p
end <- f 0 0
return $ VGM.slice 0 end indices
|
quyse/flaw
|
flaw-visual/Flaw/Visual/Geometry.hs
|
mit
| 5,819
| 0
| 22
| 1,209
| 1,602
| 840
| 762
| 146
| 6
|
-- |
-- Module: Data.Vector.Extended
-- Copyright: (c) 2016 Jared Tobin
-- License: MIT
--
-- Maintainer: Jared Tobin <jared@jtobin.ca>
-- Stability: unstable
-- Portability: ghc
module Data.Vector.Extended (
ensemble
, particle
) where
import qualified Data.Vector as V (fromList, Vector)
import qualified Data.Vector.Unboxed as U (fromList, Vector)
-- | A type-specialized alias for Data.Vector.fromList.
--
-- Use this to create ensembles from lists of particles.
ensemble :: [U.Vector Double] -> V.Vector (U.Vector Double)
ensemble = V.fromList
-- | A type-specialized alias for Data.Vector.Unboxed.fromList
--
-- Use this to create particles from lists of doubles.
particle :: [Double] -> U.Vector Double
particle = U.fromList
|
jtobin/flat-mcmc
|
lib/Data/Vector/Extended.hs
|
mit
| 749
| 0
| 9
| 121
| 123
| 79
| 44
| 9
| 1
|
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-orphans #-}
module Hydrogen.Parsing (
module Text.Parsec.Combinator
, module Text.Parsec.Prim
, module Text.Parsec.Pos
, Parser
, ParseError
, SomethingBad
, Tokens
, runTokenParser
, mkError
, sourceToken
, manyBetween
, (>+>)
, (<+<)
, sya
, ignoreUnderscores
, tryRead
, tryReads
, tryReadDecimal
, tryReadRational
, tryReadHex
, tryReadUUID
, tryReadVersion
, tryReadDateTime
, tryReadDate
, tryReadTime
, tryReadBool
, tryReadLink
) where
import Hydrogen.Prelude
import Text.Parsec.Combinator
import Text.Parsec.Error
import Text.Parsec.Pos
import Text.Parsec.Prim
type SomethingBad = [(SourcePos, String)]
type Parser source result = source -> Either SomethingBad result
type Tokens t = [(SourcePos, t)]
instance Serialize SourcePos where
put pos = do
let line = sourceLine pos
col = sourceColumn pos
name = sourceName pos
putWord32be (fromIntegral line)
putWord32be (fromIntegral col)
put name
get = do
line <- fromIntegral <$> getWord32be
col <- fromIntegral <$> getWord32be
name <- get
return (newPos name line col)
mkError :: ParseError -> Either SomethingBad b
mkError e = Left $ map ((errorPos e,) . messageToString) (errorMessages e)
where
messageToString = \case
SysUnExpect msg -> "Unexpected " ++ msg
UnExpect msg -> "Unexpected " ++ msg
Expect msg -> "Expected " ++ msg
Message msg -> msg
runTokenParser :: (Stream a Identity t) => ParsecT a () Identity b -> Parser a b
runTokenParser p = either mkError Right . runIdentity . runParserT p () ""
sourceToken :: (Show t, Stream (Tokens t) m (SourcePos, t))
=> (t -> Maybe a)
-> ParsecT [(SourcePos, t)] u m a
sourceToken f = tokenPrim (show . snd) nextPos (f . snd)
where
nextPos p _ = \case
((p', _) : _) -> p'
_ -> p
manyBetween :: (Monad m, Stream s m t)
=> ParsecT s u m open -> ParsecT s u m close -> ParsecT s u m p -> ParsecT s u m [p]
manyBetween o c p = o *> manyTill p c
(>+>) :: Parser a b -> Parser b c -> Parser a c
p1 >+> p2 = join <$> fmap p2 <$> p1
(<+<) :: Parser b c -> Parser a b -> Parser a c
(<+<) = flip (>+>)
tryRead :: (Monad m) => ReadS a -> String -> m a
tryRead p s = case p s of
[(val, "")] -> return val
[] -> fail "no parse"
_ -> fail "ambiguous parse"
tryReads :: (Monad m, Read a) => String -> m a
tryReads = tryRead reads
ignoreUnderscores :: String -> String
ignoreUnderscores = \case
x : xs -> x : ignore xs
xs -> xs
where
ignore = \case
xs@(x : '_' : _) | not (isAlphaNum x) -> xs
'_' : x : xs | isAlphaNum x -> x : ignore xs
x : xs -> x : ignore xs
xs -> xs
tryReadDecimal :: String -> Maybe Rational
tryReadDecimal = \case
('-' : xs) -> negate <$> readRational xs
('+' : xs) -> readRational xs
('.' : xs) -> readRational ("0." ++ xs)
xs -> readRational xs
where
readRational = tryRead readFloat . ignoreUnderscores
tryReadRational :: String -> Maybe Rational
tryReadRational xs = case right of
(_ : right') -> liftM2 (%) numer denom
where
numer = tryRead reads left
denom = tryRead reads right'
_ -> Nothing
where
(left, right) = span (/= '/') (ignoreUnderscores xs)
tryReadHex :: String -> Maybe Rational
tryReadHex = tryRead readHex . ignoreUnderscores . hex
where
hex = \case
'0' : 'x' : xs -> xs
_ -> ""
tryReadUUID :: String -> Maybe UUID
tryReadUUID = tryRead reads
tryReadVersion :: String -> Maybe Version
tryReadVersion = \case
('v' : xs) -> tryRead reads xs
_ -> fail "no version"
tryReadDateTime :: String -> Maybe (Maybe ZonedTime)
tryReadDateTime xs = case xs =~ dateTime of
[[_, y, m, d, h, min, _, s, s', z, zm, _, zs]]
-> Just (liftM2 ZonedTime (liftM2 LocalTime date time) zone)
where
(year, month, day, hour, minute) = (read y, read m, read d, read h, read min)
sec = read ((if null s then "0" else s) ++ (if null s' then ".0" else s'))
time = makeTimeOfDayValid hour minute sec
date = fromGregorianValid year month day
zone = Just $ case z of
"Z" -> utc
('-' : _) -> minutesToTimeZone (negate zn)
_ -> minutesToTimeZone zn
where
zn = read zm * 60 + (if zs == "" then 0 else read zs)
_ -> Nothing
where
date = "([0-9]{4})-?([0-9]{2})-?([0-9]{2})"
time = "([0-9]{2}):?([0-9]{2})(:?([0-9]{2})(\\.[0-9]{1,12})?)?"
timeZone = "(Z|[+-]([0-9]{1,2})(:?([0-9]{2}))?)"
dateTime = concat ["^", date, "T?", time, timeZone, "$"]
tryReadDate :: String -> Maybe (Maybe Day)
tryReadDate xs = case xs =~ date of
[[_, y, _, m, d, ""]] -> Just (fromGregorianValid year month day)
where
(year, month, day) = (read y, read m, read d)
[[_, y, _, _, _, d]] -> Just (fromOrdinalDateValid year day)
where
(year, day) = (read y, read d)
_ -> Nothing
where
date = "^([0-9]{4})-(([0-9]{2})-([0-9]{2})|([0-9]{3}))$"
tryReadTime :: String -> Maybe (Maybe TimeOfDay)
tryReadTime xs = case xs =~ time of
[[_, h, m, _, s]] -> Just (makeTimeOfDayValid hour min sec)
where
(hour, min, sec) = (read h, read m, if null s then 0 else read s)
_ -> Nothing
where
time = "^([0-9]{2}):([0-9]{2})(:([0-9]{2}))?$"
tryReadBool :: String -> Maybe Bool
tryReadBool = \case
"true" -> return True
"TRUE" -> return True
"True" -> return True
"false" -> return False
"False" -> return False
"FALSE" -> return False
_ -> Nothing
tryReadLink :: String -> Maybe String
tryReadLink xs
| xs =~ url = Just xs
| otherwise = Nothing
where
url = concat [
"^[a-z](-?[a-z0-9])*(\\.[a-z](-?[a-z0-9])*)+"
, "(/([a-z0-9_.=-]|%[a-fA-F0-9]{2}|%u[a-fA-F0-9]{4})*)+"
, "(\\?([a-z0-9_.=-]|%[a-fA-F0-9]{2}|%u[a-fA-F0-9]{4})*)?$"
]
-- | Infix to postfix notation (an implementation of the Shunting-Yard-Algorithm)
sya :: (Ord p, Eq o)
=> (a -> Maybe o) -- ^ Determine operator
-> (o -> Bool) -- ^ Is left precedence?
-> (o -> p) -- ^ Precedence of given operator
-> [a] -- ^ The input stream (infix notation)
-> [a] -- ^ The output stream (postfix notation)
sya mkOp isL p = sy []
where
sy (t : ts) (x : xs)
| isOp x && isOp t && cmp t x = t : sy ts (x : xs)
sy ts (x : xs)
| isOp x = sy (x : ts) xs
| otherwise = x : sy ts xs
sy ts [] = ts
isOp = isJust . mkOp
cmp o1 o2 = isL o1' && p o1' == p o2' || p o1' > p o2'
where
Just o1' = mkOp o1
Just o2' = mkOp o2
|
hydrogen-tools/hydrogen-parsing
|
src/Hydrogen/Parsing.hs
|
mit
| 6,798
| 0
| 16
| 1,923
| 2,506
| 1,309
| 1,197
| -1
| -1
|
{-# htermination filterM :: (a -> Maybe Bool) -> [a] -> Maybe [a] #-}
import Monad
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Monad_filterM_3.hs
|
mit
| 83
| 0
| 3
| 16
| 5
| 3
| 2
| 1
| 0
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
import Text.Printf
import Data.List
import Control.Arrow
import Data.Function
import Control.Monad.State
import Control.Applicative
import Control.Monad.Primitive
import Data.BigBunnyAndDeer.DeerText
import Data.BigBunnyAndDeer.DeerInfo
import Data.BigBunnyAndDeer.Type
import Data.BigBunnyAndDeer.Util
import Data.BigBunnyAndDeer.Twitter
import Data.BigBunnyAndDeer.BigBunny
import Data.BigBunnyAndDeer.Parens
import Data.BigBunnyAndDeer.OptParse
import Options.Applicative
import Data.Random.RVar
import Data.Random.Shuffle.Weighted
import Data.Random
import System.Random.MWC
pickNextDeer :: forall m . (Functor m, MonadIO m)
=> BigBunnyT m (DeerId,DeerText)
pickNextDeer = do
db <- getTextDb
di <- getDeerInfo
-- WARNING: to get a complete set of DeerId,
-- DO NOT use deerinfo as which might be incomplete
-- frequency varies, so we give up its control
let dis = allDeerIds db
getLastTime = lastAccess . findDeerEntry di
-- most recent tweets appear earlier, so we can drop them
disRecentFirst = sortBy (flip compare `on` getLastTime) dis
-- MAGIC: designed for posting 24 tweets a day,
-- therefore set to a number slightly bigger:
disCandidate = drop 30 disRecentFirst
dfreqCandiate = map (findDeerEntry di >>> totalTime)
disCandidate
-- add smooth to every frequency to ensure the sum isn't zero
-- and make the distribution slightly more smooth
smooth = 1
adjustedFreqCandidate = map (+ smooth) dfreqCandiate
adjustedSum = sum adjustedFreqCandidate
adjustedProb = map (\fq -> adjustedSum - fq) adjustedFreqCandidate
weightedCandidate = zip adjustedProb disCandidate
chooseId :: RVar DeerId
chooseId = head <$> weightedSample 1 weightedCandidate
execRVar :: forall a . RVar a -> BigBunnyT m a
execRVar rv = liftIO $ withSystemRandom
thereIsNoFuckingLambdaToBeAvoided
where
thereIsNoFuckingLambdaToBeAvoided :: Gen (PrimState IO) -> IO a
thereIsNoFuckingLambdaToBeAvoided gen = sampleFrom gen rv
-- choose one entry
pickId <- execRVar chooseId
ts <- getCurrentTimestamp
-- record choice
modify (updateDeerInfo pickId ts)
return (pickId, findDeerText db pickId)
main :: IO ()
main = do
let opts = info (helper <*> configP) fullDesc
conf <- execParser opts
(did,dtext) <- runBigBunny conf pickNextDeer
lp <- getSomeLParens
let msg = printf "%d. %s" did dtext ++ lp
putStrLn ("Posting message: " ++ msg)
postTweet (authFilePath conf) msg
|
Javran/BigBunnyAndDeer
|
src/Main.hs
|
mit
| 2,716
| 0
| 15
| 634
| 577
| 306
| 271
| 57
| 1
|
{-# htermination isLatin1 :: Char -> Bool #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_isLatin1_1.hs
|
mit
| 46
| 0
| 2
| 8
| 3
| 2
| 1
| 1
| 0
|
import Data.List.Split
example1 = splitOn "." "foo.bar.baz"
-- ["foo","bar","baz"]
example2 = chunksOf 10 "To be or not to be that is the question."
-- ["To be or n","ot to be t","hat is the"," question."]
|
riwsky/wiwinwlh
|
src/split.hs
|
mit
| 208
| 0
| 5
| 36
| 29
| 16
| 13
| 3
| 1
|
{- Author: Jeff Newbern
Maintainer: Jeff Newbern <jnewbern@nomaware.com>
Time-stamp: <Mon Nov 17 20:48:07 2003>
License: GPL
-}
{- DESCRIPTION
Exercise 2 - Combining Monadic Values
Write functions parent and grandparent with signature Sheep -> Maybe
Sheep. They should return one sheep selected from all sheep matching
the description, or Nothing if there is no such sheep. Hint: the mplus
operator is useful here.
-}
import Monad
-- everything you need to know about sheep
data Sheep = Sheep {name::String, mother::Maybe Sheep, father::Maybe Sheep}
-- we show sheep by name
instance Show Sheep where
show s = show (name s)
-- we can use do-notation to build complicated sequences
maternalGrandfather :: Sheep -> Maybe Sheep
maternalGrandfather s = do m <- mother s
father m
fathersMaternalGrandmother :: Sheep -> Maybe Sheep
fathersMaternalGrandmother s = do f <- father s
gm <- mother f
mother gm
mothersPaternalGrandfather :: Sheep -> Maybe Sheep
mothersPaternalGrandfather s = do m <- mother s
gf <- father m
father gf
-- here are the new functions
parent :: Sheep -> Maybe Sheep
parent s = (mother s) `mplus` (father s)
grandparent :: Sheep -> Maybe Sheep
grandparent s = (mother s >>= parent) `mplus` (father s >>= parent)
-- Why couldn't we write:
-- grandparent s = do p <- parent s
-- parent p
-- Hint: What if a sheep's mother had no parents but its father did,
-- like Roger below?
-- this builds our sheep family tree
breedSheep :: Sheep
breedSheep = let adam = Sheep "Adam" Nothing Nothing
eve = Sheep "Eve" Nothing Nothing
uranus = Sheep "Uranus" Nothing Nothing
gaea = Sheep "Gaea" Nothing Nothing
kronos = Sheep "Kronos" (Just gaea) (Just uranus)
holly = Sheep "Holly" (Just eve) (Just adam)
roger = Sheep "Roger" (Just eve) (Just kronos)
molly = Sheep "Molly" (Just holly) (Just roger)
in Sheep "Dolly" (Just molly) Nothing
-- print one of Dolly's grandparents
main :: IO ()
main = let dolly = breedSheep
in do print (grandparent dolly)
-- END OF FILE
|
maurotrb/hs-exercises
|
AllAboutMonads/examples/exercise2.hs
|
mit
| 2,237
| 10
| 11
| 605
| 507
| 256
| 251
| 32
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Foundation where
import Database.Persist.Sql
import Import.NoFoundation
import Text.Hamlet (hamletFile)
import Yesod.Auth.Email
import Yesod.Auth.Message (AuthMessage (InvalidLogin))
import qualified Yesod.Core.Unsafe as Unsafe
import Yesod.Core.Types (Logger)
import Yesod.Default.Util (addStaticContentExternal)
import Yesod.Fay
import Network.Mail.Mime
import Text.Shakespeare.Text (stext)
import Data.Text.Lazy.Encoding (encodeUtf8)
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import Yesod.Form.Nic (YesodNic)
import System.Log.FastLogger (pushLogStrLn, toLogStr, ToLogStr)
-- | The foundation datatype for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ appSettings :: AppSettings
, appStatic :: Static -- ^ Settings for static file serving.
, appConnPool :: ConnectionPool -- ^ Database connection pool.
, appHttpManager :: Manager
, appLogger :: Logger
, appFayCommandHandler :: CommandHandler App
, appData :: MVar (Maybe DomainData)
}
data DomainData =
DomainData
{ getMarks :: [Entity Mark]
, getModels :: [Entity Model]
, getAges :: [Entity Age]
, getLkModelAges :: [Entity LkModelAge]
, getGenerations :: [Entity Generation]
, getRegions :: [Entity Region]
, getLkTags :: [Entity LkTag]
, getTextAdvices :: [Entity TextAdvise]
, getImages :: [Entity Image]
}
instance YesodNic App
instance HasHttpManager App where
getHttpManager = appHttpManager
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
--
-- This function also generates the following type synonyms:
-- type Handler = HandlerT App IO
-- type Widget = WidgetT App IO ()
mkYesodData "App" $(parseRoutesFile "config/routes")
-- | A convenient synonym for creating forms.
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
-- Controls the base of generated URLs. For more information on modifying,
-- see: https://github.com/yesodweb/yesod/wiki/Overriding-approot
approot = ApprootMaster $ appRoot . appSettings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = Just <$> defaultClientSessionBackend
36000 -- timeout in minutes
"config/client_session_key.aes"
maximumContentLength _ (Just (FaySiteR _)) = Just (100 * 1024 * 1024) -- 100 mb for photos
maximumContentLength _ _ = Just (2 * 1024 * 1024)
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
-- addStylesheet $ StaticR css_bootstrap_css
-- addStylesheet $ StaticR css_bootstrap_combobox_css
-- addStylesheet $ StaticR css_blocks_css
-- addStylesheet $ StaticR css_grid_css
addScript $ StaticR js_jquery_min_js
addScript $ StaticR js_jquery_parallax_js
addScript $ StaticR js_bootstrap_min_js
addScript $ StaticR js_bootstrap_combobox_js
-- addScript $ StaticR js_blocks_js
addScript $ StaticR js_blocks2_js
addScript $ StaticR js_scripts2_js
addScript $ StaticR js_forms1_js
$(widgetFile "default-layout")
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- Routes not requiring authentication.
isAuthorized (AuthR _) _ = return Authorized
isAuthorized FaviconR _ = return Authorized
isAuthorized RobotsR _ = return Authorized
isAuthorized (StaticR _) _ = return Authorized
isAuthorized GoogleR _ = return Authorized
isAuthorized SiteMapR _ = return Authorized
-- Default to Authorized for now.
isAuthorized HomeR _ = return Authorized
isAuthorized AboutR _ = return Authorized
isAuthorized TermsR _ = return Authorized
isAuthorized (ModelsR _) _ = return Authorized
isAuthorized (FaySiteR _) _ = return Authorized
isAuthorized SearchR _ = return Authorized
isAuthorized TechR _ = return Authorized
isAuthorized (ReviewR _ _) _ = return Authorized
isAuthorized ReviewNewR _ = return Authorized
isAuthorized _ _ = isAdmin
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext mime content = do
master <- getYesod
let staticDir = appStaticDir $ appSettings master
addStaticContentExternal
Right
genFileName
staticDir
(StaticR . flip StaticRoute [])
ext
mime
content
where
-- Generate a unique filename based on the content itself
genFileName lbs = "autogen-" ++ base64md5 lbs
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog app _source level =
appShouldLogAll (appSettings app)
|| level == LevelWarn
|| level == LevelError
makeLogger = return . appLogger
isUser :: forall master. YesodAuth master => HandlerT master IO AuthResult
isUser = do
mu <- maybeAuthId
return $ case mu of
Nothing -> AuthenticationRequired
Just _ -> Authorized
isAdmin ::
forall master.
(YesodPersist master, YesodAuth master, AuthId master ~ Key User,
YesodPersistBackend master ~ SqlBackend) =>
HandlerT master IO AuthResult
isAdmin = do
mu <- maybeAuthId
ar <- case mu of
Nothing -> return AuthenticationRequired
Just uid -> do
u <- runDB $ selectList [UserId ==. uid] []
return $ case u of
[] -> AuthenticationRequired
(Entity _ u':_) ->
case (userRole u') of
"admin" -> Authorized
_ -> AuthenticationRequired
return ar
instance YesodJquery App where
urlJqueryJs _ = Left (StaticR js_jquery_min_js)
instance YesodFay App where
fayRoute = FaySiteR
yesodFayCommand render command = do
master <- getYesod
appFayCommandHandler master render command
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB action = do
master <- getYesod
runSqlPool action $ appConnPool master
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner appConnPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
-- Override the above two destinations when a Referer: header is present
redirectToReferer _ = True
authenticate creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
return $ case x of
Just (Entity uid _) -> Authenticated uid
Nothing -> UserError InvalidLogin
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authEmail]
getAuthId creds = runDB $ do
x <- insertBy $ User (credsIdent creds) Nothing Nothing False "user"
return $ Just $
case x of
Left (Entity userid _) -> userid
Right userid -> userid
authHttpManager = error "Email doesn't need an HTTP manager"
instance YesodAuthEmail App where
type AuthEmailId App = UserId
-- registerHandler :: YesodAuthEmail master => AuthHandler master Html
registerHandler = do
(widget, enctype) <- lift $ generateFormPost registrationForm
toParentRoute <- getRouteToParent
lift $ defaultLayout $ do
setTitle "Регистрация"
[whamlet|
<p>Пожалуйста, оставьте свой e-mail для того, чтобы продолжить поиск без ограничений.
<form method="post" action="@{toParentRoute $ PluginR em reg}" enctype=#{enctype}>
<div id="registerForm">
^{widget}
<button .btn>Отправить
|]
where
em = "email"
reg = ["register"]
registrationForm extra = do
let emailSettings = FieldSettings
{
fsLabel = "Email",
fsTooltip = Nothing,
fsId = Just "email",
fsName = Just "email",
fsAttrs = [("autofocus", "")]
}
(emailRes, emailView) <- mreq emailField emailSettings Nothing
let userRes = UserForm <$> emailRes
let widget = do
[whamlet|
#{extra}
^{fvLabel emailView}
^{fvInput emailView}
|]
return (userRes, widget)
addUnverified email verkey =
runDB $ insert $ User email Nothing (Just verkey) False "user"
confirmationEmailSentResponse ident = selectRep $ provideRep $ defaultLayout $ do
setTitle "Проверьте почту"
let a = "Письмо с подтверждением было отправлено по адресу " <> ident <> ". Пожалуйста, проверьте почту."
[whamlet|<h2>#{a}|]
sendVerifyEmail email _ verurl =
liftIO $ renderSendMail (emptyMail $ Address Nothing "noreply")
{ mailTo = [Address Nothing email]
, mailHeaders =
[ ("Subject", "Подтверждение вашего email адреса")
]
, mailParts = [[textPart, htmlPart']]
}
where
textPart = Part
{ partType = "text/plain; charset=utf-8"
, partEncoding = None
, partFilename = Nothing
, partContent = Data.Text.Lazy.Encoding.encodeUtf8 [stext|
Пожалуйста, подтвердите ваш email адрес, перейдя по ссылке.
\#{verurl}
Спасибо.
|]
, partHeaders = []
}
htmlPart' = Part
{ partType = "text/html; charset=utf-8"
, partEncoding = None
, partFilename = Nothing
, partContent = renderHtml [shamlet|
<p>Пожалуйста, подтвердите ваш email адрес, перейдя по ссылке.
<p>
<a href=#{verurl}>#{verurl}
<p>Спасибо.
|]
, partHeaders = []
}
getVerifyKey = runDB . fmap (join . fmap userVerkey) . get
setVerifyKey uid key = runDB $ update uid [UserVerkey =. Just key]
verifyAccount uid = runDB $ do
mu <- get uid
case mu of
Nothing -> return Nothing
Just _ -> do
update uid [UserVerified =. True]
return $ Just uid
getPassword = runDB . fmap (join . fmap userPassword) . get
setPassword uid pass = runDB $ update uid [UserPassword =. Just pass]
getEmailCreds email = runDB $ do
mu <- getBy $ UniqueUser email
case mu of
Nothing -> return Nothing
Just (Entity uid u) -> return $ Just EmailCreds
{ emailCredsId = uid
, emailCredsAuthId = Just uid
, emailCredsStatus = isJust $ userPassword u
, emailCredsVerkey = userVerkey u
, emailCredsEmail = userEmail u
}
getEmail = runDB . fmap (fmap userEmail) . get
afterPasswordRoute _ = HomeR
instance YesodAuthPersist App
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
unsafeHandler :: App -> Handler a -> IO a
unsafeHandler = Unsafe.fakeHandlerGetLogger appLogger
-- Note: Some functionality previously present in the scaffolding has been
-- moved to documentation in the Wiki. Following are some hopefully helpful
-- links:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
-- https://github.com/yesodweb/yesod/wiki/Serve-static-files-from-a-separate-domain
-- https://github.com/yesodweb/yesod/wiki/i18n-messages-in-the-scaffolding
logMessage :: forall (m :: * -> *) r.
(IsString r, MonadHandler m, IsSequence r, ToLogStr r
, HandlerSite m ~ App, Element r ~ Char, Semigroup r) =>
r -> m ()
logMessage msg = do
y <- getYesod
liftIO $ logMessageSite y msg
logMessageSite :: forall r. (IsString r, IsSequence r, ToLogStr r, Element r ~ Char, Semigroup r) =>
App -> r -> IO ()
logMessageSite app msg = do
t <- getCurrentTime
let timelog = pack $ formatTime defaultTimeLocale "%d-%b-%Y %T" t
pushLogStrLn (loggerSet $ appLogger app) (toLogStr $ timelog <> "\t" <> msg)
data UserForm = UserForm { email :: Text }
getAdviseByReviewData :: Text -> Text -> HandlerT App IO [(Single Text, Single Text, Single Text, Single Int, Single Int, Entity TextAdvise)]
getAdviseByReviewData ma mog = do
let sql = "SELECT DISTINCT mark.name, model.name, generation.generation,"
<> " min(gen2.bottom_age) OVER (PARTITION BY text_advise.id),"
<> " max(gen2.top_age) OVER (PARTITION BY text_advise.id), "
<> " ?? FROM text_advise"
<> " INNER JOIN lk_tag ON text_advise.id = lk_tag.text_advise_id"
<> " INNER JOIN lk_tag lt2 ON text_advise.id = lt2.text_advise_id"
<> " INNER JOIN generation gen2 ON lt2.generation = gen2.id"
<> " INNER JOIN generation ON lk_tag.generation = generation.id"
<> " INNER JOIN mark ON generation.mark_id = mark.id"
<> " INNER JOIN model ON generation.model_id = model.id"
<> " WHERE mark.url = ? AND text_advise.hidden = 'f' AND text_advise.promo IS NOT NULL AND model.url || generation.url = ? "
runDB $ rawSql sql [toPersistValue ma, toPersistValue mog]
getReviewDataByAdvise :: Key TextAdvise -> HandlerT App IO [(Single Text, Single Text)]
getReviewDataByAdvise taId = do
let sql = "SELECT DISTINCT ma.url mark_url, mo.url || min(g.url) over (PARTITION BY l.text_advise_id) gen_url"
<> " FROM text_advise ta"
<> " INNER JOIN lk_tag l ON ta.id = l.text_advise_id"
<> " INNER JOIN generation g ON l.generation = g.id"
<> " INNER JOIN model mo ON g.model_id = mo.id"
<> " INNER JOIN mark ma ON g.mark_id = ma.id"
<> " WHERE ta.id = ? "
<> " AND ta.promo IS NOT NULL"
<> " AND ta.hidden = 'f'"
runDB $ rawSql sql [toPersistValue taId]
getReviewData :: HandlerT App IO [(Single Text, Single Text)]
getReviewData = do
let sql = "SELECT DISTINCT ma.url mark_url, mo.url || min(g.url) over (PARTITION BY l.text_advise_id) gen_url"
<> " FROM text_advise ta"
<> " INNER JOIN lk_tag l ON ta.id = l.text_advise_id"
<> " INNER JOIN generation g ON l.generation = g.id"
<> " INNER JOIN model mo ON g.model_id = mo.id"
<> " INNER JOIN mark ma ON g.mark_id = ma.id"
<> " WHERE ta.promo IS NOT NULL"
<> " AND ta.hidden = 'f'"
runDB $ rawSql sql []
getRandomSimpleAdvice :: forall site. (YesodPersist site, YesodPersistBackend site ~ SqlBackend)
=> HandlerT site IO [Entity SimpleAdvice]
getRandomSimpleAdvice = do
let sql = "select ?? from (select * from simple_advice where id in (select id from (select q.cnt, sa.id, row_number() over (order by sa.id)-1 rn from simple_advice sa, (select trunc(random(0,count(1))) cnt from simple_advice) q) simple_advice where cnt = rn)) simple_advice"
runDB $ rawSql sql []
getRandomPromos :: forall site. (YesodPersist site, YesodPersistBackend site ~ SqlBackend)
=> HandlerT site IO [(Single Text, Single Text, Entity TextAdvise, Entity Image)]
getRandomPromos = do
let sql = "WITH len as ("
<> " SELECT count(1) cnt FROM text_advise ta"
<> " WHERE hidden = 'f' AND promo IS NOT NULL"
<> " AND EXISTS ("
<> " SELECT 1 FROM image WHERE image.text_advise_id = ta.id"
<> " )"
<> " ), rans as ("
<> " SELECT trunc(random(0, cnt)) rrn FROM len UNION"
<> " SELECT trunc(random(0, cnt)) rrn FROM len UNION"
<> " SELECT trunc(random(0, cnt)) rrn FROM len"
<> " ), ext as ("
<> " SELECT ta.id, row_number() OVER (order by ta.id)-1 rn FROM text_advise ta"
<> " WHERE promo IS NOT NULL AND hidden = 'f'"
<> " ), rants as ("
<> " SELECT ext.id FROM rans, ext WHERE rans.rrn = ext.rn"
<> " ), imgs as ("
<> " SELECT min(id) mid, text_advise_id FROM image GROUP BY text_advise_id"
<> " )"
<> " SELECT DISTINCT ma.url, mo.url || q.url url2, ??, ?? FROM ("
<> " SELECT * FROM text_advise ta WHERE id IN ("
<> " SELECT id FROM rants"
<> " )"
<> " ) text_advise INNER JOIN ("
<> " SELECT * FROM image WHERE id IN ("
<> " SELECT mid FROM imgs"
<> " )"
<> " ) image ON text_advise.id = image.text_advise_id"
<> " INNER JOIN ("
<> " SELECT q.text_advise_id, q.generation, q.url, "
<> " q.mark_id, q.model_id, q.top_age, q.bottom_age, "
<> " q.id, q.generation "
<> " FROM ("
<> " SELECT l.text_advise_id, g.id, g.generation, g.top_age, g.bottom_age, min(g.url) over (PARTITION BY l.text_advise_id) url, g.model_id, g.mark_id, length(g.url) lurl, min(length(g.url)) over (PARTITION BY l.text_advise_id) murl "
<> " FROM lk_tag l INNER JOIN generation g ON l.generation = g.id"
<> " ) q WHERE q.lurl = q.murl"
<> " ) q ON text_advise.id = q.text_advise_id"
<> " INNER JOIN model mo ON q.model_id = mo.id"
<> " INNER JOIN mark ma ON q.mark_id = ma.id"
runDB $ rawSql sql []
getEModels :: forall site. (YesodPersist site, YesodPersistBackend site ~ SqlBackend)
=> Text -> HandlerT site IO [(Single Text, Single Text, Single Text, Entity Image, Entity TextAdvise)]
getEModels mu = do
let sql = "SELECT DISTINCT mark.url, model.url || q.url, model.name, ??, ?? FROM model"
<> " INNER JOIN ("
<> " SELECT q.text_advise_id, q.generation, q.url, "
<> " q.mark_id, q.model_id, q.top_age, q.bottom_age, "
<> " q.id, q.generation"
<> " FROM ("
<> " SELECT l.text_advise_id, g.id, g.generation, g.top_age, g.bottom_age, min(g.url) over (PARTITION BY l.text_advise_id) url, g.model_id, g.mark_id, length(g.url) lurl, min(length(g.url)) over (PARTITION BY l.text_advise_id) murl "
<> " FROM lk_tag l INNER JOIN generation g ON l.generation = g.id"
<> " ) q WHERE q.lurl = q.murl"
<> " ) q ON model.id = q.model_id"
<> " INNER JOIN mark ON model.mark_id = mark.id"
<> " INNER JOIN text_advise ON q.text_advise_id = text_advise.id"
<> " INNER JOIN ("
<> " SELECT min(id) id, text_advise_id FROM image GROUP BY text_advise_id"
<> " ) q2 ON text_advise.id = q2.text_advise_id"
<> " INNER JOIN image ON q2.id = image.id"
<> " WHERE text_advise.hidden = 'f'"
<> " AND text_advise.promo IS NOT NULL"
<> " AND mark.url = ? "
runDB $ rawSql sql [toPersistValue mu]
|
swamp-agr/carbuyer-advisor
|
Foundation.hs
|
mit
| 21,051
| 0
| 46
| 6,024
| 3,587
| 1,848
| 1,739
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable, MultiParamTypeClasses #-}
module NginxLint.Data where
import Data.Data (Data, Typeable)
import Data.Generics.Str (listStr, strList, Str(Zero))
import Data.Generics.Uniplate.Operations (Biplate, biplate, Uniplate, uniplate)
import qualified Text.ParserCombinators.Parsec as P
type Ident = String
data NgFile = NgFile FilePath [Decl]
deriving (Eq, Show)
data Arg
= RawString P.SourcePos String
| QuotedString P.SourcePos String
| Integer P.SourcePos Integer
deriving (Data, Eq, Show, Typeable)
data Decl
= Decl P.SourcePos Ident [Arg]
| Block P.SourcePos Ident [Arg] [Decl]
| If P.SourcePos [Arg] [Decl]
| Location P.SourcePos [Arg] [Decl]
deriving (Data, Eq, Show, Typeable)
instance Biplate NgFile Decl where
biplate (NgFile fname ds) = (listStr ds, \newds -> NgFile fname (strList newds))
instance Uniplate Decl where
uniplate d@Decl{} = (Zero, \Zero -> d)
uniplate (Block pos ident args children) = (listStr children, \newds -> Block pos ident args (strList newds))
uniplate (If pos args children) = (listStr children, \newds -> If pos args (strList newds))
uniplate (Location pos args children) = (listStr children, \newds -> If pos args (strList newds))
class NgPositioned a where
ng_getPosition :: a -> P.SourcePos
instance NgPositioned Arg where
ng_getPosition (RawString pos _) = pos
ng_getPosition (QuotedString pos _) = pos
ng_getPosition (Integer pos _) = pos
instance NgPositioned Decl where
ng_getPosition (Decl pos _ _) = pos
ng_getPosition (Block pos _ _ _) = pos
ng_getPosition (If pos _ _) = pos
ng_getPosition (Location pos _ _) = pos
-- Hint where, category, id, content
data Hint = Hint P.SourcePos String String String
instance NgPositioned Hint where
ng_getPosition (Hint pos _ _ _) = pos
-- pretty print for P.SourcePos
ppSrcPos :: P.SourcePos -> String
ppSrcPos pos = P.sourceName pos ++ ":" ++ show (P.sourceLine pos)
++ ":" ++ show (P.sourceColumn pos)
-- pretty print for Arg
ppArg :: Arg -> String
ppArg (RawString _ s) = s
ppArg (QuotedString _ s) = "\"" ++ s ++ "\""
ppArg (Integer _ i) = show i
ppArgList :: [Arg] -> String
ppArgList = concatMap (\a -> " " ++ ppArg a)
-- pretty print for Decl
ppDecl :: Decl -> String
ppDecl (Decl _ name args ) = name ++ ppArgList args ++ ";\n"
ppDecl (Block _ name args ds) = name ++ ppArgList args ++ " {\n"
++ concatMap (\d -> " " ++ ppDecl d) ds
++ "}\n"
ppDecl (If _ args ds) = "if ( " ++ ppArgList args ++ " ) {\n"
++ concatMap (\d -> " " ++ ppDecl d) ds
++ "}\n"
ppDecl (Location pos args ds) = ppDecl $ Block pos "location" args ds
-- pretty print for Hint
ppHint :: Hint -> String
ppHint h@(Hint _ cat ident content) = ppSrcPos pos ++ " "
++ cat ++ ":" ++ ident ++ ": " ++ content
where pos = ng_getPosition h
|
temoto/nginx-lint
|
NginxLint/Data.hs
|
mit
| 3,009
| 0
| 11
| 749
| 1,093
| 576
| 517
| 63
| 1
|
--
-- The first known prime found to exceed one million digits was discovered in
-- 1999, and is a Mersenne prime of the form 2^6972593−1; it contains exactly
-- 2,098,960 digits. Subsequently other Mersenne primes, of the form 2^p−1,
-- have been found which contain more digits.
--
-- However, in 2004 there was found a massive non-Mersenne prime which contains
-- 2,357,207 digits: 28433 x 2^7830457+1.
--
-- Find the last ten digits of this prime number.
--
main = print $ (28433 * 2^7830457 + 1) `mod` 10000000000
|
stu-smith/project-euler-haskell
|
Euler-097.hs
|
mit
| 525
| 0
| 10
| 93
| 42
| 28
| 14
| 1
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ask-skill-skillpackage.html
module Stratosphere.ResourceProperties.ASKSkillSkillPackage where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.ASKSkillOverrides
-- | Full data type definition for ASKSkillSkillPackage. See
-- 'askSkillSkillPackage' for a more convenient constructor.
data ASKSkillSkillPackage =
ASKSkillSkillPackage
{ _aSKSkillSkillPackageOverrides :: Maybe ASKSkillOverrides
, _aSKSkillSkillPackageS3Bucket :: Val Text
, _aSKSkillSkillPackageS3BucketRole :: Maybe (Val Text)
, _aSKSkillSkillPackageS3Key :: Val Text
, _aSKSkillSkillPackageS3ObjectVersion :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON ASKSkillSkillPackage where
toJSON ASKSkillSkillPackage{..} =
object $
catMaybes
[ fmap (("Overrides",) . toJSON) _aSKSkillSkillPackageOverrides
, (Just . ("S3Bucket",) . toJSON) _aSKSkillSkillPackageS3Bucket
, fmap (("S3BucketRole",) . toJSON) _aSKSkillSkillPackageS3BucketRole
, (Just . ("S3Key",) . toJSON) _aSKSkillSkillPackageS3Key
, fmap (("S3ObjectVersion",) . toJSON) _aSKSkillSkillPackageS3ObjectVersion
]
-- | Constructor for 'ASKSkillSkillPackage' containing required fields as
-- arguments.
askSkillSkillPackage
:: Val Text -- ^ 'asksspS3Bucket'
-> Val Text -- ^ 'asksspS3Key'
-> ASKSkillSkillPackage
askSkillSkillPackage s3Bucketarg s3Keyarg =
ASKSkillSkillPackage
{ _aSKSkillSkillPackageOverrides = Nothing
, _aSKSkillSkillPackageS3Bucket = s3Bucketarg
, _aSKSkillSkillPackageS3BucketRole = Nothing
, _aSKSkillSkillPackageS3Key = s3Keyarg
, _aSKSkillSkillPackageS3ObjectVersion = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ask-skill-skillpackage.html#cfn-ask-skill-skillpackage-overrides
asksspOverrides :: Lens' ASKSkillSkillPackage (Maybe ASKSkillOverrides)
asksspOverrides = lens _aSKSkillSkillPackageOverrides (\s a -> s { _aSKSkillSkillPackageOverrides = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ask-skill-skillpackage.html#cfn-ask-skill-skillpackage-s3bucket
asksspS3Bucket :: Lens' ASKSkillSkillPackage (Val Text)
asksspS3Bucket = lens _aSKSkillSkillPackageS3Bucket (\s a -> s { _aSKSkillSkillPackageS3Bucket = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ask-skill-skillpackage.html#cfn-ask-skill-skillpackage-s3bucketrole
asksspS3BucketRole :: Lens' ASKSkillSkillPackage (Maybe (Val Text))
asksspS3BucketRole = lens _aSKSkillSkillPackageS3BucketRole (\s a -> s { _aSKSkillSkillPackageS3BucketRole = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ask-skill-skillpackage.html#cfn-ask-skill-skillpackage-s3key
asksspS3Key :: Lens' ASKSkillSkillPackage (Val Text)
asksspS3Key = lens _aSKSkillSkillPackageS3Key (\s a -> s { _aSKSkillSkillPackageS3Key = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ask-skill-skillpackage.html#cfn-ask-skill-skillpackage-s3objectversion
asksspS3ObjectVersion :: Lens' ASKSkillSkillPackage (Maybe (Val Text))
asksspS3ObjectVersion = lens _aSKSkillSkillPackageS3ObjectVersion (\s a -> s { _aSKSkillSkillPackageS3ObjectVersion = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/ASKSkillSkillPackage.hs
|
mit
| 3,443
| 0
| 13
| 363
| 532
| 302
| 230
| 45
| 1
|
module Types
where
import qualified Data.Text as T
import qualified Data.Dates as D
import Data.Monoid
import Data.Monoid.Statistics.Numeric
type Symbol = String
data Query = Query
{ count :: Int
, results :: [Quote]
} deriving Show
data HistoricalData = HistoricalData
{ symbol :: Symbol
, quotes :: [Quote]
} deriving (Eq)
instance Show HistoricalData where
show (HistoricalData symbol quotes) = show (symbol, length quotes)
type Weight = Double
type Portfolio = [(HistoricalData,Weight)]
type Performance = [Double]
data Quote = Quote
{ date :: D.DateTime
, open :: Double
, high :: Double
, low :: Double
, close :: Double
, volume :: Double
} deriving (Show,Eq)
data Assessment = Assessment
{ variance :: Variance
, mean :: Mean
, cumulativeReturns :: Double
, dailyValues :: [Double]
} deriving (Eq)
instance Show Assessment where
show (Assessment v m c _) = show (v,m,c)
getSharpeRatio :: Assessment -> Double
getSharpeRatio assessment = average / stdDev
where stdDev = sqrt . calcVariance . variance $ assessment
average = calcMean $ mean assessment
instance Ord Assessment where
compare a1 a2 = compare (getSharpeRatio a1) (getSharpeRatio a2)
|
Dawil/SimplePortfolioOptimizer
|
src/Types.hs
|
mit
| 1,258
| 0
| 9
| 289
| 390
| 229
| 161
| 41
| 1
|
module Logic.PropositionalLogic.TruthTables where
import Prelude
import Data.List (nub, sortBy)
import Data.Ord (comparing)
import Logic.PropositionalLogic.Macro
import Logic.PropositionalLogic.Sentence
import Notes hiding (not, or)
import qualified Notes as N
renderLiteral :: Literal -> Note
renderLiteral (Lit True) = true
renderLiteral (Lit False) = false
renderLiteral (Symbol s) = raw s
renderSentence :: Sentence -> Note
renderSentence (Literal l) = renderLiteral l
renderSentence (Not (Literal l)) = N.not $ renderLiteral l
renderSentence (Not s@(Not _)) = N.not $ renderSentence s
renderSentence (Not s) = pars $ N.not $ renderSentence s
renderSentence (Or s1 s2) = pars $ renderSentence s1 ∨ renderSentence s2
renderSentence (And s1 s2) = pars $ renderSentence s1 ∧ renderSentence s2
renderSentence (Implies s1 s2) = pars $ renderSentence s1 ⇒ renderSentence s2
renderSentence (Equiv s1 s2) = pars $ renderSentence s1 ⇔ renderSentence s2
tseitinTransformationExample :: Sentence -> Note
tseitinTransformationExample sen = ex $ do
renderTransformation sen
s ["The Tseitin transformation, applied to ", m $ renderSentence sen]
renderCNFSentence :: Sentence -> Note
renderCNFSentence s = if isCNF s
then go s
else renderSentence s
where
go (Or s1 s2) = go s1 ∨ go s2
go (And s1 s2) = (go s1 <> commS " ") ∧ (commS " " <> go s2)
go s = renderSentence s
truthTableOf :: Sentence -> Note
truthTableOf s = truthTableOfExprs [s]
truthTableOfExprs :: [Sentence] -> Note
truthTableOfExprs exs = linedTable header content
where
exprs = sortBy (comparing sentenceDepth) $ nub $ concatMap infixSubs exs
symbols = nub $ concatMap symbolsOf exprs
states = possibleStates symbols
header :: [Note]
header = map renderSentence exprs
content :: [[Note]]
content = map row states
row :: [(Text, Bool)] -> [Note]
row vals = map (\e -> raw $ render $ evaluate $ fillInWith vals e) exprs
renderTransformation :: Sentence -> Note
renderTransformation = align_ . map (\(s, e) -> renderCNFSentence s & text (raw $ " " <> e)) . cnfTransformation
|
NorfairKing/the-notes
|
src/Logic/PropositionalLogic/TruthTables.hs
|
gpl-2.0
| 2,437
| 1
| 14
| 712
| 772
| 396
| 376
| 47
| 4
|
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.Transform.Fourier.DFT
-- Copyright : (c) Matthew Donadio 2003
-- License : GPL
--
-- Maintainer : m.p.donadio@ieee.org
-- Stability : experimental
-- Portability : portable
--
-- Not so naive implementation of a Discrete Fourier Transform.
--
-----------------------------------------------------------------------------
{-
We cheat in three ways from a direct translation of the DFT equation:
X(k) = sum(n=0..N-1) x(n) * e^(-2*j*pi*n*k/N)
1. We precompute all values of W_N, and exploit the periodicity.
This is just to cut down on the number of sin/cos calls.
2. We calculate X(0) seperately to prevent multiplication by 1
3. We factor out x(0) to prevent multiplication by 1
-}
module Numeric.Transform.Fourier.DFT (dft) where
import Data.Array
import Data.Complex
-- We use a helper function here because we may want to have special
-- cases for small DFT's and we want to precompute the suspension all of
-- the twiddle factors.
{-# specialize dft :: Array Int (Complex Float) -> Array Int (Complex Float) #-}
{-# specialize dft :: Array Int (Complex Double) -> Array Int (Complex Double) #-}
dft :: (Ix a, Integral a, RealFloat b) => Array a (Complex b) -- ^ x[n]
-> Array a (Complex b) -- ^ X[k]
dft a = dft' a w n
where w = listArray (0,n-1) [ cis (-2 * pi * fromIntegral i / fromIntegral n) | i <- [0..(n-0)] ]
n = snd (bounds a) + 1
{-# specialize dft' :: Array Int (Complex Float) -> Array Int (Complex Float) -> Int -> Array Int (Complex Float) #-}
{-# specialize dft' :: Array Int (Complex Double) -> Array Int (Complex Double) -> Int -> Array Int (Complex Double) #-}
dft' :: (Ix a, Integral a, RealFloat b) => Array a (Complex b) -> Array a (Complex b) -> a -> Array a (Complex b)
dft' a _ 1 = a
dft' a w n = listArray (0,n-1) (sum [ a!k | k <- [0..(n-1)] ] : [ a!0 + sum [ a!k * wik i k | k <- [1..(n-1)] ] | i <- [1..(n-1)] ])
where wik 0 _ = 1
wik _ 0 = 1
wik i k = w!(i*k `mod` n)
|
tolysz/dsp
|
Numeric/Transform/Fourier/DFT.hs
|
gpl-2.0
| 2,085
| 0
| 16
| 444
| 468
| 255
| 213
| 18
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLabels #-}
module GUI.DialogManager.Build (
buildDialogManager
, configureDialogManager
) where
import Data.Maybe(fromJust)
import Data.Text(Text)
import qualified Data.Text as T
import GI.Gtk hiding (MessageDialog)
import GUI.BuildMonad
import GUI.CanBeCast
import GUI.DialogManager
import GUI.Iteration
import GUI.HKD
import Presenter.Input
buildDialogManager :: Builder -> IO DialogManager
buildDialogManager builder = do
let getObject :: CanBeCast obj => Text -> IO obj
getObject name = builderGetObject builder name >>= doCast . fromJust
fromIO DialogManager {
changeFieldFormulaDialog = getObject "changeFieldFormulaDialog"
, changeFieldFormulaEntry = getObject "changeFieldFormulaEntry"
, changeFieldFormulaLabel = getObject "changeFieldFormulaLabel"
, changeFieldFormulaButton = getObject "changeFieldFormulaCheckButton"
, confFileSaveCheckButton = getObject "confFileSaveCheckButton"
, saveAsDialog = getObject "saveAsDialog"
, confFileLoadCheckButton = getObject "confFileLoadCheckButton"
, loadFileDialog = getObject "loadFileDialog"
, importFromFileDialog = getObject "importFromFileDialog"
, importInputSeparator = getObject "importInputSeparator"
, importInputFormat = getObject "importInputFormat"
, importFieldsOptionsDialog = getObject "importFieldsOptionsDialog"
, importFieldsOptionsRows = getObject "importFieldsOptionsRows"
, importRowsOptionsDialog = getObject "importRowsOptionsDialog"
, importRowsOptionsRows = getObject "importRowsOptionsRows"
, searchFieldDialog = getObject "searchFieldDialog"
, searchFieldCombo = getObject "searchFieldCombo"
, copyOtherDialog = getObject "copyOtherDialog"
, copyOtherCombo = getObject "copyOtherCombo"
, showSourcesDialog = getObject "showSourcesDialog"
, sourcesTreeView = getObject "sourcesTreeView"
, aboutDialog = getObject "aboutDialog"
}
configureDialogManager :: BuildMonad()
configureDialogManager = do
prepareChangeFieldFormulaDialog
prepareChangeFieldFormulaDialog :: BuildMonad ()
prepareChangeFieldFormulaDialog = do
dmg <- getDialogManager
let btn = changeFieldFormulaButton dmg
entry = changeFieldFormulaEntry dmg
ioVoid $ btn `on` #toggled $ toggleButtonGetActive btn >>=
widgetSetSensitive entry
notImplementedDialog :: Text -> Input
notImplementedDialog f = toInput $ MessageDialog (InformationMessage $ T.concat ["Opción ", f, " no implementada"])
|
jvilar/hrows
|
lib/GUI/DialogManager/Build.hs
|
gpl-2.0
| 2,560
| 0
| 12
| 413
| 488
| 264
| 224
| 54
| 1
|
{-# LANGUAGE StandaloneDeriving #-}
module Annfail06 where
-- Testing that we don't accept Typeable or Data instances defined in the same module
import Annfail06_Help
import Data.Data
import Data.Typeable
deriving instance Typeable InstancesInWrongModule
instance Data InstancesInWrongModule where
gfoldl = undefined
gunfold = undefined
{-# ANN module InstancesInWrongModule #-}
{-# ANN type Foo InstancesInWrongModule #-}
data Foo = Bar
{-# ANN f InstancesInWrongModule #-}
f x = x
|
leroux/testsuite
|
tests/annotations/should_fail/annfail06.hs
|
gpl-3.0
| 498
| 0
| 5
| 81
| 63
| 38
| 25
| 14
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.