code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
import Control.Monad
mean = uncurry (/) . foldr (\e (s,c) -> (e+s,c+1)) (0,0)
main :: IO ()
main = do
l1 <- getLine
l2 <- getLine
l3 <- getLine
l4 <- getLine
l5 <- getLine
let note = read (words l1 !! 0) :: Float
let [mini, maxi] = readNumbers l2
let moy = read (words l3 !! 0) :: Float
let notes = (readNumbers l5) ++ [note]
putStrLn $ conclusion note mini maxi moy notes
conclusion :: Float -> Float -> Float -> Float -> [Float] -> String
conclusion note mini maxi moy notes
| (note > maxi
|| note < mini
|| mini > maxi
|| abs (mean notes) - moy > 0.02
|| not (elem mini notes)
|| not (elem maxi notes)
) = "Jack ! Viens ici !"
| otherwise = "RAS"
readNumbers :: String -> [Float]
readNumbers = map read . words
|
INSAlgo/codingbattle-2017
|
concours/b_bulletin/sol/sol-hs.hs
|
gpl-3.0
| 791
| 0
| 18
| 224
| 377
| 188
| 189
| 26
| 1
|
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Helpers.Friends
(
) where
-- todo: helper functions like connecting to friends
{-
friendsSelectWidget :: [Friend] -> (Friend -> a -> a) -> ScrollWidget a
-}
|
karamellpelle/grid
|
designer/source/Game/Helpers/Friends.hs
|
gpl-3.0
| 928
| 0
| 3
| 173
| 30
| 27
| 3
| 2
| 0
|
data Foo = Bar
{ -- a
foo -- b
:: -- c
Baz -- d
, -- e
bars :: Bizzz
}
deriving (Show, Eq, Monad, Functor, Traversable, Foldable)
|
lspitzner/brittany
|
data/Test65.hs
|
agpl-3.0
| 169
| 0
| 8
| 69
| 54
| 32
| 22
| 5
| 0
|
module Import.Base
( module Import
, apresult
, parseInt
, parseUnsigned
, parseInt'
, getJson
, unValue3
) where
import BasicPrelude as Import hiding (on, groupBy, insert, insertBy, delete,
deleteBy, length, splitAt, filter)
import Data.Either as Import
import Data.Text as Import (pack, unpack, length, splitAt, filter)
import Data.Text.Lazy as Import (toStrict, fromStrict)
import Yesod.Core as Import hiding (Header, Value)
import Yesod.Form as Import hiding (parseTime)
import Yesod.Form.Bootstrap3 as Import
import Yesod.Static as Import
import Yesod.Persist.Core as Import
import Network.HTTP.Client.Conduit as Import
import Settings as Import
import Settings.StaticFiles as Import
import Yesod.Auth as Import
import Yesod.Core.Types as Import (loggerSet)
import Yesod.Default.Config2 as Import
import Database.Esqueleto as Import hiding (isNothing)
import Data.Default as Import
import Data.Time as Import
import qualified Data.Attoparsec.Text as AP
import qualified Data.Aeson.Types as Json
apresult :: AP.Result r -> Maybe r
apresult (AP.Fail _ _ _) = Nothing
apresult (AP.Partial c) = apresult (c "")
apresult (AP.Done _ r) = Just r
parseInt :: Text -> Maybe Int
parseInt str = apresult $ AP.parse (AP.signed AP.decimal) str
parseUnsigned :: Text -> Maybe Int
parseUnsigned str = apresult $ AP.parse AP.decimal str
-- Unsafe Int read
parseInt' :: Text -> Int
parseInt' str = case parseInt str of
Just a -> a
Nothing -> error $ "unsafe use of parseInt on: " ++ (unpack str)
getJson :: FromJSON a => (a -> Json.Parser b) -> HandlerT site IO b
getJson p = do
body <- requireJsonBody
let m = Json.parseMaybe p body
maybe (invalidArgs []) return m
unValue3 :: (Value a, Value b, Value c) -> (a, b, c)
unValue3 (Value a, Value b, Value c) = (a, b, c)
|
sir-murray/lol
|
Import/Base.hs
|
agpl-3.0
| 1,989
| 0
| 11
| 504
| 624
| 363
| 261
| 48
| 2
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hu-HU">
<title>Server-Sent Events | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/sse/resources/help_hu_HU/helpset_hu_HU.hs
|
apache-2.0
| 980
| 80
| 67
| 160
| 419
| 212
| 207
| -1
| -1
|
module Palindromes.A298481Spec (main, spec) where
import Test.Hspec
import Palindromes.A298481 (a298481)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A298481" $
it "correctly computes the first 20 elements" $
take 20 (map a298481 [1..]) `shouldBe` expectedValue where
expectedValue = [1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1]
|
peterokagey/haskellOEIS
|
test/Palindromes/A298481Spec.hs
|
apache-2.0
| 357
| 0
| 10
| 59
| 160
| 95
| 65
| 10
| 1
|
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE NoImplicitPrelude #-}
module Main where
import ReexportPrelude
main :: IO ()
main = return ()
|
google/cabal2bazel
|
bzl/tests/rules/ReexportPreludeTest.hs
|
apache-2.0
| 698
| 0
| 6
| 120
| 41
| 29
| 12
| 5
| 1
|
{-# LANGUAGE FlexibleInstances, DeriveGeneric, DeriveAnyClass #-}
module Types where
{- ===========================================================================
Contains basic types - you'll have to extend several of the definitions below
=========================================================================== -}
import GHC.Generics
import FPPrac.Trees
-- ===================================================================
-- Example Alphabet
-- - Extend, adapt, change the non-terminals to your own needs
-- - Do NOT change the first two groups of constructors (Symbol ... Rep1)
data Alphabet = Terminal String -- Terminal symbol: WILL be included in parseTree
| Symbol String -- Terminal symbol: will NOT be included in parseTree
| SyntCat Alphabet -- Checks whether a string belongs to a syntactic category
| Alt [Alphabet] [Alphabet] -- Try both
| Opt [Alphabet] -- Optional
| Rep0 [Alphabet] -- Zero or more repetitions
| Rep1 [Alphabet] -- One or more repetitions
-- Types
| IntType -- Integer value
| BoolType -- Boolean value
| EnumType -- EnumType
| Var -- Variable name
| Pid -- Procedure name
| Type -- Type
-- Operators
| OpOrd
| OpEqual
| OpMul
| OpPlusMin
| OpAnd
| OpOr
| OpXor
| OpNot
| OpIncDec
-- Punctuation
| PreUnary
| Op
| Unary -- Unary (for now prefix) operator
| Par -- Parentheses
| Brace -- Braces
| Comma -- Comma
| Semi -- Semicolon
-- Classic Expressions
| Expr -- Expression
| Expr'
| AND
| AND'
| XOr
| XOr'
| OR
| OR'
| EQUAL
| EQUAL'
| Ord
| Ord'
| Term
| Term'
| Factor
| Ass -- Assignment espression
-- Statements
| Stat -- Statement
| Decl -- Declaration statement
| If -- If statement
| Else -- If statement
| While -- While statement
| Call -- Procedure call
| Block -- Block statement
| Fork -- Fork statement
| Join -- Join statement
| Global -- Global keyword
| Enum -- enum keyword
| Print
| Program -- Program
| Proc -- Procedure
deriving (Eq,Ord,Show,Generic,ToRoseTree)
--type TypeTable = [(String, [AST{-Var-}])]
type VariableType = (String, Alphabet{- Must be of either IntType or BoolType -})
type FunctionType = (String, [(String, Alphabet{- Must be of either IntType or BoolType -})])
type EnumCheckerType = (String, [String])
-- CheckType [Functions] [Globals] [Scopes[Variables]] [Enumerations]
-- Scopes are defined in the following manner: [deepest scope,...,shallowest scope]
type CheckType = ([FunctionType], [VariableType], [[VariableType]], [EnumCheckerType])
-- An Alphabet, in this context, is equivalent to a type
data AST = ASTProgram [AST] CheckType
-- Globals
| ASTGlobal Alphabet AST (Maybe AST) CheckType
-- Enumerations
| ASTEnum String [AST] CheckType
-- Procedures and arguments
| ASTProc String [AST] AST CheckType
| ASTArg AST AST CheckType
-- Statements
| ASTBlock [AST] CheckType
| ASTDecl Alphabet AST (Maybe AST) CheckType
| ASTIf AST AST (Maybe AST) CheckType
| ASTWhile AST AST CheckType
| ASTFork String [AST] CheckType
| ASTJoin CheckType
| ASTCall String [AST] CheckType
| ASTPrint [AST] CheckType
| ASTAss AST AST (Maybe Alphabet) CheckType
-- Expressions
| ASTExpr AST (Maybe Alphabet) CheckType
| ASTVar String CheckType
| ASTInt String CheckType
| ASTBool String CheckType
| ASTType String CheckType
| ASTOp AST String AST (Maybe Alphabet) CheckType
| ASTPreUnary String AST (Maybe Alphabet) CheckType
| ASTUnary String AST (Maybe Alphabet) CheckType
deriving Show
-- ===================================================================
-- Symbolic notation for EBNF constructors
ps <> qs = Alt ps qs
(?:) ps = Opt ps
(*:) ps = Rep0 ps
(+:) ps = Rep1 ps
-- ===================================================================
type Grammar = Alphabet -> [[Alphabet]]
type Token = (Alphabet,String,Int) -- Alphabet: indicates the "syntactic category" to which
-- the String belongs (to distinguish, a.o., between
-- reserved words and identifiers in general),
-- String: the token itself,
-- Int: the position of the token in the input token-list
-- (needed for error messages).
instance ToRoseTree Token where
toRoseTree t = RoseNode (show t) []
data ParseTree = PLeaf Token
| PNode Alphabet [ParseTree]
| PError ParseTree [Alphabet] Alphabet String Int
deriving (Eq,Show,Generic,ToRoseTree)
instance Ord ParseTree where
PError _ _ _ _ k < PError _ _ _ _ k' = k < k'
_ < _ = error "ordering only in case of parse-errors"
PError _ _ _ _ k <= PError _ _ _ _ k' = k <= k'
_ <= _ = error "ordering only in case of parse-errors"
type ParseState = ( Alphabet -- Non-terminal indicating the present subexpression
, [ParseTree] -- The already produced trees within the present subexpression
, [Token] -- The remaining list of input tokens
)
-- ===================================================================
x ∈ xs = x `elem` xs
-- ===================================================================
-- Pretty Printing
toStrings tree = case tree of
PLeaf t -> ["PLeaf " ++ show t]
PNode nt ts -> ("PNode " ++ show nt) : (addSpace 7 $ concat $ addEndBrack $ addListNotation $ map toStrings ts)
where
addSpace n = map ((replicate n ' ') ++)
addListNotation ((str:strs):strss) = (("["++str):strs)
: [ (","++str'):strs' | (str':strs') <- strss ]
addEndBrack [strs] = [ strs ++ ["]"] ]
addEndBrack (strs:strss) = strs : addEndBrack strss
PError tr rule nt str k -> [ "==========="
, "Parse Error"
, "==========="
, "Recognized:"
, "-----------"
]
++ toStrings tr ++
[ "-----------"
, "Still to go: " ++ show rule
, "Expected: " ++ show nt
, "Found: " ++ str
, "At position: " ++ show k
, "==========="
]
prpr t = putStr $ ('\n':) $ (++"\n") $ unlines $ toStrings t
|
wouwouwou/2017_module_8
|
src/haskell/PP-project-2017/Types.hs
|
apache-2.0
| 8,494
| 0
| 15
| 3,826
| 1,332
| 779
| 553
| 133
| 4
|
{-# LANGUAGE Haskell2010 #-}
module PositionPragmas where
{-# LINE 8 "hypsrc-test/src/PositionPragmas.hs" #-}
foo :: String
foo = bar
{-# LINE 23 "hypsrc-test/src/PositionPragmas.hs" #-}
bar :: String
bar = foo
|
haskell/haddock
|
hypsrc-test/src/PositionPragmas.hs
|
bsd-2-clause
| 217
| 0
| 4
| 35
| 27
| 18
| 9
| 6
| 1
|
{-# LANGUAGE OverloadedStrings #-}
import Text.CSS.Parse
import Text.CSS.Render
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import qualified Data.Text as T
import Data.Text.Lazy.Builder (toLazyText)
import Data.Text.Lazy (toStrict)
import Data.Text (Text)
import Test.QuickCheck
import Control.Arrow ((***))
import Control.Monad (liftM)
main :: IO ()
main = hspec $ do
describe "single attribute parser" $ do
it "trimming whitespace" $
parseAttr " foo : bar " `shouldBe` Right ("foo", "bar")
describe "multiple attribute parser" $ do
it "no final semicolon" $
parseAttrs " foo: bar ; baz : bin "
`shouldBe` Right [("foo", "bar"), ("baz", "bin")]
it "final semicolon" $
parseAttrs " foo: bar ; baz : bin ;"
`shouldBe` Right [("foo", "bar"), ("baz", "bin")]
it "ignores comments" $
parseAttrs " foo: bar ; /* ignored */ baz : bin ;"
`shouldBe` Right [("foo", "bar"), ("baz", "bin")]
describe "block parser" $ do
it "multiple blocks" $
parseBlocks (T.concat
[ "foo{fooK1:fooV1;/*ignored*/fooK2:fooV2 }\n\n"
, "/*ignored*/"
, "bar{barK1:barV1;/*ignored*/barK2:barV2 ;}\n\n/*ignored*/"
]) `shouldBe` Right [
("foo", [("fooK1", "fooV1"), ("fooK2", "fooV2")])
, ("bar", [("barK1", "barV1"), ("barK2", "barV2")])
]
it "media queries" $ do
parseBlocks "@media print {* {text-shadow: none !important;} }"
`shouldBe` Right []
parseNestedBlocks "@media print {* {text-shadow: none !important; color: #000 !important; } a, a:visited { text-decoration: underline; }}"
`shouldBe` Right [NestedBlock "@media print"
[ LeafBlock ("*", [("text-shadow", "none !important"), ("color", "#000 !important")])
, LeafBlock ("a, a:visited", [("text-decoration", "underline")])
]
]
describe "render" $ -- do
it "works" $
renderBlocks [
("foo", [("bar", "baz"), ("bin", "bang")])
, ("foo2", [("x", "y")])
]
`shouldBe` "foo{bar:baz;bin:bang}foo2{x:y}"
describe "parse/render" $ do
prop "idempotent blocks" $ \bs ->
parseBlocks (toStrict $ toLazyText $ renderBlocks $ unBlocks bs) == Right (unBlocks bs)
prop "idempotent nested blocks" $ \bs ->
parseNestedBlocks (toStrict $ toLazyText $ renderNestedBlocks bs) == Right bs
newtype Blocks = Blocks { unBlocks :: [(Text, [(Text, Text)])] }
deriving (Show, Eq)
instance Arbitrary NestedBlock where
arbitrary = resize 4 $ frequency
[ (80, (LeafBlock . unBlock) `liftM` arbitrary)
, (10, do mediatype <- elements ["@print", "@screen",
"@media (min-width:768px)",
"@media screen and (max-width: 300px)"]
contents <- arbitrary
return (NestedBlock mediatype contents))
]
instance Arbitrary Blocks where
arbitrary = fmap (Blocks . map unBlock) arbitrary
newtype Block = Block { unBlock :: (Text, [(Text, Text)]) }
deriving (Show, Eq)
instance Arbitrary Block where
arbitrary = do
sel <- frequency [
(90, unT `fmap` arbitrary)
, (10, return "@font-face")
]
attrs <- arbitrary
return $ Block (sel, unAttrs attrs)
newtype Attrs = Attrs { unAttrs :: [(Text, Text)] }
instance Arbitrary Attrs where
arbitrary = fmap (Attrs . map (unT *** unT)) arbitrary
newtype T = T { unT :: Text }
instance Arbitrary T where
arbitrary = fmap (T . T.pack) $ listOf1 $ elements $ concat
[ ['A'..'Z']
, ['a'..'z']
, ['0'..'9']
, "-_"
]
|
jgm/css-text
|
runtests.hs
|
bsd-2-clause
| 3,753
| 0
| 22
| 1,069
| 1,078
| 601
| 477
| 85
| 1
|
{-# LANGUAGE TemplateHaskell, DeriveFunctor #-}
{-| Some common Ganeti types.
This holds types common to both core work, and to htools. Types that
are very core specific (e.g. configuration objects) should go in
'Ganeti.Objects', while types that are specific to htools in-memory
representation should go into 'Ganeti.HTools.Types'.
-}
{-
Copyright (C) 2012, 2013, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Types
( AllocPolicy(..)
, allocPolicyFromRaw
, allocPolicyToRaw
, InstanceStatus(..)
, instanceStatusFromRaw
, instanceStatusToRaw
, DiskTemplate(..)
, diskTemplateToRaw
, diskTemplateFromRaw
, diskTemplateMovable
, TagKind(..)
, tagKindToRaw
, tagKindFromRaw
, NonNegative
, fromNonNegative
, mkNonNegative
, Positive
, fromPositive
, mkPositive
, Negative
, fromNegative
, mkNegative
, NonEmpty
, fromNonEmpty
, mkNonEmpty
, NonEmptyString
, QueryResultCode
, IPv4Address
, mkIPv4Address
, IPv4Network
, mkIPv4Network
, IPv6Address
, mkIPv6Address
, IPv6Network
, mkIPv6Network
, MigrationMode(..)
, migrationModeToRaw
, VerifyOptionalChecks(..)
, verifyOptionalChecksToRaw
, DdmSimple(..)
, DdmFull(..)
, ddmFullToRaw
, CVErrorCode(..)
, cVErrorCodeToRaw
, Hypervisor(..)
, hypervisorFromRaw
, hypervisorToRaw
, OobCommand(..)
, oobCommandToRaw
, OobStatus(..)
, oobStatusToRaw
, StorageType(..)
, storageTypeToRaw
, EvacMode(..)
, evacModeToRaw
, FileDriver(..)
, fileDriverToRaw
, InstCreateMode(..)
, instCreateModeToRaw
, RebootType(..)
, rebootTypeToRaw
, ExportMode(..)
, exportModeToRaw
, IAllocatorTestDir(..)
, iAllocatorTestDirToRaw
, IAllocatorMode(..)
, iAllocatorModeToRaw
, NICMode(..)
, nICModeToRaw
, JobStatus(..)
, jobStatusToRaw
, jobStatusFromRaw
, FinalizedJobStatus(..)
, finalizedJobStatusToRaw
, JobId
, fromJobId
, makeJobId
, makeJobIdS
, RelativeJobId
, JobIdDep(..)
, JobDependency(..)
, absoluteJobDependency
, getJobIdFromDependency
, OpSubmitPriority(..)
, opSubmitPriorityToRaw
, parseSubmitPriority
, fmtSubmitPriority
, OpStatus(..)
, opStatusToRaw
, opStatusFromRaw
, ELogType(..)
, eLogTypeToRaw
, ReasonElem
, ReasonTrail
, StorageUnit(..)
, StorageUnitRaw(..)
, StorageKey
, addParamsToStorageUnit
, diskTemplateToStorageType
, VType(..)
, vTypeFromRaw
, vTypeToRaw
, NodeRole(..)
, nodeRoleToRaw
, roleDescription
, DiskMode(..)
, diskModeToRaw
, BlockDriver(..)
, blockDriverToRaw
, AdminState(..)
, adminStateFromRaw
, adminStateToRaw
, AdminStateSource(..)
, adminStateSourceFromRaw
, adminStateSourceToRaw
, StorageField(..)
, storageFieldToRaw
, DiskAccessMode(..)
, diskAccessModeToRaw
, LocalDiskStatus(..)
, localDiskStatusFromRaw
, localDiskStatusToRaw
, localDiskStatusName
, ReplaceDisksMode(..)
, replaceDisksModeToRaw
, RpcTimeout(..)
, rpcTimeoutFromRaw -- FIXME: no used anywhere
, rpcTimeoutToRaw
, HotplugTarget(..)
, hotplugTargetToRaw
, HotplugAction(..)
, hotplugActionToRaw
, SshKeyType(..)
, sshKeyTypeToRaw
, Private(..)
, showPrivateJSObject
, Secret(..)
, showSecretJSObject
, revealValInJSObject
, redacted
, HvParams
, OsParams
, OsParamsPrivate
, TimeStampObject(..)
, UuidObject(..)
, ForthcomingObject(..)
, SerialNoObject(..)
, TagsObject(..)
) where
import Control.Applicative
import Control.Monad (liftM)
import qualified Text.JSON as JSON
import Text.JSON (JSON, readJSON, showJSON)
import Data.Ratio (numerator, denominator)
import qualified Data.Set as Set
import System.Time (ClockTime)
import qualified Ganeti.ConstantUtils as ConstantUtils
import Ganeti.JSON (Container, HasStringRepr(..))
import qualified Ganeti.THH as THH
import Ganeti.Utils
-- * Generic types
-- | Type that holds a non-negative value.
newtype NonNegative a = NonNegative { fromNonNegative :: a }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'NonNegative'.
mkNonNegative :: (Monad m, Num a, Ord a, Show a) => a -> m (NonNegative a)
mkNonNegative i | i >= 0 = return (NonNegative i)
| otherwise = fail $ "Invalid value for non-negative type '" ++
show i ++ "'"
instance (JSON.JSON a, Num a, Ord a, Show a) => JSON.JSON (NonNegative a) where
showJSON = JSON.showJSON . fromNonNegative
readJSON v = JSON.readJSON v >>= mkNonNegative
-- | Type that holds a positive value.
newtype Positive a = Positive { fromPositive :: a }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'Positive'.
mkPositive :: (Monad m, Num a, Ord a, Show a) => a -> m (Positive a)
mkPositive i | i > 0 = return (Positive i)
| otherwise = fail $ "Invalid value for positive type '" ++
show i ++ "'"
instance (JSON.JSON a, Num a, Ord a, Show a) => JSON.JSON (Positive a) where
showJSON = JSON.showJSON . fromPositive
readJSON v = JSON.readJSON v >>= mkPositive
-- | Type that holds a negative value.
newtype Negative a = Negative { fromNegative :: a }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'Negative'.
mkNegative :: (Monad m, Num a, Ord a, Show a) => a -> m (Negative a)
mkNegative i | i < 0 = return (Negative i)
| otherwise = fail $ "Invalid value for negative type '" ++
show i ++ "'"
instance (JSON.JSON a, Num a, Ord a, Show a) => JSON.JSON (Negative a) where
showJSON = JSON.showJSON . fromNegative
readJSON v = JSON.readJSON v >>= mkNegative
-- | Type that holds a non-null list.
newtype NonEmpty a = NonEmpty { fromNonEmpty :: [a] }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'NonEmpty'.
mkNonEmpty :: (Monad m) => [a] -> m (NonEmpty a)
mkNonEmpty [] = fail "Received empty value for non-empty list"
mkNonEmpty xs = return (NonEmpty xs)
instance (JSON.JSON a) => JSON.JSON (NonEmpty a) where
showJSON = JSON.showJSON . fromNonEmpty
readJSON v = JSON.readJSON v >>= mkNonEmpty
-- | A simple type alias for non-empty strings.
type NonEmptyString = NonEmpty Char
type QueryResultCode = Int
newtype IPv4Address = IPv4Address { fromIPv4Address :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv4Address :: Monad m => String -> m IPv4Address
mkIPv4Address address =
return IPv4Address { fromIPv4Address = address }
instance JSON.JSON IPv4Address where
showJSON = JSON.showJSON . fromIPv4Address
readJSON v = JSON.readJSON v >>= mkIPv4Address
newtype IPv4Network = IPv4Network { fromIPv4Network :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv4Network :: Monad m => String -> m IPv4Network
mkIPv4Network address =
return IPv4Network { fromIPv4Network = address }
instance JSON.JSON IPv4Network where
showJSON = JSON.showJSON . fromIPv4Network
readJSON v = JSON.readJSON v >>= mkIPv4Network
newtype IPv6Address = IPv6Address { fromIPv6Address :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv6Address :: Monad m => String -> m IPv6Address
mkIPv6Address address =
return IPv6Address { fromIPv6Address = address }
instance JSON.JSON IPv6Address where
showJSON = JSON.showJSON . fromIPv6Address
readJSON v = JSON.readJSON v >>= mkIPv6Address
newtype IPv6Network = IPv6Network { fromIPv6Network :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv6Network :: Monad m => String -> m IPv6Network
mkIPv6Network address =
return IPv6Network { fromIPv6Network = address }
instance JSON.JSON IPv6Network where
showJSON = JSON.showJSON . fromIPv6Network
readJSON v = JSON.readJSON v >>= mkIPv6Network
-- * Ganeti types
-- | Instance disk template type. The disk template is a name for the
-- constructor of the disk configuration 'DiskLogicalId' used for
-- serialization, configuration values, etc.
$(THH.declareLADT ''String "DiskTemplate"
[ ("DTDiskless", "diskless")
, ("DTFile", "file")
, ("DTSharedFile", "sharedfile")
, ("DTPlain", "plain")
, ("DTBlock", "blockdev")
, ("DTDrbd8", "drbd")
, ("DTRbd", "rbd")
, ("DTExt", "ext")
, ("DTGluster", "gluster")
])
$(THH.makeJSONInstance ''DiskTemplate)
instance THH.PyValue DiskTemplate where
showValue = show . diskTemplateToRaw
instance HasStringRepr DiskTemplate where
fromStringRepr = diskTemplateFromRaw
toStringRepr = diskTemplateToRaw
-- | Predicate on disk templates indicating if instances based on this
-- disk template can freely be moved (to any node in the node group).
diskTemplateMovable :: DiskTemplate -> Bool
-- Note: we deliberately do not use wildcard pattern to force an
-- update of this function whenever a new disk template is added.
diskTemplateMovable DTDiskless = True
diskTemplateMovable DTFile = False
diskTemplateMovable DTSharedFile = True
diskTemplateMovable DTPlain = False
diskTemplateMovable DTBlock = False
diskTemplateMovable DTDrbd8 = False
diskTemplateMovable DTRbd = True
diskTemplateMovable DTExt = True
diskTemplateMovable DTGluster = True
-- | Data type representing what items the tag operations apply to.
$(THH.declareLADT ''String "TagKind"
[ ("TagKindInstance", "instance")
, ("TagKindNode", "node")
, ("TagKindGroup", "nodegroup")
, ("TagKindCluster", "cluster")
, ("TagKindNetwork", "network")
])
$(THH.makeJSONInstance ''TagKind)
-- | The Group allocation policy type.
--
-- Note that the order of constructors is important as the automatic
-- Ord instance will order them in the order they are defined, so when
-- changing this data type be careful about the interaction with the
-- desired sorting order.
$(THH.declareLADT ''String "AllocPolicy"
[ ("AllocPreferred", "preferred")
, ("AllocLastResort", "last_resort")
, ("AllocUnallocable", "unallocable")
])
$(THH.makeJSONInstance ''AllocPolicy)
-- | The Instance real state type.
$(THH.declareLADT ''String "InstanceStatus"
[ ("StatusDown", "ADMIN_down")
, ("StatusOffline", "ADMIN_offline")
, ("ErrorDown", "ERROR_down")
, ("ErrorUp", "ERROR_up")
, ("NodeDown", "ERROR_nodedown")
, ("NodeOffline", "ERROR_nodeoffline")
, ("Running", "running")
, ("UserDown", "USER_down")
, ("WrongNode", "ERROR_wrongnode")
])
$(THH.makeJSONInstance ''InstanceStatus)
-- | Migration mode.
$(THH.declareLADT ''String "MigrationMode"
[ ("MigrationLive", "live")
, ("MigrationNonLive", "non-live")
])
$(THH.makeJSONInstance ''MigrationMode)
-- | Verify optional checks.
$(THH.declareLADT ''String "VerifyOptionalChecks"
[ ("VerifyNPlusOneMem", "nplusone_mem")
])
$(THH.makeJSONInstance ''VerifyOptionalChecks)
-- | Cluster verify error codes.
$(THH.declareLADT ''String "CVErrorCode"
[ ("CvECLUSTERCFG", "ECLUSTERCFG")
, ("CvECLUSTERCERT", "ECLUSTERCERT")
, ("CvECLUSTERCLIENTCERT", "ECLUSTERCLIENTCERT")
, ("CvECLUSTERFILECHECK", "ECLUSTERFILECHECK")
, ("CvECLUSTERDANGLINGNODES", "ECLUSTERDANGLINGNODES")
, ("CvECLUSTERDANGLINGINST", "ECLUSTERDANGLINGINST")
, ("CvEINSTANCEBADNODE", "EINSTANCEBADNODE")
, ("CvEINSTANCEDOWN", "EINSTANCEDOWN")
, ("CvEINSTANCELAYOUT", "EINSTANCELAYOUT")
, ("CvEINSTANCEMISSINGDISK", "EINSTANCEMISSINGDISK")
, ("CvEINSTANCEFAULTYDISK", "EINSTANCEFAULTYDISK")
, ("CvEINSTANCEWRONGNODE", "EINSTANCEWRONGNODE")
, ("CvEINSTANCESPLITGROUPS", "EINSTANCESPLITGROUPS")
, ("CvEINSTANCEPOLICY", "EINSTANCEPOLICY")
, ("CvEINSTANCEUNSUITABLENODE", "EINSTANCEUNSUITABLENODE")
, ("CvEINSTANCEMISSINGCFGPARAMETER", "EINSTANCEMISSINGCFGPARAMETER")
, ("CvENODEDRBD", "ENODEDRBD")
, ("CvENODEDRBDVERSION", "ENODEDRBDVERSION")
, ("CvENODEDRBDHELPER", "ENODEDRBDHELPER")
, ("CvENODEFILECHECK", "ENODEFILECHECK")
, ("CvENODEHOOKS", "ENODEHOOKS")
, ("CvENODEHV", "ENODEHV")
, ("CvENODELVM", "ENODELVM")
, ("CvENODEN1", "ENODEN1")
, ("CvENODENET", "ENODENET")
, ("CvENODEOS", "ENODEOS")
, ("CvENODEORPHANINSTANCE", "ENODEORPHANINSTANCE")
, ("CvENODEORPHANLV", "ENODEORPHANLV")
, ("CvENODERPC", "ENODERPC")
, ("CvENODESSH", "ENODESSH")
, ("CvENODEVERSION", "ENODEVERSION")
, ("CvENODESETUP", "ENODESETUP")
, ("CvENODETIME", "ENODETIME")
, ("CvENODEOOBPATH", "ENODEOOBPATH")
, ("CvENODEUSERSCRIPTS", "ENODEUSERSCRIPTS")
, ("CvENODEFILESTORAGEPATHS", "ENODEFILESTORAGEPATHS")
, ("CvENODEFILESTORAGEPATHUNUSABLE", "ENODEFILESTORAGEPATHUNUSABLE")
, ("CvENODESHAREDFILESTORAGEPATHUNUSABLE",
"ENODESHAREDFILESTORAGEPATHUNUSABLE")
, ("CvENODEGLUSTERSTORAGEPATHUNUSABLE",
"ENODEGLUSTERSTORAGEPATHUNUSABLE")
, ("CvEGROUPDIFFERENTPVSIZE", "EGROUPDIFFERENTPVSIZE")
, ("CvEEXTAGS", "EEXTAGS")
])
$(THH.makeJSONInstance ''CVErrorCode)
-- | Dynamic device modification, just add/remove version.
$(THH.declareLADT ''String "DdmSimple"
[ ("DdmSimpleAdd", "add")
, ("DdmSimpleAttach", "attach")
, ("DdmSimpleRemove", "remove")
, ("DdmSimpleDetach", "detach")
])
$(THH.makeJSONInstance ''DdmSimple)
-- | Dynamic device modification, all operations version.
--
-- TODO: DDM_SWAP, DDM_MOVE?
$(THH.declareLADT ''String "DdmFull"
[ ("DdmFullAdd", "add")
, ("DdmFullAttach", "attach")
, ("DdmFullRemove", "remove")
, ("DdmFullDetach", "detach")
, ("DdmFullModify", "modify")
])
$(THH.makeJSONInstance ''DdmFull)
-- | Hypervisor type definitions.
$(THH.declareLADT ''String "Hypervisor"
[ ("Kvm", "kvm")
, ("XenPvm", "xen-pvm")
, ("Chroot", "chroot")
, ("XenHvm", "xen-hvm")
, ("Lxc", "lxc")
, ("Fake", "fake")
])
$(THH.makeJSONInstance ''Hypervisor)
instance THH.PyValue Hypervisor where
showValue = show . hypervisorToRaw
instance HasStringRepr Hypervisor where
fromStringRepr = hypervisorFromRaw
toStringRepr = hypervisorToRaw
-- | Oob command type.
$(THH.declareLADT ''String "OobCommand"
[ ("OobHealth", "health")
, ("OobPowerCycle", "power-cycle")
, ("OobPowerOff", "power-off")
, ("OobPowerOn", "power-on")
, ("OobPowerStatus", "power-status")
])
$(THH.makeJSONInstance ''OobCommand)
-- | Oob command status
$(THH.declareLADT ''String "OobStatus"
[ ("OobStatusCritical", "CRITICAL")
, ("OobStatusOk", "OK")
, ("OobStatusUnknown", "UNKNOWN")
, ("OobStatusWarning", "WARNING")
])
$(THH.makeJSONInstance ''OobStatus)
-- | Storage type.
$(THH.declareLADT ''String "StorageType"
[ ("StorageFile", "file")
, ("StorageSharedFile", "sharedfile")
, ("StorageGluster", "gluster")
, ("StorageLvmPv", "lvm-pv")
, ("StorageLvmVg", "lvm-vg")
, ("StorageDiskless", "diskless")
, ("StorageBlock", "blockdev")
, ("StorageRados", "rados")
, ("StorageExt", "ext")
])
$(THH.makeJSONInstance ''StorageType)
-- | Storage keys are identifiers for storage units. Their content varies
-- depending on the storage type, for example a storage key for LVM storage
-- is the volume group name.
type StorageKey = String
-- | Storage parameters
type SPExclusiveStorage = Bool
-- | Storage units without storage-type-specific parameters
data StorageUnitRaw = SURaw StorageType StorageKey
-- | Full storage unit with storage-type-specific parameters
data StorageUnit = SUFile StorageKey
| SUSharedFile StorageKey
| SUGluster StorageKey
| SULvmPv StorageKey SPExclusiveStorage
| SULvmVg StorageKey SPExclusiveStorage
| SUDiskless StorageKey
| SUBlock StorageKey
| SURados StorageKey
| SUExt StorageKey
deriving (Eq)
instance Show StorageUnit where
show (SUFile key) = showSUSimple StorageFile key
show (SUSharedFile key) = showSUSimple StorageSharedFile key
show (SUGluster key) = showSUSimple StorageGluster key
show (SULvmPv key es) = showSULvm StorageLvmPv key es
show (SULvmVg key es) = showSULvm StorageLvmVg key es
show (SUDiskless key) = showSUSimple StorageDiskless key
show (SUBlock key) = showSUSimple StorageBlock key
show (SURados key) = showSUSimple StorageRados key
show (SUExt key) = showSUSimple StorageExt key
instance JSON StorageUnit where
showJSON (SUFile key) = showJSON (StorageFile, key, []::[String])
showJSON (SUSharedFile key) = showJSON (StorageSharedFile, key, []::[String])
showJSON (SUGluster key) = showJSON (StorageGluster, key, []::[String])
showJSON (SULvmPv key es) = showJSON (StorageLvmPv, key, [es])
showJSON (SULvmVg key es) = showJSON (StorageLvmVg, key, [es])
showJSON (SUDiskless key) = showJSON (StorageDiskless, key, []::[String])
showJSON (SUBlock key) = showJSON (StorageBlock, key, []::[String])
showJSON (SURados key) = showJSON (StorageRados, key, []::[String])
showJSON (SUExt key) = showJSON (StorageExt, key, []::[String])
-- FIXME: add readJSON implementation
readJSON = fail "Not implemented"
-- | Composes a string representation of storage types without
-- storage parameters
showSUSimple :: StorageType -> StorageKey -> String
showSUSimple st sk = show (storageTypeToRaw st, sk, []::[String])
-- | Composes a string representation of the LVM storage types
showSULvm :: StorageType -> StorageKey -> SPExclusiveStorage -> String
showSULvm st sk es = show (storageTypeToRaw st, sk, [es])
-- | Mapping from disk templates to storage types.
diskTemplateToStorageType :: DiskTemplate -> StorageType
diskTemplateToStorageType DTExt = StorageExt
diskTemplateToStorageType DTFile = StorageFile
diskTemplateToStorageType DTSharedFile = StorageSharedFile
diskTemplateToStorageType DTDrbd8 = StorageLvmVg
diskTemplateToStorageType DTPlain = StorageLvmVg
diskTemplateToStorageType DTRbd = StorageRados
diskTemplateToStorageType DTDiskless = StorageDiskless
diskTemplateToStorageType DTBlock = StorageBlock
diskTemplateToStorageType DTGluster = StorageGluster
-- | Equips a raw storage unit with its parameters
addParamsToStorageUnit :: SPExclusiveStorage -> StorageUnitRaw -> StorageUnit
addParamsToStorageUnit _ (SURaw StorageBlock key) = SUBlock key
addParamsToStorageUnit _ (SURaw StorageDiskless key) = SUDiskless key
addParamsToStorageUnit _ (SURaw StorageExt key) = SUExt key
addParamsToStorageUnit _ (SURaw StorageFile key) = SUFile key
addParamsToStorageUnit _ (SURaw StorageSharedFile key) = SUSharedFile key
addParamsToStorageUnit _ (SURaw StorageGluster key) = SUGluster key
addParamsToStorageUnit es (SURaw StorageLvmPv key) = SULvmPv key es
addParamsToStorageUnit es (SURaw StorageLvmVg key) = SULvmVg key es
addParamsToStorageUnit _ (SURaw StorageRados key) = SURados key
-- | Node evac modes.
--
-- This is part of the 'IAllocator' interface and it is used, for
-- example, in 'Ganeti.HTools.Loader.RqType'. However, it must reside
-- in this module, and not in 'Ganeti.HTools.Types', because it is
-- also used by 'Ganeti.Constants'.
$(THH.declareLADT ''String "EvacMode"
[ ("ChangePrimary", "primary-only")
, ("ChangeSecondary", "secondary-only")
, ("ChangeAll", "all")
])
$(THH.makeJSONInstance ''EvacMode)
-- | The file driver type.
$(THH.declareLADT ''String "FileDriver"
[ ("FileLoop", "loop")
, ("FileBlktap", "blktap")
, ("FileBlktap2", "blktap2")
])
$(THH.makeJSONInstance ''FileDriver)
-- | The instance create mode.
$(THH.declareLADT ''String "InstCreateMode"
[ ("InstCreate", "create")
, ("InstImport", "import")
, ("InstRemoteImport", "remote-import")
])
$(THH.makeJSONInstance ''InstCreateMode)
-- | Reboot type.
$(THH.declareLADT ''String "RebootType"
[ ("RebootSoft", "soft")
, ("RebootHard", "hard")
, ("RebootFull", "full")
])
$(THH.makeJSONInstance ''RebootType)
-- | Export modes.
$(THH.declareLADT ''String "ExportMode"
[ ("ExportModeLocal", "local")
, ("ExportModeRemote", "remote")
])
$(THH.makeJSONInstance ''ExportMode)
-- | IAllocator run types (OpTestIAllocator).
$(THH.declareLADT ''String "IAllocatorTestDir"
[ ("IAllocatorDirIn", "in")
, ("IAllocatorDirOut", "out")
])
$(THH.makeJSONInstance ''IAllocatorTestDir)
-- | IAllocator mode. FIXME: use this in "HTools.Backend.IAlloc".
$(THH.declareLADT ''String "IAllocatorMode"
[ ("IAllocatorAlloc", "allocate")
, ("IAllocatorAllocateSecondary", "allocate-secondary")
, ("IAllocatorMultiAlloc", "multi-allocate")
, ("IAllocatorReloc", "relocate")
, ("IAllocatorNodeEvac", "node-evacuate")
, ("IAllocatorChangeGroup", "change-group")
])
$(THH.makeJSONInstance ''IAllocatorMode)
-- | Network mode.
$(THH.declareLADT ''String "NICMode"
[ ("NMBridged", "bridged")
, ("NMRouted", "routed")
, ("NMOvs", "openvswitch")
, ("NMPool", "pool")
])
$(THH.makeJSONInstance ''NICMode)
-- | The JobStatus data type. Note that this is ordered especially
-- such that greater\/lesser comparison on values of this type makes
-- sense.
$(THH.declareLADT ''String "JobStatus"
[ ("JOB_STATUS_QUEUED", "queued")
, ("JOB_STATUS_WAITING", "waiting")
, ("JOB_STATUS_CANCELING", "canceling")
, ("JOB_STATUS_RUNNING", "running")
, ("JOB_STATUS_CANCELED", "canceled")
, ("JOB_STATUS_SUCCESS", "success")
, ("JOB_STATUS_ERROR", "error")
])
$(THH.makeJSONInstance ''JobStatus)
-- | Finalized job status.
$(THH.declareLADT ''String "FinalizedJobStatus"
[ ("JobStatusCanceled", "canceled")
, ("JobStatusSuccessful", "success")
, ("JobStatusFailed", "error")
])
$(THH.makeJSONInstance ''FinalizedJobStatus)
-- | The Ganeti job type.
newtype JobId = JobId { fromJobId :: Int }
deriving (Show, Eq, Ord)
-- | Builds a job ID.
makeJobId :: (Monad m) => Int -> m JobId
makeJobId i | i >= 0 = return $ JobId i
| otherwise = fail $ "Invalid value for job ID ' " ++ show i ++ "'"
-- | Builds a job ID from a string.
makeJobIdS :: (Monad m) => String -> m JobId
makeJobIdS s = tryRead "parsing job id" s >>= makeJobId
-- | Parses a job ID.
parseJobId :: (Monad m) => JSON.JSValue -> m JobId
parseJobId (JSON.JSString x) = makeJobIdS $ JSON.fromJSString x
parseJobId (JSON.JSRational _ x) =
if denominator x /= 1
then fail $ "Got fractional job ID from master daemon?! Value:" ++ show x
-- FIXME: potential integer overflow here on 32-bit platforms
else makeJobId . fromIntegral . numerator $ x
parseJobId x = fail $ "Wrong type/value for job id: " ++ show x
instance JSON.JSON JobId where
showJSON = JSON.showJSON . fromJobId
readJSON = parseJobId
-- | Relative job ID type alias.
type RelativeJobId = Negative Int
-- | Job ID dependency.
data JobIdDep = JobDepRelative RelativeJobId
| JobDepAbsolute JobId
deriving (Show, Eq, Ord)
instance JSON.JSON JobIdDep where
showJSON (JobDepRelative i) = showJSON i
showJSON (JobDepAbsolute i) = showJSON i
readJSON v =
case JSON.readJSON v::JSON.Result (Negative Int) of
-- first try relative dependency, usually most common
JSON.Ok r -> return $ JobDepRelative r
JSON.Error _ -> liftM JobDepAbsolute (parseJobId v)
-- | From job ID dependency and job ID, compute the absolute dependency.
absoluteJobIdDep :: (Monad m) => JobIdDep -> JobId -> m JobIdDep
absoluteJobIdDep (JobDepAbsolute jid) _ = return $ JobDepAbsolute jid
absoluteJobIdDep (JobDepRelative rjid) jid =
liftM JobDepAbsolute . makeJobId $ fromJobId jid + fromNegative rjid
-- | Job Dependency type.
data JobDependency = JobDependency JobIdDep [FinalizedJobStatus]
deriving (Show, Eq, Ord)
instance JSON JobDependency where
showJSON (JobDependency dep status) = showJSON (dep, status)
readJSON = liftM (uncurry JobDependency) . readJSON
-- | From job dependency and job id compute an absolute job dependency.
absoluteJobDependency :: (Monad m) => JobDependency -> JobId -> m JobDependency
absoluteJobDependency (JobDependency jdep fstats) jid =
liftM (flip JobDependency fstats) $ absoluteJobIdDep jdep jid
-- | From a job dependency get the absolute job id it depends on,
-- if given absolutely.
getJobIdFromDependency :: JobDependency -> [JobId]
getJobIdFromDependency (JobDependency (JobDepAbsolute jid) _) = [jid]
getJobIdFromDependency _ = []
-- | Valid opcode priorities for submit.
$(THH.declareIADT "OpSubmitPriority"
[ ("OpPrioLow", 'ConstantUtils.priorityLow)
, ("OpPrioNormal", 'ConstantUtils.priorityNormal)
, ("OpPrioHigh", 'ConstantUtils.priorityHigh)
])
$(THH.makeJSONInstance ''OpSubmitPriority)
-- | Parse submit priorities from a string.
parseSubmitPriority :: (Monad m) => String -> m OpSubmitPriority
parseSubmitPriority "low" = return OpPrioLow
parseSubmitPriority "normal" = return OpPrioNormal
parseSubmitPriority "high" = return OpPrioHigh
parseSubmitPriority str = fail $ "Unknown priority '" ++ str ++ "'"
-- | Format a submit priority as string.
fmtSubmitPriority :: OpSubmitPriority -> String
fmtSubmitPriority OpPrioLow = "low"
fmtSubmitPriority OpPrioNormal = "normal"
fmtSubmitPriority OpPrioHigh = "high"
-- | Our ADT for the OpCode status at runtime (while in a job).
$(THH.declareLADT ''String "OpStatus"
[ ("OP_STATUS_QUEUED", "queued")
, ("OP_STATUS_WAITING", "waiting")
, ("OP_STATUS_CANCELING", "canceling")
, ("OP_STATUS_RUNNING", "running")
, ("OP_STATUS_CANCELED", "canceled")
, ("OP_STATUS_SUCCESS", "success")
, ("OP_STATUS_ERROR", "error")
])
$(THH.makeJSONInstance ''OpStatus)
-- | Type for the job message type.
$(THH.declareLADT ''String "ELogType"
[ ("ELogMessage", "message")
, ("ELogMessageList", "message-list")
, ("ELogRemoteImport", "remote-import")
, ("ELogJqueueTest", "jqueue-test")
, ("ELogDelayTest", "delay-test")
])
$(THH.makeJSONInstance ''ELogType)
-- | Type of one element of a reason trail, of form
-- @(source, reason, timestamp)@.
type ReasonElem = (String, String, Integer)
-- | Type representing a reason trail.
type ReasonTrail = [ReasonElem]
-- | The VTYPES, a mini-type system in Python.
$(THH.declareLADT ''String "VType"
[ ("VTypeString", "string")
, ("VTypeMaybeString", "maybe-string")
, ("VTypeBool", "bool")
, ("VTypeSize", "size")
, ("VTypeInt", "int")
, ("VTypeFloat", "float")
])
$(THH.makeJSONInstance ''VType)
instance THH.PyValue VType where
showValue = THH.showValue . vTypeToRaw
-- * Node role type
$(THH.declareLADT ''String "NodeRole"
[ ("NROffline", "O")
, ("NRDrained", "D")
, ("NRRegular", "R")
, ("NRCandidate", "C")
, ("NRMaster", "M")
])
$(THH.makeJSONInstance ''NodeRole)
-- | The description of the node role.
roleDescription :: NodeRole -> String
roleDescription NROffline = "offline"
roleDescription NRDrained = "drained"
roleDescription NRRegular = "regular"
roleDescription NRCandidate = "master candidate"
roleDescription NRMaster = "master"
-- * Disk types
$(THH.declareLADT ''String "DiskMode"
[ ("DiskRdOnly", "ro")
, ("DiskRdWr", "rw")
])
$(THH.makeJSONInstance ''DiskMode)
-- | The persistent block driver type. Currently only one type is allowed.
$(THH.declareLADT ''String "BlockDriver"
[ ("BlockDrvManual", "manual")
])
$(THH.makeJSONInstance ''BlockDriver)
-- * Instance types
$(THH.declareLADT ''String "AdminState"
[ ("AdminOffline", "offline")
, ("AdminDown", "down")
, ("AdminUp", "up")
])
$(THH.makeJSONInstance ''AdminState)
$(THH.declareLADT ''String "AdminStateSource"
[ ("AdminSource", "admin")
, ("UserSource", "user")
])
$(THH.makeJSONInstance ''AdminStateSource)
instance THH.PyValue AdminStateSource where
showValue = THH.showValue . adminStateSourceToRaw
-- * Storage field type
$(THH.declareLADT ''String "StorageField"
[ ( "SFUsed", "used")
, ( "SFName", "name")
, ( "SFAllocatable", "allocatable")
, ( "SFFree", "free")
, ( "SFSize", "size")
])
$(THH.makeJSONInstance ''StorageField)
-- * Disk access protocol
$(THH.declareLADT ''String "DiskAccessMode"
[ ( "DiskUserspace", "userspace")
, ( "DiskKernelspace", "kernelspace")
])
$(THH.makeJSONInstance ''DiskAccessMode)
-- | Local disk status
--
-- Python code depends on:
-- DiskStatusOk < DiskStatusUnknown < DiskStatusFaulty
$(THH.declareILADT "LocalDiskStatus"
[ ("DiskStatusOk", 1)
, ("DiskStatusSync", 2)
, ("DiskStatusUnknown", 3)
, ("DiskStatusFaulty", 4)
])
localDiskStatusName :: LocalDiskStatus -> String
localDiskStatusName DiskStatusFaulty = "faulty"
localDiskStatusName DiskStatusOk = "ok"
localDiskStatusName DiskStatusSync = "syncing"
localDiskStatusName DiskStatusUnknown = "unknown"
-- | Replace disks type.
$(THH.declareLADT ''String "ReplaceDisksMode"
[ -- Replace disks on primary
("ReplaceOnPrimary", "replace_on_primary")
-- Replace disks on secondary
, ("ReplaceOnSecondary", "replace_on_secondary")
-- Change secondary node
, ("ReplaceNewSecondary", "replace_new_secondary")
, ("ReplaceAuto", "replace_auto")
])
$(THH.makeJSONInstance ''ReplaceDisksMode)
-- | Basic timeouts for RPC calls.
$(THH.declareILADT "RpcTimeout"
[ ("Urgent", 60) -- 1 minute
, ("Fast", 5 * 60) -- 5 minutes
, ("Normal", 15 * 60) -- 15 minutes
, ("Slow", 3600) -- 1 hour
, ("FourHours", 4 * 3600) -- 4 hours
, ("OneDay", 86400) -- 1 day
])
-- | Hotplug action.
$(THH.declareLADT ''String "HotplugAction"
[ ("HAAdd", "hotadd")
, ("HARemove", "hotremove")
, ("HAMod", "hotmod")
])
$(THH.makeJSONInstance ''HotplugAction)
-- | Hotplug Device Target.
$(THH.declareLADT ''String "HotplugTarget"
[ ("HTDisk", "disk")
, ("HTNic", "nic")
])
$(THH.makeJSONInstance ''HotplugTarget)
-- | SSH key type.
$(THH.declareLADT ''String "SshKeyType"
[ ("RSA", "rsa")
, ("DSA", "dsa")
, ("ECDSA", "ecdsa")
])
$(THH.makeJSONInstance ''SshKeyType)
-- * Private type and instances
redacted :: String
redacted = "<redacted>"
-- | A container for values that should be happy to be manipulated yet
-- refuses to be shown unless explicitly requested.
newtype Private a = Private { getPrivate :: a }
deriving (Eq, Ord, Functor)
instance (Show a, JSON.JSON a) => JSON.JSON (Private a) where
readJSON = liftM Private . JSON.readJSON
showJSON (Private x) = JSON.showJSON x
-- | "Show" the value of the field.
--
-- It would be better not to implement this at all.
-- Alas, Show OpCode requires Show Private.
instance Show a => Show (Private a) where
show _ = redacted
instance THH.PyValue a => THH.PyValue (Private a) where
showValue (Private x) = "Private(" ++ THH.showValue x ++ ")"
instance Applicative Private where
pure = Private
Private f <*> Private x = Private (f x)
instance Monad Private where
(Private x) >>= f = f x
return = Private
showPrivateJSObject :: (JSON.JSON a) =>
[(String, a)] -> JSON.JSObject (Private JSON.JSValue)
showPrivateJSObject value = JSON.toJSObject $ map f value
where f (k, v) = (k, Private $ JSON.showJSON v)
-- * Secret type and instances
-- | A container for values that behaves like Private, but doesn't leak the
-- value through showJSON
newtype Secret a = Secret { getSecret :: a }
deriving (Eq, Ord, Functor)
instance (Show a, JSON.JSON a) => JSON.JSON (Secret a) where
readJSON = liftM Secret . JSON.readJSON
showJSON = const . JSON.JSString $ JSON.toJSString redacted
instance Show a => Show (Secret a) where
show _ = redacted
instance THH.PyValue a => THH.PyValue (Secret a) where
showValue (Secret x) = "Secret(" ++ THH.showValue x ++ ")"
instance Applicative Secret where
pure = Secret
Secret f <*> Secret x = Secret (f x)
instance Monad Secret where
(Secret x) >>= f = f x
return = Secret
-- | We return "\<redacted\>" here to satisfy the idempotence of serialization
-- and deserialization, although this will impact the meaningfulness of secret
-- parameters within configuration tests.
showSecretJSObject :: (JSON.JSON a) =>
[(String, a)] -> JSON.JSObject (Secret JSON.JSValue)
showSecretJSObject value = JSON.toJSObject $ map f value
where f (k, _) = (k, Secret $ JSON.showJSON redacted)
revealValInJSObject :: JSON.JSObject (Secret JSON.JSValue)
-> JSON.JSObject (Private JSON.JSValue)
revealValInJSObject object = JSON.toJSObject . map f $ JSON.fromJSObject object
where f (k, v) = (k, Private $ getSecret v)
-- | The hypervisor parameter type. This is currently a simple map,
-- without type checking on key/value pairs.
type HvParams = Container JSON.JSValue
-- | The OS parameters type. This is, and will remain, a string
-- container, since the keys are dynamically declared by the OSes, and
-- the values are always strings.
type OsParams = Container String
type OsParamsPrivate = Container (Private String)
-- | Class of objects that have timestamps.
class TimeStampObject a where
cTimeOf :: a -> ClockTime
mTimeOf :: a -> ClockTime
-- | Class of objects that have an UUID.
class UuidObject a where
uuidOf :: a -> String
-- | Class of objects that can be forthcoming.
class ForthcomingObject a where
isForthcoming :: a -> Bool
-- | Class of object that have a serial number.
class SerialNoObject a where
serialOf :: a -> Int
-- | Class of objects that have tags.
class TagsObject a where
tagsOf :: a -> Set.Set String
|
yiannist/ganeti
|
src/Ganeti/Types.hs
|
bsd-2-clause
| 35,219
| 0
| 11
| 6,923
| 8,176
| 4,593
| 3,583
| 718
| 2
|
module BPython.AST where
import BPython.PyType
import Data.Maybe (maybeToList)
-- FIXME : AnyFunction is a debug hack
data AST = ID String | Literal PythonType | UnaryOp String ASTLoc | BinaryOp String ASTLoc ASTLoc |
FunctionCall ASTLoc [ASTLoc] | Assignment ASTLoc ASTLoc | IfThenElse [(ASTLoc, ASTLoc)] (Maybe ASTLoc) |
While ASTLoc ASTLoc | Sequence [ASTLoc] | FunctionDefinition String [String] ASTLoc | Return ASTLoc |
List [ASTLoc] | Index ASTLoc ASTLoc | Slice ASTLoc ASTLoc ASTLoc | Break | Continue | For String ASTLoc ASTLoc | Void
deriving Show
type ASTLoc = (AST, (Int, Int))
mergeLocs :: (Int, Int) -> (Int, Int) -> (Int, Int)
mergeLocs (x, y) (x', y') = (x `min` x', y `max` y')
extractNodes :: (AST -> Bool) -> ASTLoc -> [ASTLoc]
extractNodes p al@(ast, loc)
| p ast = al : extractNodes' ast
| otherwise = extractNodes' ast
where
extractNodes' (UnaryOp _ arg) = extractNodes p arg
extractNodes' (BinaryOp _ l r) = extractNodes p l ++ extractNodes p r
extractNodes' (FunctionCall fun args) = extractNodes p fun ++ concatMap (extractNodes p) args
extractNodes' (Assignment lvalue rvalue) = extractNodes p lvalue ++ extractNodes p rvalue
extractNodes' (IfThenElse pairs elseclause) = concatMap (\(c, b) -> extractNodes p c ++ extractNodes p b) pairs ++ concat (maybeToList (fmap (extractNodes p) elseclause))
extractNodes' (While cond body) = extractNodes p cond ++ extractNodes p body
extractNodes' (Sequence stmts) = concatMap (extractNodes p) stmts
extractNodes' (FunctionDefinition _ _ body) = extractNodes p body
extractNodes' (Return arg) = extractNodes p arg
extractNodes' (List args) = concatMap (extractNodes p) args
extractNodes' (Index list index) = extractNodes p list ++ extractNodes p index
extractNodes' (Slice list start end) = extractNodes p list ++ extractNodes p start ++ extractNodes p end
extractNodes' _ = []
|
feelout/mscthesiscode
|
BPython/AST.hs
|
bsd-3-clause
| 1,892
| 8
| 14
| 337
| 735
| 386
| 349
| 28
| 13
|
module Flat.Instances.DList
()
where
import Flat.Class
import Flat.Instances.Mono
import Data.DList
-- $setup
-- >>> import Flat.Instances.Test
-- >>> import Flat.Instances.Base()
-- >>> import Flat.Run
-- >>> import Data.DList
-- >>> let test = tstBits
{-|
>>> test (Data.DList.fromList [7::Word,7])
(True,19,"10000011 11000001 110")
>>> let l = [7::Word,7] in flat (Data.DList.fromList l) == flat l
True
-}
instance Flat a => Flat (DList a) where
size = sizeList . toList
encode = encodeList . toList
decode = fromList <$> decodeList
|
tittoassini/flat
|
src/Flat/Instances/DList.hs
|
bsd-3-clause
| 581
| 0
| 7
| 124
| 81
| 49
| 32
| 9
| 0
|
{-# LANGUAGE CPP, TupleSections #-}
-- |Vectorisation of expressions.
module Vectorise.Exp
( -- * Vectorise right-hand sides of toplevel bindings
vectTopExpr
, vectTopExprs
, vectScalarFun
, vectScalarDFun
)
where
#include "HsVersions.h"
import Vectorise.Type.Type
import Vectorise.Var
import Vectorise.Convert
import Vectorise.Vect
import Vectorise.Env
import Vectorise.Monad
import Vectorise.Builtins
import Vectorise.Utils
import CoreUtils
import MkCore
import CoreSyn
import CoreFVs
import Class
import DataCon
import TyCon
import TcType
import Type
import TyCoRep
import Var
import VarEnv
import VarSet
import NameSet
import Id
import BasicTypes( isStrongLoopBreaker )
import Literal
import TysPrim
import Outputable
import FastString
import DynFlags
import Util
#if __GLASGOW_HASKELL__ < 709
import MonadUtils
#endif
import Control.Monad
import Data.Maybe
import Data.List
-- Main entry point to vectorise expressions -----------------------------------
-- |Vectorise a polymorphic expression that forms a *non-recursive* binding.
--
-- Return 'Nothing' if the expression is scalar; otherwise, the first component of the result
-- (which is of type 'Bool') indicates whether the expression is parallel (i.e., whether it is
-- tagged as 'VIParr').
--
-- We have got the non-recursive case as a special case as it doesn't require to compute
-- vectorisation information twice.
--
vectTopExpr :: Var -> CoreExpr -> VM (Maybe (Bool, Inline, CoreExpr))
vectTopExpr var expr
= do
{ exprVI <- encapsulateScalars <=< vectAvoidInfo emptyVarSet . freeVars $ expr
; if isVIEncaps exprVI
then
return Nothing
else do
{ vExpr <- closedV $
inBind var $
vectAnnPolyExpr False exprVI
; inline <- computeInline exprVI
; return $ Just (isVIParr exprVI, inline, vectorised vExpr)
}
}
-- Compute the inlining hint for the right-hand side of a top-level binding.
--
computeInline :: CoreExprWithVectInfo -> VM Inline
computeInline ((_, VIDict), _) = return $ DontInline
computeInline (_, AnnTick _ expr) = computeInline expr
computeInline expr@(_, AnnLam _ _) = Inline <$> polyArity tvs
where
(tvs, _) = collectAnnTypeBinders expr
computeInline _expr = return $ DontInline
-- |Vectorise a recursive group of top-level polymorphic expressions.
--
-- Return 'Nothing' if the expression group is scalar; otherwise, the first component of the result
-- (which is of type 'Bool') indicates whether the expressions are parallel (i.e., whether they are
-- tagged as 'VIParr').
--
vectTopExprs :: [(Var, CoreExpr)] -> VM (Maybe (Bool, [(Inline, CoreExpr)]))
vectTopExprs binds
= do
{ exprVIs <- mapM (vectAvoidAndEncapsulate emptyVarSet) exprs
; if all isVIEncaps exprVIs
-- if all bindings are scalar => don't vectorise this group of bindings
then return Nothing
else do
{ -- non-scalar bindings need to be vectorised
; let areVIParr = any isVIParr exprVIs
; revised_exprVIs <- if not areVIParr
-- if no binding is parallel => 'exprVIs' is ready for vectorisation
then return exprVIs
-- if any binding is parallel => recompute the vectorisation info
else mapM (vectAvoidAndEncapsulate (mkVarSet vars)) exprs
; vExprs <- zipWithM vect vars revised_exprVIs
; return $ Just (areVIParr, vExprs)
}
}
where
(vars, exprs) = unzip binds
vectAvoidAndEncapsulate pvs = encapsulateScalars <=< vectAvoidInfo pvs . freeVars
vect var exprVI
= do
{ vExpr <- closedV $
inBind var $
vectAnnPolyExpr (isStrongLoopBreaker $ idOccInfo var) exprVI
; inline <- computeInline exprVI
; return (inline, vectorised vExpr)
}
-- |Vectorise a polymorphic expression annotated with vectorisation information.
--
-- The special case of dictionary functions is currently handled separately. (Would be neater to
-- integrate them, though!)
--
vectAnnPolyExpr :: Bool -> CoreExprWithVectInfo -> VM VExpr
vectAnnPolyExpr loop_breaker (_, AnnTick tickish expr)
-- traverse through ticks
= vTick tickish <$> vectAnnPolyExpr loop_breaker expr
vectAnnPolyExpr loop_breaker expr
| isVIDict expr
-- special case the right-hand side of dictionary functions
= (, undefined) <$> vectDictExpr (deAnnotate expr)
| otherwise
-- collect and vectorise type abstractions; then, descent into the body
= polyAbstract tvs $ \args ->
mapVect (mkLams $ tvs ++ args) <$> vectFnExpr False loop_breaker mono
where
(tvs, mono) = collectAnnTypeBinders expr
-- Encapsulate every purely sequential subexpression of a (potentially) parallel expression into a
-- lambda abstraction over all its free variables followed by the corresponding application to those
-- variables. We can, then, avoid the vectorisation of the ensapsulated subexpressions.
--
-- Preconditions:
--
-- * All free variables and the result type must be /simple/ types.
-- * The expression is sufficiently complex (to warrant special treatment). For now, that is
-- every expression that is not constant and contains at least one operation.
--
--
-- The user has an option to choose between aggressive and minimal vectorisation avoidance. With
-- minimal vectorisation avoidance, we only encapsulate individual scalar operations. With
-- aggressive vectorisation avoidance, we encapsulate subexpression that are as big as possible.
--
encapsulateScalars :: CoreExprWithVectInfo -> VM CoreExprWithVectInfo
encapsulateScalars ce@(_, AnnType _ty)
= return ce
encapsulateScalars ce@((_, VISimple), AnnVar _v)
-- NB: diverts from the paper: encapsulate scalar variables (including functions)
= liftSimpleAndCase ce
encapsulateScalars ce@(_, AnnVar _v)
= return ce
encapsulateScalars ce@(_, AnnLit _)
= return ce
encapsulateScalars ((fvs, vi), AnnTick tck expr)
= do
{ encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnTick tck encExpr)
}
encapsulateScalars ce@((fvs, vi), AnnLam bndr expr)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
-- NB: diverts from the paper: we need to check the scalarness of bound variables as well,
-- as 'vectScalarFun' will handle them just the same as those introduced for the 'fvs'
-- by encapsulation.
; bndrsS <- allScalarVarType bndrs
; case (vi, vectAvoid && varsS && bndrsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnLam bndr encExpr)
}
}
where
(bndrs, _) = collectAnnBndrs ce
encapsulateScalars ce@((fvs, vi), AnnApp ce1 ce2)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, (vectAvoid || isSimpleApplication ce) && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encCe1 <- encapsulateScalars ce1
; encCe2 <- encapsulateScalars ce2
; return ((fvs, vi), AnnApp encCe1 encCe2)
}
}
where
isSimpleApplication :: CoreExprWithVectInfo -> Bool
isSimpleApplication (_, AnnTick _ ce) = isSimpleApplication ce
isSimpleApplication (_, AnnCast ce _) = isSimpleApplication ce
isSimpleApplication ce | isSimple ce = True
isSimpleApplication (_, AnnApp ce1 ce2) = isSimple ce1 && isSimpleApplication ce2
isSimpleApplication _ = False
--
isSimple :: CoreExprWithVectInfo -> Bool
isSimple (_, AnnType {}) = True
isSimple (_, AnnVar {}) = True
isSimple (_, AnnLit {}) = True
isSimple (_, AnnTick _ ce) = isSimple ce
isSimple (_, AnnCast ce _) = isSimple ce
isSimple _ = False
encapsulateScalars ce@((fvs, vi), AnnCase scrut bndr ty alts)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, vectAvoid && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encScrut <- encapsulateScalars scrut
; encAlts <- mapM encAlt alts
; return ((fvs, vi), AnnCase encScrut bndr ty encAlts)
}
}
where
encAlt (con, bndrs, expr) = (con, bndrs,) <$> encapsulateScalars expr
encapsulateScalars ce@((fvs, vi), AnnLet (AnnNonRec bndr expr1) expr2)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, vectAvoid && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encExpr1 <- encapsulateScalars expr1
; encExpr2 <- encapsulateScalars expr2
; return ((fvs, vi), AnnLet (AnnNonRec bndr encExpr1) encExpr2)
}
}
encapsulateScalars ce@((fvs, vi), AnnLet (AnnRec binds) expr)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, vectAvoid && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encBinds <- mapM encBind binds
; encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnLet (AnnRec encBinds) encExpr)
}
}
where
encBind (bndr, expr) = (bndr,) <$> encapsulateScalars expr
encapsulateScalars ((fvs, vi), AnnCast expr coercion)
= do
{ encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnCast encExpr coercion)
}
encapsulateScalars _
= panic "Vectorise.Exp.encapsulateScalars: unknown constructor"
-- Lambda-lift the given simple expression and apply it to the abstracted free variables.
--
-- If the expression is a case expression scrutinising anything, but a scalar type, then lift
-- each alternative individually.
--
liftSimpleAndCase :: CoreExprWithVectInfo -> VM CoreExprWithVectInfo
liftSimpleAndCase aexpr@((fvs, _vi), AnnCase expr bndr t alts)
= do
{ vi <- vectAvoidInfoTypeOf expr
; if (vi == VISimple)
then
liftSimple aexpr -- if the scrutinee is scalar, we need no special treatment
else do
{ alts' <- mapM (\(ac, bndrs, aexpr) -> (ac, bndrs,) <$> liftSimpleAndCase aexpr) alts
; return ((fvs, vi), AnnCase expr bndr t alts')
}
}
liftSimpleAndCase aexpr = liftSimple aexpr
liftSimple :: CoreExprWithVectInfo -> VM CoreExprWithVectInfo
liftSimple ((fvs, vi), AnnVar v)
| v `elemDVarSet` fvs -- special case to avoid producing: (\v -> v) v
&& not (isToplevel v) -- NB: if 'v' not free or is toplevel, we must get the 'VIEncaps'
= return $ ((fvs, vi), AnnVar v)
liftSimple aexpr@((fvs_orig, VISimple), expr)
= do
{ let liftedExpr = mkAnnApps (mkAnnLams (reverse vars) fvs expr) vars
; traceVt "encapsulate:" $ ppr (deAnnotate aexpr) $$ text "==>" $$ ppr (deAnnotate liftedExpr)
; return $ liftedExpr
}
where
vars = dVarSetElems fvs
fvs = filterDVarSet (not . isToplevel) fvs_orig -- only include 'Id's that are not toplevel
mkAnnLams :: [Var] -> DVarSet -> AnnExpr' Var (DVarSet, VectAvoidInfo) -> CoreExprWithVectInfo
mkAnnLams [] fvs expr = ASSERT(isEmptyDVarSet fvs)
((emptyDVarSet, VIEncaps), expr)
mkAnnLams (v:vs) fvs expr = mkAnnLams vs (fvs `delDVarSet` v) (AnnLam v ((fvs, VIEncaps), expr))
mkAnnApps :: CoreExprWithVectInfo -> [Var] -> CoreExprWithVectInfo
mkAnnApps aexpr [] = aexpr
mkAnnApps aexpr (v:vs) = mkAnnApps (mkAnnApp aexpr v) vs
mkAnnApp :: CoreExprWithVectInfo -> Var -> CoreExprWithVectInfo
mkAnnApp aexpr@((fvs, _vi), _expr) v
= ((fvs `extendDVarSet` v, VISimple), AnnApp aexpr ((unitDVarSet v, VISimple), AnnVar v))
liftSimple aexpr
= pprPanic "Vectorise.Exp.liftSimple: not simple" $ ppr (deAnnotate aexpr)
isToplevel :: Var -> Bool
isToplevel v | isId v = case realIdUnfolding v of
NoUnfolding -> False
OtherCon {} -> True
DFunUnfolding {} -> True
CoreUnfolding {uf_is_top = top} -> top
| otherwise = False
-- |Vectorise an expression.
--
vectExpr :: CoreExprWithVectInfo -> VM VExpr
vectExpr aexpr
-- encapsulated expression of functional type => try to vectorise as a scalar subcomputation
| (isFunTy . annExprType $ aexpr) && isVIEncaps aexpr
= vectFnExpr True False aexpr
-- encapsulated constant => vectorise as a scalar constant
| isVIEncaps aexpr
= traceVt "vectExpr (encapsulated constant):" (ppr . deAnnotate $ aexpr) >>
vectConst (deAnnotate aexpr)
vectExpr (_, AnnVar v)
= vectVar v
vectExpr (_, AnnLit lit)
= vectConst $ Lit lit
vectExpr aexpr@(_, AnnLam _ _)
= traceVt "vectExpr [AnnLam]:" (ppr . deAnnotate $ aexpr) >>
vectFnExpr True False aexpr
-- SPECIAL CASE: Vectorise/lift 'patError @ ty err' by only vectorising/lifting the type 'ty';
-- its only purpose is to abort the program, but we need to adjust the type to keep CoreLint
-- happy.
-- FIXME: can't be do this with a VECTORISE pragma on 'pAT_ERROR_ID' now?
vectExpr (_, AnnApp (_, AnnApp (_, AnnVar v) (_, AnnType ty)) err)
| v == pAT_ERROR_ID
= do
{ (vty, lty) <- vectAndLiftType ty
; return (mkCoreApps (Var v) [Type (getRuntimeRep "vectExpr" vty), Type vty, err'], mkCoreApps (Var v) [Type lty, err'])
}
where
err' = deAnnotate err
-- type application (handle multiple consecutive type applications simultaneously to ensure the
-- PA dictionaries are put at the right places)
vectExpr e@(_, AnnApp _ arg)
| isAnnTypeArg arg
= vectPolyApp e
-- Lifted literal
vectExpr (_, AnnApp (_, AnnVar v) (_, AnnLit lit))
| Just _con <- isDataConId_maybe v
= do
{ let vexpr = App (Var v) (Lit lit)
; lexpr <- liftPD vexpr
; return (vexpr, lexpr)
}
-- value application (dictionary or user value)
vectExpr e@(_, AnnApp fn arg)
| isPredTy arg_ty -- dictionary application (whose result is not a dictionary)
= vectPolyApp e
| otherwise -- user value
= do
{ -- vectorise the types
; varg_ty <- vectType arg_ty
; vres_ty <- vectType res_ty
-- vectorise the function and argument expression
; vfn <- vectExpr fn
; varg <- vectExpr arg
-- the vectorised function is a closure; apply it to the vectorised argument
; mkClosureApp varg_ty vres_ty vfn varg
}
where
(arg_ty, res_ty) = splitFunTy . exprType $ deAnnotate fn
vectExpr (_, AnnCase scrut bndr ty alts)
| Just (tycon, ty_args) <- splitTyConApp_maybe scrut_ty
, isAlgTyCon tycon
= vectAlgCase tycon ty_args scrut bndr ty alts
| otherwise
= do
{ dflags <- getDynFlags
; cantVectorise dflags "Can't vectorise expression (no algebraic type constructor)" $
ppr scrut_ty
}
where
scrut_ty = exprType (deAnnotate scrut)
vectExpr (_, AnnLet (AnnNonRec bndr rhs) body)
= do
{ traceVt "let binding (non-recursive)" Outputable.empty
; vrhs <- localV $
inBind bndr $
vectAnnPolyExpr False rhs
; traceVt "let body (non-recursive)" Outputable.empty
; (vbndr, vbody) <- vectBndrIn bndr (vectExpr body)
; return $ vLet (vNonRec vbndr vrhs) vbody
}
vectExpr (_, AnnLet (AnnRec bs) body)
= do
{ (vbndrs, (vrhss, vbody)) <- vectBndrsIn bndrs $ do
{ traceVt "let bindings (recursive)" Outputable.empty
; vrhss <- zipWithM vect_rhs bndrs rhss
; traceVt "let body (recursive)" Outputable.empty
; vbody <- vectExpr body
; return (vrhss, vbody)
}
; return $ vLet (vRec vbndrs vrhss) vbody
}
where
(bndrs, rhss) = unzip bs
vect_rhs bndr rhs = localV $
inBind bndr $
vectAnnPolyExpr (isStrongLoopBreaker $ idOccInfo bndr) rhs
vectExpr (_, AnnTick tickish expr)
= vTick tickish <$> vectExpr expr
vectExpr (_, AnnType ty)
= vType <$> vectType ty
vectExpr e
= do
{ dflags <- getDynFlags
; cantVectorise dflags "Can't vectorise expression (vectExpr)" $ ppr (deAnnotate e)
}
-- |Vectorise an expression that *may* have an outer lambda abstraction. If the expression is marked
-- as encapsulated ('VIEncaps'), vectorise it as a scalar computation (using a generalised scalar
-- zip).
--
-- We do not handle type variables at this point, as they will already have been stripped off by
-- 'vectPolyExpr'. We also only have to worry about one set of dictionary arguments as we (1) only
-- deal with Haskell 2011 and (2) class selectors are vectorised elsewhere.
--
vectFnExpr :: Bool -- ^If we process the RHS of a binding, whether that binding
-- should be inlined
-> Bool -- ^Whether the binding is a loop breaker
-> CoreExprWithVectInfo -- ^Expression to vectorise; must have an outer `AnnLam`
-> VM VExpr
vectFnExpr inline loop_breaker aexpr@(_ann, AnnLam bndr body)
-- predicate abstraction: leave as a normal abstraction, but vectorise the predicate type
| isId bndr
&& isPredTy (idType bndr)
= do
{ vBndr <- vectBndr bndr
; vbody <- vectFnExpr inline loop_breaker body
; return $ mapVect (mkLams [vectorised vBndr]) vbody
}
-- encapsulated non-predicate abstraction: vectorise as a scalar computation
| isId bndr && isVIEncaps aexpr
= vectScalarFun . deAnnotate $ aexpr
-- non-predicate abstraction: vectorise as a non-scalar computation
| isId bndr
= vectLam inline loop_breaker aexpr
| otherwise
= do
{ dflags <- getDynFlags
; cantVectorise dflags "Vectorise.Exp.vectFnExpr: Unexpected type lambda" $
ppr (deAnnotate aexpr)
}
vectFnExpr _ _ aexpr
-- encapsulated function: vectorise as a scalar computation
| (isFunTy . annExprType $ aexpr) && isVIEncaps aexpr
= vectScalarFun . deAnnotate $ aexpr
| otherwise
-- not an abstraction: vectorise as a non-scalar vanilla expression
-- NB: we can get here due to the recursion in the first case above and from 'vectAnnPolyExpr'
= vectExpr aexpr
-- |Vectorise type and dictionary applications.
--
-- These are always headed by a variable (as we don't support higher-rank polymorphism), but may
-- involve two sets of type variables and dictionaries. Consider,
--
-- > class C a where
-- > m :: D b => b -> a
--
-- The type of 'm' is 'm :: forall a. C a => forall b. D b => b -> a'.
--
vectPolyApp :: CoreExprWithVectInfo -> VM VExpr
vectPolyApp e0
= case e4 of
(_, AnnVar var)
-> do { -- get the vectorised form of the variable
; vVar <- lookupVar var
; traceVt "vectPolyApp of" (ppr var)
-- vectorise type and dictionary arguments
; vDictsOuter <- mapM vectDictExpr (map deAnnotate dictsOuter)
; vDictsInner <- mapM vectDictExpr (map deAnnotate dictsInner)
; vTysOuter <- mapM vectType tysOuter
; vTysInner <- mapM vectType tysInner
; let reconstructOuter v = (`mkApps` vDictsOuter) <$> polyApply v vTysOuter
; case vVar of
Local (vv, lv)
-> do { MASSERT( null dictsInner ) -- local vars cannot be class selectors
; traceVt " LOCAL" (text "")
; (,) <$> reconstructOuter (Var vv) <*> reconstructOuter (Var lv)
}
Global vv
| isDictComp var -- dictionary computation
-> do { -- in a dictionary computation, the innermost, non-empty set of
-- arguments are non-vectorised arguments, where no 'PA'dictionaries
-- are needed for the type variables
; ve <- if null dictsInner
then
return $ Var vv `mkTyApps` vTysOuter `mkApps` vDictsOuter
else
reconstructOuter
(Var vv `mkTyApps` vTysInner `mkApps` vDictsInner)
; traceVt " GLOBAL (dict):" (ppr ve)
; vectConst ve
}
| otherwise -- non-dictionary computation
-> do { MASSERT( null dictsInner )
; ve <- reconstructOuter (Var vv)
; traceVt " GLOBAL (non-dict):" (ppr ve)
; vectConst ve
}
}
_ -> pprSorry "Cannot vectorise programs with higher-rank types:" (ppr . deAnnotate $ e0)
where
-- if there is only one set of variables or dictionaries, it will be the outer set
(e1, dictsOuter) = collectAnnDictArgs e0
(e2, tysOuter) = collectAnnTypeArgs e1
(e3, dictsInner) = collectAnnDictArgs e2
(e4, tysInner) = collectAnnTypeArgs e3
--
isDictComp var = (isJust . isClassOpId_maybe $ var) || isDFunId var
-- |Vectorise the body of a dfun.
--
-- Dictionary computations are special for the following reasons. The application of dictionary
-- functions are always saturated, so there is no need to create closures. Dictionary computations
-- don't depend on array values, so they are always scalar computations whose result we can
-- replicate (instead of executing them in parallel).
--
-- NB: To keep things simple, we are not rewriting any of the bindings introduced in a dictionary
-- computation. Consequently, the variable case needs to deal with cases where binders are
-- in the vectoriser environments and where that is not the case.
--
vectDictExpr :: CoreExpr -> VM CoreExpr
vectDictExpr (Var var)
= do { mb_scope <- lookupVar_maybe var
; case mb_scope of
Nothing -> return $ Var var -- binder from within the dict. computation
Just (Local (vVar, _)) -> return $ Var vVar -- local vectorised variable
Just (Global vVar) -> return $ Var vVar -- global vectorised variable
}
vectDictExpr (Lit lit)
= pprPanic "Vectorise.Exp.vectDictExpr: literal in dictionary computation" (ppr lit)
vectDictExpr (Lam bndr e)
= Lam bndr <$> vectDictExpr e
vectDictExpr (App fn arg)
= App <$> vectDictExpr fn <*> vectDictExpr arg
vectDictExpr (Case e bndr ty alts)
= Case <$> vectDictExpr e <*> pure bndr <*> vectType ty <*> mapM vectDictAlt alts
where
vectDictAlt (con, bs, e) = (,,) <$> vectDictAltCon con <*> pure bs <*> vectDictExpr e
--
vectDictAltCon (DataAlt datacon) = DataAlt <$> maybeV dataConErr (lookupDataCon datacon)
where
dataConErr = text "Cannot vectorise data constructor:" <+> ppr datacon
vectDictAltCon (LitAlt lit) = return $ LitAlt lit
vectDictAltCon DEFAULT = return DEFAULT
vectDictExpr (Let bnd body)
= Let <$> vectDictBind bnd <*> vectDictExpr body
where
vectDictBind (NonRec bndr e) = NonRec bndr <$> vectDictExpr e
vectDictBind (Rec bnds) = Rec <$> mapM (\(bndr, e) -> (bndr,) <$> vectDictExpr e) bnds
vectDictExpr e@(Cast _e _coe)
= pprSorry "Vectorise.Exp.vectDictExpr: cast" (ppr e)
vectDictExpr (Tick tickish e)
= Tick tickish <$> vectDictExpr e
vectDictExpr (Type ty)
= Type <$> vectType ty
vectDictExpr (Coercion coe)
= pprSorry "Vectorise.Exp.vectDictExpr: coercion" (ppr coe)
-- |Vectorise an expression of functional type, where all arguments and the result are of primitive
-- types (i.e., 'Int', 'Float', 'Double' etc., which have instances of the 'Scalar' type class) and
-- which does not contain any subcomputations that involve parallel arrays. Such functionals do not
-- require the full blown vectorisation transformation; instead, they can be lifted by application
-- of a member of the zipWith family (i.e., 'map', 'zipWith', zipWith3', etc.)
--
-- Dictionary functions are also scalar functions (as dictionaries themselves are not vectorised,
-- instead they become dictionaries of vectorised methods). We treat them differently, though see
-- "Note [Scalar dfuns]" in 'Vectorise'.
--
vectScalarFun :: CoreExpr -> VM VExpr
vectScalarFun expr
= do
{ traceVt "vectScalarFun:" (ppr expr)
; let (arg_tys, res_ty) = splitFunTys (exprType expr)
; mkScalarFun arg_tys res_ty expr
}
-- Generate code for a scalar function by generating a scalar closure. If the function is a
-- dictionary function, vectorise it as dictionary code.
--
mkScalarFun :: [Type] -> Type -> CoreExpr -> VM VExpr
mkScalarFun arg_tys res_ty expr
| isPredTy res_ty
= do { vExpr <- vectDictExpr expr
; return (vExpr, unused)
}
| otherwise
= do { traceVt "mkScalarFun: " $ ppr expr $$ text " ::" <+>
ppr (mkFunTys arg_tys res_ty)
; fn_var <- hoistExpr (fsLit "fn") expr DontInline
; zipf <- zipScalars arg_tys res_ty
; clo <- scalarClosure arg_tys res_ty (Var fn_var) (zipf `App` Var fn_var)
; clo_var <- hoistExpr (fsLit "clo") clo DontInline
; lclo <- liftPD (Var clo_var)
; return (Var clo_var, lclo)
}
where
unused = error "Vectorise.Exp.mkScalarFun: we don't lift dictionary expressions"
-- |Vectorise a dictionary function that has a 'VECTORISE SCALAR instance' pragma.
--
-- In other words, all methods in that dictionary are scalar functions — to be vectorised with
-- 'vectScalarFun'. The dictionary "function" itself may be a constant, though.
--
-- NB: You may think that we could implement this function guided by the struture of the Core
-- expression of the right-hand side of the dictionary function. We cannot proceed like this as
-- 'vectScalarDFun' must also work for *imported* dfuns, where we don't necessarily have access
-- to the Core code of the unvectorised dfun.
--
-- Here an example — assume,
--
-- > class Eq a where { (==) :: a -> a -> Bool }
-- > instance (Eq a, Eq b) => Eq (a, b) where { (==) = ... }
-- > {-# VECTORISE SCALAR instance Eq (a, b) }
--
-- The unvectorised dfun for the above instance has the following signature:
--
-- > $dEqPair :: forall a b. Eq a -> Eq b -> Eq (a, b)
--
-- We generate the following (scalar) vectorised dfun (liberally using TH notation):
--
-- > $v$dEqPair :: forall a b. V:Eq a -> V:Eq b -> V:Eq (a, b)
-- > $v$dEqPair = /\a b -> \dEqa :: V:Eq a -> \dEqb :: V:Eq b ->
-- > D:V:Eq $(vectScalarFun True recFns
-- > [| (==) @(a, b) ($dEqPair @a @b $(unVect dEqa) $(unVect dEqb)) |])
--
-- NB:
-- * '(,)' vectorises to '(,)' — hence, the type constructor in the result type remains the same.
-- * We share the '$(unVect di)' sub-expressions between the different selectors, but duplicate
-- the application of the unvectorised dfun, to enable the dictionary selection rules to fire.
--
vectScalarDFun :: Var -- ^ Original dfun
-> VM CoreExpr
vectScalarDFun var
= do { -- bring the type variables into scope
; mapM_ defLocalTyVar tvs
-- vectorise dictionary argument types and generate variables for them
; vTheta <- mapM vectType theta
; vThetaBndr <- mapM (newLocalVar (fsLit "vd")) vTheta
; let vThetaVars = varsToCoreExprs vThetaBndr
-- vectorise superclass dictionaries and methods as scalar expressions
; thetaVars <- mapM (newLocalVar (fsLit "d")) theta
; thetaExprs <- zipWithM unVectDict theta vThetaVars
; let thetaDictBinds = zipWith NonRec thetaVars thetaExprs
dict = Var var `mkTyApps` (mkTyVarTys tvs) `mkVarApps` thetaVars
scsOps = map (\selId -> varToCoreExpr selId `mkTyApps` tys `mkApps` [dict])
selIds
; vScsOps <- mapM (\e -> vectorised <$> vectScalarFun e) scsOps
-- vectorised applications of the class-dictionary data constructor
; Just vDataCon <- lookupDataCon dataCon
; vTys <- mapM vectType tys
; let vBody = thetaDictBinds `mkLets` mkCoreConApps vDataCon (map Type vTys ++ vScsOps)
; return $ mkLams (tvs ++ vThetaBndr) vBody
}
where
ty = varType var
(tvs, theta, pty) = tcSplitSigmaTy ty -- 'theta' is the instance context
(cls, tys) = tcSplitDFunHead pty -- 'pty' is the instance head
selIds = classAllSelIds cls
dataCon = classDataCon cls
-- Build a value of the dictionary before vectorisation from original, unvectorised type and an
-- expression computing the vectorised dictionary.
--
-- Given the vectorised version of a dictionary 'vd :: V:C vt1..vtn', generate code that computes
-- the unvectorised version, thus:
--
-- > D:C op1 .. opm
-- > where
-- > opi = $(fromVect opTyi [| vSeli @vt1..vtk vd |])
--
-- where 'opTyi' is the type of the i-th superclass or op of the unvectorised dictionary.
--
unVectDict :: Type -> CoreExpr -> VM CoreExpr
unVectDict ty e
= do { vTys <- mapM vectType tys
; let meths = map (\sel -> Var sel `mkTyApps` vTys `mkApps` [e]) selIds
; scOps <- zipWithM fromVect methTys meths
; return $ mkCoreConApps dataCon (map Type tys ++ scOps)
}
where
(tycon, tys) = splitTyConApp ty
Just dataCon = isDataProductTyCon_maybe tycon
Just cls = tyConClass_maybe tycon
methTys = dataConInstArgTys dataCon tys
selIds = classAllSelIds cls
-- Vectorise an 'n'-ary lambda abstraction by building a set of 'n' explicit closures.
--
-- All non-dictionary free variables go into the closure's environment, whereas the dictionary
-- variables are passed explicit (as conventional arguments) into the body during closure
-- construction.
--
vectLam :: Bool -- ^ Should the RHS of a binding be inlined?
-> Bool -- ^ Whether the binding is a loop breaker.
-> CoreExprWithVectInfo -- ^ Body of abstraction.
-> VM VExpr
vectLam inline loop_breaker expr@((fvs, _vi), AnnLam _ _)
= do { traceVt "fully vectorise a lambda expression" (ppr . deAnnotate $ expr)
; let (bndrs, body) = collectAnnValBinders expr
-- grab the in-scope type variables
; tyvars <- localTyVars
-- collect and vectorise all /local/ free variables
; vfvs <- readLEnv $ \env ->
[ (var, fromJust mb_vv)
| var <- dVarSetElems fvs
, let mb_vv = lookupVarEnv (local_vars env) var
, isJust mb_vv -- its local == is in local var env
]
-- separate dictionary from non-dictionary variables in the free variable set
; let (vvs_dict, vvs_nondict) = partition (isPredTy . varType . fst) vfvs
(_fvs_dict, vfvs_dict) = unzip vvs_dict
(fvs_nondict, vfvs_nondict) = unzip vvs_nondict
-- compute the type of the vectorised closure
; arg_tys <- mapM (vectType . idType) bndrs
; res_ty <- vectType (exprType $ deAnnotate body)
; let arity = length fvs_nondict + length bndrs
vfvs_dict' = map vectorised vfvs_dict
; buildClosures tyvars vfvs_dict' vfvs_nondict arg_tys res_ty
. hoistPolyVExpr tyvars vfvs_dict' (maybe_inline arity)
$ do { -- generate the vectorised body of the lambda abstraction
; lc <- builtin liftingContext
; (vbndrs, vbody) <- vectBndrsIn (fvs_nondict ++ bndrs) $ vectExpr body
; vbody' <- break_loop lc res_ty vbody
; return $ vLams lc vbndrs vbody'
}
}
where
maybe_inline n | inline = Inline n
| otherwise = DontInline
-- If this is the body of a binding marked as a loop breaker, add a recursion termination test
-- to the /lifted/ version of the function body. The termination tests checks if the lifting
-- context is empty. If so, it returns an empty array of the (lifted) result type instead of
-- executing the function body. This is the test from the last line (defining \mathcal{L}')
-- in Figure 6 of HtM.
break_loop lc ty (ve, le)
| loop_breaker
= do { dflags <- getDynFlags
; empty <- emptyPD ty
; lty <- mkPDataType ty
; return (ve, mkWildCase (Var lc) intPrimTy lty
[(DEFAULT, [], le),
(LitAlt (mkMachInt dflags 0), [], empty)])
}
| otherwise = return (ve, le)
vectLam _ _ _ = panic "Vectorise.Exp.vectLam: not a lambda"
-- Vectorise an algebraic case expression.
--
-- We convert
--
-- case e :: t of v { ... }
--
-- to
--
-- V: let v' = e in case v' of _ { ... }
-- L: let v' = e in case v' `cast` ... of _ { ... }
--
-- When lifting, we have to do it this way because v must have the type
-- [:V(T):] but the scrutinee must be cast to the representation type. We also
-- have to handle the case where v is a wild var correctly.
--
-- FIXME: this is too lazy...is it?
vectAlgCase :: TyCon -> [Type] -> CoreExprWithVectInfo -> Var -> Type
-> [(AltCon, [Var], CoreExprWithVectInfo)]
-> VM VExpr
vectAlgCase _tycon _ty_args scrut bndr ty [(DEFAULT, [], body)]
= do
{ traceVt "scrutinee (DEFAULT only)" Outputable.empty
; vscrut <- vectExpr scrut
; (vty, lty) <- vectAndLiftType ty
; traceVt "alternative body (DEFAULT only)" Outputable.empty
; (vbndr, vbody) <- vectBndrIn bndr (vectExpr body)
; return $ vCaseDEFAULT vscrut vbndr vty lty vbody
}
vectAlgCase _tycon _ty_args scrut bndr ty [(DataAlt _, [], body)]
= do
{ traceVt "scrutinee (one shot w/o binders)" Outputable.empty
; vscrut <- vectExpr scrut
; (vty, lty) <- vectAndLiftType ty
; traceVt "alternative body (one shot w/o binders)" Outputable.empty
; (vbndr, vbody) <- vectBndrIn bndr (vectExpr body)
; return $ vCaseDEFAULT vscrut vbndr vty lty vbody
}
vectAlgCase _tycon _ty_args scrut bndr ty [(DataAlt dc, bndrs, body)]
= do
{ traceVt "scrutinee (one shot w/ binders)" Outputable.empty
; vexpr <- vectExpr scrut
; (vty, lty) <- vectAndLiftType ty
; traceVt "alternative body (one shot w/ binders)" Outputable.empty
; (vbndr, (vbndrs, (vect_body, lift_body)))
<- vect_scrut_bndr
. vectBndrsIn bndrs
$ vectExpr body
; let (vect_bndrs, lift_bndrs) = unzip vbndrs
; (vscrut, lscrut, pdata_dc) <- pdataUnwrapScrut (vVar vbndr)
; vect_dc <- maybeV dataConErr (lookupDataCon dc)
; let vcase = mk_wild_case vscrut vty vect_dc vect_bndrs vect_body
lcase = mk_wild_case lscrut lty pdata_dc lift_bndrs lift_body
; return $ vLet (vNonRec vbndr vexpr) (vcase, lcase)
}
where
vect_scrut_bndr | isDeadBinder bndr = vectBndrNewIn bndr (fsLit "scrut")
| otherwise = vectBndrIn bndr
mk_wild_case expr ty dc bndrs body
= mkWildCase expr (exprType expr) ty [(DataAlt dc, bndrs, body)]
dataConErr = (text "vectAlgCase: data constructor not vectorised" <+> ppr dc)
vectAlgCase tycon _ty_args scrut bndr ty alts
= do
{ traceVt "scrutinee (general case)" Outputable.empty
; vexpr <- vectExpr scrut
; vect_tc <- vectTyCon tycon
; (vty, lty) <- vectAndLiftType ty
; let arity = length (tyConDataCons vect_tc)
; sel_ty <- builtin (selTy arity)
; sel_bndr <- newLocalVar (fsLit "sel") sel_ty
; let sel = Var sel_bndr
; traceVt "alternatives' body (general case)" Outputable.empty
; (vbndr, valts) <- vect_scrut_bndr
$ mapM (proc_alt arity sel vty lty) alts'
; let (vect_dcs, vect_bndrss, lift_bndrss, vbodies) = unzip4 valts
; (vect_scrut, lift_scrut, pdata_dc) <- pdataUnwrapScrut (vVar vbndr)
; let (vect_bodies, lift_bodies) = unzip vbodies
; vdummy <- newDummyVar (exprType vect_scrut)
; ldummy <- newDummyVar (exprType lift_scrut)
; let vect_case = Case vect_scrut vdummy vty
(zipWith3 mk_vect_alt vect_dcs vect_bndrss vect_bodies)
; lc <- builtin liftingContext
; lbody <- combinePD vty (Var lc) sel lift_bodies
; let lift_case = Case lift_scrut ldummy lty
[(DataAlt pdata_dc, sel_bndr : concat lift_bndrss,
lbody)]
; return . vLet (vNonRec vbndr vexpr)
$ (vect_case, lift_case)
}
where
vect_scrut_bndr | isDeadBinder bndr = vectBndrNewIn bndr (fsLit "scrut")
| otherwise = vectBndrIn bndr
alts' = sortBy (\(alt1, _, _) (alt2, _, _) -> cmp alt1 alt2) alts
cmp (DataAlt dc1) (DataAlt dc2) = dataConTag dc1 `compare` dataConTag dc2
cmp DEFAULT DEFAULT = EQ
cmp DEFAULT _ = LT
cmp _ DEFAULT = GT
cmp _ _ = panic "vectAlgCase/cmp"
proc_alt arity sel _ lty (DataAlt dc, bndrs, body@((fvs_body, _), _))
= do
dflags <- getDynFlags
vect_dc <- maybeV dataConErr (lookupDataCon dc)
let ntag = dataConTagZ vect_dc
tag = mkDataConTag dflags vect_dc
fvs = fvs_body `delDVarSetList` bndrs
sel_tags <- liftM (`App` sel) (builtin (selTags arity))
lc <- builtin liftingContext
elems <- builtin (selElements arity ntag)
(vbndrs, vbody)
<- vectBndrsIn bndrs
. localV
$ do
{ binds <- mapM (pack_var (Var lc) sel_tags tag)
. filter isLocalId
$ dVarSetElems fvs
; traceVt "case alternative:" (ppr . deAnnotate $ body)
; (ve, le) <- vectExpr body
; return (ve, Case (elems `App` sel) lc lty
[(DEFAULT, [], (mkLets (concat binds) le))])
}
-- empty <- emptyPD vty
-- return (ve, Case (elems `App` sel) lc lty
-- [(DEFAULT, [], Let (NonRec flags_var flags_expr)
-- $ mkLets (concat binds) le),
-- (LitAlt (mkMachInt 0), [], empty)])
let (vect_bndrs, lift_bndrs) = unzip vbndrs
return (vect_dc, vect_bndrs, lift_bndrs, vbody)
where
dataConErr = (text "vectAlgCase: data constructor not vectorised" <+> ppr dc)
proc_alt _ _ _ _ _ = panic "vectAlgCase/proc_alt"
mk_vect_alt vect_dc bndrs body = (DataAlt vect_dc, bndrs, body)
-- Pack a variable for a case alternative context *if* the variable is vectorised. If it
-- isn't, ignore it as scalar variables don't need to be packed.
pack_var len tags t v
= do
{ r <- lookupVar_maybe v
; case r of
Just (Local (vv, lv)) ->
do
{ lv' <- cloneVar lv
; expr <- packByTagPD (idType vv) (Var lv) len tags t
; updLEnv (\env -> env { local_vars = extendVarEnv (local_vars env) v (vv, lv') })
; return [(NonRec lv' expr)]
}
_ -> return []
}
-- Support to compute information for vectorisation avoidance ------------------
-- Annotation for Core AST nodes that describes how they should be handled during vectorisation
-- and especially if vectorisation of the corresponding computation can be avoided.
--
data VectAvoidInfo = VIParr -- tree contains parallel computations
| VISimple -- result type is scalar & no parallel subcomputation
| VIComplex -- any result type, no parallel subcomputation
| VIEncaps -- tree encapsulated by 'liftSimple'
| VIDict -- dictionary computation (never parallel)
deriving (Eq, Show)
-- Core expression annotated with free variables and vectorisation-specific information.
--
type CoreExprWithVectInfo = AnnExpr Id (DVarSet, VectAvoidInfo)
-- Yield the type of an annotated core expression.
--
annExprType :: AnnExpr Var ann -> Type
annExprType = exprType . deAnnotate
-- Project the vectorisation information from an annotated Core expression.
--
vectAvoidInfoOf :: CoreExprWithVectInfo -> VectAvoidInfo
vectAvoidInfoOf ((_, vi), _) = vi
-- Is this a 'VIParr' node?
--
isVIParr :: CoreExprWithVectInfo -> Bool
isVIParr = (== VIParr) . vectAvoidInfoOf
-- Is this a 'VIEncaps' node?
--
isVIEncaps :: CoreExprWithVectInfo -> Bool
isVIEncaps = (== VIEncaps) . vectAvoidInfoOf
-- Is this a 'VIDict' node?
--
isVIDict :: CoreExprWithVectInfo -> Bool
isVIDict = (== VIDict) . vectAvoidInfoOf
-- 'VIParr' if either argument is 'VIParr'; otherwise, the first argument.
--
unlessVIParr :: VectAvoidInfo -> VectAvoidInfo -> VectAvoidInfo
unlessVIParr _ VIParr = VIParr
unlessVIParr vi _ = vi
-- 'VIParr' if either arguments vectorisation information is 'VIParr'; otherwise, the vectorisation
-- information of the first argument is produced.
--
unlessVIParrExpr :: VectAvoidInfo -> CoreExprWithVectInfo -> VectAvoidInfo
infixl `unlessVIParrExpr`
unlessVIParrExpr e1 e2 = e1 `unlessVIParr` vectAvoidInfoOf e2
-- Compute Core annotations to determine for which subexpressions we can avoid vectorisation.
--
-- * The first argument is the set of free, local variables whose evaluation may entail parallelism.
--
vectAvoidInfo :: VarSet -> CoreExprWithFVs -> VM CoreExprWithVectInfo
vectAvoidInfo pvs ce@(_, AnnVar v)
= do
{ gpvs <- globalParallelVars
; vi <- if v `elemVarSet` pvs || v `elemDVarSet` gpvs
then return VIParr
else vectAvoidInfoTypeOf ce
; viTrace ce vi []
; when (vi == VIParr) $
traceVt " reason:" $ if v `elemVarSet` pvs then text "local" else
if v `elemDVarSet` gpvs then text "global" else text "parallel type"
; return ((fvs, vi), AnnVar v)
}
where
fvs = freeVarsOf ce
vectAvoidInfo _pvs ce@(_, AnnLit lit)
= do
{ vi <- vectAvoidInfoTypeOf ce
; viTrace ce vi []
; return ((fvs, vi), AnnLit lit)
}
where
fvs = freeVarsOf ce
vectAvoidInfo pvs ce@(_, AnnApp e1 e2)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; eVI1 <- vectAvoidInfo pvs e1
; eVI2 <- vectAvoidInfo pvs e2
; let vi = ceVI `unlessVIParrExpr` eVI1 `unlessVIParrExpr` eVI2
-- ; viTrace ce vi [eVI1, eVI2]
; return ((fvs, vi), AnnApp eVI1 eVI2)
}
where
fvs = freeVarsOf ce
vectAvoidInfo pvs ce@(_, AnnLam var body)
= do
{ bodyVI <- vectAvoidInfo pvs body
; varVI <- vectAvoidInfoType $ varType var
; let vi = vectAvoidInfoOf bodyVI `unlessVIParr` varVI
-- ; viTrace ce vi [bodyVI]
; return ((fvs, vi), AnnLam var bodyVI)
}
where
fvs = freeVarsOf ce
vectAvoidInfo pvs ce@(_, AnnLet (AnnNonRec var e) body)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; eVI <- vectAvoidInfo pvs e
; isScalarTy <- isScalar $ varType var
; (bodyVI, vi) <- if isVIParr eVI && not isScalarTy
then do -- binding is parallel
{ bodyVI <- vectAvoidInfo (pvs `extendVarSet` var) body
; return (bodyVI, VIParr)
}
else do -- binding doesn't affect parallelism
{ bodyVI <- vectAvoidInfo pvs body
; return (bodyVI, ceVI `unlessVIParrExpr` bodyVI)
}
-- ; viTrace ce vi [eVI, bodyVI]
; return ((fvs, vi), AnnLet (AnnNonRec var eVI) bodyVI)
}
where
fvs = freeVarsOf ce
vectAvoidInfo pvs ce@(_, AnnLet (AnnRec bnds) body)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; bndsVI <- mapM (vectAvoidInfoBnd pvs) bnds
; parrBndrs <- map fst <$> filterM isVIParrBnd bndsVI
; if not . null $ parrBndrs
then do -- body may trigger parallelism via at least one binding
{ new_pvs <- filterM ((not <$>) . isScalar . varType) parrBndrs
; let extendedPvs = pvs `extendVarSetList` new_pvs
; bndsVI <- mapM (vectAvoidInfoBnd extendedPvs) bnds
; bodyVI <- vectAvoidInfo extendedPvs body
-- ; viTrace ce VIParr (map snd bndsVI ++ [bodyVI])
; return ((fvs, VIParr), AnnLet (AnnRec bndsVI) bodyVI)
}
else do -- demanded bindings cannot trigger parallelism
{ bodyVI <- vectAvoidInfo pvs body
; let vi = ceVI `unlessVIParrExpr` bodyVI
-- ; viTrace ce vi (map snd bndsVI ++ [bodyVI])
; return ((fvs, vi), AnnLet (AnnRec bndsVI) bodyVI)
}
}
where
fvs = freeVarsOf ce
vectAvoidInfoBnd pvs (var, e) = (var,) <$> vectAvoidInfo pvs e
isVIParrBnd (var, eVI)
= do
{ isScalarTy <- isScalar (varType var)
; return $ isVIParr eVI && not isScalarTy
}
vectAvoidInfo pvs ce@(_, AnnCase e var ty alts)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; eVI <- vectAvoidInfo pvs e
; altsVI <- mapM (vectAvoidInfoAlt (isVIParr eVI)) alts
; let alteVIs = [eVI | (_, _, eVI) <- altsVI]
vi = foldl unlessVIParrExpr ceVI (eVI:alteVIs) -- NB: same effect as in the paper
-- ; viTrace ce vi (eVI : alteVIs)
; return ((fvs, vi), AnnCase eVI var ty altsVI)
}
where
fvs = freeVarsOf ce
vectAvoidInfoAlt scrutIsPar (con, bndrs, e)
= do
{ allScalar <- allScalarVarType bndrs
; let altPvs | scrutIsPar && not allScalar = pvs `extendVarSetList` bndrs
| otherwise = pvs
; (con, bndrs,) <$> vectAvoidInfo altPvs e
}
vectAvoidInfo pvs ce@(_, AnnCast e (fvs_ann, ann))
= do
{ eVI <- vectAvoidInfo pvs e
; return ((fvs, vectAvoidInfoOf eVI), AnnCast eVI ((freeVarsOfAnn fvs_ann, VISimple), ann))
}
where
fvs = freeVarsOf ce
vectAvoidInfo pvs ce@(_, AnnTick tick e)
= do
{ eVI <- vectAvoidInfo pvs e
; return ((fvs, vectAvoidInfoOf eVI), AnnTick tick eVI)
}
where
fvs = freeVarsOf ce
vectAvoidInfo _pvs ce@(_, AnnType ty)
= return ((fvs, VISimple), AnnType ty)
where
fvs = freeVarsOf ce
vectAvoidInfo _pvs ce@(_, AnnCoercion coe)
= return ((fvs, VISimple), AnnCoercion coe)
where
fvs = freeVarsOf ce
-- Compute vectorisation avoidance information for a type.
--
vectAvoidInfoType :: Type -> VM VectAvoidInfo
vectAvoidInfoType ty
| isPredTy ty
= return VIDict
| Just (arg, res) <- splitFunTy_maybe ty
= do
{ argVI <- vectAvoidInfoType arg
; resVI <- vectAvoidInfoType res
; case (argVI, resVI) of
(VISimple, VISimple) -> return VISimple -- NB: diverts from the paper: scalar functions
(_ , VIDict) -> return VIDict
_ -> return $ VIComplex `unlessVIParr` argVI `unlessVIParr` resVI
}
| otherwise
= do
{ parr <- maybeParrTy ty
; if parr
then return VIParr
else do
{ scalar <- isScalar ty
; if scalar
then return VISimple
else return VIComplex
} }
-- Compute vectorisation avoidance information for the type of a Core expression (with FVs).
--
vectAvoidInfoTypeOf :: AnnExpr Var ann -> VM VectAvoidInfo
vectAvoidInfoTypeOf = vectAvoidInfoType . annExprType
-- Checks whether the type might be a parallel array type.
--
maybeParrTy :: Type -> VM Bool
maybeParrTy ty
-- looking through newtypes
| Just ty' <- coreView ty
= (== VIParr) <$> vectAvoidInfoType ty'
-- decompose constructor applications
| Just (tc, ts) <- splitTyConApp_maybe ty
= do
{ isParallel <- (tyConName tc `elemNameSet`) <$> globalParallelTyCons
; if isParallel
then return True
else or <$> mapM maybeParrTy ts
}
-- must be a Named ForAllTy because anon ones respond to splitTyConApp_maybe
maybeParrTy (ForAllTy _ ty) = maybeParrTy ty
maybeParrTy _ = return False
-- Are the types of all variables in the 'Scalar' class or toplevel variables?
--
-- NB: 'liftSimple' does not abstract over toplevel variables.
--
allScalarVarType :: [Var] -> VM Bool
allScalarVarType vs = and <$> mapM isScalarOrToplevel vs
where
isScalarOrToplevel v | isToplevel v = return True
| otherwise = isScalar (varType v)
-- Are the types of all variables in the set in the 'Scalar' class or toplevel variables?
--
allScalarVarTypeSet :: DVarSet -> VM Bool
allScalarVarTypeSet = allScalarVarType . dVarSetElems
-- Debugging support
--
viTrace :: CoreExprWithFVs -> VectAvoidInfo -> [CoreExprWithVectInfo] -> VM ()
viTrace ce vi vTs
= traceVt ("vect info: " ++ show vi ++ "[" ++
(concat $ map ((++ " ") . show . vectAvoidInfoOf) vTs) ++ "]")
(ppr $ deAnnotate ce)
|
GaloisInc/halvm-ghc
|
compiler/vectorise/Vectorise/Exp.hs
|
bsd-3-clause
| 49,672
| 6
| 22
| 14,202
| 11,340
| 5,942
| 5,398
| -1
| -1
|
-- Para los tests de cosas que no deben compilar
{-# OPTIONS_GHC -fdefer-type-errors #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Sigym4.DataSpec ( spec, main ) where
import Sigym4.Data hiding (describe, map, const)
import qualified Sigym4.Data as D
import qualified Sigym4.Data.AST as AST
import Data.Functor.Identity
import Data.List (isInfixOf)
import qualified Data.Vector.Storable as St
import Data.Maybe
import Test.Hspec
import Test.ShouldNotTypecheck (shouldNotTypecheck)
main :: IO ()
main = hspec spec
newtype Temperature = Temperature Double
deriving
( Eq, Ord, Show, Num
, RealFrac, Real, Fractional, Floating, RealFloat, Storable)
spec :: Spec
spec = do
describe "prettyAST" $ do
let v :: DummyRasterVar (Epsg 23030) Observation Temperature
v = dummyRasterInput "dummy"
(const (throwError (NotAvailable "")))
(const (throwError (NotAvailable "")))
(Schedule [cron|0 0 * * *|])
it "shows description" $ do
show (prettyAST v) `shouldSatisfy` isInfixOf "dummy"
it "shows dimension" $ do
show (prettyAST v) `shouldSatisfy` isInfixOf "0 0 * * *"
it "handles cycles" $ do
let v' = D.zipWith ([fp|v1|] (+)) v v'
show (prettyAST v') `shouldSatisfy` isInfixOf "..."
describe "only shows type of \"describe\" nodes" $ do
let v' = D.describe "una variable" v
it "shows description" $ do
show (prettyAST v') `shouldSatisfy` isInfixOf "una variable"
it "shows crs" $ do
show v' `shouldSatisfy` isInfixOf "23030"
it "shows type" $ do
show v' `shouldSatisfy` isInfixOf "Temperature"
it "shows dimension type" $ do
show v' `shouldSatisfy` isInfixOf "Observation"
describe "adaptDim" $ do
let tObs :: DummyRasterVar (Epsg 23030) Observation Temperature
tObs = dummyRasterInput "temperatura observada"
(const (throwError (NotAvailable "")))
(const (throwError (NotAvailable "")))
(Schedule [cron|0 * * * *|])
tPred :: DummyRasterVar (Epsg 23030) Prediction Temperature
tPred = dummyRasterInput "temperatura inventada"
(const (throwError (NotAvailable "")))
(const (throwError (NotAvailable "")))
([0,60..24*60] :* Schedule [cron|0 0 * * *|])
sqErr = [fp|version1|] $ \o p -> o*o - p*p
predictedTime (horizon:*runTime) = addHorizon horizon runTime
closestObservedTime dObs ixPred =
let bestTime = idfloor dObs (coerceTime (predictedTime ixPred))
in maybeToList (fmap unQ bestTime)
adaptedObs = adaptDim (dimension tPred) (closestObservedTime (dimension tObs)) tObs
tErr :: DummyRasterVar (Epsg 23030) Prediction Temperature
tErr = D.describe "predErr" $
D.zipWith sqErr adaptedObs tPred
it "should not typecheck without adaptDim" $ shouldNotTypecheck $
let tErrBad :: DummyRasterVar (Epsg 23030) Prediction Temperature
tErrBad = D.describe "predErr" $ D.zipWith sqErr tObs tPred
in tErrBad
it "can be pretty printed" $ do
show (prettyAST tErr) `shouldSatisfy` isInfixOf "predErr"
show (prettyAST tErr) `shouldSatisfy` isInfixOf "AdaptDim"
show (prettyAST tErr) `shouldSatisfy` isInfixOf "ZipWith"
describe "getMissingInputs" $ do
it "can calculate" $ do
let ix = Hour 6 :* datetime 2016 11 28 0 0
Right missing = runDummy (getMissingInputs tErr ix)
length missing `shouldBe` 2
map missingIx missing `shouldMatchList` [
SomeDimensionIx (dimension tObs) (datetime 2016 11 28 6 0)
, SomeDimensionIx (dimension tPred) ix
]
it "handles cycles" $ do
let ix = Hour 6 :* datetime 2016 11 28 0 0
Right missing = runDummy (getMissingInputs v' ix)
v' = D.zipWith ([fp|v1|] (+)) tErr v'
length missing `shouldSatisfy` (>0)
it "marks failed adaptation as missing input" $ do
let tPredGood = dummyRasterInput "temperatura inventada"
(const (return undefined))
(const (throwError (NotAvailable "")))
([0,60..24*60] :* Schedule [cron|0 0 * * *|])
tObsBad = adaptDim (dimension tPred) badAdaptor tObs
where badAdaptor = const []
tPredBad = D.describe "predErr" $
D.zipWith sqErr tObsBad tPredGood
ix = Hour 6 :* datetime 2016 11 28 0 0
Right missing = runDummy (getMissingInputs tPredBad ix)
map missingIx missing `shouldMatchList` [
SomeDimensionIx (dimension tPred) ix
]
newtype DummyInterpreter a = DummyInterpreter { runDummy :: Either LoadError a }
deriving (Functor, Applicative, Monad, MonadError LoadError)
type DummyRasterVar = Variable DummyInterpreter RasterT
dummyRasterInput
:: IsInput DummyInterpreter RasterT crs dim a
=> Description
-> (DimensionIx dim -> DummyInterpreter (DummyBand crs a))
-> (DimensionIx dim -> DummyInterpreter Fingerprint)
-> dim -> DummyRasterVar crs dim a
dummyRasterInput desc res f d = input DummyRasterInput
{ rLoad = res
, rFingerprint = f
, rDimension = d
, rDescription = desc
}
data instance AST.Loader DummyInterpreter RasterT crs dim a = DummyRasterInput
{ rLoad :: DimensionIx dim -> DummyInterpreter (DummyBand crs a)
, rFingerprint :: DimensionIx dim -> DummyInterpreter Fingerprint
, rDimension :: dim
, rDescription :: Description
}
type DummyRasterInput = AST.Loader DummyInterpreter RasterT
instance IsVariable DummyInterpreter RasterT crs dim a
=> AST.HasLoad DummyInterpreter RasterT crs dim a where
load (AST.Input l) = rLoad l
data DummyBand crs a = DummyBand
{ dummyDescription :: Description
}
type instance AST.Dataset DummyInterpreter RasterT crs a = DummyBand crs a
instance AST.HasCalculateFingerprint DummyInterpreter dim (DummyRasterInput crs dim a) where
calculateFingerprint = rFingerprint
instance HasDimension (DummyRasterInput crs dim a) dim where dimension = rDimension
instance AST.HasDescription (DummyRasterInput crs dim a) where
description = rDescription
instance AST.HasDescription (DummyBand crs a) where
description = dummyDescription
|
meteogrid/sigym4-data
|
test/Sigym4/DataSpec.hs
|
bsd-3-clause
| 6,665
| 0
| 22
| 1,665
| 1,919
| 979
| 940
| 139
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.Granule.Syntax.Def where
import Data.List ((\\), delete)
import Data.Set (Set)
import qualified Data.Map as M
import GHC.Generics (Generic)
import Language.Granule.Context (Ctxt)
import Language.Granule.Syntax.FirstParameter
import Language.Granule.Syntax.Helpers
import Language.Granule.Syntax.Identifiers
import Language.Granule.Syntax.Span
import Language.Granule.Syntax.Expr
import Language.Granule.Syntax.Type
import Language.Granule.Syntax.Pattern
-- | Top-level ASTs
-- | Comprise a list of data type declarations and a list
-- | of expression definitions
-- | where `v` is the type of values and `a` annotations
data AST v a =
AST
{ dataTypes :: [DataDecl]
, definitions :: [Def v a]
, imports :: Set Import
, hiddenNames :: M.Map Id Id -- map from names to the module hiding them
, moduleName :: Maybe Id
}
deriving instance (Show (Def v a), Show a) => Show (AST v a)
deriving instance (Eq (Def v a), Eq a) => Eq (AST v a)
type Import = FilePath
-- | Function definitions
data Def v a = Def
{ defSpan :: Span
, defId :: Id
, defEquations :: [Equation v a]
, defTypeScheme :: TypeScheme
}
deriving Generic
deriving instance (Eq v, Eq a) => Eq (Def v a)
deriving instance (Show v, Show a) => Show (Def v a)
-- | Single equation of a function
data Equation v a =
Equation {
equationSpan :: Span,
equationType :: a,
equationArguments :: [Pattern a],
equationBody :: Expr v a }
deriving Generic
deriving instance (Eq v, Eq a) => Eq (Equation v a)
deriving instance (Show v, Show a) => Show (Equation v a)
instance FirstParameter (Equation v a) Span
definitionType :: Def v a -> Type
definitionType Def { defTypeScheme = ts } =
ty where (Forall _ _ _ ty) = ts
-- | Data type declarations
data DataDecl = DataDecl
{ dataDeclSpan :: Span
, dataDeclId :: Id
, dataDeclTyVarCtxt :: Ctxt Kind
, dataDeclKindAnn :: Maybe Kind
, dataDeclDataConstrs :: [DataConstr]
}
deriving (Generic, Show, Eq)
instance FirstParameter DataDecl Span
-- | Data constructors
data DataConstr
= DataConstrIndexed
{ dataConstrSpan :: Span, dataConstrId :: Id, dataConstrTypeScheme :: TypeScheme } -- ^ GADTs
| DataConstrNonIndexed
{ dataConstrSpan :: Span, dataConstrId :: Id, dataConstrParams :: [Type] } -- ^ ADTs
deriving (Eq, Show, Generic)
-- | Is the data type an indexed data type, or just a plain ADT?
isIndexedDataType :: DataDecl -> Bool
isIndexedDataType (DataDecl _ id tyVars _ constrs) =
and (map nonIndexedConstructors constrs)
where
nonIndexedConstructors DataConstrNonIndexed{} = False
nonIndexedConstructors (DataConstrIndexed _ _ (Forall _ tyVars' _ ty)) =
noMatchOnEndType (reverse tyVars) ty
noMatchOnEndType ((v, _):tyVars) (TyApp t1 t2) =
case t2 of
TyVar v' | v == v' -> noMatchOnEndType tyVars t1
_ -> True
noMatchOnEndType tyVars (FunTy _ t) = noMatchOnEndType tyVars t
noMatchOnEndType [] (TyCon _) = False
-- Defaults to `true` (acutally an ill-formed case for data types)
noMatchOnEndType _ _ = True
nonIndexedToIndexedDataConstr :: Id -> [(Id, Kind)] -> DataConstr -> DataConstr
nonIndexedToIndexedDataConstr _ _ d@DataConstrIndexed{} = d
nonIndexedToIndexedDataConstr tName tyVars (DataConstrNonIndexed sp dName params)
-- Don't push the parameters into the type scheme yet
= DataConstrIndexed sp dName (Forall sp [] [] ty)
where
ty = foldr FunTy (returnTy (TyCon tName) tyVars) params
returnTy t [] = t
returnTy t (v:vs) = returnTy (TyApp t ((TyVar . fst) v)) vs
instance FirstParameter DataConstr Span
-- | How many data constructors a type has (Nothing -> don't know)
type Cardinality = Maybe Nat
-- | Fresh a whole AST
freshenAST :: AST v a -> AST v a
freshenAST (AST dds defs imports hiddens name) =
AST dds' defs' imports hiddens name
where (dds', defs') = (map runFreshener dds, map runFreshener defs)
instance Monad m => Freshenable m DataDecl where
freshen (DataDecl s v tyVars kind ds) = do
tyVars <- mapM (\(v, k) -> freshen k >>= \k' -> return (v, k')) tyVars
kind <- freshen kind
ds <- freshen ds
return $ DataDecl s v tyVars kind ds
instance Monad m => Freshenable m DataConstr where
freshen (DataConstrIndexed sp v tys) = do
tys <- freshen tys
return $ DataConstrIndexed sp v tys
freshen (DataConstrNonIndexed sp v ts) = do
ts <- mapM freshen ts
return $ DataConstrNonIndexed sp v ts
instance Monad m => Freshenable m (Equation v a) where
freshen (Equation s a ps e) = do
ps <- mapM freshen ps
e <- freshen e
return (Equation s a ps e)
-- | Alpha-convert all bound variables of a definition to unique names.
instance Monad m => Freshenable m (Def v a) where
freshen (Def s var eqs t) = do
t <- freshen t
eqs <- mapM freshen eqs
return (Def s var eqs t)
instance Term (Equation v a) where
freeVars (Equation s a binders body) =
freeVars body \\ concatMap boundVars binders
instance Term (Def v a) where
freeVars (Def _ name equations _) =
delete name (concatMap freeVars equations)
|
dorchard/gram_lang
|
frontend/src/Language/Granule/Syntax/Def.hs
|
bsd-3-clause
| 5,364
| 0
| 15
| 1,158
| 1,690
| 897
| 793
| 119
| 6
|
module CombinatoricsKata where
import BestTravel
import Combinations
import FindTheSmallest
import Permutations
import TheObservedPIN
testKata :: IO ()
testKata = print "Hey! Another super-useless function!"
|
Eugleo/Code-Wars
|
src/CombinatoricsKata.hs
|
bsd-3-clause
| 210
| 0
| 6
| 26
| 37
| 22
| 15
| 8
| 1
|
{-# Language RankNTypes #-}
{-# Language TupleSections #-}
{-# Language DeriveDataTypeable #-}
module Concepted.Core where
import Graphics.UI.Gtk hiding
( eventKeyName, eventButton, eventModifier
, Menu, Point, Rectangle, Widget
, add, get
)
import Graphics.UI.Gtk.Gdk.Events (
eventX, eventY, eventKeyName, eventButton, eventDirection, eventModifier)
import Graphics.Rendering.Cairo hiding (
status, versionString, version)
import Control.Concurrent
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State
import Data.List
import Data.Maybe
import qualified Data.IntMap as IM
import qualified Data.Map as M
import Concepted.Graphics
import Concepted.Widget
import Concepted.Syntax.Parser
import Concepted.Plane
import Concepted.State
import Concepted.Misc (snapXY)
linkSplitter :: Handler (Maybe Int)
linkSplitter (Key "s" True) Nothing = do
sel <- gets $ selection . getCurrentPlane
case sel of
[IdLink i] -> do
status "link split"
return . Continue $ Just i
_ -> return Ignored
linkSplitter (Key "s" True) (Just i) = do
s <- get
let cp = getCurrentPlane s
Just (Link p a b c d (Handle q:ps) e) = IM.lookup i $ links cp
pq = q `sub` p `divs` 2 `add` p
put $ replaceCurrentPlane s cp { links = IM.insert i (Link p a b c d (Handle pq:Handle q:ps) e) $ links cp }
return . Continue $ Just i
linkSplitter _ _ = return Ignored
data LineEditor = NewLine Int
lineEditor :: Handler LineEditor
lineEditor (Key "lmb" True) (NewLine i) = do
s <- get
let cp = getCurrentPlane s
mxy = mouseXY s
xy = screenToPlane cp mxy
Just (Line ps) = IM.lookup i $ pLines cp
put $ replaceCurrentPlane s cp { pLines = IM.insert i (Line $ ps ++ [Handle xy]) $ pLines cp }
return . Continue $ NewLine i
lineEditor (Key "Escape" True) _ = do
status "Stopped line editing"
return End
lineEditor _ _ = return Ignored
xxx :: Handler ()
xxx (Key "space" True) _ = do
sel <- gets $ selection . getCurrentPlane
status $ "Selection: " ++ show sel
return $ Continue ()
xxx e _ = do
status $ "Pressed " ++ show e
return Ignored
main' :: CState -> IO ()
main' initialState = do
let config = CConf
{ confBackground = white
}
initGUI
window <- windowNew
set window
[ windowTitle := "Concepted"
, windowDefaultWidth := 320
, windowDefaultHeight := 200
, containerBorderWidth := 0
]
canvas <- drawingAreaNew
containerAdd window canvas
widgetShowAll window
let ws = map widgets $ planes initialState
ms <- mapM newMenu ws
let ms' = M.fromList $ zip ws ms
sVar <- newMVar initialState { menus = ms' }
onKeyPress window $ \e -> do
modifyState config sVar $ myKeyPress (eventKeyName e)
widgetQueueDraw canvas
return True
onButtonPress canvas $ \e -> do
case eventButton e of
LeftButton -> do
modifyState config sVar $ myLmbPress (Control `elem` eventModifier e)
(eventX e, eventY e)
widgetQueueDraw canvas
_ -> return ()
return True
onButtonRelease canvas $ \e -> do
case eventButton e of
LeftButton -> do
modifyState config sVar $ myLmbRelease (eventX e, eventY e)
widgetQueueDraw canvas
_ -> return ()
return True
onScroll canvas $ \e -> do
case eventDirection e of
ScrollUp -> do
modifyState config sVar $ myScroll True
widgetQueueDraw canvas
ScrollDown -> do
modifyState config sVar $ myScroll False
widgetQueueDraw canvas
_ -> return ()
return True
onMotionNotify canvas False $ \e -> do
s <- takeMVar sVar
-- TODO The first time onMotionNotify is called, the computed dx
-- and dy are wrong.
let dx = eventX e - fst (mouseXY s)
dy = eventY e - snd (mouseXY s)
lmb = Button1 `elem` (eventModifier e)
rmb = Button3 `elem` (eventModifier e)
s' <- execC config s { mouseXY = (eventX e, eventY e) } $
myMotion lmb rmb (dx, dy)
putMVar sVar s'
widgetQueueDraw canvas
return True
onExpose canvas $ \_ -> do
(w, h) <- widgetGetSize canvas
drawin <- widgetGetDrawWindow canvas
s <- takeMVar sVar
let s' = s { wwidth = w, wheight = h }
putMVar sVar s'
renderWithDrawable drawin (myDraw config s')
return True
onDestroy window mainQuit
mainGUI
modifyState :: CConf -> MVar CState -> C a -> IO ()
modifyState config sVar f =
modifyMVar_ sVar $ \s -> execC config s f
----------------------------------------------------------------------
-- The main callbacks
----------------------------------------------------------------------
myKeyPress :: String -> C ()
myKeyPress k = do
b <- handle $ Key k True
if b
then return ()
else do
s <- get
case k of
"r" -> case filename s of
Nothing -> pass
Just fn -> do
c <- liftIO $ readFile fn
case unserialize c of
Left err ->
liftIO . putStrLn $ "parse error: " ++ show err
Right s' -> do
liftIO . putStrLn $ fn ++ " reloaded"
change currentPlane $ \cp -> cp
{ concepts = concepts (getCurrentPlane s')
, links = links (getCurrentPlane s')
, follow = follow (getCurrentPlane s')
}
"plus" -> change currentPlane $ zoomAt (mouseXY s) 1.1
"minus" -> change currentPlane $ zoomAt (mouseXY s) (1 / 1.1)
"l" -> put $ s { hideLinks = not (hideLinks s) }
"c" -> changeAtXY currentPlane newConcept
"n" -> do
i <- newLine
modify (\s' -> s' { handlers = HandlerState lineEditor (NewLine i) : handlers s })
status $ "Editting line #" ++ show i ++ ", press Escape to stop"
"Up" -> change currentPlane $ pan (0, 20)
"Down" -> change currentPlane $ pan (0, -20)
"Left" -> change currentPlane $ pan (20, 0)
"Right" -> change currentPlane $ pan (-20, 0)
"Print" -> do
config <- ask
liftIO $ withImageSurface FormatARGB32 (wwidth s) (wheight s) $
\surf -> do
renderWith surf $ myDraw config s
surfaceWriteToPNG surf "screenshot.png"
"Escape" -> liftIO mainQuit
_ -> pass
myLmbPress :: Bool -> Point -> C ()
myLmbPress ctrl xy = do
b <- handle $ Key "lmb" True
if b
then return ()
else do
cp <- grab currentPlane
let xy' = screenToPlane cp xy
selc = select IdConcept xy' (IM.toList $ concepts cp)
sell = select IdLink xy' (IM.toList $ links cp)
selh = selectLinksHandles xy' (IM.toList $ links cp)
sel = take 1 $ concat [selc, sell, selh]
pms <- gets planeMenuPairs
liftIO $ mapM_ (\(p, m) -> pressMenu (screenToPlane p xy) m) pms
nail currentPlane $ cp { selection = if ctrl
then nub (sel ++ selection cp)
else if null sel then selection cp else sel}
myLmbRelease :: Point -> C ()
myLmbRelease xy = do
s <- get
cmds <- liftIO $ mapM (\(p, m) -> releaseMenu (screenToPlane p xy) m) $ planeMenuPairs s
sequence_ $ mapMaybe id cmds
change currentPlane $ snapSelection' $ snapTreshold s
pass :: Monad m => m ()
pass = return ()
-- The bool specifies if it is up (true) or down (false).
myScroll :: Bool -> C ()
myScroll up = do
mxy <- gets mouseXY
change currentPlane $ zoomAt mxy (if up then 1.1 else 1 / 1.1)
-- The booleans specify if the lmb and rmb are pressed.
myMotion :: Bool -> Bool -> (Double, Double) -> C ()
myMotion True False (dx, dy) = do
cp <- grab currentPlane
let dxy' = screenToPlaneDelta cp (dx, dy)
change currentPlane $ mapSelection (move dxy')
myMotion False True dxy = change currentPlane $ pan dxy
myMotion _ _ _ = pass
myDraw :: CConf -> CState -> Render ()
myDraw config s = do
-- clear
identityMatrix
setSourceRGBA' $ confBackground config
paint
-- status bar
setFontSize 12
setSourceRGBA' black
moveTo 5 $ (fromIntegral $ wheight s) - 5
showText $ wstatus s
mapM_ (renderPlane s) $ planeMenuPairs s
renderPlane :: CState -> (Plane, Menu) -> Render ()
renderPlane s (p, m) = do
-- view the scene under the pan/zoom transform
identityMatrix
translate (fst $ panXY p) (snd $ panXY p)
scale (zoom p) (zoom p)
-- render
mapM_ (\(a,b) -> render (a `isSelectedConcept` p) b)
(IM.toList $ concepts p)
unless (hideLinks s) $
mapM_ (\(a,b) -> renderLink (a `isSelectedLink` p) b)
(IM.toList $ links p)
mapM_ (\(a,b) -> mapM_ (\(i,j) -> renderHandle (IdLinkHandle a i `elem` selection p) j) $ zip [0..] $ handles b)
(IM.toList $ links p)
mapM_ (\(_,b) -> renderLine b) (IM.toList $ pLines p)
let pos = screenToPlane p $ mouseXY s
renderMenu pos m
----------------------------------------------------------------------
-- Process the selection
----------------------------------------------------------------------
mapSelection :: (forall a . Moveable a => a -> a) -> Plane -> Plane
mapSelection f s = s
{ concepts = IM.mapWithKey fc $ concepts s
, links = IM.mapWithKey fh' $ IM.mapWithKey fl $ links s
}
where
fol = follow s
fc b n = if IdConcept b `elem` (selection s `addFollow` fol) then f n else n
fl b n = if IdLink b `elem` (selection s `addFollow` fol) then f n else n
fh' b n = mapHandles (fh b) n
fh b (i,n) = if IdLinkHandle b i `elem` (selection s `addFollow` fol) then f n else n
mapHandles :: ((Int, Handle) -> Handle) -> Link -> Link
mapHandles f (Link xy rgba from verb to hs w) =
let hs' = map f $ zip [0..] hs
in Link xy rgba from verb to hs' w
snapSelection :: Int -> Plane -> Plane
snapSelection t = mapSelection (\n -> setPosition (snapXY t $ position n) n)
snapSelection' :: Maybe Int -> Plane -> Plane
snapSelection' (Just t) = snapSelection t
snapSelection' Nothing = id
addFollow :: [Id] -> [(Id,Id)] -> [Id]
addFollow [] _ = []
addFollow sel fllw = sel ++ mapMaybe f fllw
where f (a,b) = if a `elem` sel then Just b else Nothing
-- | Similar to 'change' used in the C monad, but also provide the
-- plane-local mouse coordinates.
changeAtXY :: GN CState a -> (Point -> a -> a) -> C ()
changeAtXY gn f = do
mxy <- gets mouseXY
cp <- grab currentPlane
let xy = screenToPlane cp mxy
change gn $ f xy
newLine :: C Int
newLine = do
change currentPlane newLine'
gets $ pred . IM.size . pLines . getCurrentPlane
generate' :: Int -> Int -> FilePath -> (Int -> Int -> Render ()) -> IO ()
generate' width height fn draw =
withImageSurface FormatARGB32 width height $
\surf -> do
renderWith surf $ draw width height
surfaceWriteToPNG surf fn
example :: Int -> Int -> Render ()
example width height = do
setSourceRGBA 255 255 255 255
paint
identityMatrix
once
identityMatrix
translate w 0
once
identityMatrix
translate 0 h
once
identityMatrix
translate w h
once
where p (x, y) = do
setSourceRGBA 0 255 255 255
arc x y 1.5 0 (2 * pi)
fill
w = fromIntegral width
h = fromIntegral height
once = mapM_ p [(x, y) | x <- [0, 4..w - 5], y <- [0, 4..h - 5]]
|
noteed/concepted
|
Concepted/Core.hs
|
bsd-3-clause
| 11,154
| 0
| 29
| 2,959
| 4,273
| 2,091
| 2,182
| 297
| 16
|
{-# LANGUAGE BangPatterns #-}
module Merchandise.Merchandise
( runGame
) where
import Data.IORef
import System.Random
import qualified Merchandise.Types.Event as E
import qualified Merchandise.Types.Game as G
-- | whatever arguments main needs to aquire
type Params = [String]
runGame :: Params -> IO ()
runGame _ =
do
-- aquire randomness
g <- newStdGen
-- infR is a lazy infinite list of Ints between 0 and 100000. Using this, we
-- can pass an Int at a time into our pure stuff, letting us use randomness
-- in the functionally pure Event processing by `mod`ing each Int into
-- whatever range makes sense in the context.
infR <- return $ randomRs (0, 100000 :: Int) g
print "placeholder"
|
trobertson/merch
|
src/Merchandise/Merchandise.hs
|
bsd-3-clause
| 765
| 0
| 10
| 189
| 115
| 69
| 46
| 14
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.SGIX.FragmentLighting
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/SGIX/fragment_lighting.txt SGIX_fragment_lighting> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.SGIX.FragmentLighting (
-- * Enums
gl_CURRENT_RASTER_NORMAL_SGIX,
gl_FRAGMENT_COLOR_MATERIAL_FACE_SGIX,
gl_FRAGMENT_COLOR_MATERIAL_PARAMETER_SGIX,
gl_FRAGMENT_COLOR_MATERIAL_SGIX,
gl_FRAGMENT_LIGHT0_SGIX,
gl_FRAGMENT_LIGHT1_SGIX,
gl_FRAGMENT_LIGHT2_SGIX,
gl_FRAGMENT_LIGHT3_SGIX,
gl_FRAGMENT_LIGHT4_SGIX,
gl_FRAGMENT_LIGHT5_SGIX,
gl_FRAGMENT_LIGHT6_SGIX,
gl_FRAGMENT_LIGHT7_SGIX,
gl_FRAGMENT_LIGHTING_SGIX,
gl_FRAGMENT_LIGHT_MODEL_AMBIENT_SGIX,
gl_FRAGMENT_LIGHT_MODEL_LOCAL_VIEWER_SGIX,
gl_FRAGMENT_LIGHT_MODEL_NORMAL_INTERPOLATION_SGIX,
gl_FRAGMENT_LIGHT_MODEL_TWO_SIDE_SGIX,
gl_LIGHT_ENV_MODE_SGIX,
gl_MAX_ACTIVE_LIGHTS_SGIX,
gl_MAX_FRAGMENT_LIGHTS_SGIX,
-- * Functions
glFragmentColorMaterialSGIX,
glFragmentLightModelfSGIX,
glFragmentLightModelfvSGIX,
glFragmentLightModeliSGIX,
glFragmentLightModelivSGIX,
glFragmentLightfSGIX,
glFragmentLightfvSGIX,
glFragmentLightiSGIX,
glFragmentLightivSGIX,
glFragmentMaterialfSGIX,
glFragmentMaterialfvSGIX,
glFragmentMaterialiSGIX,
glFragmentMaterialivSGIX,
glGetFragmentLightfvSGIX,
glGetFragmentLightivSGIX,
glGetFragmentMaterialfvSGIX,
glGetFragmentMaterialivSGIX,
glLightEnviSGIX
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/SGIX/FragmentLighting.hs
|
bsd-3-clause
| 1,866
| 0
| 4
| 196
| 157
| 112
| 45
| 41
| 0
|
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.THEff
import Control.THEff.Reader
mkEff "CharReader" ''Reader ''Char ''NoEff
mkEff "StrReader" ''Reader ''String ''CharReader
main:: IO ()
main = putStrLn $ runCharReader 'T' $ runStrReader "est" $ do
c <- ask
s <- ask
return $ c:s
|
KolodeznyDiver/THEff
|
samples/SampleReader.hs
|
bsd-3-clause
| 464
| 0
| 9
| 116
| 113
| 57
| 56
| 14
| 1
|
module Data.Astro.MoonTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.HUnit
import Test.HUnit.Approx
import Test.QuickCheck
import Data.Astro.TypesTest (testDecimalDegrees)
import Data.Astro.CoordinateTest (testEC1)
import Data.Astro.Types (GeographicCoordinates(..))
import Data.Astro.Time.JulianDate (fromYMD)
import Data.Astro.Coordinate (EquatorialCoordinates1(..))
import Data.Astro.Moon.MoonDetails (MoonDetails(..), j2010MoonDetails, MoonDistanceUnits(..))
import Data.Astro.Moon
tests = [testGroup "moonPosition1" [
testEC1 "at 2003-09-01 00:00:00 UT"
0.000001
(EC1 (-11.525750) 14.211486)
(moonPosition1 j2010MoonDetails (fromYMD 2003 9 1))
]
, testGroup "moonPosition2" [
testEC1 "at 2003-09-01 00:00:00 UT"
0.000001
(EC1 (-12.174888) 14.178731)
(moonPosition2 j2010MoonDetails (MDU 1) (GeoC 51 0) 20 (fromYMD 2003 9 1))
]
, testGroup "moonDistance" [
testMDU "at 2016-08-27 00:00:00"
0.000001
(MDU 0.953425)
(moonDistance1 j2010MoonDetails (fromYMD 2016 8 26))
]
, testGroup "moonAngularSize" [
testDecimalDegrees "at 0.953425 MDU"
0.000001
0.543409
(moonAngularSize (MDU 0.953425))
, testDecimalDegrees "at 1 MDU"
0.000001
(mdBigTheta j2010MoonDetails)
(moonAngularSize (MDU 1))
]
, testGroup "moonHorizontalParallax" [
testDecimalDegrees "at 0.953425 MDU"
0.000001
0.997142
(moonHorizontalParallax (MDU 0.953425))
, testDecimalDegrees "at 1 MDU"
0.000001
(mdPi j2010MoonDetails)
(moonHorizontalParallax (MDU 1))
]
, testGroup "moonPhase" [
testCase "at 2016-08-01 00:00:00" $ assertApproxEqual ""
0.000001
0.042498
(moonPhase j2010MoonDetails (fromYMD 2016 8 1))
, testCase "at 2016-08-21 00:00:00" $ assertApproxEqual ""
0.000001
0.911818
(moonPhase j2010MoonDetails (fromYMD 2016 8 21))
]
, testGroup "moonBrightLimbPositionAngle" [
testDecimalDegrees "at 2016-08-28 00:00:00"
0.000001
82.479138
(moonBrightLimbPositionAngle (EC1 17.386905 4.897826) (EC1 10.329324 10.342354))
]
]
testMDU msg eps (MDU e) (MDU a) = testCase msg $ assertApproxEqual "" eps e a
|
Alexander-Ignatyev/astro
|
test/Data/Astro/MoonTest.hs
|
bsd-3-clause
| 2,853
| 0
| 13
| 991
| 616
| 332
| 284
| 64
| 1
|
module Roguelike.Dice
( Dice(..)
, roll
) where
import Control.Applicative
import Text.Read
import Data.Text (Text)
import qualified Data.Text as T
import Data.Aeson
import Control.Monad.Random
data Dice = D !Int !Int
deriving (Eq)
infix 5 `D`
instance Show Dice where
show (n `D` s) = show n ++ " `D` " ++ show s
instance Read Dice where
readPrec = prec 5 $ do
n <- readPrec
l <- mapM (const lexP) [1..3]
unless (l == [Punc "`", Ident "D", Punc "`"]) $ fail "invalid Dice"
s <- readPrec
return $ n `D` s
instance ToJSON Dice where
toJSON (n `D` s) = String $ T.pack $ show n ++ "d" ++ show s
instance FromJSON Dice where
parseJSON = withText "dice" $ \(T.breakOn "d" -> (n', s')) ->
maybe return (fail "invalid dice") $ do
n <- readMaybe $ T.unpack n'
s <- case T.unpack s' of
"" -> Nothing
_:t -> readMaybe t
return $ n `D` s
roll :: MonadRandom m => Dice -> m Int
roll (n `D` s) = sum <$> replicateM n (getRandomR (1, s))
|
abbradar/roguelike
|
src/Roguelike/Dice.hs
|
bsd-3-clause
| 1,030
| 0
| 15
| 285
| 458
| 239
| 219
| -1
| -1
|
module Language.Haskell.Colorize where
import qualified Language.Haskell.Lexer as L
import System.Console.ANSI
-- | The different types of that we recognize.
data Token
= Comment -- ^ Comment
| Reserved -- ^ Reserved word
| ReservedOp -- ^ Reserved operator
| Var -- ^ Variables
| VarOp -- ^ Variable operatros
| Con -- ^ Constructors
| ConOp -- ^ Constructor operators
| Special -- ^ Special syntax (e.g., parens,brackets)
| IntLit -- ^ Integer lieterals
| FloatLit -- ^ Floating point literals
| CharLit -- ^ Character literals
| StringLit -- ^ String literals
-- | The type of functions that specify how to render a value.
type Style = Token -> String -> ShowS
render :: Style -> String -> ShowS
render how prog k = foldr step k (L.lexerPass0 prog)
where
step (y,(_,x)) =
case y of
L.Varid -> how Var x
L.Conid -> how Con x
L.Varsym -> how VarOp x
L.Consym -> how ConOp x
L.Reservedid
| x == "_" -> how Var x
| otherwise -> how Reserved x
L.Reservedop -> how ReservedOp x
L.Special -> how Special x
L.IntLit -> how IntLit x
L.FloatLit -> how FloatLit x
L.CharLit -> how CharLit x
L.StringLit -> how StringLit x
L.Qvarid -> how Var x
L.Qconid -> how Con x
L.Qvarsym -> how VarOp x
L.Qconsym -> how ConOp x
L.NestedCommentStart -> how Comment x
L.NestedComment -> how Comment x
L.LiterateComment -> how Comment x
L.Commentstart -> how Comment x
L.Comment -> how Comment x
_ -> (x ++)
-- | Annotates tokens with ANSI escape sequences, suitable for a dark termianl
ansiDark :: Style
ansiDark t = case t of
Comment -> bright Cyan
Reserved -> bright Green
ReservedOp -> bright Yellow
VarOp -> bright Yellow
ConOp -> bright Yellow
IntLit -> bright Magenta
FloatLit -> bright Magenta
CharLit -> bright Magenta
StringLit -> bright Magenta
_ -> (++)
where bright x xs k = setSGRCode [ SetConsoleIntensity BoldIntensity
, SetColor Foreground Vivid x
]
++ xs ++ setSGRCode [Reset] ++ k
-- | Annotates tokens with ANSI escape sequences, suitable for a dark termianl
ansiLight :: Style
ansiLight t = case t of
Comment -> dark Blue
Reserved -> dark Green
ReservedOp -> dark Red
VarOp -> dark Red
ConOp -> dark Red
IntLit -> dark Magenta
FloatLit -> dark Magenta
CharLit -> dark Magenta
StringLit -> dark Magenta
_ -> (++)
where dark x xs k = setSGRCode [ SetConsoleIntensity FaintIntensity
, SetColor Foreground Dull x
]
++ xs ++ setSGRCode [Reset] ++ k
{-
-- | Annotates tokens with HTML tags.
html :: Style
html = Style
{ comment = tag "comment"
, reserved = tag "reseved"
, reservedOp = tag "resevedOp"
, var = tag "var"
, varOp = tag "varOp"
, con = tag "con"
, conOp = tag "conOp"
, intLit = tag "intLit"
, floatLit = tag "floatLit"
, charLit = tag "charLit"
, stringLit = tag "stringLit"
, special = tag "special"
, prefix = showString "<html><head>"
. showString css
. showString "</head><body><pre>"
, postfix = showString "</pre></body></html>"
}
where tag x cs = "<span class='" ++ x ++ "'>"
++ concatMap esc cs ++ "</span>"
esc c = case c of
'<' -> "<"
'>' -> ">"
'&' -> "&"
_ -> [c]
css = unlines
[ "<style type='text/css' rel='stylesheet'>"
, ".comment { color: blue }"
, "</style>"
]
--------------------------------------------------------------------------------
getOptions :: IO Options
getOptions =
do (fs,non_opt,errs) <- getOpt Permute flags `fmap` getArgs
case (non_opt,errs) of
([],[]) -> return (foldr ($) defaultOptions fs)
_ -> mapM_ (hPutStrLn stderr) errs >> showUsage
-- | Print usage info and quit
showUsage :: IO a
showUsage = do hPutStrLn stderr (usageInfo "Available options:" flags)
exitFailure
flags :: [ OptDescr (Options -> Options) ]
flags = [ Option [] ["html"] (NoArg $ \o -> o { optStyle = HTML })
"Generate HTML output"
, Option [] ["ansi"] (NoArg $ \o -> o { optStyle = ANSI })
"Generate ANSI output (default)"
, Option ['h'] ["help"] (NoArg $ \o -> o { optHelp = True })
"Display this help."
]
data OptStyle = ANSI | HTML
data Options = Options
{ optStyle :: OptStyle
, optHelp :: Bool
}
defaultOptions :: Options
defaultOptions = Options
{ optStyle = ANSI
, optHelp = False
}
-}
|
yav/colorize-haskell
|
Language/Haskell/Colorize.hs
|
bsd-3-clause
| 5,375
| 0
| 13
| 2,068
| 767
| 388
| 379
| 74
| 21
|
module Main where
import Language.CIL
--import Log
main :: IO ()
main = do
--debugParseC "test.c"
--print f
parseC "test.c" >>= print
|
tomahawkins/cil
|
attic/Test.hs
|
bsd-3-clause
| 144
| 0
| 8
| 32
| 37
| 21
| 16
| 5
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
-- | The Config type.
module Stack.Types.Config
(
-- * Main configuration types and classes
-- ** HasPlatform & HasStackRoot
HasPlatform(..)
,PlatformVariant(..)
-- ** Runner
,HasRunner(..)
,Runner(..)
,ColorWhen(..)
,terminalL
,reExecL
-- ** Config & HasConfig
,Config(..)
,HasConfig(..)
,askLatestSnapshotUrl
,explicitSetupDeps
,configProjectRoot
-- ** BuildConfig & HasBuildConfig
,BuildConfig(..)
,ProjectPackage(..)
,DepPackage(..)
,ppRoot
,ppVersion
,ppComponents
,ppGPD
,stackYamlL
,projectRootL
,HasBuildConfig(..)
-- ** Storage databases
,UserStorage(..)
,ProjectStorage(..)
-- ** GHCVariant & HasGHCVariant
,GHCVariant(..)
,ghcVariantName
,ghcVariantSuffix
,parseGHCVariant
,HasGHCVariant(..)
,snapshotsDir
-- ** EnvConfig & HasEnvConfig
,EnvConfig(..)
,HasSourceMap(..)
,HasEnvConfig(..)
,getCompilerPath
-- * Details
-- ** ApplyGhcOptions
,ApplyGhcOptions(..)
-- ** CabalConfigKey
,CabalConfigKey(..)
-- ** ConfigException
,HpackExecutable(..)
,ConfigException(..)
-- ** ConfigMonoid
,ConfigMonoid(..)
,configMonoidInstallGHCName
,configMonoidSystemGHCName
,parseConfigMonoid
-- ** DumpLogs
,DumpLogs(..)
-- ** EnvSettings
,EnvSettings(..)
,minimalEnvSettings
,defaultEnvSettings
,plainEnvSettings
-- ** GlobalOpts & GlobalOptsMonoid
,GlobalOpts(..)
,GlobalOptsMonoid(..)
,StackYamlLoc(..)
,stackYamlLocL
,LockFileBehavior(..)
,readLockFileBehavior
,lockFileBehaviorL
,defaultLogLevel
-- ** Project & ProjectAndConfigMonoid
,Project(..)
,ProjectConfig(..)
,Curator(..)
,ProjectAndConfigMonoid(..)
,parseProjectAndConfigMonoid
-- ** PvpBounds
,PvpBounds(..)
,PvpBoundsType(..)
,parsePvpBounds
-- ** ColorWhen
,readColorWhen
-- ** Styles
,readStyles
-- ** SCM
,SCM(..)
-- * Paths
,bindirSuffix
,GlobalInfoSource(..)
,getProjectWorkDir
,docDirSuffix
,extraBinDirs
,hpcReportDir
,installationRootDeps
,installationRootLocal
,bindirCompilerTools
,hoogleRoot
,hoogleDatabasePath
,packageDatabaseDeps
,packageDatabaseExtra
,packageDatabaseLocal
,platformOnlyRelDir
,platformGhcRelDir
,platformGhcVerOnlyRelDir
,useShaPathOnWindows
,shaPath
,shaPathForBytes
,workDirL
-- * Command-specific types
-- ** Eval
,EvalOpts(..)
-- ** Exec
,ExecOpts(..)
,SpecialExecCmd(..)
,ExecOptsExtra(..)
-- ** Setup
,DownloadInfo(..)
,VersionedDownloadInfo(..)
,GHCDownloadInfo(..)
,SetupInfo(..)
-- ** Docker entrypoint
,DockerEntrypoint(..)
,DockerUser(..)
,module X
-- * Lens helpers
,wantedCompilerVersionL
,actualCompilerVersionL
,HasCompiler(..)
,DumpPackage(..)
,CompilerPaths(..)
,GhcPkgExe(..)
,getGhcPkgExe
,cpWhich
,ExtraDirs(..)
,buildOptsL
,globalOptsL
,buildOptsInstallExesL
,buildOptsMonoidHaddockL
,buildOptsMonoidTestsL
,buildOptsMonoidBenchmarksL
,buildOptsMonoidInstallExesL
,buildOptsHaddockL
,globalOptsBuildOptsMonoidL
,stackRootL
,cabalVersionL
,whichCompilerL
,envOverrideSettingsL
,shouldForceGhcColorFlag
,appropriateGhcColorFlag
-- * Helper logging functions
,prettyStackDevL
-- * Lens reexport
,view
,to
) where
import Control.Monad.Writer (tell)
import Crypto.Hash (hashWith, SHA1(..))
import Stack.Prelude
import Pantry.Internal.AesonExtended
(ToJSON, toJSON, FromJSON, FromJSONKey (..), parseJSON, withText, object,
(.=), (..:), (...:), (..:?), (..!=), Value(Bool),
withObjectWarnings, WarningParser, Object, jsonSubWarnings,
jsonSubWarningsT, jsonSubWarningsTT, WithJSONWarnings(..),
FromJSONKeyFunction (FromJSONKeyTextParser))
import Data.Attoparsec.Args (parseArgs, EscapingMode (Escaping))
import qualified Data.ByteArray.Encoding as Mem (convertToBase, Base(Base16))
import qualified Data.ByteString.Char8 as S8
import Data.Coerce (coerce)
import Data.List (stripPrefix)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import qualified Data.Map.Strict as M
import qualified Data.Monoid as Monoid
import Data.Monoid.Map (MonoidMap(..))
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Yaml (ParseException)
import qualified Data.Yaml as Yaml
import qualified Distribution.License as C
import Distribution.ModuleName (ModuleName)
import Distribution.PackageDescription (GenericPackageDescription)
import qualified Distribution.PackageDescription as C
import Distribution.System (Platform, Arch)
import qualified Distribution.Text
import qualified Distribution.Types.UnqualComponentName as C
import Distribution.Version (anyVersion, mkVersion', mkVersion)
import Generics.Deriving.Monoid (memptydefault, mappenddefault)
import Lens.Micro
import Options.Applicative (ReadM)
import qualified Options.Applicative as OA
import qualified Options.Applicative.Types as OA
import Pantry.Internal (Storage)
import Path
import qualified Paths_stack as Meta
import qualified RIO.List as List
import RIO.PrettyPrint (HasTerm (..), StyleDoc, prettyWarnL, prettyDebugL)
import RIO.PrettyPrint.StylesUpdate (StylesUpdate,
parseStylesUpdateFromString, HasStylesUpdate (..))
import Stack.Constants
import Stack.Types.Compiler
import Stack.Types.CompilerBuild
import Stack.Types.Docker
import Stack.Types.GhcPkgId
import Stack.Types.NamedComponent
import Stack.Types.Nix
import Stack.Types.Resolver
import Stack.Types.SourceMap
import Stack.Types.TemplateName
import Stack.Types.Version
import qualified System.FilePath as FilePath
import System.PosixCompat.Types (UserID, GroupID, FileMode)
import RIO.Process (ProcessContext, HasProcessContext (..))
import Casa.Client (CasaRepoPrefix)
-- Re-exports
import Stack.Types.Config.Build as X
-- | The base environment that almost everything in Stack runs in,
-- based off of parsing command line options in 'GlobalOpts'. Provides
-- logging and process execution.
data Runner = Runner
{ runnerGlobalOpts :: !GlobalOpts
, runnerUseColor :: !Bool
, runnerLogFunc :: !LogFunc
, runnerTermWidth :: !Int
, runnerProcessContext :: !ProcessContext
}
data ColorWhen = ColorNever | ColorAlways | ColorAuto
deriving (Eq, Show, Generic)
instance FromJSON ColorWhen where
parseJSON v = do
s <- parseJSON v
case s of
"never" -> return ColorNever
"always" -> return ColorAlways
"auto" -> return ColorAuto
_ -> fail ("Unknown color use: " <> s <> ". Expected values of " <>
"option are 'never', 'always', or 'auto'.")
-- | The top-level Stackage configuration.
data Config =
Config {configWorkDir :: !(Path Rel Dir)
-- ^ this allows to override .stack-work directory
,configUserConfigPath :: !(Path Abs File)
-- ^ Path to user configuration file (usually ~/.stack/config.yaml)
,configBuild :: !BuildOpts
-- ^ Build configuration
,configDocker :: !DockerOpts
-- ^ Docker configuration
,configNix :: !NixOpts
-- ^ Execution environment (e.g nix-shell) configuration
,configProcessContextSettings :: !(EnvSettings -> IO ProcessContext)
-- ^ Environment variables to be passed to external tools
,configLocalProgramsBase :: !(Path Abs Dir)
-- ^ Non-platform-specific path containing local installations
,configLocalPrograms :: !(Path Abs Dir)
-- ^ Path containing local installations (mainly GHC)
,configHideTHLoading :: !Bool
-- ^ Hide the Template Haskell "Loading package ..." messages from the
-- console
,configPrefixTimestamps :: !Bool
-- ^ Prefix build output with timestamps for each line.
,configPlatform :: !Platform
-- ^ The platform we're building for, used in many directory names
,configPlatformVariant :: !PlatformVariant
-- ^ Variant of the platform, also used in directory names
,configGHCVariant :: !(Maybe GHCVariant)
-- ^ The variant of GHC requested by the user.
,configGHCBuild :: !(Maybe CompilerBuild)
-- ^ Override build of the compiler distribution (e.g. standard, gmp4, tinfo6)
,configLatestSnapshot :: !Text
-- ^ URL of a JSON file providing the latest LTS and Nightly snapshots.
,configSystemGHC :: !Bool
-- ^ Should we use the system-installed GHC (on the PATH) if
-- available? Can be overridden by command line options.
,configInstallGHC :: !Bool
-- ^ Should we automatically install GHC if missing or the wrong
-- version is available? Can be overridden by command line options.
,configSkipGHCCheck :: !Bool
-- ^ Don't bother checking the GHC version or architecture.
,configSkipMsys :: !Bool
-- ^ On Windows: don't use a sandboxed MSYS
,configCompilerCheck :: !VersionCheck
-- ^ Specifies which versions of the compiler are acceptable.
,configCompilerRepository :: !CompilerRepository
-- ^ Specifies the repository containing the compiler sources
,configLocalBin :: !(Path Abs Dir)
-- ^ Directory we should install executables into
,configRequireStackVersion :: !VersionRange
-- ^ Require a version of stack within this range.
,configJobs :: !Int
-- ^ How many concurrent jobs to run, defaults to number of capabilities
,configOverrideGccPath :: !(Maybe (Path Abs File))
-- ^ Optional gcc override path
,configExtraIncludeDirs :: ![FilePath]
-- ^ --extra-include-dirs arguments
,configExtraLibDirs :: ![FilePath]
-- ^ --extra-lib-dirs arguments
,configConcurrentTests :: !Bool
-- ^ Run test suites concurrently
,configTemplateParams :: !(Map Text Text)
-- ^ Parameters for templates.
,configScmInit :: !(Maybe SCM)
-- ^ Initialize SCM (e.g. git) when creating new projects.
,configGhcOptionsByName :: !(Map PackageName [Text])
-- ^ Additional GHC options to apply to specific packages.
,configGhcOptionsByCat :: !(Map ApplyGhcOptions [Text])
-- ^ Additional GHC options to apply to categories of packages
,configCabalConfigOpts :: !(Map CabalConfigKey [Text])
-- ^ Additional options to be passed to ./Setup.hs configure
,configSetupInfoLocations :: ![String]
-- ^ URLs or paths to stack-setup.yaml files, for finding tools.
-- If none present, the default setup-info is used.
,configSetupInfoInline :: !SetupInfo
-- ^ Additional SetupInfo to use to find tools.
,configPvpBounds :: !PvpBounds
-- ^ How PVP upper bounds should be added to packages
,configModifyCodePage :: !Bool
-- ^ Force the code page to UTF-8 on Windows
,configExplicitSetupDeps :: !(Map (Maybe PackageName) Bool)
-- ^ See 'explicitSetupDeps'. 'Nothing' provides the default value.
,configRebuildGhcOptions :: !Bool
-- ^ Rebuild on GHC options changes
,configApplyGhcOptions :: !ApplyGhcOptions
-- ^ Which packages to ghc-options on the command line apply to?
,configAllowNewer :: !Bool
-- ^ Ignore version ranges in .cabal files. Funny naming chosen to
-- match cabal.
,configDefaultTemplate :: !(Maybe TemplateName)
-- ^ The default template to use when none is specified.
-- (If Nothing, the default default is used.)
,configAllowDifferentUser :: !Bool
-- ^ Allow users other than the stack root owner to use the stack
-- installation.
,configDumpLogs :: !DumpLogs
-- ^ Dump logs of local non-dependencies when doing a build.
,configProject :: !(ProjectConfig (Project, Path Abs File))
-- ^ Project information and stack.yaml file location
,configAllowLocals :: !Bool
-- ^ Are we allowed to build local packages? The script
-- command disallows this.
,configSaveHackageCreds :: !Bool
-- ^ Should we save Hackage credentials to a file?
,configHackageBaseUrl :: !Text
-- ^ Hackage base URL used when uploading packages
,configRunner :: !Runner
,configPantryConfig :: !PantryConfig
,configStackRoot :: !(Path Abs Dir)
,configResolver :: !(Maybe AbstractResolver)
-- ^ Any resolver override from the command line
,configUserStorage :: !UserStorage
-- ^ Database connection pool for user Stack database
,configHideSourcePaths :: !Bool
-- ^ Enable GHC hiding source paths?
,configRecommendUpgrade :: !Bool
-- ^ Recommend a Stack upgrade?
,configStackDeveloperMode :: !Bool
-- ^ Turn on Stack developer mode for additional messages?
}
-- | A bit of type safety to ensure we're talking to the right database.
newtype UserStorage = UserStorage
{ unUserStorage :: Storage
}
-- | A bit of type safety to ensure we're talking to the right database.
newtype ProjectStorage = ProjectStorage
{ unProjectStorage :: Storage
}
-- | The project root directory, if in a project.
configProjectRoot :: Config -> Maybe (Path Abs Dir)
configProjectRoot c =
case configProject c of
PCProject (_, fp) -> Just $ parent fp
PCGlobalProject -> Nothing
PCNoProject _deps -> Nothing
-- | Which packages do configure opts apply to?
data CabalConfigKey
= CCKTargets -- ^ See AGOTargets
| CCKLocals -- ^ See AGOLocals
| CCKEverything -- ^ See AGOEverything
| CCKPackage !PackageName -- ^ A specific package
deriving (Show, Read, Eq, Ord)
instance FromJSON CabalConfigKey where
parseJSON = withText "CabalConfigKey" parseCabalConfigKey
instance FromJSONKey CabalConfigKey where
fromJSONKey = FromJSONKeyTextParser parseCabalConfigKey
parseCabalConfigKey :: (Monad m, MonadFail m) => Text -> m CabalConfigKey
parseCabalConfigKey "$targets" = pure CCKTargets
parseCabalConfigKey "$locals" = pure CCKLocals
parseCabalConfigKey "$everything" = pure CCKEverything
parseCabalConfigKey name =
case parsePackageName $ T.unpack name of
Nothing -> fail $ "Invalid CabalConfigKey: " ++ show name
Just x -> pure $ CCKPackage x
-- | Which packages do ghc-options on the command line apply to?
data ApplyGhcOptions = AGOTargets -- ^ all local targets
| AGOLocals -- ^ all local packages, even non-targets
| AGOEverything -- ^ every package
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON ApplyGhcOptions where
parseJSON = withText "ApplyGhcOptions" $ \t ->
case t of
"targets" -> return AGOTargets
"locals" -> return AGOLocals
"everything" -> return AGOEverything
_ -> fail $ "Invalid ApplyGhcOptions: " ++ show t
-- | Which build log files to dump
data DumpLogs
= DumpNoLogs -- ^ don't dump any logfiles
| DumpWarningLogs -- ^ dump logfiles containing warnings
| DumpAllLogs -- ^ dump all logfiles
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON DumpLogs where
parseJSON (Bool True) = return DumpAllLogs
parseJSON (Bool False) = return DumpNoLogs
parseJSON v =
withText
"DumpLogs"
(\t ->
if | t == "none" -> return DumpNoLogs
| t == "warning" -> return DumpWarningLogs
| t == "all" -> return DumpAllLogs
| otherwise -> fail ("Invalid DumpLogs: " ++ show t))
v
-- | Controls which version of the environment is used
data EnvSettings = EnvSettings
{ esIncludeLocals :: !Bool
-- ^ include local project bin directory, GHC_PACKAGE_PATH, etc
, esIncludeGhcPackagePath :: !Bool
-- ^ include the GHC_PACKAGE_PATH variable
, esStackExe :: !Bool
-- ^ set the STACK_EXE variable to the current executable name
, esLocaleUtf8 :: !Bool
-- ^ set the locale to C.UTF-8
, esKeepGhcRts :: !Bool
-- ^ if True, keep GHCRTS variable in environment
}
deriving (Show, Eq, Ord)
data ExecOpts = ExecOpts
{ eoCmd :: !SpecialExecCmd
, eoArgs :: ![String]
, eoExtra :: !ExecOptsExtra
} deriving (Show)
data SpecialExecCmd
= ExecCmd String
| ExecRun
| ExecGhc
| ExecRunGhc
deriving (Show, Eq)
data ExecOptsExtra = ExecOptsExtra
{ eoEnvSettings :: !EnvSettings
, eoPackages :: ![String]
, eoRtsOptions :: ![String]
, eoCwd :: !(Maybe FilePath)
}
deriving (Show)
data EvalOpts = EvalOpts
{ evalArg :: !String
, evalExtra :: !ExecOptsExtra
} deriving (Show)
-- | Parsed global command-line options.
data GlobalOpts = GlobalOpts
{ globalReExecVersion :: !(Maybe String) -- ^ Expected re-exec in container version
, globalDockerEntrypoint :: !(Maybe DockerEntrypoint)
-- ^ Data used when stack is acting as a Docker entrypoint (internal use only)
, globalLogLevel :: !LogLevel -- ^ Log level
, globalTimeInLog :: !Bool -- ^ Whether to include timings in logs.
, globalConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig'
, globalResolver :: !(Maybe AbstractResolver) -- ^ Resolver override
, globalCompiler :: !(Maybe WantedCompiler) -- ^ Compiler override
, globalTerminal :: !Bool -- ^ We're in a terminal?
, globalStylesUpdate :: !StylesUpdate -- ^ SGR (Ansi) codes for styles
, globalTermWidth :: !(Maybe Int) -- ^ Terminal width override
, globalStackYaml :: !StackYamlLoc -- ^ Override project stack.yaml
, globalLockFileBehavior :: !LockFileBehavior
} deriving (Show)
-- | Location for the project's stack.yaml file.
data StackYamlLoc
= SYLDefault
-- ^ Use the standard parent-directory-checking logic
| SYLOverride !(Path Abs File)
-- ^ Use a specific stack.yaml file provided
| SYLNoProject ![PackageIdentifierRevision]
-- ^ Do not load up a project, just user configuration. Include
-- the given extra dependencies with the resolver.
| SYLGlobalProject
-- ^ Do not look for a project configuration, and use the implicit global.
deriving Show
stackYamlLocL :: HasRunner env => Lens' env StackYamlLoc
stackYamlLocL = globalOptsL.lens globalStackYaml (\x y -> x { globalStackYaml = y })
-- | How to interact with lock files
data LockFileBehavior
= LFBReadWrite
-- ^ Read and write lock files
| LFBReadOnly
-- ^ Read lock files, but do not write them
| LFBIgnore
-- ^ Entirely ignore lock files
| LFBErrorOnWrite
-- ^ Error out on trying to write a lock file. This can be used to
-- ensure that lock files in a repository already ensure
-- reproducible builds.
deriving (Show, Enum, Bounded)
lockFileBehaviorL :: HasRunner env => SimpleGetter env LockFileBehavior
lockFileBehaviorL = globalOptsL.to globalLockFileBehavior
-- | Parser for 'LockFileBehavior'
readLockFileBehavior :: ReadM LockFileBehavior
readLockFileBehavior = do
s <- OA.readerAsk
case Map.lookup s m of
Just x -> pure x
Nothing -> OA.readerError $ "Invalid lock file behavior, valid options: " ++
List.intercalate ", " (Map.keys m)
where
m = Map.fromList $ map (\x -> (render x, x)) [minBound..maxBound]
render LFBReadWrite = "read-write"
render LFBReadOnly = "read-only"
render LFBIgnore = "ignore"
render LFBErrorOnWrite = "error-on-write"
-- | Project configuration information. Not every run of Stack has a
-- true local project; see constructors below.
data ProjectConfig a
= PCProject a
-- ^ Normal run: we want a project, and have one. This comes from
-- either 'SYLDefault' or 'SYLOverride'.
| PCGlobalProject
-- ^ No project was found when using 'SYLDefault'. Instead, use
-- the implicit global.
| PCNoProject ![PackageIdentifierRevision]
-- ^ Use a no project run. This comes from 'SYLNoProject'.
-- | Parsed global command-line options monoid.
data GlobalOptsMonoid = GlobalOptsMonoid
{ globalMonoidReExecVersion :: !(First String) -- ^ Expected re-exec in container version
, globalMonoidDockerEntrypoint :: !(First DockerEntrypoint)
-- ^ Data used when stack is acting as a Docker entrypoint (internal use only)
, globalMonoidLogLevel :: !(First LogLevel) -- ^ Log level
, globalMonoidTimeInLog :: !FirstTrue -- ^ Whether to include timings in logs.
, globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig'
, globalMonoidResolver :: !(First (Unresolved AbstractResolver)) -- ^ Resolver override
, globalMonoidResolverRoot :: !(First FilePath) -- ^ root directory for resolver relative path
, globalMonoidCompiler :: !(First WantedCompiler) -- ^ Compiler override
, globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal?
, globalMonoidStyles :: !StylesUpdate -- ^ Stack's output styles
, globalMonoidTermWidth :: !(First Int) -- ^ Terminal width override
, globalMonoidStackYaml :: !(First FilePath) -- ^ Override project stack.yaml
, globalMonoidLockFileBehavior :: !(First LockFileBehavior) -- ^ See 'globalLockFileBehavior'
} deriving Generic
instance Semigroup GlobalOptsMonoid where
(<>) = mappenddefault
instance Monoid GlobalOptsMonoid where
mempty = memptydefault
mappend = (<>)
-- | Default logging level should be something useful but not crazy.
defaultLogLevel :: LogLevel
defaultLogLevel = LevelInfo
readColorWhen :: ReadM ColorWhen
readColorWhen = do
s <- OA.readerAsk
case s of
"never" -> return ColorNever
"always" -> return ColorAlways
"auto" -> return ColorAuto
_ -> OA.readerError "Expected values of color option are 'never', 'always', or 'auto'."
readStyles :: ReadM StylesUpdate
readStyles = parseStylesUpdateFromString <$> OA.readerAsk
-- | A superset of 'Config' adding information on how to build code. The reason
-- for this breakdown is because we will need some of the information from
-- 'Config' in order to determine the values here.
--
-- These are the components which know nothing about local configuration.
data BuildConfig = BuildConfig
{ bcConfig :: !Config
, bcSMWanted :: !SMWanted
, bcExtraPackageDBs :: ![Path Abs Dir]
-- ^ Extra package databases
, bcStackYaml :: !(Path Abs File)
-- ^ Location of the stack.yaml file.
--
-- Note: if the STACK_YAML environment variable is used, this may be
-- different from projectRootL </> "stack.yaml" if a different file
-- name is used.
, bcProjectStorage :: !ProjectStorage
-- ^ Database connection pool for project Stack database
, bcCurator :: !(Maybe Curator)
}
stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File)
stackYamlL = buildConfigL.lens bcStackYaml (\x y -> x { bcStackYaml = y })
-- | Directory containing the project's stack.yaml file
projectRootL :: HasBuildConfig env => Getting r env (Path Abs Dir)
projectRootL = stackYamlL.to parent
-- | Configuration after the environment has been setup.
data EnvConfig = EnvConfig
{envConfigBuildConfig :: !BuildConfig
,envConfigBuildOptsCLI :: !BuildOptsCLI
,envConfigSourceMap :: !SourceMap
,envConfigSourceMapHash :: !SourceMapHash
,envConfigCompilerPaths :: !CompilerPaths
}
ppGPD :: MonadIO m => ProjectPackage -> m GenericPackageDescription
ppGPD = liftIO . cpGPD . ppCommon
-- | Root directory for the given 'ProjectPackage'
ppRoot :: ProjectPackage -> Path Abs Dir
ppRoot = parent . ppCabalFP
-- | All components available in the given 'ProjectPackage'
ppComponents :: MonadIO m => ProjectPackage -> m (Set NamedComponent)
ppComponents pp = do
gpd <- ppGPD pp
pure $ Set.fromList $ concat
[ maybe [] (const [CLib]) (C.condLibrary gpd)
, go CExe (fst <$> C.condExecutables gpd)
, go CTest (fst <$> C.condTestSuites gpd)
, go CBench (fst <$> C.condBenchmarks gpd)
]
where
go :: (T.Text -> NamedComponent)
-> [C.UnqualComponentName]
-> [NamedComponent]
go wrapper = map (wrapper . T.pack . C.unUnqualComponentName)
-- | Version for the given 'ProjectPackage
ppVersion :: MonadIO m => ProjectPackage -> m Version
ppVersion = fmap gpdVersion . ppGPD
-- | A project is a collection of packages. We can have multiple stack.yaml
-- files, but only one of them may contain project information.
data Project = Project
{ projectUserMsg :: !(Maybe String)
-- ^ A warning message to display to the user when the auto generated
-- config may have issues.
, projectPackages :: ![RelFilePath]
-- ^ Packages which are actually part of the project (as opposed
-- to dependencies).
, projectDependencies :: ![RawPackageLocation]
-- ^ Dependencies defined within the stack.yaml file, to be
-- applied on top of the snapshot.
, projectFlags :: !(Map PackageName (Map FlagName Bool))
-- ^ Flags to be applied on top of the snapshot flags.
, projectResolver :: !RawSnapshotLocation
-- ^ How we resolve which @Snapshot@ to use
, projectCompiler :: !(Maybe WantedCompiler)
-- ^ Override the compiler in 'projectResolver'
, projectExtraPackageDBs :: ![FilePath]
, projectCurator :: !(Maybe Curator)
-- ^ Extra configuration intended exclusively for usage by the
-- curator tool. In other words, this is /not/ part of the
-- documented and exposed Stack API. SUBJECT TO CHANGE.
, projectDropPackages :: !(Set PackageName)
-- ^ Packages to drop from the 'projectResolver'.
}
deriving Show
instance ToJSON Project where
-- Expanding the constructor fully to ensure we don't miss any fields.
toJSON (Project userMsg packages extraDeps flags resolver mcompiler extraPackageDBs mcurator drops) = object $ concat
[ maybe [] (\cv -> ["compiler" .= cv]) mcompiler
, maybe [] (\msg -> ["user-message" .= msg]) userMsg
, if null extraPackageDBs then [] else ["extra-package-dbs" .= extraPackageDBs]
, if null extraDeps then [] else ["extra-deps" .= extraDeps]
, if Map.null flags then [] else ["flags" .= fmap toCabalStringMap (toCabalStringMap flags)]
, ["packages" .= packages]
, ["resolver" .= resolver]
, maybe [] (\c -> ["curator" .= c]) mcurator
, if Set.null drops then [] else ["drop-packages" .= Set.map CabalString drops]
]
-- | Extra configuration intended exclusively for usage by the
-- curator tool. In other words, this is /not/ part of the
-- documented and exposed Stack API. SUBJECT TO CHANGE.
data Curator = Curator
{ curatorSkipTest :: !(Set PackageName)
, curatorExpectTestFailure :: !(Set PackageName)
, curatorSkipBenchmark :: !(Set PackageName)
, curatorExpectBenchmarkFailure :: !(Set PackageName)
, curatorSkipHaddock :: !(Set PackageName)
, curatorExpectHaddockFailure :: !(Set PackageName)
}
deriving Show
instance ToJSON Curator where
toJSON c = object
[ "skip-test" .= Set.map CabalString (curatorSkipTest c)
, "expect-test-failure" .= Set.map CabalString (curatorExpectTestFailure c)
, "skip-bench" .= Set.map CabalString (curatorSkipBenchmark c)
, "expect-benchmark-failure" .= Set.map CabalString (curatorExpectTestFailure c)
, "skip-haddock" .= Set.map CabalString (curatorSkipHaddock c)
, "expect-test-failure" .= Set.map CabalString (curatorExpectHaddockFailure c)
]
instance FromJSON (WithJSONWarnings Curator) where
parseJSON = withObjectWarnings "Curator" $ \o -> Curator
<$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty)
<*> fmap (Set.map unCabalString) (o ..:? "expect-test-failure" ..!= mempty)
<*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty)
<*> fmap (Set.map unCabalString) (o ..:? "expect-benchmark-failure" ..!= mempty)
<*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty)
<*> fmap (Set.map unCabalString) (o ..:? "expect-haddock-failure" ..!= mempty)
-- An uninterpreted representation of configuration options.
-- Configurations may be "cascaded" using mappend (left-biased).
data ConfigMonoid =
ConfigMonoid
{ configMonoidStackRoot :: !(First (Path Abs Dir))
-- ^ See: 'clStackRoot'
, configMonoidWorkDir :: !(First (Path Rel Dir))
-- ^ See: 'configWorkDir'.
, configMonoidBuildOpts :: !BuildOptsMonoid
-- ^ build options.
, configMonoidDockerOpts :: !DockerOptsMonoid
-- ^ Docker options.
, configMonoidNixOpts :: !NixOptsMonoid
-- ^ Options for the execution environment (nix-shell or container)
, configMonoidConnectionCount :: !(First Int)
-- ^ See: 'configConnectionCount'
, configMonoidHideTHLoading :: !FirstTrue
-- ^ See: 'configHideTHLoading'
, configMonoidPrefixTimestamps :: !(First Bool)
-- ^ See: 'configPrefixTimestamps'
, configMonoidLatestSnapshot :: !(First Text)
-- ^ See: 'configLatestSnapshot'
, configMonoidPackageIndices :: !(First [HackageSecurityConfig])
-- ^ See: @picIndices@
, configMonoidSystemGHC :: !(First Bool)
-- ^ See: 'configSystemGHC'
,configMonoidInstallGHC :: !FirstTrue
-- ^ See: 'configInstallGHC'
,configMonoidSkipGHCCheck :: !FirstFalse
-- ^ See: 'configSkipGHCCheck'
,configMonoidSkipMsys :: !FirstFalse
-- ^ See: 'configSkipMsys'
,configMonoidCompilerCheck :: !(First VersionCheck)
-- ^ See: 'configCompilerCheck'
,configMonoidCompilerRepository :: !(First CompilerRepository)
-- ^ See: 'configCompilerRepository'
,configMonoidRequireStackVersion :: !IntersectingVersionRange
-- ^ See: 'configRequireStackVersion'
,configMonoidArch :: !(First String)
-- ^ Used for overriding the platform
,configMonoidGHCVariant :: !(First GHCVariant)
-- ^ Used for overriding the platform
,configMonoidGHCBuild :: !(First CompilerBuild)
-- ^ Used for overriding the GHC build
,configMonoidJobs :: !(First Int)
-- ^ See: 'configJobs'
,configMonoidExtraIncludeDirs :: ![FilePath]
-- ^ See: 'configExtraIncludeDirs'
,configMonoidExtraLibDirs :: ![FilePath]
-- ^ See: 'configExtraLibDirs'
, configMonoidOverrideGccPath :: !(First (Path Abs File))
-- ^ Allow users to override the path to gcc
,configMonoidOverrideHpack :: !(First FilePath)
-- ^ Use Hpack executable (overrides bundled Hpack)
,configMonoidConcurrentTests :: !(First Bool)
-- ^ See: 'configConcurrentTests'
,configMonoidLocalBinPath :: !(First FilePath)
-- ^ Used to override the binary installation dir
,configMonoidTemplateParameters :: !(Map Text Text)
-- ^ Template parameters.
,configMonoidScmInit :: !(First SCM)
-- ^ Initialize SCM (e.g. git init) when making new projects?
,configMonoidGhcOptionsByName :: !(MonoidMap PackageName (Monoid.Dual [Text]))
-- ^ See 'configGhcOptionsByName'. Uses 'Monoid.Dual' so that
-- options from the configs on the right come first, so that they
-- can be overridden.
,configMonoidGhcOptionsByCat :: !(MonoidMap ApplyGhcOptions (Monoid.Dual [Text]))
-- ^ See 'configGhcOptionsAll'. Uses 'Monoid.Dual' so that options
-- from the configs on the right come first, so that they can be
-- overridden.
,configMonoidCabalConfigOpts :: !(MonoidMap CabalConfigKey (Monoid.Dual [Text]))
-- ^ See 'configCabalConfigOpts'.
,configMonoidExtraPath :: ![Path Abs Dir]
-- ^ Additional paths to search for executables in
,configMonoidSetupInfoLocations :: ![String]
-- ^ See 'configSetupInfoLocations'
,configMonoidSetupInfoInline :: !SetupInfo
-- ^ See 'configSetupInfoInline'
,configMonoidLocalProgramsBase :: !(First (Path Abs Dir))
-- ^ Override the default local programs dir, where e.g. GHC is installed.
,configMonoidPvpBounds :: !(First PvpBounds)
-- ^ See 'configPvpBounds'
,configMonoidModifyCodePage :: !FirstTrue
-- ^ See 'configModifyCodePage'
,configMonoidExplicitSetupDeps :: !(Map (Maybe PackageName) Bool)
-- ^ See 'configExplicitSetupDeps'
,configMonoidRebuildGhcOptions :: !FirstFalse
-- ^ See 'configMonoidRebuildGhcOptions'
,configMonoidApplyGhcOptions :: !(First ApplyGhcOptions)
-- ^ See 'configApplyGhcOptions'
,configMonoidAllowNewer :: !(First Bool)
-- ^ See 'configMonoidAllowNewer'
,configMonoidDefaultTemplate :: !(First TemplateName)
-- ^ The default template to use when none is specified.
-- (If Nothing, the default default is used.)
, configMonoidAllowDifferentUser :: !(First Bool)
-- ^ Allow users other than the stack root owner to use the stack
-- installation.
, configMonoidDumpLogs :: !(First DumpLogs)
-- ^ See 'configDumpLogs'
, configMonoidSaveHackageCreds :: !(First Bool)
-- ^ See 'configSaveHackageCreds'
, configMonoidHackageBaseUrl :: !(First Text)
-- ^ See 'configHackageBaseUrl'
, configMonoidColorWhen :: !(First ColorWhen)
-- ^ When to use 'ANSI' colors
, configMonoidStyles :: !StylesUpdate
, configMonoidHideSourcePaths :: !FirstTrue
-- ^ See 'configHideSourcePaths'
, configMonoidRecommendUpgrade :: !FirstTrue
-- ^ See 'configRecommendUpgrade'
, configMonoidCasaRepoPrefix :: !(First CasaRepoPrefix)
, configMonoidSnapshotLocation :: !(First Text)
-- ^ Custom location of LTS/Nightly snapshots
, configMonoidStackDeveloperMode :: !(First Bool)
-- ^ See 'configStackDeveloperMode'
}
deriving (Show, Generic)
instance Semigroup ConfigMonoid where
(<>) = mappenddefault
instance Monoid ConfigMonoid where
mempty = memptydefault
mappend = (<>)
parseConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings ConfigMonoid)
parseConfigMonoid = withObjectWarnings "ConfigMonoid" . parseConfigMonoidObject
-- | Parse a partial configuration. Used both to parse both a standalone config
-- file and a project file, so that a sub-parser is not required, which would interfere with
-- warnings for missing fields.
parseConfigMonoidObject :: Path Abs Dir -> Object -> WarningParser ConfigMonoid
parseConfigMonoidObject rootDir obj = do
-- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical
let configMonoidStackRoot = First Nothing
configMonoidWorkDir <- First <$> obj ..:? configMonoidWorkDirName
configMonoidBuildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty)
configMonoidDockerOpts <- jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty)
configMonoidNixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty)
configMonoidConnectionCount <- First <$> obj ..:? configMonoidConnectionCountName
configMonoidHideTHLoading <- FirstTrue <$> obj ..:? configMonoidHideTHLoadingName
configMonoidPrefixTimestamps <- First <$> obj ..:? configMonoidPrefixTimestampsName
murls :: Maybe Value <- obj ..:? configMonoidUrlsName
configMonoidLatestSnapshot <-
case murls of
Nothing -> pure $ First Nothing
Just urls -> jsonSubWarnings $ lift $ withObjectWarnings
"urls"
(\o -> First <$> o ..:? "latest-snapshot" :: WarningParser (First Text))
urls
configMonoidPackageIndices <- First <$> jsonSubWarningsTT (obj ..:? configMonoidPackageIndicesName)
configMonoidSystemGHC <- First <$> obj ..:? configMonoidSystemGHCName
configMonoidInstallGHC <- FirstTrue <$> obj ..:? configMonoidInstallGHCName
configMonoidSkipGHCCheck <- FirstFalse <$> obj ..:? configMonoidSkipGHCCheckName
configMonoidSkipMsys <- FirstFalse <$> obj ..:? configMonoidSkipMsysName
configMonoidRequireStackVersion <- IntersectingVersionRange . unVersionRangeJSON <$> (
obj ..:? configMonoidRequireStackVersionName
..!= VersionRangeJSON anyVersion)
configMonoidArch <- First <$> obj ..:? configMonoidArchName
configMonoidGHCVariant <- First <$> obj ..:? configMonoidGHCVariantName
configMonoidGHCBuild <- First <$> obj ..:? configMonoidGHCBuildName
configMonoidJobs <- First <$> obj ..:? configMonoidJobsName
configMonoidExtraIncludeDirs <- map (toFilePath rootDir FilePath.</>) <$>
obj ..:? configMonoidExtraIncludeDirsName ..!= []
configMonoidExtraLibDirs <- map (toFilePath rootDir FilePath.</>) <$>
obj ..:? configMonoidExtraLibDirsName ..!= []
configMonoidOverrideGccPath <- First <$> obj ..:? configMonoidOverrideGccPathName
configMonoidOverrideHpack <- First <$> obj ..:? configMonoidOverrideHpackName
configMonoidConcurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName
configMonoidLocalBinPath <- First <$> obj ..:? configMonoidLocalBinPathName
templates <- obj ..:? "templates"
(configMonoidScmInit,configMonoidTemplateParameters) <-
case templates of
Nothing -> return (First Nothing,M.empty)
Just tobj -> do
scmInit <- tobj ..:? configMonoidScmInitName
params <- tobj ..:? configMonoidTemplateParametersName
return (First scmInit,fromMaybe M.empty params)
configMonoidCompilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName
configMonoidCompilerRepository <- First <$> (obj ..:? configMonoidCompilerRepositoryName)
options <- Map.map unGhcOptions <$> obj ..:? configMonoidGhcOptionsName ..!= mempty
optionsEverything <-
case (Map.lookup GOKOldEverything options, Map.lookup GOKEverything options) of
(Just _, Just _) -> fail "Cannot specify both `*` and `$everything` GHC options"
(Nothing, Just x) -> return x
(Just x, Nothing) -> do
tell "The `*` ghc-options key is not recommended. Consider using $locals, or if really needed, $everything"
return x
(Nothing, Nothing) -> return []
let configMonoidGhcOptionsByCat = coerce $ Map.fromList
[ (AGOEverything, optionsEverything)
, (AGOLocals, Map.findWithDefault [] GOKLocals options)
, (AGOTargets, Map.findWithDefault [] GOKTargets options)
]
configMonoidGhcOptionsByName = coerce $ Map.fromList
[(name, opts) | (GOKPackage name, opts) <- Map.toList options]
configMonoidCabalConfigOpts' <- obj ..:? "configure-options" ..!= mempty
let configMonoidCabalConfigOpts = coerce (configMonoidCabalConfigOpts' :: Map CabalConfigKey [Text])
configMonoidExtraPath <- obj ..:? configMonoidExtraPathName ..!= []
configMonoidSetupInfoLocations <- obj ..:? configMonoidSetupInfoLocationsName ..!= []
configMonoidSetupInfoInline <- jsonSubWarningsT (obj ..:? configMonoidSetupInfoInlineName) ..!= mempty
configMonoidLocalProgramsBase <- First <$> obj ..:? configMonoidLocalProgramsBaseName
configMonoidPvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName
configMonoidModifyCodePage <- FirstTrue <$> obj ..:? configMonoidModifyCodePageName
configMonoidExplicitSetupDeps <-
(obj ..:? configMonoidExplicitSetupDepsName ..!= mempty)
>>= fmap Map.fromList . mapM handleExplicitSetupDep . Map.toList
configMonoidRebuildGhcOptions <- FirstFalse <$> obj ..:? configMonoidRebuildGhcOptionsName
configMonoidApplyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName
configMonoidAllowNewer <- First <$> obj ..:? configMonoidAllowNewerName
configMonoidDefaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName
configMonoidAllowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName
configMonoidDumpLogs <- First <$> obj ..:? configMonoidDumpLogsName
configMonoidSaveHackageCreds <- First <$> obj ..:? configMonoidSaveHackageCredsName
configMonoidHackageBaseUrl <- First <$> obj ..:? configMonoidHackageBaseUrlName
configMonoidColorWhenUS <- obj ..:? configMonoidColorWhenUSName
configMonoidColorWhenGB <- obj ..:? configMonoidColorWhenGBName
let configMonoidColorWhen = First $ configMonoidColorWhenUS
<|> configMonoidColorWhenGB
configMonoidStylesUS <- obj ..:? configMonoidStylesUSName
configMonoidStylesGB <- obj ..:? configMonoidStylesGBName
let configMonoidStyles = fromMaybe mempty $ configMonoidStylesUS
<|> configMonoidStylesGB
configMonoidHideSourcePaths <- FirstTrue <$> obj ..:? configMonoidHideSourcePathsName
configMonoidRecommendUpgrade <- FirstTrue <$> obj ..:? configMonoidRecommendUpgradeName
configMonoidCasaRepoPrefix <- First <$> obj ..:? configMonoidCasaRepoPrefixName
configMonoidSnapshotLocation <- First <$> obj ..:? configMonoidSnapshotLocationName
configMonoidStackDeveloperMode <- First <$> obj ..:? configMonoidStackDeveloperModeName
return ConfigMonoid {..}
where
handleExplicitSetupDep :: (Monad m, MonadFail m) => (Text, Bool) -> m (Maybe PackageName, Bool)
handleExplicitSetupDep (name', b) = do
name <-
if name' == "*"
then return Nothing
else case parsePackageName $ T.unpack name' of
Nothing -> fail $ "Invalid package name: " ++ show name'
Just x -> return $ Just x
return (name, b)
configMonoidWorkDirName :: Text
configMonoidWorkDirName = "work-dir"
configMonoidBuildOptsName :: Text
configMonoidBuildOptsName = "build"
configMonoidDockerOptsName :: Text
configMonoidDockerOptsName = "docker"
configMonoidNixOptsName :: Text
configMonoidNixOptsName = "nix"
configMonoidConnectionCountName :: Text
configMonoidConnectionCountName = "connection-count"
configMonoidHideTHLoadingName :: Text
configMonoidHideTHLoadingName = "hide-th-loading"
configMonoidPrefixTimestampsName :: Text
configMonoidPrefixTimestampsName = "build-output-timestamps"
configMonoidUrlsName :: Text
configMonoidUrlsName = "urls"
configMonoidPackageIndicesName :: Text
configMonoidPackageIndicesName = "package-indices"
configMonoidSystemGHCName :: Text
configMonoidSystemGHCName = "system-ghc"
configMonoidInstallGHCName :: Text
configMonoidInstallGHCName = "install-ghc"
configMonoidSkipGHCCheckName :: Text
configMonoidSkipGHCCheckName = "skip-ghc-check"
configMonoidSkipMsysName :: Text
configMonoidSkipMsysName = "skip-msys"
configMonoidRequireStackVersionName :: Text
configMonoidRequireStackVersionName = "require-stack-version"
configMonoidArchName :: Text
configMonoidArchName = "arch"
configMonoidGHCVariantName :: Text
configMonoidGHCVariantName = "ghc-variant"
configMonoidGHCBuildName :: Text
configMonoidGHCBuildName = "ghc-build"
configMonoidJobsName :: Text
configMonoidJobsName = "jobs"
configMonoidExtraIncludeDirsName :: Text
configMonoidExtraIncludeDirsName = "extra-include-dirs"
configMonoidExtraLibDirsName :: Text
configMonoidExtraLibDirsName = "extra-lib-dirs"
configMonoidOverrideGccPathName :: Text
configMonoidOverrideGccPathName = "with-gcc"
configMonoidOverrideHpackName :: Text
configMonoidOverrideHpackName = "with-hpack"
configMonoidConcurrentTestsName :: Text
configMonoidConcurrentTestsName = "concurrent-tests"
configMonoidLocalBinPathName :: Text
configMonoidLocalBinPathName = "local-bin-path"
configMonoidScmInitName :: Text
configMonoidScmInitName = "scm-init"
configMonoidTemplateParametersName :: Text
configMonoidTemplateParametersName = "params"
configMonoidCompilerCheckName :: Text
configMonoidCompilerCheckName = "compiler-check"
configMonoidCompilerRepositoryName :: Text
configMonoidCompilerRepositoryName = "compiler-repository"
configMonoidGhcOptionsName :: Text
configMonoidGhcOptionsName = "ghc-options"
configMonoidExtraPathName :: Text
configMonoidExtraPathName = "extra-path"
configMonoidSetupInfoLocationsName :: Text
configMonoidSetupInfoLocationsName = "setup-info-locations"
configMonoidSetupInfoInlineName :: Text
configMonoidSetupInfoInlineName = "setup-info"
configMonoidLocalProgramsBaseName :: Text
configMonoidLocalProgramsBaseName = "local-programs-path"
configMonoidPvpBoundsName :: Text
configMonoidPvpBoundsName = "pvp-bounds"
configMonoidModifyCodePageName :: Text
configMonoidModifyCodePageName = "modify-code-page"
configMonoidExplicitSetupDepsName :: Text
configMonoidExplicitSetupDepsName = "explicit-setup-deps"
configMonoidRebuildGhcOptionsName :: Text
configMonoidRebuildGhcOptionsName = "rebuild-ghc-options"
configMonoidApplyGhcOptionsName :: Text
configMonoidApplyGhcOptionsName = "apply-ghc-options"
configMonoidAllowNewerName :: Text
configMonoidAllowNewerName = "allow-newer"
configMonoidDefaultTemplateName :: Text
configMonoidDefaultTemplateName = "default-template"
configMonoidAllowDifferentUserName :: Text
configMonoidAllowDifferentUserName = "allow-different-user"
configMonoidDumpLogsName :: Text
configMonoidDumpLogsName = "dump-logs"
configMonoidSaveHackageCredsName :: Text
configMonoidSaveHackageCredsName = "save-hackage-creds"
configMonoidHackageBaseUrlName :: Text
configMonoidHackageBaseUrlName = "hackage-base-url"
configMonoidColorWhenUSName :: Text
configMonoidColorWhenUSName = "color"
configMonoidColorWhenGBName :: Text
configMonoidColorWhenGBName = "colour"
configMonoidStylesUSName :: Text
configMonoidStylesUSName = "stack-colors"
configMonoidStylesGBName :: Text
configMonoidStylesGBName = "stack-colours"
configMonoidHideSourcePathsName :: Text
configMonoidHideSourcePathsName = "hide-source-paths"
configMonoidRecommendUpgradeName :: Text
configMonoidRecommendUpgradeName = "recommend-stack-upgrade"
configMonoidCasaRepoPrefixName :: Text
configMonoidCasaRepoPrefixName = "casa-repo-prefix"
configMonoidSnapshotLocationName :: Text
configMonoidSnapshotLocationName = "snapshot-location-base"
configMonoidStackDeveloperModeName :: Text
configMonoidStackDeveloperModeName = "stack-developer-mode"
data ConfigException
= ParseConfigFileException (Path Abs File) ParseException
| ParseCustomSnapshotException Text ParseException
| NoProjectConfigFound (Path Abs Dir) (Maybe Text)
| UnexpectedArchiveContents [Path Abs Dir] [Path Abs File]
| UnableToExtractArchive Text (Path Abs File)
| BadStackVersionException VersionRange
| NoMatchingSnapshot (NonEmpty SnapName)
| ResolverMismatch !RawSnapshotLocation String
| ResolverPartial !RawSnapshotLocation String
| NoSuchDirectory FilePath
| ParseGHCVariantException String
| BadStackRoot (Path Abs Dir)
| Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir
| UserDoesn'tOwnDirectory (Path Abs Dir)
| ManualGHCVariantSettingsAreIncompatibleWithSystemGHC
| NixRequiresSystemGhc
| NoResolverWhenUsingNoProject
| DuplicateLocalPackageNames ![(PackageName, [PackageLocation])]
deriving Typeable
instance Show ConfigException where
show (ParseConfigFileException configFile exception) = concat
[ "Could not parse '"
, toFilePath configFile
, "':\n"
, Yaml.prettyPrintParseException exception
, "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/"
]
show (ParseCustomSnapshotException url exception) = concat
[ "Could not parse '"
, T.unpack url
, "':\n"
, Yaml.prettyPrintParseException exception
, "\nSee https://docs.haskellstack.org/en/stable/custom_snapshot/"
]
show (NoProjectConfigFound dir mcmd) = concat
[ "Unable to find a stack.yaml file in the current directory ("
, toFilePath dir
, ") or its ancestors"
, case mcmd of
Nothing -> ""
Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd
]
show (UnexpectedArchiveContents dirs files) = concat
[ "When unpacking an archive specified in your stack.yaml file, "
, "did not find expected contents. Expected: a single directory. Found: "
, show ( map (toFilePath . dirname) dirs
, map (toFilePath . filename) files
)
]
show (UnableToExtractArchive url file) = concat
[ "Archive extraction failed. Tarballs and zip archives are supported, couldn't handle the following URL, "
, T.unpack url, " downloaded to the file ", toFilePath $ filename file
]
show (BadStackVersionException requiredRange) = concat
[ "The version of stack you are using ("
, show (mkVersion' Meta.version)
, ") is outside the required\n"
,"version range specified in stack.yaml ("
, T.unpack (versionRangeText requiredRange)
, ")." ]
show (NoMatchingSnapshot names) = concat
[ "None of the following snapshots provides a compiler matching "
, "your package(s):\n"
, unlines $ map (\name -> " - " <> show name)
(NonEmpty.toList names)
, resolveOptions
]
show (ResolverMismatch resolver errDesc) = concat
[ "Resolver '"
, T.unpack $ utf8BuilderToText $ display resolver
, "' does not have a matching compiler to build some or all of your "
, "package(s).\n"
, errDesc
, resolveOptions
]
show (ResolverPartial resolver errDesc) = concat
[ "Resolver '"
, T.unpack $ utf8BuilderToText $ display resolver
, "' does not have all the packages to match your requirements.\n"
, unlines $ fmap (" " <>) (lines errDesc)
, resolveOptions
]
show (NoSuchDirectory dir) =
"No directory could be located matching the supplied path: " ++ dir
show (ParseGHCVariantException v) =
"Invalid ghc-variant value: " ++ v
show (BadStackRoot stackRoot) = concat
[ "Invalid stack root: '"
, toFilePath stackRoot
, "'. Please provide a valid absolute path."
]
show (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat
[ "Preventing creation of stack root '"
, toFilePath envStackRoot
, "'. Parent directory '"
, toFilePath parentDir
, "' is owned by someone else."
]
show (UserDoesn'tOwnDirectory dir) = concat
[ "You are not the owner of '"
, toFilePath dir
, "'. Aborting to protect file permissions."
, "\nRetry with '--"
, T.unpack configMonoidAllowDifferentUserName
, "' to disable this precaution."
]
show ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat
[ "stack can only control the "
, configMonoidGHCVariantName
, " of its own GHC installations. Please use '--no-"
, configMonoidSystemGHCName
, "'."
]
show NixRequiresSystemGhc = T.unpack $ T.concat
[ "stack's Nix integration is incompatible with '--no-system-ghc'. "
, "Please use '--"
, configMonoidSystemGHCName
, "' or disable the Nix integration."
]
show NoResolverWhenUsingNoProject = "When using the script command, you must provide a resolver argument"
show (DuplicateLocalPackageNames pairs) = concat
$ "The same package name is used in multiple local packages\n"
: map go pairs
where
go (name, dirs) = unlines
$ ""
: (packageNameString name ++ " used in:")
: map goLoc dirs
goLoc loc = "- " ++ show loc
instance Exception ConfigException
resolveOptions :: String
resolveOptions =
unlines [ "\nThis may be resolved by:"
, " - Using '--omit-packages' to exclude mismatching package(s)."
, " - Using '--resolver' to specify a matching snapshot/resolver"
]
-- | Get the URL to request the information on the latest snapshots
askLatestSnapshotUrl :: (MonadReader env m, HasConfig env) => m Text
askLatestSnapshotUrl = view $ configL.to configLatestSnapshot
-- | @".stack-work"@
workDirL :: HasConfig env => Lens' env (Path Rel Dir)
workDirL = configL.lens configWorkDir (\x y -> x { configWorkDir = y })
-- | Per-project work dir
getProjectWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir)
getProjectWorkDir = do
root <- view projectRootL
workDir <- view workDirL
return (root </> workDir)
-- | Relative directory for the platform identifier
platformOnlyRelDir
:: (MonadReader env m, HasPlatform env, MonadThrow m)
=> m (Path Rel Dir)
platformOnlyRelDir = do
platform <- view platformL
platformVariant <- view platformVariantL
parseRelDir (Distribution.Text.display platform ++ platformVariantSuffix platformVariant)
-- | Directory containing snapshots
snapshotsDir :: (MonadReader env m, HasEnvConfig env, MonadThrow m) => m (Path Abs Dir)
snapshotsDir = do
root <- view stackRootL
platform <- platformGhcRelDir
return $ root </> relDirSnapshots </> platform
-- | Installation root for dependencies
installationRootDeps :: (HasEnvConfig env) => RIO env (Path Abs Dir)
installationRootDeps = do
root <- view stackRootL
-- TODO: also useShaPathOnWindows here, once #1173 is resolved.
psc <- platformSnapAndCompilerRel
return $ root </> relDirSnapshots </> psc
-- | Installation root for locals
installationRootLocal :: (HasEnvConfig env) => RIO env (Path Abs Dir)
installationRootLocal = do
workDir <- getProjectWorkDir
psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel
return $ workDir </> relDirInstall </> psc
-- | Installation root for compiler tools
bindirCompilerTools :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
bindirCompilerTools = do
config <- view configL
platform <- platformGhcRelDir
compilerVersion <- view actualCompilerVersionL
compiler <- parseRelDir $ compilerVersionString compilerVersion
return $
view stackRootL config </>
relDirCompilerTools </>
platform </>
compiler </>
bindirSuffix
-- | Hoogle directory.
hoogleRoot :: (HasEnvConfig env) => RIO env (Path Abs Dir)
hoogleRoot = do
workDir <- getProjectWorkDir
psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel
return $ workDir </> relDirHoogle </> psc
-- | Get the hoogle database path.
hoogleDatabasePath :: (HasEnvConfig env) => RIO env (Path Abs File)
hoogleDatabasePath = do
dir <- hoogleRoot
return (dir </> relFileDatabaseHoo)
-- | Path for platform followed by snapshot name followed by compiler
-- name.
platformSnapAndCompilerRel
:: (HasEnvConfig env)
=> RIO env (Path Rel Dir)
platformSnapAndCompilerRel = do
platform <- platformGhcRelDir
smh <- view $ envConfigL.to envConfigSourceMapHash
name <- smRelDir smh
ghc <- compilerVersionDir
useShaPathOnWindows (platform </> name </> ghc)
-- | Relative directory for the platform and GHC identifier
platformGhcRelDir
:: (MonadReader env m, HasEnvConfig env, MonadThrow m)
=> m (Path Rel Dir)
platformGhcRelDir = do
cp <- view compilerPathsL
let cbSuffix = compilerBuildSuffix $ cpBuild cp
verOnly <- platformGhcVerOnlyRelDirStr
parseRelDir (mconcat [ verOnly, cbSuffix ])
-- | Relative directory for the platform and GHC identifier without GHC bindist build
platformGhcVerOnlyRelDir
:: (MonadReader env m, HasPlatform env, HasGHCVariant env, MonadThrow m)
=> m (Path Rel Dir)
platformGhcVerOnlyRelDir =
parseRelDir =<< platformGhcVerOnlyRelDirStr
-- | Relative directory for the platform and GHC identifier without GHC bindist build
-- (before parsing into a Path)
platformGhcVerOnlyRelDirStr
:: (MonadReader env m, HasPlatform env, HasGHCVariant env)
=> m FilePath
platformGhcVerOnlyRelDirStr = do
platform <- view platformL
platformVariant <- view platformVariantL
ghcVariant <- view ghcVariantL
return $ mconcat [ Distribution.Text.display platform
, platformVariantSuffix platformVariant
, ghcVariantSuffix ghcVariant ]
-- | This is an attempt to shorten stack paths on Windows to decrease our
-- chances of hitting 260 symbol path limit. The idea is to calculate
-- SHA1 hash of the path used on other architectures, encode with base
-- 16 and take first 8 symbols of it.
useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir)
useShaPathOnWindows
| osIsWindows = shaPath
| otherwise = pure
shaPath :: (IsPath Rel t, MonadThrow m) => Path Rel t -> m (Path Rel t)
shaPath = shaPathForBytes . encodeUtf8 . T.pack . toFilePath
shaPathForBytes :: (IsPath Rel t, MonadThrow m) => ByteString -> m (Path Rel t)
shaPathForBytes
= parsePath . S8.unpack . S8.take 8
. Mem.convertToBase Mem.Base16 . hashWith SHA1
-- TODO: Move something like this into the path package. Consider
-- subsuming path-io's 'AnyPath'?
class IsPath b t where
parsePath :: MonadThrow m => FilePath -> m (Path b t)
instance IsPath Abs Dir where parsePath = parseAbsDir
instance IsPath Rel Dir where parsePath = parseRelDir
instance IsPath Abs File where parsePath = parseAbsFile
instance IsPath Rel File where parsePath = parseRelFile
compilerVersionDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Rel Dir)
compilerVersionDir = do
compilerVersion <- view actualCompilerVersionL
parseRelDir $ case compilerVersion of
ACGhc version -> versionString version
ACGhcGit {} -> compilerVersionString compilerVersion
-- | Package database for installing dependencies into
packageDatabaseDeps :: (HasEnvConfig env) => RIO env (Path Abs Dir)
packageDatabaseDeps = do
root <- installationRootDeps
return $ root </> relDirPkgdb
-- | Package database for installing local packages into
packageDatabaseLocal :: (HasEnvConfig env) => RIO env (Path Abs Dir)
packageDatabaseLocal = do
root <- installationRootLocal
return $ root </> relDirPkgdb
-- | Extra package databases
packageDatabaseExtra :: (MonadReader env m, HasEnvConfig env) => m [Path Abs Dir]
packageDatabaseExtra = view $ buildConfigL.to bcExtraPackageDBs
-- | Where do we get information on global packages for loading up a
-- 'LoadedSnapshot'?
data GlobalInfoSource
= GISSnapshotHints
-- ^ Accept the hints in the snapshot definition
| GISCompiler ActualCompiler
-- ^ Look up the actual information in the installed compiler
-- | Where HPC reports and tix files get stored.
hpcReportDir :: (HasEnvConfig env)
=> RIO env (Path Abs Dir)
hpcReportDir = do
root <- installationRootLocal
return $ root </> relDirHpc
-- | Get the extra bin directories (for the PATH). Puts more local first
--
-- Bool indicates whether or not to include the locals
extraBinDirs :: (HasEnvConfig env)
=> RIO env (Bool -> [Path Abs Dir])
extraBinDirs = do
deps <- installationRootDeps
local' <- installationRootLocal
tools <- bindirCompilerTools
return $ \locals -> if locals
then [local' </> bindirSuffix, deps </> bindirSuffix, tools]
else [deps </> bindirSuffix, tools]
minimalEnvSettings :: EnvSettings
minimalEnvSettings =
EnvSettings
{ esIncludeLocals = False
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = False
, esKeepGhcRts = False
}
-- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH.
--
-- Note that this also passes through the GHCRTS environment variable.
-- See https://github.com/commercialhaskell/stack/issues/3444
defaultEnvSettings :: EnvSettings
defaultEnvSettings = EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
, esKeepGhcRts = True
}
-- | Environment settings which do not embellish the environment
--
-- Note that this also passes through the GHCRTS environment variable.
-- See https://github.com/commercialhaskell/stack/issues/3444
plainEnvSettings :: EnvSettings
plainEnvSettings = EnvSettings
{ esIncludeLocals = False
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = False
, esKeepGhcRts = True
}
-- | Get the path for the given compiler ignoring any local binaries.
--
-- https://github.com/commercialhaskell/stack/issues/1052
getCompilerPath :: HasCompiler env => RIO env (Path Abs File)
getCompilerPath = view $ compilerPathsL.to cpCompiler
data ProjectAndConfigMonoid
= ProjectAndConfigMonoid !Project !ConfigMonoid
parseProjectAndConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings (IO ProjectAndConfigMonoid))
parseProjectAndConfigMonoid rootDir =
withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do
packages <- o ..:? "packages" ..!= [RelFilePath "."]
deps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= []
flags' <- o ..:? "flags" ..!= mempty
let flags = unCabalStringMap <$> unCabalStringMap
(flags' :: Map (CabalString PackageName) (Map (CabalString FlagName) Bool))
resolver <- jsonSubWarnings $ o ...: ["snapshot", "resolver"]
mcompiler <- o ..:? "compiler"
msg <- o ..:? "user-message"
config <- parseConfigMonoidObject rootDir o
extraPackageDBs <- o ..:? "extra-package-dbs" ..!= []
mcurator <- jsonSubWarningsT (o ..:? "curator")
drops <- o ..:? "drop-packages" ..!= mempty
return $ do
deps' <- mapM (resolvePaths (Just rootDir)) deps
resolver' <- resolvePaths (Just rootDir) resolver
let project = Project
{ projectUserMsg = msg
, projectResolver = resolver'
, projectCompiler = mcompiler -- FIXME make sure resolver' isn't SLCompiler
, projectExtraPackageDBs = extraPackageDBs
, projectPackages = packages
, projectDependencies = concatMap toList (deps' :: [NonEmpty RawPackageLocation])
, projectFlags = flags
, projectCurator = mcurator
, projectDropPackages = Set.map unCabalString drops
}
pure $ ProjectAndConfigMonoid project config
-- | A software control system.
data SCM = Git
deriving (Show)
instance FromJSON SCM where
parseJSON v = do
s <- parseJSON v
case s of
"git" -> return Git
_ -> fail ("Unknown or unsupported SCM: " <> s)
instance ToJSON SCM where
toJSON Git = toJSON ("git" :: Text)
-- | A variant of the platform, used to differentiate Docker builds from host
data PlatformVariant = PlatformVariantNone
| PlatformVariant String
-- | Render a platform variant to a String suffix.
platformVariantSuffix :: PlatformVariant -> String
platformVariantSuffix PlatformVariantNone = ""
platformVariantSuffix (PlatformVariant v) = "-" ++ v
-- | Specialized bariant of GHC (e.g. libgmp4 or integer-simple)
data GHCVariant
= GHCStandard -- ^ Standard bindist
| GHCIntegerSimple -- ^ Bindist that uses integer-simple
| GHCCustom String -- ^ Other bindists
deriving (Show)
instance FromJSON GHCVariant where
-- Strange structuring is to give consistent error messages
parseJSON =
withText
"GHCVariant"
(either (fail . show) return . parseGHCVariant . T.unpack)
-- | Render a GHC variant to a String.
ghcVariantName :: GHCVariant -> String
ghcVariantName GHCStandard = "standard"
ghcVariantName GHCIntegerSimple = "integersimple"
ghcVariantName (GHCCustom name) = "custom-" ++ name
-- | Render a GHC variant to a String suffix.
ghcVariantSuffix :: GHCVariant -> String
ghcVariantSuffix GHCStandard = ""
ghcVariantSuffix v = "-" ++ ghcVariantName v
-- | Parse GHC variant from a String.
parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant
parseGHCVariant s =
case stripPrefix "custom-" s of
Just name -> return (GHCCustom name)
Nothing
| s == "" -> return GHCStandard
| s == "standard" -> return GHCStandard
| s == "integersimple" -> return GHCIntegerSimple
| otherwise -> return (GHCCustom s)
-- | Build of the compiler distribution (e.g. standard, gmp4, tinfo6)
-- | Information for a file to download.
data DownloadInfo = DownloadInfo
{ downloadInfoUrl :: Text
-- ^ URL or absolute file path
, downloadInfoContentLength :: Maybe Int
, downloadInfoSha1 :: Maybe ByteString
, downloadInfoSha256 :: Maybe ByteString
} deriving (Show)
instance FromJSON (WithJSONWarnings DownloadInfo) where
parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject
-- | Parse JSON in existing object for 'DownloadInfo'
parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo
parseDownloadInfoFromObject o = do
url <- o ..: "url"
contentLength <- o ..:? "content-length"
sha1TextMay <- o ..:? "sha1"
sha256TextMay <- o ..:? "sha256"
return
DownloadInfo
{ downloadInfoUrl = url
, downloadInfoContentLength = contentLength
, downloadInfoSha1 = fmap encodeUtf8 sha1TextMay
, downloadInfoSha256 = fmap encodeUtf8 sha256TextMay
}
data VersionedDownloadInfo = VersionedDownloadInfo
{ vdiVersion :: Version
, vdiDownloadInfo :: DownloadInfo
}
deriving Show
instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where
parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do
CabalString version <- o ..: "version"
downloadInfo <- parseDownloadInfoFromObject o
return VersionedDownloadInfo
{ vdiVersion = version
, vdiDownloadInfo = downloadInfo
}
data GHCDownloadInfo = GHCDownloadInfo
{ gdiConfigureOpts :: [Text]
, gdiConfigureEnv :: Map Text Text
, gdiDownloadInfo :: DownloadInfo
}
deriving Show
instance FromJSON (WithJSONWarnings GHCDownloadInfo) where
parseJSON = withObjectWarnings "GHCDownloadInfo" $ \o -> do
configureOpts <- o ..:? "configure-opts" ..!= mempty
configureEnv <- o ..:? "configure-env" ..!= mempty
downloadInfo <- parseDownloadInfoFromObject o
return GHCDownloadInfo
{ gdiConfigureOpts = configureOpts
, gdiConfigureEnv = configureEnv
, gdiDownloadInfo = downloadInfo
}
data SetupInfo = SetupInfo
{ siSevenzExe :: Maybe DownloadInfo
, siSevenzDll :: Maybe DownloadInfo
, siMsys2 :: Map Text VersionedDownloadInfo
, siGHCs :: Map Text (Map Version GHCDownloadInfo)
, siStack :: Map Text (Map Version DownloadInfo)
}
deriving Show
instance FromJSON (WithJSONWarnings SetupInfo) where
parseJSON = withObjectWarnings "SetupInfo" $ \o -> do
siSevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info")
siSevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info")
siMsys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty)
(fmap unCabalStringMap -> siGHCs) <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty)
(fmap unCabalStringMap -> siStack) <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty)
return SetupInfo {..}
-- | For the @siGHCs@ field maps are deeply merged.
-- For all fields the values from the first @SetupInfo@ win.
instance Semigroup SetupInfo where
l <> r =
SetupInfo
{ siSevenzExe = siSevenzExe l <|> siSevenzExe r
, siSevenzDll = siSevenzDll l <|> siSevenzDll r
, siMsys2 = siMsys2 l <> siMsys2 r
, siGHCs = Map.unionWith (<>) (siGHCs l) (siGHCs r)
, siStack = Map.unionWith (<>) (siStack l) (siStack r) }
instance Monoid SetupInfo where
mempty =
SetupInfo
{ siSevenzExe = Nothing
, siSevenzDll = Nothing
, siMsys2 = Map.empty
, siGHCs = Map.empty
, siStack = Map.empty
}
mappend = (<>)
-- | How PVP bounds should be added to .cabal files
data PvpBoundsType
= PvpBoundsNone
| PvpBoundsUpper
| PvpBoundsLower
| PvpBoundsBoth
deriving (Show, Read, Eq, Typeable, Ord, Enum, Bounded)
data PvpBounds = PvpBounds
{ pbType :: !PvpBoundsType
, pbAsRevision :: !Bool
}
deriving (Show, Read, Eq, Typeable, Ord)
pvpBoundsText :: PvpBoundsType -> Text
pvpBoundsText PvpBoundsNone = "none"
pvpBoundsText PvpBoundsUpper = "upper"
pvpBoundsText PvpBoundsLower = "lower"
pvpBoundsText PvpBoundsBoth = "both"
parsePvpBounds :: Text -> Either String PvpBounds
parsePvpBounds t = maybe err Right $ do
(t', asRevision) <-
case T.break (== '-') t of
(x, "") -> Just (x, False)
(x, "-revision") -> Just (x, True)
_ -> Nothing
x <- Map.lookup t' m
Just PvpBounds
{ pbType = x
, pbAsRevision = asRevision
}
where
m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound]
err = Left $ "Invalid PVP bounds: " ++ T.unpack t
instance ToJSON PvpBounds where
toJSON (PvpBounds typ asRevision) =
toJSON (pvpBoundsText typ <> (if asRevision then "-revision" else ""))
instance FromJSON PvpBounds where
parseJSON = withText "PvpBounds" (either fail return . parsePvpBounds)
-- | Provide an explicit list of package dependencies when running a custom Setup.hs
explicitSetupDeps :: (MonadReader env m, HasConfig env) => PackageName -> m Bool
explicitSetupDeps name = do
m <- view $ configL.to configExplicitSetupDeps
return $
Map.findWithDefault
(Map.findWithDefault False Nothing m)
(Just name)
m
-- | Data passed into Docker container for the Docker entrypoint's use
newtype DockerEntrypoint = DockerEntrypoint
{ deUser :: Maybe DockerUser
-- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in container
} deriving (Read,Show)
-- | Docker host user info
data DockerUser = DockerUser
{ duUid :: UserID -- ^ uid
, duGid :: GroupID -- ^ gid
, duGroups :: [GroupID] -- ^ Supplemantal groups
, duUmask :: FileMode -- ^ File creation mask }
} deriving (Read,Show)
data GhcOptionKey
= GOKOldEverything
| GOKEverything
| GOKLocals
| GOKTargets
| GOKPackage !PackageName
deriving (Eq, Ord)
instance FromJSONKey GhcOptionKey where
fromJSONKey = FromJSONKeyTextParser $ \t ->
case t of
"*" -> return GOKOldEverything
"$everything" -> return GOKEverything
"$locals" -> return GOKLocals
"$targets" -> return GOKTargets
_ ->
case parsePackageName $ T.unpack t of
Nothing -> fail $ "Invalid package name: " ++ show t
Just x -> return $ GOKPackage x
fromJSONKeyList = FromJSONKeyTextParser $ \_ -> fail "GhcOptionKey.fromJSONKeyList"
newtype GhcOptions = GhcOptions { unGhcOptions :: [Text] }
instance FromJSON GhcOptions where
parseJSON = withText "GhcOptions" $ \t ->
case parseArgs Escaping t of
Left e -> fail e
Right opts -> return $ GhcOptions $ map T.pack opts
-----------------------------------
-- Lens classes
-----------------------------------
-- | Class for environment values which have a Platform
class HasPlatform env where
platformL :: Lens' env Platform
default platformL :: HasConfig env => Lens' env Platform
platformL = configL.platformL
{-# INLINE platformL #-}
platformVariantL :: Lens' env PlatformVariant
default platformVariantL :: HasConfig env => Lens' env PlatformVariant
platformVariantL = configL.platformVariantL
{-# INLINE platformVariantL #-}
-- | Class for environment values which have a GHCVariant
class HasGHCVariant env where
ghcVariantL :: SimpleGetter env GHCVariant
default ghcVariantL :: HasConfig env => SimpleGetter env GHCVariant
ghcVariantL = configL.ghcVariantL
{-# INLINE ghcVariantL #-}
-- | Class for environment values which have a 'Runner'.
class (HasProcessContext env, HasLogFunc env) => HasRunner env where
runnerL :: Lens' env Runner
instance HasLogFunc Runner where
logFuncL = lens runnerLogFunc (\x y -> x { runnerLogFunc = y })
instance HasProcessContext Runner where
processContextL = lens runnerProcessContext (\x y -> x { runnerProcessContext = y })
instance HasRunner Runner where
runnerL = id
instance HasStylesUpdate Runner where
stylesUpdateL = globalOptsL.
lens globalStylesUpdate (\x y -> x { globalStylesUpdate = y })
instance HasTerm Runner where
useColorL = lens runnerUseColor (\x y -> x { runnerUseColor = y })
termWidthL = lens runnerTermWidth (\x y -> x { runnerTermWidth = y })
globalOptsL :: HasRunner env => Lens' env GlobalOpts
globalOptsL = runnerL.lens runnerGlobalOpts (\x y -> x { runnerGlobalOpts = y })
-- | Class for environment values that can provide a 'Config'.
class (HasPlatform env, HasGHCVariant env, HasProcessContext env, HasPantryConfig env, HasTerm env, HasRunner env) => HasConfig env where
configL :: Lens' env Config
default configL :: HasBuildConfig env => Lens' env Config
configL = buildConfigL.lens bcConfig (\x y -> x { bcConfig = y })
{-# INLINE configL #-}
class HasConfig env => HasBuildConfig env where
buildConfigL :: Lens' env BuildConfig
default buildConfigL :: HasEnvConfig env => Lens' env BuildConfig
buildConfigL = envConfigL.lens
envConfigBuildConfig
(\x y -> x { envConfigBuildConfig = y })
class (HasBuildConfig env, HasSourceMap env, HasCompiler env) => HasEnvConfig env where
envConfigL :: Lens' env EnvConfig
-----------------------------------
-- Lens instances
-----------------------------------
instance HasPlatform (Platform,PlatformVariant) where
platformL = _1
platformVariantL = _2
instance HasPlatform Config where
platformL = lens configPlatform (\x y -> x { configPlatform = y })
platformVariantL = lens configPlatformVariant (\x y -> x { configPlatformVariant = y })
instance HasPlatform BuildConfig
instance HasPlatform EnvConfig
instance HasGHCVariant GHCVariant where
ghcVariantL = id
{-# INLINE ghcVariantL #-}
instance HasGHCVariant Config where
ghcVariantL = to $ fromMaybe GHCStandard . configGHCVariant
instance HasGHCVariant BuildConfig
instance HasGHCVariant EnvConfig
instance HasProcessContext Config where
processContextL = runnerL.processContextL
instance HasProcessContext BuildConfig where
processContextL = configL.processContextL
instance HasProcessContext EnvConfig where
processContextL = configL.processContextL
instance HasPantryConfig Config where
pantryConfigL = lens configPantryConfig (\x y -> x { configPantryConfig = y })
instance HasPantryConfig BuildConfig where
pantryConfigL = configL.pantryConfigL
instance HasPantryConfig EnvConfig where
pantryConfigL = configL.pantryConfigL
instance HasConfig Config where
configL = id
{-# INLINE configL #-}
instance HasConfig BuildConfig where
configL = lens bcConfig (\x y -> x { bcConfig = y })
instance HasConfig EnvConfig
instance HasBuildConfig BuildConfig where
buildConfigL = id
{-# INLINE buildConfigL #-}
instance HasBuildConfig EnvConfig
instance HasCompiler EnvConfig where
compilerPathsL = to envConfigCompilerPaths
instance HasEnvConfig EnvConfig where
envConfigL = id
{-# INLINE envConfigL #-}
instance HasRunner Config where
runnerL = lens configRunner (\x y -> x { configRunner = y })
instance HasRunner BuildConfig where
runnerL = configL.runnerL
instance HasRunner EnvConfig where
runnerL = configL.runnerL
instance HasLogFunc Config where
logFuncL = runnerL.logFuncL
instance HasLogFunc BuildConfig where
logFuncL = runnerL.logFuncL
instance HasLogFunc EnvConfig where
logFuncL = runnerL.logFuncL
instance HasStylesUpdate Config where
stylesUpdateL = runnerL.stylesUpdateL
instance HasStylesUpdate BuildConfig where
stylesUpdateL = runnerL.stylesUpdateL
instance HasStylesUpdate EnvConfig where
stylesUpdateL = runnerL.stylesUpdateL
instance HasTerm Config where
useColorL = runnerL.useColorL
termWidthL = runnerL.termWidthL
instance HasTerm BuildConfig where
useColorL = runnerL.useColorL
termWidthL = runnerL.termWidthL
instance HasTerm EnvConfig where
useColorL = runnerL.useColorL
termWidthL = runnerL.termWidthL
-----------------------------------
-- Helper lenses
-----------------------------------
stackRootL :: HasConfig s => Lens' s (Path Abs Dir)
stackRootL = configL.lens configStackRoot (\x y -> x { configStackRoot = y })
-- | The compiler specified by the @SnapshotDef@. This may be
-- different from the actual compiler used!
wantedCompilerVersionL :: HasBuildConfig s => Getting r s WantedCompiler
wantedCompilerVersionL = buildConfigL.to (smwCompiler . bcSMWanted)
-- | Location of the ghc-pkg executable
newtype GhcPkgExe = GhcPkgExe (Path Abs File)
deriving Show
-- | Get the 'GhcPkgExe' from a 'HasCompiler' environment
getGhcPkgExe :: HasCompiler env => RIO env GhcPkgExe
getGhcPkgExe = view $ compilerPathsL.to cpPkg
-- | Dump information for a single package
data DumpPackage = DumpPackage
{ dpGhcPkgId :: !GhcPkgId
, dpPackageIdent :: !PackageIdentifier
, dpParentLibIdent :: !(Maybe PackageIdentifier)
, dpLicense :: !(Maybe C.License)
, dpLibDirs :: ![FilePath]
, dpLibraries :: ![Text]
, dpHasExposedModules :: !Bool
, dpExposedModules :: !(Set ModuleName)
, dpDepends :: ![GhcPkgId]
, dpHaddockInterfaces :: ![FilePath]
, dpHaddockHtml :: !(Maybe FilePath)
, dpIsExposed :: !Bool
}
deriving (Show, Read, Eq)
-- | Paths on the filesystem for the compiler we're using
data CompilerPaths = CompilerPaths
{ cpCompilerVersion :: !ActualCompiler
, cpArch :: !Arch
, cpBuild :: !CompilerBuild
, cpCompiler :: !(Path Abs File)
-- | ghc-pkg or equivalent
, cpPkg :: !GhcPkgExe
-- | runghc
, cpInterpreter :: !(Path Abs File)
-- | haddock, in 'IO' to allow deferring the lookup
, cpHaddock :: !(Path Abs File)
-- | Is this a Stack-sandboxed installation?
, cpSandboxed :: !Bool
, cpCabalVersion :: !Version
-- ^ This is the version of Cabal that stack will use to compile Setup.hs files
-- in the build process.
--
-- Note that this is not necessarily the same version as the one that stack
-- depends on as a library and which is displayed when running
-- @stack ls dependencies | grep Cabal@ in the stack project.
, cpGlobalDB :: !(Path Abs Dir)
-- ^ Global package database
, cpGhcInfo :: !ByteString
-- ^ Output of @ghc --info@
, cpGlobalDump :: !(Map PackageName DumpPackage)
}
deriving Show
cpWhich :: (MonadReader env m, HasCompiler env) => m WhichCompiler
cpWhich = view $ compilerPathsL.to (whichCompiler.cpCompilerVersion)
data ExtraDirs = ExtraDirs
{ edBins :: ![Path Abs Dir]
, edInclude :: ![Path Abs Dir]
, edLib :: ![Path Abs Dir]
} deriving (Show, Generic)
instance Semigroup ExtraDirs where
(<>) = mappenddefault
instance Monoid ExtraDirs where
mempty = memptydefault
mappend = (<>)
-- | An environment which ensures that the given compiler is available
-- on the PATH
class HasCompiler env where
compilerPathsL :: SimpleGetter env CompilerPaths
instance HasCompiler CompilerPaths where
compilerPathsL = id
class HasSourceMap env where
sourceMapL :: Lens' env SourceMap
instance HasSourceMap EnvConfig where
sourceMapL = lens envConfigSourceMap (\x y -> x { envConfigSourceMap = y })
-- | The version of the compiler which will actually be used. May be
-- different than that specified in the 'SnapshotDef' and returned
-- by 'wantedCompilerVersionL'.
actualCompilerVersionL :: HasSourceMap env => SimpleGetter env ActualCompiler
actualCompilerVersionL = sourceMapL.to smCompiler
buildOptsL :: HasConfig s => Lens' s BuildOpts
buildOptsL = configL.lens
configBuild
(\x y -> x { configBuild = y })
buildOptsMonoidHaddockL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidHaddockL = lens (getFirstFalse . buildMonoidHaddock)
(\buildMonoid t -> buildMonoid {buildMonoidHaddock = FirstFalse t})
buildOptsMonoidTestsL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidTestsL = lens (getFirstFalse . buildMonoidTests)
(\buildMonoid t -> buildMonoid {buildMonoidTests = FirstFalse t})
buildOptsMonoidBenchmarksL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidBenchmarksL = lens (getFirstFalse . buildMonoidBenchmarks)
(\buildMonoid t -> buildMonoid {buildMonoidBenchmarks = FirstFalse t})
buildOptsMonoidInstallExesL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidInstallExesL =
lens (getFirstFalse . buildMonoidInstallExes)
(\buildMonoid t -> buildMonoid {buildMonoidInstallExes = FirstFalse t})
buildOptsInstallExesL :: Lens' BuildOpts Bool
buildOptsInstallExesL =
lens boptsInstallExes
(\bopts t -> bopts {boptsInstallExes = t})
buildOptsHaddockL :: Lens' BuildOpts Bool
buildOptsHaddockL =
lens boptsHaddock
(\bopts t -> bopts {boptsHaddock = t})
globalOptsBuildOptsMonoidL :: Lens' GlobalOpts BuildOptsMonoid
globalOptsBuildOptsMonoidL =
lens
globalConfigMonoid
(\x y -> x { globalConfigMonoid = y })
.
lens
configMonoidBuildOpts
(\x y -> x { configMonoidBuildOpts = y })
cabalVersionL :: HasCompiler env => SimpleGetter env Version
cabalVersionL = compilerPathsL.to cpCabalVersion
whichCompilerL :: Getting r ActualCompiler WhichCompiler
whichCompilerL = to whichCompiler
envOverrideSettingsL :: HasConfig env => Lens' env (EnvSettings -> IO ProcessContext)
envOverrideSettingsL = configL.lens
configProcessContextSettings
(\x y -> x { configProcessContextSettings = y })
shouldForceGhcColorFlag :: (HasRunner env, HasEnvConfig env)
=> RIO env Bool
shouldForceGhcColorFlag = do
canDoColor <- (>= mkVersion [8, 2, 1]) . getGhcVersion
<$> view actualCompilerVersionL
shouldDoColor <- view useColorL
return $ canDoColor && shouldDoColor
appropriateGhcColorFlag :: (HasRunner env, HasEnvConfig env)
=> RIO env (Maybe String)
appropriateGhcColorFlag = f <$> shouldForceGhcColorFlag
where f True = Just ghcColorForceFlag
f False = Nothing
-- | See 'globalTerminal'
terminalL :: HasRunner env => Lens' env Bool
terminalL = globalOptsL.lens globalTerminal (\x y -> x { globalTerminal = y })
-- | See 'globalReExecVersion'
reExecL :: HasRunner env => SimpleGetter env Bool
reExecL = globalOptsL.to (isJust . globalReExecVersion)
-- | In dev mode, print as a warning, otherwise as debug
prettyStackDevL :: HasConfig env => [StyleDoc] -> RIO env ()
prettyStackDevL docs = do
config <- view configL
if configStackDeveloperMode config
then prettyWarnL docs
else prettyDebugL docs
|
juhp/stack
|
src/Stack/Types/Config.hs
|
bsd-3-clause
| 84,171
| 0
| 20
| 18,347
| 16,137
| 8,806
| 7,331
| 1,968
| 8
|
module Main where
import Lib
import Interpreter
import System.Environment
main :: IO ()
main = do
args <- getArgs
let fileName = head args
runRun $ startProgram fileName
putStrLn "Finished"
|
mohamey/reversible-interpreter
|
app/Main.hs
|
bsd-3-clause
| 202
| 0
| 10
| 42
| 65
| 32
| 33
| 10
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NondecreasingIndentation #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
-- | Generally useful definitions that we expect most test scripts
-- to use.
module Test.Cabal.Prelude (
module Test.Cabal.Prelude,
module Test.Cabal.Monad,
module Test.Cabal.Run,
module System.FilePath,
module Control.Monad,
module Distribution.Version,
module Distribution.Simple.Program,
) where
import Test.Cabal.Script
import Test.Cabal.Run
import Test.Cabal.Monad
import Test.Cabal.Plan
import Distribution.Compat.Time (calibrateMtimeChangeDelay)
import Distribution.Simple.Compiler (PackageDBStack, PackageDB(..))
import Distribution.Simple.Program.Types
import Distribution.Simple.Program.Db
import Distribution.Simple.Program
import Distribution.System (OS(Windows,Linux,OSX), buildOS)
import Distribution.Simple.Utils
( withFileContents, tryFindPackageDesc )
import Distribution.Simple.Configure
( getPersistBuildConfig )
import Distribution.Version
import Distribution.Package
import Distribution.Types.UnqualComponentName
import Distribution.Types.LocalBuildInfo
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
import Distribution.Compat.Stack
import Text.Regex.TDFA
import Control.Concurrent.Async
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Lazy as BSL
import Control.Monad
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
import qualified Data.ByteString.Char8 as C
import Data.List
import Data.Maybe
import System.Exit
import System.FilePath
import Control.Concurrent (threadDelay)
import qualified Data.Char as Char
import System.Directory
#ifndef mingw32_HOST_OS
import Control.Monad.Catch ( bracket_ )
import System.Posix.Files ( createSymbolicLink )
#endif
------------------------------------------------------------------------
-- * Utilities
runM :: FilePath -> [String] -> TestM Result
runM path args = do
env <- getTestEnv
r <- liftIO $ run (testVerbosity env)
(Just (testCurrentDir env))
(testEnvironment env)
path
args
recordLog r
requireSuccess r
runProgramM :: Program -> [String] -> TestM Result
runProgramM prog args = do
configured_prog <- requireProgramM prog
-- TODO: Consider also using other information from
-- ConfiguredProgram, e.g., env and args
runM (programPath configured_prog) args
getLocalBuildInfoM :: TestM LocalBuildInfo
getLocalBuildInfoM = do
env <- getTestEnv
liftIO $ getPersistBuildConfig (testDistDir env)
------------------------------------------------------------------------
-- * Changing parameters
withDirectory :: FilePath -> TestM a -> TestM a
withDirectory f = withReaderT
(\env -> env { testRelativeCurrentDir = testRelativeCurrentDir env </> f })
-- We append to the environment list, as per 'getEffectiveEnvironment'
-- which prefers the latest override.
withEnv :: [(String, Maybe String)] -> TestM a -> TestM a
withEnv e = withReaderT (\env -> env { testEnvironment = testEnvironment env ++ e })
-- HACK please don't use me
withEnvFilter :: (String -> Bool) -> TestM a -> TestM a
withEnvFilter p = withReaderT (\env -> env { testEnvironment = filter (p . fst) (testEnvironment env) })
------------------------------------------------------------------------
-- * Running Setup
marked_verbose :: String
marked_verbose = "-vverbose +markoutput +nowrap"
setup :: String -> [String] -> TestM ()
setup cmd args = void (setup' cmd args)
setup' :: String -> [String] -> TestM Result
setup' cmd args = do
env <- getTestEnv
when ((cmd == "register" || cmd == "copy") && not (testHavePackageDb env)) $
error "Cannot register/copy without using 'withPackageDb'"
ghc_path <- programPathM ghcProgram
haddock_path <- programPathM haddockProgram
let args' = case cmd of
"configure" ->
-- If the package database is empty, setting --global
-- here will make us error loudly if we try to install
-- into a bad place.
[ "--global"
-- NB: technically unnecessary with Cabal, but
-- definitely needed for Setup, which doesn't
-- respect cabal.config
, "--with-ghc", ghc_path
, "--with-haddock", haddock_path
-- This avoids generating hashes in our package IDs,
-- which helps the test suite's expect tests.
, "--enable-deterministic"
-- These flags make the test suite run faster
-- Can't do this unless we LD_LIBRARY_PATH correctly
-- , "--enable-executable-dynamic"
-- , "--disable-optimization"
-- Specify where we want our installed packages to go
, "--prefix=" ++ testPrefixDir env
] ++ packageDBParams (testPackageDBStack env)
++ args
_ -> args
let rel_dist_dir = definitelyMakeRelative (testCurrentDir env) (testDistDir env)
full_args = cmd : [marked_verbose, "--distdir", rel_dist_dir] ++ args'
defaultRecordMode RecordMarked $ do
recordHeader ["Setup", cmd]
if testCabalInstallAsSetup env
then runProgramM cabalProgram full_args
else do
pdfile <- liftIO $ tryFindPackageDesc (testCurrentDir env)
pdesc <- liftIO $ readGenericPackageDescription (testVerbosity env) pdfile
if buildType (packageDescription pdesc) == Just Simple
then runM (testSetupPath env) full_args
-- Run the Custom script!
else do
r <- liftIO $ runghc (testScriptEnv env)
(Just (testCurrentDir env))
(testEnvironment env)
(testCurrentDir env </> "Setup.hs")
full_args
recordLog r
requireSuccess r
-- This code is very tempting (and in principle should be quick:
-- after all we are loading the built version of Cabal), but
-- actually it costs quite a bit in wallclock time (e.g. 54sec to
-- 68sec on AllowNewer, working with un-optimized Cabal.)
{-
r <- liftIO $ runghc (testScriptEnv env)
(Just (testCurrentDir env))
(testEnvironment env)
"Setup.hs"
(cmd : ["-v", "--distdir", testDistDir env] ++ args')
-- don't forget to check results...
-}
definitelyMakeRelative :: FilePath -> FilePath -> FilePath
definitelyMakeRelative base0 path0 =
let go [] path = joinPath path
go base [] = joinPath (replicate (length base) "..")
go (x:xs) (y:ys)
| x == y = go xs ys
| otherwise = go (x:xs) [] </> go [] (y:ys)
-- NB: It's important to normalize, as otherwise if
-- we see "foo/./bar" we'll incorrectly conclude that we need
-- to go "../../.." to get out of it.
in go (splitPath (normalise base0)) (splitPath (normalise path0))
-- | This abstracts the common pattern of configuring and then building.
setup_build :: [String] -> TestM ()
setup_build args = do
setup "configure" args
setup "build" []
return ()
-- | This abstracts the common pattern of "installing" a package.
setup_install :: [String] -> TestM ()
setup_install args = do
setup "configure" args
setup "build" []
setup "copy" []
setup "register" []
return ()
-- | This abstracts the common pattern of "installing" a package,
-- with haddock documentation.
setup_install_with_docs :: [String] -> TestM ()
setup_install_with_docs args = do
setup "configure" args
setup "build" []
setup "haddock" []
setup "copy" []
setup "register" []
return ()
packageDBParams :: PackageDBStack -> [String]
packageDBParams dbs = "--package-db=clear"
: map (("--package-db=" ++) . convert) dbs
where
convert :: PackageDB -> String
convert GlobalPackageDB = "global"
convert UserPackageDB = "user"
convert (SpecificPackageDB path) = path
------------------------------------------------------------------------
-- * Running cabal
cabal :: String -> [String] -> TestM ()
cabal "sandbox" _ =
error "Use cabal_sandbox instead"
cabal cmd args = void (cabal' cmd args)
cabal' :: String -> [String] -> TestM Result
cabal' = cabalG' []
cabalG :: [String] -> String -> [String] -> TestM ()
cabalG global_args cmd args = void (cabalG' global_args cmd args)
cabalG' :: [String] -> String -> [String] -> TestM Result
cabalG' _ "sandbox" _ =
-- NB: We don't just auto-pass this through, because it's
-- possible that the first argument isn't the sub-sub-command.
-- So make sure the user specifies it correctly.
error "Use cabal_sandbox' instead"
cabalG' global_args cmd args = do
env <- getTestEnv
-- Freeze writes out cabal.config to source directory, this is not
-- overwritable
when (cmd `elem` ["freeze"]) requireHasSourceCopy
let extra_args
-- Sandboxes manage dist dir
| testHaveSandbox env
= install_args
| cmd `elem` ["update", "outdated", "user-config", "manpage", "freeze"]
= [ ]
-- new-build commands are affected by testCabalProjectFile
| "new-" `isPrefixOf` cmd
= [ "--builddir", testDistDir env
, "--project-file", testCabalProjectFile env
, "-j1" ]
| otherwise
= [ "--builddir", testDistDir env ] ++
install_args
install_args
| cmd == "install"
|| cmd == "build" = [ "-j1" ]
| otherwise = []
extra_global_args
| testHaveSandbox env
= [ "--sandbox-config-file", testSandboxConfigFile env ]
| otherwise
= []
cabal_args = extra_global_args
++ global_args
++ [ cmd, marked_verbose ]
++ extra_args
++ args
defaultRecordMode RecordMarked $ do
recordHeader ["cabal", cmd]
cabal_raw' cabal_args
cabal_sandbox :: String -> [String] -> TestM ()
cabal_sandbox cmd args = void $ cabal_sandbox' cmd args
cabal_sandbox' :: String -> [String] -> TestM Result
cabal_sandbox' cmd args = do
env <- getTestEnv
let cabal_args = [ "--sandbox-config-file", testSandboxConfigFile env
, "sandbox", cmd
, marked_verbose ]
++ args
defaultRecordMode RecordMarked $ do
recordHeader ["cabal", "sandbox", cmd]
cabal_raw' cabal_args
cabal_raw' :: [String] -> TestM Result
cabal_raw' cabal_args = runProgramM cabalProgram cabal_args
withSandbox :: TestM a -> TestM a
withSandbox m = do
env0 <- getTestEnv
-- void $ cabal_raw' ["sandbox", "init", "--sandbox", testSandboxDir env0]
cabal_sandbox "init" ["--sandbox", testSandboxDir env0]
withReaderT (\env -> env { testHaveSandbox = True }) m
withProjectFile :: FilePath -> TestM a -> TestM a
withProjectFile fp m =
withReaderT (\env -> env { testCabalProjectFile = fp }) m
-- | Assuming we've successfully configured a new-build project,
-- read out the plan metadata so that we can use it to do other
-- operations.
withPlan :: TestM a -> TestM a
withPlan m = do
env0 <- getTestEnv
Just plan <- JSON.decode `fmap`
liftIO (BSL.readFile (testDistDir env0 </> "cache" </> "plan.json"))
withReaderT (\env -> env { testPlan = Just plan }) m
-- | Run an executable from a package. Requires 'withPlan' to have
-- been run so that we can find the dist dir.
runPlanExe :: String {- package name -} -> String {- component name -}
-> [String] -> TestM ()
runPlanExe pkg_name cname args = void $ runPlanExe' pkg_name cname args
-- | Run an executable from a package. Requires 'withPlan' to have
-- been run so that we can find the dist dir. Also returns 'Result'.
runPlanExe' :: String {- package name -} -> String {- component name -}
-> [String] -> TestM Result
runPlanExe' pkg_name cname args = do
Just plan <- testPlan `fmap` getTestEnv
let dist_dir = planDistDir plan (mkPackageName pkg_name)
(CExeName (mkUnqualComponentName cname))
defaultRecordMode RecordAll $ do
recordHeader [pkg_name, cname]
runM (dist_dir </> "build" </> cname </> cname) args
------------------------------------------------------------------------
-- * Running ghc-pkg
withPackageDb :: TestM a -> TestM a
withPackageDb m = do
env <- getTestEnv
let db_path = testPackageDbDir env
if testHavePackageDb env
then m
else withReaderT (\nenv ->
nenv { testPackageDBStack
= testPackageDBStack env
++ [SpecificPackageDB db_path]
, testHavePackageDb = True
} )
$ do ghcPkg "init" [db_path]
m
ghcPkg :: String -> [String] -> TestM ()
ghcPkg cmd args = void (ghcPkg' cmd args)
ghcPkg' :: String -> [String] -> TestM Result
ghcPkg' cmd args = do
env <- getTestEnv
unless (testHavePackageDb env) $
error "Must initialize package database using withPackageDb"
-- NB: testDBStack already has the local database
ghcConfProg <- requireProgramM ghcProgram
let db_stack = testPackageDBStack env
extraArgs = ghcPkgPackageDBParams
(fromMaybe
(error "ghc-pkg: cannot detect version")
(programVersion ghcConfProg))
db_stack
recordHeader ["ghc-pkg", cmd]
runProgramM ghcPkgProgram (cmd : extraArgs ++ args)
ghcPkgPackageDBParams :: Version -> PackageDBStack -> [String]
ghcPkgPackageDBParams version dbs = concatMap convert dbs where
convert :: PackageDB -> [String]
-- Ignoring global/user is dodgy but there's no way good
-- way to give ghc-pkg the correct flags in this case.
convert GlobalPackageDB = []
convert UserPackageDB = []
convert (SpecificPackageDB path)
| version >= mkVersion [7,6]
= ["--package-db=" ++ path]
| otherwise
= ["--package-conf=" ++ path]
------------------------------------------------------------------------
-- * Running other things
-- | Run an executable that was produced by cabal. The @exe_name@
-- is precisely the name of the executable section in the file.
runExe :: String -> [String] -> TestM ()
runExe exe_name args = void (runExe' exe_name args)
runExe' :: String -> [String] -> TestM Result
runExe' exe_name args = do
env <- getTestEnv
defaultRecordMode RecordAll $ do
recordHeader [exe_name]
runM (testDistDir env </> "build" </> exe_name </> exe_name) args
-- | Run an executable that was installed by cabal. The @exe_name@
-- is precisely the name of the executable.
runInstalledExe :: String -> [String] -> TestM ()
runInstalledExe exe_name args = void (runInstalledExe' exe_name args)
-- | Run an executable that was installed by cabal. Use this
-- instead of 'runInstalledExe' if you need to inspect the
-- stdout/stderr output.
runInstalledExe' :: String -> [String] -> TestM Result
runInstalledExe' exe_name args = do
env <- getTestEnv
defaultRecordMode RecordAll $ do
recordHeader [exe_name]
runM (testPrefixDir env </> "bin" </> exe_name) args
-- | Run a shell command in the current directory.
shell :: String -> [String] -> TestM Result
shell exe args = runM exe args
------------------------------------------------------------------------
-- * Repository manipulation
-- Workflows we support:
-- 1. Test comes with some packages (directories in repository) which
-- should be in the repository and available for depsolving/installing
-- into global store.
--
-- Workflows we might want to support in the future
-- * Regression tests may want to test on Hackage index. They will
-- operate deterministically as they will be pinned to a timestamp.
-- (But should we allow this? Have to download the tarballs in that
-- case. Perhaps dep solver only!)
-- * We might sdist a local package, and then upload it to the
-- repository
-- * Some of our tests involve old versions of Cabal. This might
-- be one of the rare cases where we're willing to grab the entire
-- tarball.
--
-- Properties we want to hold:
-- 1. Tests can be run offline. No dependence on hackage.haskell.org
-- beyond what we needed to actually get the build of Cabal working
-- itself
-- 2. Tests are deterministic. Updates to Hackage should not cause
-- tests to fail. (OTOH, it's good to run tests on most recent
-- Hackage index; some sort of canary test which is run nightly.
-- Point is it should NOT be tied to cabal source code.)
--
-- Technical notes:
-- * We depend on hackage-repo-tool binary. It would better if it was
-- libified into hackage-security but this has not been done yet.
--
hackageRepoTool :: String -> [String] -> TestM ()
hackageRepoTool cmd args = void $ hackageRepoTool' cmd args
hackageRepoTool' :: String -> [String] -> TestM Result
hackageRepoTool' cmd args = do
recordHeader ["hackage-repo-tool", cmd]
runProgramM hackageRepoToolProgram (cmd : args)
tar :: [String] -> TestM ()
tar args = void $ tar' args
tar' :: [String] -> TestM Result
tar' args = do
recordHeader ["tar"]
runProgramM tarProgram args
-- | Creates a tarball of a directory, such that if you
-- archive the directory "/foo/bar/baz" to "mine.tgz", @tar tf@ reports
-- @baz/file1@, @baz/file2@, etc.
archiveTo :: FilePath -> FilePath -> TestM ()
src `archiveTo` dst = do
-- TODO: Consider using the @tar@ library?
let (src_parent, src_dir) = splitFileName src
-- TODO: --format ustar, like createArchive?
tar ["-czf", dst, "-C", src_parent, src_dir]
infixr 4 `archiveTo`
-- | Given a directory (relative to the 'testCurrentDir') containing
-- a series of directories representing packages, generate an
-- external repository corresponding to all of these packages
withRepo :: FilePath -> TestM a -> TestM a
withRepo repo_dir m = do
env <- getTestEnv
-- Check if hackage-repo-tool is available, and skip if not
skipUnless =<< isAvailableProgram hackageRepoToolProgram
-- 1. Generate keys
hackageRepoTool "create-keys" ["--keys", testKeysDir env]
-- 2. Initialize repo directory
let package_dir = testRepoDir env </> "package"
liftIO $ createDirectoryIfMissing True (testRepoDir env </> "index")
liftIO $ createDirectoryIfMissing True package_dir
-- 3. Create tarballs
pkgs <- liftIO $ getDirectoryContents (testCurrentDir env </> repo_dir)
forM_ pkgs $ \pkg -> do
case pkg of
'.':_ -> return ()
_ -> testCurrentDir env </> repo_dir </> pkg
`archiveTo`
package_dir </> pkg <.> "tar.gz"
-- 4. Initialize repository
hackageRepoTool "bootstrap" ["--keys", testKeysDir env, "--repo", testRepoDir env]
-- 5. Wire it up in .cabal/config
-- TODO: libify this
let package_cache = testHomeDir env </> ".cabal" </> "packages"
liftIO $ appendFile (testUserCabalConfigFile env)
$ unlines [ "repository test-local-repo"
, " url: file:" ++ testRepoDir env
, " secure: True"
-- TODO: Hypothetically, we could stick in the
-- correct key here
, " root-keys: "
, " key-threshold: 0"
, "remote-repo-cache: " ++ package_cache ]
-- 6. Create local directories (TODO: this is a bug #4136, once you
-- fix that this can be removed)
liftIO $ createDirectoryIfMissing True (package_cache </> "test-local-repo")
-- 7. Update our local index
cabal "update" []
-- 8. Profit
withReaderT (\env' -> env' { testHaveRepo = True }) m
-- TODO: Arguably should undo everything when we're done...
------------------------------------------------------------------------
-- * Subprocess run results
requireSuccess :: Result -> TestM Result
requireSuccess r@Result { resultCommand = cmd
, resultExitCode = exitCode
, resultOutput = output } = withFrozenCallStack $ do
env <- getTestEnv
when (exitCode /= ExitSuccess && not (testShouldFail env)) $
assertFailure $ "Command " ++ cmd ++ " failed.\n" ++
"Output:\n" ++ output ++ "\n"
when (exitCode == ExitSuccess && testShouldFail env) $
assertFailure $ "Command " ++ cmd ++ " succeeded.\n" ++
"Output:\n" ++ output ++ "\n"
return r
initWorkDir :: TestM ()
initWorkDir = do
env <- getTestEnv
liftIO $ createDirectoryIfMissing True (testWorkDir env)
-- | Record a header to help identify the output to the expect
-- log. Unlike the 'recordLog', we don't record all arguments;
-- just enough to give you an idea of what the command might have
-- been. (This is because the arguments may not be deterministic,
-- so we don't want to spew them to the log.)
recordHeader :: [String] -> TestM ()
recordHeader args = do
env <- getTestEnv
let mode = testRecordMode env
str_header = "# " ++ intercalate " " args ++ "\n"
header = C.pack (testRecordNormalizer env str_header)
case mode of
DoNotRecord -> return ()
_ -> do
initWorkDir
liftIO $ putStr str_header
liftIO $ C.appendFile (testWorkDir env </> "test.log") header
liftIO $ C.appendFile (testActualFile env) header
recordLog :: Result -> TestM ()
recordLog res = do
env <- getTestEnv
let mode = testRecordMode env
initWorkDir
liftIO $ C.appendFile (testWorkDir env </> "test.log")
(C.pack $ "+ " ++ resultCommand res ++ "\n"
++ resultOutput res ++ "\n\n")
liftIO . C.appendFile (testActualFile env) . C.pack . testRecordNormalizer env $
case mode of
RecordAll -> unlines (lines (resultOutput res))
RecordMarked -> getMarkedOutput (resultOutput res)
DoNotRecord -> ""
getMarkedOutput :: String -> String -- trailing newline
getMarkedOutput out = unlines (go (lines out) False)
where
go [] _ = []
go (x:xs) True
| "-----END CABAL OUTPUT-----" `isPrefixOf` x
= go xs False
| otherwise = x : go xs True
go (x:xs) False
-- NB: Windows has extra goo at the end
| "-----BEGIN CABAL OUTPUT-----" `isPrefixOf` x
= go xs True
| otherwise = go xs False
------------------------------------------------------------------------
-- * Test helpers
assertFailure :: WithCallStack (String -> m ())
assertFailure msg = withFrozenCallStack $ error msg
assertEqual :: (Eq a, Show a, MonadIO m) => WithCallStack (String -> a -> a -> m ())
assertEqual s x y =
withFrozenCallStack $
when (x /= y) $
error (s ++ ":\nExpected: " ++ show x ++ "\nActual: " ++ show y)
assertNotEqual :: (Eq a, Show a, MonadIO m) => WithCallStack (String -> a -> a -> m ())
assertNotEqual s x y =
withFrozenCallStack $
when (x == y) $
error (s ++ ":\nGot both: " ++ show x)
assertBool :: MonadIO m => WithCallStack (String -> Bool -> m ())
assertBool s x =
withFrozenCallStack $
unless x $ error s
shouldExist :: MonadIO m => WithCallStack (FilePath -> m ())
shouldExist path =
withFrozenCallStack $
liftIO $ doesFileExist path >>= assertBool (path ++ " should exist")
shouldNotExist :: MonadIO m => WithCallStack (FilePath -> m ())
shouldNotExist path =
withFrozenCallStack $
liftIO $ doesFileExist path >>= assertBool (path ++ " should exist") . not
assertRegex :: MonadIO m => String -> String -> Result -> m ()
assertRegex msg regex r =
withFrozenCallStack $
let out = resultOutput r
in assertBool (msg ++ ",\nactual output:\n" ++ out)
(out =~ regex)
fails :: TestM a -> TestM a
fails = withReaderT (\env -> env { testShouldFail = not (testShouldFail env) })
defaultRecordMode :: RecordMode -> TestM a -> TestM a
defaultRecordMode mode = withReaderT (\env -> env {
testRecordDefaultMode = mode
})
recordMode :: RecordMode -> TestM a -> TestM a
recordMode mode = withReaderT (\env -> env {
testRecordUserMode = Just mode
})
recordNormalizer :: (String -> String) -> TestM a -> TestM a
recordNormalizer f =
withReaderT (\env -> env { testRecordNormalizer = testRecordNormalizer env . f })
assertOutputContains :: MonadIO m => WithCallStack (String -> Result -> m ())
assertOutputContains needle result =
withFrozenCallStack $
unless (needle `isInfixOf` (concatOutput output)) $
assertFailure $ " expected: " ++ needle
where output = resultOutput result
assertOutputDoesNotContain :: MonadIO m => WithCallStack (String -> Result -> m ())
assertOutputDoesNotContain needle result =
withFrozenCallStack $
when (needle `isInfixOf` (concatOutput output)) $
assertFailure $ "unexpected: " ++ needle
where output = resultOutput result
assertFindInFile :: MonadIO m => WithCallStack (String -> FilePath -> m ())
assertFindInFile needle path =
withFrozenCallStack $
liftIO $ withFileContents path
(\contents ->
unless (needle `isInfixOf` contents)
(assertFailure ("expected: " ++ needle ++ "\n" ++
" in file: " ++ path)))
assertFileDoesContain :: MonadIO m => WithCallStack (FilePath -> String -> m ())
assertFileDoesContain path needle =
withFrozenCallStack $
liftIO $ withFileContents path
(\contents ->
unless (needle `isInfixOf` contents)
(assertFailure ("expected: " ++ needle ++ "\n" ++
" in file: " ++ path)))
assertFileDoesNotContain :: MonadIO m => WithCallStack (FilePath -> String -> m ())
assertFileDoesNotContain path needle =
withFrozenCallStack $
liftIO $ withFileContents path
(\contents ->
when (needle `isInfixOf` contents)
(assertFailure ("expected: " ++ needle ++ "\n" ++
" in file: " ++ path)))
-- | Replace line breaks with spaces, correctly handling "\r\n".
concatOutput :: String -> String
concatOutput = unwords . lines . filter ((/=) '\r')
------------------------------------------------------------------------
-- * Skipping tests
hasSharedLibraries :: TestM Bool
hasSharedLibraries = do
shared_libs_were_removed <- ghcVersionIs (>= mkVersion [7,8])
return (not (buildOS == Windows && shared_libs_were_removed))
hasProfiledLibraries :: TestM Bool
hasProfiledLibraries = do
env <- getTestEnv
ghc_path <- programPathM ghcProgram
let prof_test_hs = testWorkDir env </> "Prof.hs"
liftIO $ writeFile prof_test_hs "module Prof where"
r <- liftIO $ run (testVerbosity env) (Just (testCurrentDir env))
(testEnvironment env) ghc_path ["-prof", "-c", prof_test_hs]
return (resultExitCode r == ExitSuccess)
-- | Check if the GHC that is used for compiling package tests has
-- a shared library of the cabal library under test in its database.
--
-- An example where this is needed is if you want to dynamically link
-- detailed-0.9 test suites, since those depend on the Cabal library unde rtest.
hasCabalShared :: TestM Bool
hasCabalShared = do
env <- getTestEnv
return (testHaveCabalShared env)
ghcVersionIs :: WithCallStack ((Version -> Bool) -> TestM Bool)
ghcVersionIs f = do
ghc_program <- requireProgramM ghcProgram
case programVersion ghc_program of
Nothing -> error $ "ghcVersionIs: no ghc version for "
++ show (programLocation ghc_program)
Just v -> return (f v)
isWindows :: TestM Bool
isWindows = return (buildOS == Windows)
isOSX :: TestM Bool
isOSX = return (buildOS == OSX)
isLinux :: TestM Bool
isLinux = return (buildOS == Linux)
hasCabalForGhc :: TestM Bool
hasCabalForGhc = do
env <- getTestEnv
ghc_program <- requireProgramM ghcProgram
(runner_ghc_program, _) <- liftIO $ requireProgram
(testVerbosity env)
ghcProgram
(runnerProgramDb (testScriptEnv env))
-- TODO: I guess, to be more robust what we should check for
-- specifically is that the Cabal library we want to use
-- will be picked up by the package db stack of ghc-program
return (programPath ghc_program == programPath runner_ghc_program)
-- | If you want to use a Custom setup with new-build, it needs to
-- be 1.20 or later. Ordinarily, Cabal can go off and build a
-- sufficiently recent Cabal if necessary, but in our test suite,
-- by default, we try to avoid doing so (since that involves a
-- rather lengthy build process), instead using the boot Cabal if
-- possible. But some GHCs don't have a recent enough boot Cabal!
-- You'll want to exclude them in that case.
--
hasNewBuildCompatBootCabal :: TestM Bool
hasNewBuildCompatBootCabal = ghcVersionIs (>= mkVersion [7,9])
------------------------------------------------------------------------
-- * Broken tests
expectBroken :: Int -> TestM a -> TestM ()
expectBroken ticket m = do
env <- getTestEnv
liftIO . withAsync (runReaderT m env) $ \a -> do
r <- waitCatch a
case r of
Left e -> do
putStrLn $ "This test is known broken, see #" ++ show ticket ++ ":"
print e
runReaderT expectedBroken env
Right _ -> do
runReaderT unexpectedSuccess env
expectBrokenIf :: Bool -> Int -> TestM a -> TestM ()
expectBrokenIf False _ m = void $ m
expectBrokenIf True ticket m = expectBroken ticket m
expectBrokenUnless :: Bool -> Int -> TestM a -> TestM ()
expectBrokenUnless b = expectBrokenIf (not b)
------------------------------------------------------------------------
-- * Miscellaneous
git :: String -> [String] -> TestM ()
git cmd args = void $ git' cmd args
git' :: String -> [String] -> TestM Result
git' cmd args = do
recordHeader ["git", cmd]
runProgramM gitProgram (cmd : args)
gcc :: [String] -> TestM ()
gcc args = void $ gcc' args
gcc' :: [String] -> TestM Result
gcc' args = do
recordHeader ["gcc"]
runProgramM gccProgram args
ghc :: [String] -> TestM ()
ghc args = void $ ghc' args
ghc' :: [String] -> TestM Result
ghc' args = do
recordHeader ["ghc"]
runProgramM ghcProgram args
-- | If a test needs to modify or write out source files, it's
-- necessary to make a hermetic copy of the source files to operate
-- on. This function arranges for this to be done.
--
-- This requires the test repository to be a Git checkout, because
-- we use the Git metadata to figure out what files to copy into the
-- hermetic copy.
withSourceCopy :: TestM a -> TestM a
withSourceCopy m = do
env <- getTestEnv
let cwd = testCurrentDir env
dest = testSourceCopyDir env
r <- git' "ls-files" ["--cached", "--modified"]
forM_ (lines (resultOutput r)) $ \f -> do
unless (isTestFile f) $ do
liftIO $ createDirectoryIfMissing True (takeDirectory (dest </> f))
liftIO $ copyFile (cwd </> f) (dest </> f)
withReaderT (\nenv -> nenv { testHaveSourceCopy = True }) m
-- | Look up the 'InstalledPackageId' of a package name.
getIPID :: String -> TestM String
getIPID pn = do
r <- ghcPkg' "field" ["--global", pn, "id"]
-- Don't choke on warnings from ghc-pkg
case mapMaybe (stripPrefix "id: ") (lines (resultOutput r)) of
[x] -> return (takeWhile (not . Char.isSpace) x)
_ -> error $ "could not determine id of " ++ pn
-- | Delay a sufficient period of time to permit file timestamp
-- to be updated.
delay :: TestM ()
delay = do
env <- getTestEnv
is_old_ghc <- ghcVersionIs (< mkVersion [7,7])
-- For old versions of GHC, we only had second-level precision,
-- so we need to sleep a full second. Newer versions use
-- millisecond level precision, so we only have to wait
-- the granularity of the underlying filesystem.
-- TODO: cite commit when GHC got better precision; this
-- version bound was empirically generated.
liftIO . threadDelay $
if is_old_ghc
then 1000000
else fromMaybe
(error "Delay must be enclosed by withDelay")
(testMtimeChangeDelay env)
-- | Calibrate file modification time delay, if not
-- already determined.
withDelay :: TestM a -> TestM a
withDelay m = do
env <- getTestEnv
case testMtimeChangeDelay env of
Nothing -> do
-- Figure out how long we need to delay for recompilation tests
(_, mtimeChange) <- liftIO $ calibrateMtimeChangeDelay
withReaderT (\nenv -> nenv { testMtimeChangeDelay = Just mtimeChange }) m
Just _ -> m
-- | Create a symlink for the duration of the provided action. If the symlink
-- already exists, it is deleted. Does not work on Windows.
withSymlink :: FilePath -> FilePath -> TestM a -> TestM a
#ifdef mingw32_HOST_OS
withSymlink _oldpath _newpath _act =
error "PackageTests.PackageTester.withSymlink: does not work on Windows!"
#else
withSymlink oldpath newpath0 act = do
env <- getTestEnv
let newpath = testCurrentDir env </> newpath0
symlinkExists <- liftIO $ doesFileExist newpath
when symlinkExists $ liftIO $ removeFile newpath
bracket_ (liftIO $ createSymbolicLink oldpath newpath)
(liftIO $ removeFile newpath) act
#endif
writeSourceFile :: FilePath -> String -> TestM ()
writeSourceFile fp s = do
requireHasSourceCopy
cwd <- fmap testCurrentDir getTestEnv
liftIO $ writeFile (cwd </> fp) s
copySourceFileTo :: FilePath -> FilePath -> TestM ()
copySourceFileTo src dest = do
requireHasSourceCopy
cwd <- fmap testCurrentDir getTestEnv
liftIO $ copyFile (cwd </> src) (cwd </> dest)
requireHasSourceCopy :: TestM ()
requireHasSourceCopy = do
env <- getTestEnv
unless (testHaveSourceCopy env) $ do
error "This operation requires a source copy; use withSourceCopy and 'git add' all test files"
-- NB: Keep this synchronized with partitionTests
isTestFile :: FilePath -> Bool
isTestFile f =
case takeExtensions f of
".test.hs" -> True
".multitest.hs" -> True
_ -> False
|
mydaum/cabal
|
cabal-testsuite/Test/Cabal/Prelude.hs
|
bsd-3-clause
| 34,806
| 0
| 22
| 8,876
| 7,754
| 3,957
| 3,797
| 606
| 4
|
type Birds = Int
type Pole = (Birds, Birds)
x -: f = f x
landLeft :: Birds -> Pole -> Maybe Pole
landLeft n (left, right)
| abs ((left + n) - right) < 4 = Just (left + n, right)
| otherwise = Nothing
landRight :: Birds -> Pole -> Maybe Pole
landRight n (left, right)
| abs (left - (right + n)) < 4 = Just (left, right + n)
| otherwise = Nothing
banana :: Pole -> Maybe Pole
banana _ = Nothing
routine :: Maybe Pole
routine = do
start <- return (0,0)
first <- landLeft 2 start
second <- landRight 2 first
landLeft 1 second
|
ku00/h-book
|
src/TightropeWalking.hs
|
bsd-3-clause
| 597
| 2
| 13
| 186
| 278
| 139
| 139
| 19
| 1
|
{-# LINE 1 "Data.Functor.Classes.hs" #-}
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Classes
-- Copyright : (c) Ross Paterson 2013
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Liftings of the Prelude classes 'Eq', 'Ord', 'Read' and 'Show' to
-- unary and binary type constructors.
--
-- These classes are needed to express the constraints on arguments of
-- transformers in portable Haskell. Thus for a new transformer @T@,
-- one might write instances like
--
-- > instance (Eq1 f) => Eq1 (T f) where ...
-- > instance (Ord1 f) => Ord1 (T f) where ...
-- > instance (Read1 f) => Read1 (T f) where ...
-- > instance (Show1 f) => Show1 (T f) where ...
--
-- If these instances can be defined, defining instances of the base
-- classes is mechanical:
--
-- > instance (Eq1 f, Eq a) => Eq (T f a) where (==) = eq1
-- > instance (Ord1 f, Ord a) => Ord (T f a) where compare = compare1
-- > instance (Read1 f, Read a) => Read (T f a) where readsPrec = readsPrec1
-- > instance (Show1 f, Show a) => Show (T f a) where showsPrec = showsPrec1
--
-- @since 4.9.0.0
-----------------------------------------------------------------------------
module Data.Functor.Classes (
-- * Liftings of Prelude classes
-- ** For unary constructors
Eq1(..), eq1,
Ord1(..), compare1,
Read1(..), readsPrec1,
Show1(..), showsPrec1,
-- ** For binary constructors
Eq2(..), eq2,
Ord2(..), compare2,
Read2(..), readsPrec2,
Show2(..), showsPrec2,
-- * Helper functions
-- $example
readsData,
readsUnaryWith,
readsBinaryWith,
showsUnaryWith,
showsBinaryWith,
-- ** Obsolete helpers
readsUnary,
readsUnary1,
readsBinary1,
showsUnary,
showsUnary1,
showsBinary1,
) where
import Control.Applicative (Const(Const))
import Data.Functor.Identity (Identity(Identity))
import Data.Proxy (Proxy(Proxy))
import Data.Monoid (mappend)
import Text.Show (showListWith)
-- | Lifting of the 'Eq' class to unary type constructors.
class Eq1 f where
-- | Lift an equality test through the type constructor.
--
-- The function will usually be applied to an equality function,
-- but the more general type ensures that the implementation uses
-- it to compare elements of the first container with elements of
-- the second.
liftEq :: (a -> b -> Bool) -> f a -> f b -> Bool
-- | Lift the standard @('==')@ function through the type constructor.
eq1 :: (Eq1 f, Eq a) => f a -> f a -> Bool
eq1 = liftEq (==)
-- | Lifting of the 'Ord' class to unary type constructors.
class (Eq1 f) => Ord1 f where
-- | Lift a 'compare' function through the type constructor.
--
-- The function will usually be applied to a comparison function,
-- but the more general type ensures that the implementation uses
-- it to compare elements of the first container with elements of
-- the second.
liftCompare :: (a -> b -> Ordering) -> f a -> f b -> Ordering
-- | Lift the standard 'compare' function through the type constructor.
compare1 :: (Ord1 f, Ord a) => f a -> f a -> Ordering
compare1 = liftCompare compare
-- | Lifting of the 'Read' class to unary type constructors.
class Read1 f where
-- | 'readsPrec' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument type.
liftReadsPrec :: (Int -> ReadS a) -> ReadS [a] -> Int -> ReadS (f a)
-- | 'readList' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument type.
-- The default implementation using standard list syntax is correct
-- for most types.
liftReadList :: (Int -> ReadS a) -> ReadS [a] -> ReadS [f a]
liftReadList rp rl = readListWith (liftReadsPrec rp rl 0)
-- | Read a list (using square brackets and commas), given a function
-- for reading elements.
readListWith :: ReadS a -> ReadS [a]
readListWith rp =
readParen False (\r -> [pr | ("[",s) <- lex r, pr <- readl s])
where
readl s = [([],t) | ("]",t) <- lex s] ++
[(x:xs,u) | (x,t) <- rp s, (xs,u) <- readl' t]
readl' s = [([],t) | ("]",t) <- lex s] ++
[(x:xs,v) | (",",t) <- lex s, (x,u) <- rp t, (xs,v) <- readl' u]
-- | Lift the standard 'readsPrec' and 'readList' functions through the
-- type constructor.
readsPrec1 :: (Read1 f, Read a) => Int -> ReadS (f a)
readsPrec1 = liftReadsPrec readsPrec readList
-- | Lifting of the 'Show' class to unary type constructors.
class Show1 f where
-- | 'showsPrec' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument type.
liftShowsPrec :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
Int -> f a -> ShowS
-- | 'showList' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument type.
-- The default implementation using standard list syntax is correct
-- for most types.
liftShowList :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
[f a] -> ShowS
liftShowList sp sl = showListWith (liftShowsPrec sp sl 0)
-- | Lift the standard 'showsPrec' and 'showList' functions through the
-- type constructor.
showsPrec1 :: (Show1 f, Show a) => Int -> f a -> ShowS
showsPrec1 = liftShowsPrec showsPrec showList
-- | Lifting of the 'Eq' class to binary type constructors.
class Eq2 f where
-- | Lift equality tests through the type constructor.
--
-- The function will usually be applied to equality functions,
-- but the more general type ensures that the implementation uses
-- them to compare elements of the first container with elements of
-- the second.
liftEq2 :: (a -> b -> Bool) -> (c -> d -> Bool) -> f a c -> f b d -> Bool
-- | Lift the standard @('==')@ function through the type constructor.
eq2 :: (Eq2 f, Eq a, Eq b) => f a b -> f a b -> Bool
eq2 = liftEq2 (==) (==)
-- | Lifting of the 'Ord' class to binary type constructors.
class (Eq2 f) => Ord2 f where
-- | Lift 'compare' functions through the type constructor.
--
-- The function will usually be applied to comparison functions,
-- but the more general type ensures that the implementation uses
-- them to compare elements of the first container with elements of
-- the second.
liftCompare2 :: (a -> b -> Ordering) -> (c -> d -> Ordering) ->
f a c -> f b d -> Ordering
-- | Lift the standard 'compare' function through the type constructor.
compare2 :: (Ord2 f, Ord a, Ord b) => f a b -> f a b -> Ordering
compare2 = liftCompare2 compare compare
-- | Lifting of the 'Read' class to binary type constructors.
class Read2 f where
-- | 'readsPrec' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument types.
liftReadsPrec2 :: (Int -> ReadS a) -> ReadS [a] ->
(Int -> ReadS b) -> ReadS [b] -> Int -> ReadS (f a b)
-- | 'readList' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument types.
-- The default implementation using standard list syntax is correct
-- for most types.
liftReadList2 :: (Int -> ReadS a) -> ReadS [a] ->
(Int -> ReadS b) -> ReadS [b] -> ReadS [f a b]
liftReadList2 rp1 rl1 rp2 rl2 =
readListWith (liftReadsPrec2 rp1 rl1 rp2 rl2 0)
-- | Lift the standard 'readsPrec' function through the type constructor.
readsPrec2 :: (Read2 f, Read a, Read b) => Int -> ReadS (f a b)
readsPrec2 = liftReadsPrec2 readsPrec readList readsPrec readList
-- | Lifting of the 'Show' class to binary type constructors.
class Show2 f where
-- | 'showsPrec' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument types.
liftShowsPrec2 :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
(Int -> b -> ShowS) -> ([b] -> ShowS) -> Int -> f a b -> ShowS
-- | 'showList' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument types.
-- The default implementation using standard list syntax is correct
-- for most types.
liftShowList2 :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
(Int -> b -> ShowS) -> ([b] -> ShowS) -> [f a b] -> ShowS
liftShowList2 sp1 sl1 sp2 sl2 =
showListWith (liftShowsPrec2 sp1 sl1 sp2 sl2 0)
-- | Lift the standard 'showsPrec' function through the type constructor.
showsPrec2 :: (Show2 f, Show a, Show b) => Int -> f a b -> ShowS
showsPrec2 = liftShowsPrec2 showsPrec showList showsPrec showList
-- Instances for Prelude type constructors
instance Eq1 Maybe where
liftEq _ Nothing Nothing = True
liftEq _ Nothing (Just _) = False
liftEq _ (Just _) Nothing = False
liftEq eq (Just x) (Just y) = eq x y
instance Ord1 Maybe where
liftCompare _ Nothing Nothing = EQ
liftCompare _ Nothing (Just _) = LT
liftCompare _ (Just _) Nothing = GT
liftCompare comp (Just x) (Just y) = comp x y
instance Read1 Maybe where
liftReadsPrec rp _ d =
readParen False (\ r -> [(Nothing,s) | ("Nothing",s) <- lex r])
`mappend`
readsData (readsUnaryWith rp "Just" Just) d
instance Show1 Maybe where
liftShowsPrec _ _ _ Nothing = showString "Nothing"
liftShowsPrec sp _ d (Just x) = showsUnaryWith sp "Just" d x
instance Eq1 [] where
liftEq _ [] [] = True
liftEq _ [] (_:_) = False
liftEq _ (_:_) [] = False
liftEq eq (x:xs) (y:ys) = eq x y && liftEq eq xs ys
instance Ord1 [] where
liftCompare _ [] [] = EQ
liftCompare _ [] (_:_) = LT
liftCompare _ (_:_) [] = GT
liftCompare comp (x:xs) (y:ys) = comp x y `mappend` liftCompare comp xs ys
instance Read1 [] where
liftReadsPrec _ rl _ = rl
instance Show1 [] where
liftShowsPrec _ sl _ = sl
instance Eq2 (,) where
liftEq2 e1 e2 (x1, y1) (x2, y2) = e1 x1 x2 && e2 y1 y2
instance Ord2 (,) where
liftCompare2 comp1 comp2 (x1, y1) (x2, y2) =
comp1 x1 x2 `mappend` comp2 y1 y2
instance Read2 (,) where
liftReadsPrec2 rp1 _ rp2 _ _ = readParen False $ \ r ->
[((x,y), w) | ("(",s) <- lex r,
(x,t) <- rp1 0 s,
(",",u) <- lex t,
(y,v) <- rp2 0 u,
(")",w) <- lex v]
instance Show2 (,) where
liftShowsPrec2 sp1 _ sp2 _ _ (x, y) =
showChar '(' . sp1 0 x . showChar ',' . sp2 0 y . showChar ')'
instance (Eq a) => Eq1 ((,) a) where
liftEq = liftEq2 (==)
instance (Ord a) => Ord1 ((,) a) where
liftCompare = liftCompare2 compare
instance (Read a) => Read1 ((,) a) where
liftReadsPrec = liftReadsPrec2 readsPrec readList
instance (Show a) => Show1 ((,) a) where
liftShowsPrec = liftShowsPrec2 showsPrec showList
instance Eq2 Either where
liftEq2 e1 _ (Left x) (Left y) = e1 x y
liftEq2 _ _ (Left _) (Right _) = False
liftEq2 _ _ (Right _) (Left _) = False
liftEq2 _ e2 (Right x) (Right y) = e2 x y
instance Ord2 Either where
liftCompare2 comp1 _ (Left x) (Left y) = comp1 x y
liftCompare2 _ _ (Left _) (Right _) = LT
liftCompare2 _ _ (Right _) (Left _) = GT
liftCompare2 _ comp2 (Right x) (Right y) = comp2 x y
instance Read2 Either where
liftReadsPrec2 rp1 _ rp2 _ = readsData $
readsUnaryWith rp1 "Left" Left `mappend`
readsUnaryWith rp2 "Right" Right
instance Show2 Either where
liftShowsPrec2 sp1 _ _ _ d (Left x) = showsUnaryWith sp1 "Left" d x
liftShowsPrec2 _ _ sp2 _ d (Right x) = showsUnaryWith sp2 "Right" d x
instance (Eq a) => Eq1 (Either a) where
liftEq = liftEq2 (==)
instance (Ord a) => Ord1 (Either a) where
liftCompare = liftCompare2 compare
instance (Read a) => Read1 (Either a) where
liftReadsPrec = liftReadsPrec2 readsPrec readList
instance (Show a) => Show1 (Either a) where
liftShowsPrec = liftShowsPrec2 showsPrec showList
-- Instances for other functors defined in the base package
instance Eq1 Identity where
liftEq eq (Identity x) (Identity y) = eq x y
instance Ord1 Identity where
liftCompare comp (Identity x) (Identity y) = comp x y
instance Read1 Identity where
liftReadsPrec rp _ = readsData $
readsUnaryWith rp "Identity" Identity
instance Show1 Identity where
liftShowsPrec sp _ d (Identity x) = showsUnaryWith sp "Identity" d x
instance Eq2 Const where
liftEq2 eq _ (Const x) (Const y) = eq x y
instance Ord2 Const where
liftCompare2 comp _ (Const x) (Const y) = comp x y
instance Read2 Const where
liftReadsPrec2 rp _ _ _ = readsData $
readsUnaryWith rp "Const" Const
instance Show2 Const where
liftShowsPrec2 sp _ _ _ d (Const x) = showsUnaryWith sp "Const" d x
instance (Eq a) => Eq1 (Const a) where
liftEq = liftEq2 (==)
instance (Ord a) => Ord1 (Const a) where
liftCompare = liftCompare2 compare
instance (Read a) => Read1 (Const a) where
liftReadsPrec = liftReadsPrec2 readsPrec readList
instance (Show a) => Show1 (Const a) where
liftShowsPrec = liftShowsPrec2 showsPrec showList
-- Proxy unfortunately imports this module, hence these instances are placed
-- here,
-- | @since 4.9.0.0
instance Eq1 Proxy where
liftEq _ _ _ = True
-- | @since 4.9.0.0
instance Ord1 Proxy where
liftCompare _ _ _ = EQ
-- | @since 4.9.0.0
instance Show1 Proxy where
liftShowsPrec _ _ _ _ = showString "Proxy"
-- | @since 4.9.0.0
instance Read1 Proxy where
liftReadsPrec _ _ d =
readParen (d > 10) (\r -> [(Proxy, s) | ("Proxy",s) <- lex r ])
-- Building blocks
-- | @'readsData' p d@ is a parser for datatypes where each alternative
-- begins with a data constructor. It parses the constructor and
-- passes it to @p@. Parsers for various constructors can be constructed
-- with 'readsUnary', 'readsUnary1' and 'readsBinary1', and combined with
-- @mappend@ from the @Monoid@ class.
readsData :: (String -> ReadS a) -> Int -> ReadS a
readsData reader d =
readParen (d > 10) $ \ r -> [res | (kw,s) <- lex r, res <- reader kw s]
-- | @'readsUnaryWith' rp n c n'@ matches the name of a unary data constructor
-- and then parses its argument using @rp@.
readsUnaryWith :: (Int -> ReadS a) -> String -> (a -> t) -> String -> ReadS t
readsUnaryWith rp name cons kw s =
[(cons x,t) | kw == name, (x,t) <- rp 11 s]
-- | @'readsBinaryWith' rp1 rp2 n c n'@ matches the name of a binary
-- data constructor and then parses its arguments using @rp1@ and @rp2@
-- respectively.
readsBinaryWith :: (Int -> ReadS a) -> (Int -> ReadS b) ->
String -> (a -> b -> t) -> String -> ReadS t
readsBinaryWith rp1 rp2 name cons kw s =
[(cons x y,u) | kw == name, (x,t) <- rp1 11 s, (y,u) <- rp2 11 t]
-- | @'showsUnaryWith' sp n d x@ produces the string representation of a
-- unary data constructor with name @n@ and argument @x@, in precedence
-- context @d@.
showsUnaryWith :: (Int -> a -> ShowS) -> String -> Int -> a -> ShowS
showsUnaryWith sp name d x = showParen (d > 10) $
showString name . showChar ' ' . sp 11 x
-- | @'showsBinaryWith' sp1 sp2 n d x y@ produces the string
-- representation of a binary data constructor with name @n@ and arguments
-- @x@ and @y@, in precedence context @d@.
showsBinaryWith :: (Int -> a -> ShowS) -> (Int -> b -> ShowS) ->
String -> Int -> a -> b -> ShowS
showsBinaryWith sp1 sp2 name d x y = showParen (d > 10) $
showString name . showChar ' ' . sp1 11 x . showChar ' ' . sp2 11 y
-- Obsolete building blocks
-- | @'readsUnary' n c n'@ matches the name of a unary data constructor
-- and then parses its argument using 'readsPrec'.
{-# DEPRECATED readsUnary "Use readsUnaryWith to define liftReadsPrec" #-}
readsUnary :: (Read a) => String -> (a -> t) -> String -> ReadS t
readsUnary name cons kw s =
[(cons x,t) | kw == name, (x,t) <- readsPrec 11 s]
-- | @'readsUnary1' n c n'@ matches the name of a unary data constructor
-- and then parses its argument using 'readsPrec1'.
{-# DEPRECATED readsUnary1 "Use readsUnaryWith to define liftReadsPrec" #-}
readsUnary1 :: (Read1 f, Read a) => String -> (f a -> t) -> String -> ReadS t
readsUnary1 name cons kw s =
[(cons x,t) | kw == name, (x,t) <- readsPrec1 11 s]
-- | @'readsBinary1' n c n'@ matches the name of a binary data constructor
-- and then parses its arguments using 'readsPrec1'.
{-# DEPRECATED readsBinary1 "Use readsBinaryWith to define liftReadsPrec" #-}
readsBinary1 :: (Read1 f, Read1 g, Read a) =>
String -> (f a -> g a -> t) -> String -> ReadS t
readsBinary1 name cons kw s =
[(cons x y,u) | kw == name,
(x,t) <- readsPrec1 11 s, (y,u) <- readsPrec1 11 t]
-- | @'showsUnary' n d x@ produces the string representation of a unary data
-- constructor with name @n@ and argument @x@, in precedence context @d@.
{-# DEPRECATED showsUnary "Use showsUnaryWith to define liftShowsPrec" #-}
showsUnary :: (Show a) => String -> Int -> a -> ShowS
showsUnary name d x = showParen (d > 10) $
showString name . showChar ' ' . showsPrec 11 x
-- | @'showsUnary1' n d x@ produces the string representation of a unary data
-- constructor with name @n@ and argument @x@, in precedence context @d@.
{-# DEPRECATED showsUnary1 "Use showsUnaryWith to define liftShowsPrec" #-}
showsUnary1 :: (Show1 f, Show a) => String -> Int -> f a -> ShowS
showsUnary1 name d x = showParen (d > 10) $
showString name . showChar ' ' . showsPrec1 11 x
-- | @'showsBinary1' n d x y@ produces the string representation of a binary
-- data constructor with name @n@ and arguments @x@ and @y@, in precedence
-- context @d@.
{-# DEPRECATED showsBinary1 "Use showsBinaryWith to define liftShowsPrec" #-}
showsBinary1 :: (Show1 f, Show1 g, Show a) =>
String -> Int -> f a -> g a -> ShowS
showsBinary1 name d x y = showParen (d > 10) $
showString name . showChar ' ' . showsPrec1 11 x .
showChar ' ' . showsPrec1 11 y
{- $example
These functions can be used to assemble 'Read' and 'Show' instances for
new algebraic types. For example, given the definition
> data T f a = Zero a | One (f a) | Two a (f a)
a standard 'Read1' instance may be defined as
> instance (Read1 f) => Read1 (T f) where
> liftReadsPrec rp rl = readsData $
> readsUnaryWith rp "Zero" Zero `mappend`
> readsUnaryWith (liftReadsPrec rp rl) "One" One `mappend`
> readsBinaryWith rp (liftReadsPrec rp rl) "Two" Two
and the corresponding 'Show1' instance as
> instance (Show1 f) => Show1 (T f) where
> liftShowsPrec sp _ d (Zero x) =
> showsUnaryWith sp "Zero" d x
> liftShowsPrec sp sl d (One x) =
> showsUnaryWith (liftShowsPrec sp sl) "One" d x
> liftShowsPrec sp sl d (Two x y) =
> showsBinaryWith sp (liftShowsPrec sp sl) "Two" d x y
-}
|
phischu/fragnix
|
builtins/base/Data.Functor.Classes.hs
|
bsd-3-clause
| 18,946
| 0
| 14
| 4,380
| 4,992
| 2,652
| 2,340
| 242
| 1
|
-- |
-- Examples in various locations...
--
-- Some random text. Some random text. Some random text. Some random text.
-- Some random text. Some random text. Some random text. Some random text.
-- Some random text.
--
-- >>> let x = 10
--
-- Some random text. Some random text. Some random text. Some random text.
-- Some random text. Some random text. Some random text. Some random text.
-- Some random text.
--
--
-- >>> baz
-- "foobar"
module Foo (
-- | Some documentation not attached to a particular Haskell entity
--
-- >>> test 10
-- *** Exception: Prelude.undefined
test,
-- |
-- >>> fib 10
-- 55
fib,
-- |
-- >>> bar
-- "bar"
bar
) where
-- | My test
--
-- >>> test 20
-- *** Exception: Prelude.undefined
test :: Integer -> Integer
test = undefined
-- | Note that examples for 'fib' include the two examples below
-- and the one example with ^ syntax after 'fix'
--
-- >>> foo
-- "foo"
{- |
Example:
>>> fib 10
55
-}
-- | Calculate Fibonacci number of given `n`.
fib :: Integer -- ^ given `n`
--
-- >>> fib 10
-- 55
-> Integer -- ^ Fibonacci of given `n`
--
-- >>> baz
-- "foobar"
fib 0 = 0
fib 1 = 1
fib n = fib (n - 1) + fib (n - 2)
-- ^ Example:
--
-- >>> fib 5
-- 5
foo = "foo"
bar = "bar"
baz = foo ++ bar
|
ekmett/doctest
|
test/integration/testCommentLocation/Foo.hs
|
mit
| 1,395
| 0
| 8
| 435
| 154
| 107
| 47
| 14
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Haddock.Doc ( module Documentation.Haddock.Doc
, docCodeBlock
, combineDocumentation
) where
import Data.Maybe
import Data.Monoid
import Documentation.Haddock.Doc
import Haddock.Types
combineDocumentation :: Documentation name -> Maybe (Doc name)
combineDocumentation (Documentation Nothing Nothing) = Nothing
combineDocumentation (Documentation mDoc mWarning) =
Just (fromMaybe mempty mWarning <> fromMaybe mempty mDoc)
-- Drop trailing whitespace from @..@ code blocks. Otherwise this:
--
-- -- @
-- -- foo
-- -- @
--
-- turns into (DocCodeBlock "\nfoo\n ") which when rendered in HTML
-- gives an extra vertical space after the code block. The single space
-- on the final line seems to trigger the extra vertical space.
--
docCodeBlock :: DocH mod id -> DocH mod id
docCodeBlock (DocString s)
= DocString (reverse $ dropWhile (`elem` " \t") $ reverse s)
docCodeBlock (DocAppend l r)
= DocAppend l (docCodeBlock r)
docCodeBlock d = d
|
jgm/haddock
|
src/Haddock/Doc.hs
|
bsd-2-clause
| 1,061
| 0
| 10
| 223
| 220
| 120
| 100
| 18
| 1
|
-- | The orgs API as described on <http://developer.github.com/v3/orgs/>.
module Github.Organizations (
publicOrganizationsFor
,publicOrganizationsFor'
,publicOrganization
,publicOrganization'
,module Github.Data
) where
import Github.Data
import Github.Private
-- | The public organizations for a user, given the user's login, with authorization
--
-- > publicOrganizationsFor' (Just ("github-username", "github-password")) "mike-burns"
publicOrganizationsFor' :: Maybe GithubAuth -> String -> IO (Either Error [SimpleOrganization])
publicOrganizationsFor' auth userName = githubGet' auth ["users", userName, "orgs"]
-- | The public organizations for a user, given the user's login.
--
-- > publicOrganizationsFor "mike-burns"
publicOrganizationsFor :: String -> IO (Either Error [SimpleOrganization])
publicOrganizationsFor = publicOrganizationsFor' Nothing
-- | Details on a public organization. Takes the organization's login.
--
-- > publicOrganization' (Just ("github-username", "github-password")) "thoughtbot"
publicOrganization' :: Maybe GithubAuth -> String -> IO (Either Error Organization)
publicOrganization' auth organizationName = githubGet' auth ["orgs", organizationName]
-- | Details on a public organization. Takes the organization's login.
--
-- > publicOrganization "thoughtbot"
publicOrganization :: String -> IO (Either Error Organization)
publicOrganization = publicOrganization' Nothing
|
fernandocastor/github
|
Github/Organizations.hs
|
bsd-3-clause
| 1,418
| 0
| 10
| 167
| 210
| 118
| 92
| 16
| 1
|
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
-- | An architecture independent description of a register's class.
module RegClass
( RegClass (..) )
where
import Outputable
import Unique
-- | The class of a register.
-- Used in the register allocator.
-- We treat all registers in a class as being interchangable.
--
data RegClass
= RcInteger
| RcFloat
| RcDouble
| RcDoubleSSE -- x86 only: the SSE regs are a separate class
deriving Eq
instance Uniquable RegClass where
getUnique RcInteger = mkRegClassUnique 0
getUnique RcFloat = mkRegClassUnique 1
getUnique RcDouble = mkRegClassUnique 2
getUnique RcDoubleSSE = mkRegClassUnique 3
instance Outputable RegClass where
ppr RcInteger = Outputable.text "I"
ppr RcFloat = Outputable.text "F"
ppr RcDouble = Outputable.text "D"
ppr RcDoubleSSE = Outputable.text "S"
|
nomeata/ghc
|
compiler/nativeGen/RegClass.hs
|
bsd-3-clause
| 1,146
| 24
| 7
| 217
| 186
| 101
| 85
| 21
| 0
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Aturan Pindai Aktif - Beta | Eksistensi ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Isi</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Telusuri</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorit</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/ascanrulesBeta/src/main/javahelp/org/zaproxy/zap/extension/ascanrulesBeta/resources/help_id_ID/helpset_id_ID.hs
|
apache-2.0
| 984
| 78
| 67
| 162
| 420
| 212
| 208
| -1
| -1
|
module Stack.Options.ResolverParser where
import Data.Monoid.Extra
import qualified Data.Text as T
import Options.Applicative
import Options.Applicative.Types (readerAsk)
import Stack.Options.Utils
import Stack.Types.Compiler
import Stack.Types.Resolver
-- | Parser for the resolver
abstractResolverOptsParser :: Bool -> Parser AbstractResolver
abstractResolverOptsParser hide =
option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Override resolver in project file" <>
hideMods hide)
compilerOptsParser :: Bool -> Parser CompilerVersion
compilerOptsParser hide =
option readCompilerVersion
(long "compiler" <>
metavar "COMPILER" <>
help "Use the specified compiler" <>
hideMods hide)
readCompilerVersion :: ReadM CompilerVersion
readCompilerVersion = do
s <- readerAsk
case parseCompilerVersion (T.pack s) of
Nothing -> readerError $ "Failed to parse compiler: " ++ s
Just x -> return x
|
mrkkrp/stack
|
src/Stack/Options/ResolverParser.hs
|
bsd-3-clause
| 1,109
| 0
| 11
| 307
| 224
| 116
| 108
| 28
| 2
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>Active Scan Rules - Blpha | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Ricerca</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/wavsepRpt/resources/help_it_IT/helpset_it_IT.hs
|
apache-2.0
| 988
| 80
| 67
| 163
| 422
| 213
| 209
| -1
| -1
|
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
-- The type of 'empty' is indeed ambiguous
module T2715 where
import Data.Kind (Type)
data Interval v where
Intv :: (Ord v, Enum v) => (v,v) -> Interval v
type family Domain (d :: Type -> Type) :: Type -> Type
type instance Domain Interval = Interval
type family Value (d :: Type -> Type) :: Type
class IDomain d where
empty :: (Ord (Value d), Enum (Value d)) => Domain d (Value d)
class (IDomain d1) -- (IDomain d1, IDomain d2, Value d1 ~ Value d2)
=> IIDomain (d1 :: Type -> Type) (d2 :: Type -> Type ) where
equals :: Domain d1 (Value d1) -> Domain d2 (Value d2) -> Bool
instance Ord (Value Interval)
=> IDomain Interval where
empty = Intv (toEnum 1, toEnum 0)
instance Ord (Value Interval)
=> IIDomain Interval Interval where
equals (Intv ix) (Intv iy) = ix == iy
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/T2715.hs
|
bsd-3-clause
| 1,021
| 0
| 11
| 223
| 329
| 180
| 149
| -1
| -1
|
{-# LANGUAGE
GADTs
, FlexibleContexts
, RankNTypes
, ScopedTypeVariables
, QuantifiedConstraints #-}
module T2893a where
import Control.Monad.ST
import Data.Array.ST
sortM
:: forall a s.
(Ord a, MArray (STUArray s) a (ST s))
=> [a]
-> ST s [a]
sortM xs = do
arr <- newListArray (1, length xs) xs
:: ST s (STUArray s Int a)
-- do some in-place sorting here
getElems arr
sortP_3
:: (Ord a, forall s. MArray (STUArray s) a (ST s))
=> [a] -> [a]
sortP_3 xs = runST (sortM xs)
|
sdiehl/ghc
|
testsuite/tests/quantified-constraints/T2893a.hs
|
bsd-3-clause
| 520
| 0
| 10
| 134
| 196
| 105
| 91
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Ar
-- Copyright : Duncan Coutts 2009
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module provides an library interface to the @ar@ program.
module Distribution.Simple.Program.Ar (
createArLibArchive,
multiStageProgramInvocation
) where
import Control.Monad (unless)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import Data.Char (isSpace)
import Distribution.Compat.CopyFile (filesEqual)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..))
import Distribution.Simple.Program
( arProgram, requireProgram )
import Distribution.Simple.Program.Run
( programInvocation, multiStageProgramInvocation
, runProgramInvocation )
import Distribution.Simple.Utils
( dieWithLocation, withTempDirectory )
import Distribution.System
( Arch(..), OS(..), Platform(..) )
import Distribution.Verbosity
( Verbosity, deafening, verbose )
import System.Directory (doesFileExist, renameFile)
import System.FilePath ((</>), splitFileName)
import System.IO
( Handle, IOMode(ReadWriteMode), SeekMode(AbsoluteSeek)
, hFileSize, hSeek, withBinaryFile )
-- | Call @ar@ to create a library archive from a bunch of object files.
--
createArLibArchive :: Verbosity -> LocalBuildInfo
-> FilePath -> [FilePath] -> IO ()
createArLibArchive verbosity lbi targetPath files = do
(ar, _) <- requireProgram verbosity arProgram progConf
let (targetDir, targetName) = splitFileName targetPath
withTempDirectory verbosity targetDir "objs" $ \ tmpDir -> do
let tmpPath = tmpDir </> targetName
-- The args to use with "ar" are actually rather subtle and system-dependent.
-- In particular we have the following issues:
--
-- -- On OS X, "ar q" does not make an archive index. Archives with no
-- index cannot be used.
--
-- -- GNU "ar r" will not let us add duplicate objects, only "ar q" lets us
-- do that. We have duplicates because of modules like "A.M" and "B.M"
-- both make an object file "M.o" and ar does not consider the directory.
--
-- Our solution is to use "ar r" in the simple case when one call is enough.
-- When we need to call ar multiple times we use "ar q" and for the last
-- call on OSX we use "ar qs" so that it'll make the index.
let simpleArgs = case hostOS of
OSX -> ["-r", "-s"]
_ -> ["-r"]
initialArgs = ["-q"]
finalArgs = case hostOS of
OSX -> ["-q", "-s"]
_ -> ["-q"]
extraArgs = verbosityOpts verbosity ++ [tmpPath]
simple = programInvocation ar (simpleArgs ++ extraArgs)
initial = programInvocation ar (initialArgs ++ extraArgs)
middle = initial
final = programInvocation ar (finalArgs ++ extraArgs)
sequence_
[ runProgramInvocation verbosity inv
| inv <- multiStageProgramInvocation
simple (initial, middle, final) files ]
unless (hostArch == Arm -- See #1537
|| hostOS == AIX) $ -- AIX uses its own "ar" format variant
wipeMetadata tmpPath
equal <- filesEqual tmpPath targetPath
unless equal $ renameFile tmpPath targetPath
where
progConf = withPrograms lbi
Platform hostArch hostOS = hostPlatform lbi
verbosityOpts v | v >= deafening = ["-v"]
| v >= verbose = []
| otherwise = ["-c"]
-- | @ar@ by default includes various metadata for each object file in their
-- respective headers, so the output can differ for the same inputs, making
-- it difficult to avoid re-linking. GNU @ar@(1) has a deterministic mode
-- (@-D@) flag that always writes zero for the mtime, UID and GID, and 0644
-- for the file mode. However detecting whether @-D@ is supported seems
-- rather harder than just re-implementing this feature.
wipeMetadata :: FilePath -> IO ()
wipeMetadata path = do
-- Check for existence first (ReadWriteMode would create one otherwise)
exists <- doesFileExist path
unless exists $ wipeError "Temporary file disappeared"
withBinaryFile path ReadWriteMode $ \ h -> hFileSize h >>= wipeArchive h
where
wipeError msg = dieWithLocation path Nothing $
"Distribution.Simple.Program.Ar.wipeMetadata: " ++ msg
archLF = "!<arch>\x0a" -- global magic, 8 bytes
x60LF = "\x60\x0a" -- header magic, 2 bytes
metadata = BS.concat
[ "0 " -- mtime, 12 bytes
, "0 " -- UID, 6 bytes
, "0 " -- GID, 6 bytes
, "0644 " -- mode, 8 bytes
]
headerSize :: Int
headerSize = 60
-- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details
wipeArchive :: Handle -> Integer -> IO ()
wipeArchive h archiveSize = do
global <- BS.hGet h (BS.length archLF)
unless (global == archLF) $ wipeError "Bad global header"
wipeHeader (toInteger $ BS.length archLF)
where
wipeHeader :: Integer -> IO ()
wipeHeader offset = case compare offset archiveSize of
EQ -> return ()
GT -> wipeError (atOffset "Archive truncated")
LT -> do
header <- BS.hGet h headerSize
unless (BS.length header == headerSize) $
wipeError (atOffset "Short header")
let magic = BS.drop 58 header
unless (magic == x60LF) . wipeError . atOffset $
"Bad magic " ++ show magic ++ " in header"
let name = BS.take 16 header
let size = BS.take 10 $ BS.drop 48 header
objSize <- case reads (BS8.unpack size) of
[(n, s)] | all isSpace s -> return n
_ -> wipeError (atOffset "Bad file size in header")
let replacement = BS.concat [ name, metadata, size, magic ]
unless (BS.length replacement == headerSize) $
wipeError (atOffset "Something has gone terribly wrong")
hSeek h AbsoluteSeek offset
BS.hPut h replacement
let nextHeader = offset + toInteger headerSize +
-- Odd objects are padded with an extra '\x0a'
if odd objSize then objSize + 1 else objSize
hSeek h AbsoluteSeek nextHeader
wipeHeader nextHeader
where
atOffset msg = msg ++ " at offset " ++ show offset
|
tolysz/prepare-ghcjs
|
spec-lts8/cabal/Cabal/Distribution/Simple/Program/Ar.hs
|
bsd-3-clause
| 6,640
| 0
| 21
| 1,867
| 1,337
| 707
| 630
| -1
| -1
|
{-# LANGUAGE TypeFamilies, PolyKinds, DataKinds, GADTs #-}
module T14045 where
import Data.Kind
data family Sing (a :: k)
data instance Sing :: Bool -> Type where
SFalse :: Sing False
STrue :: Sing True
|
ezyang/ghc
|
testsuite/tests/indexed-types/should_compile/T14045.hs
|
bsd-3-clause
| 211
| 0
| 6
| 42
| 50
| 31
| 19
| 7
| 0
|
{-# LANGUAGE BangPatterns, CPP #-}
-----------------------------------------------------------------------------
--
-- Code generation for ticky-ticky profiling
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
{- OVERVIEW: ticky ticky profiling
Please see
http://ghc.haskell.org/trac/ghc/wiki/Debugging/TickyTicky and also
edit it and the rest of this comment to keep them up-to-date if you
change ticky-ticky. Thanks!
*** All allocation ticky numbers are in bytes. ***
Some of the relevant source files:
***not necessarily an exhaustive list***
* some codeGen/ modules import this one
* this module imports cmm/CLabel.hs to manage labels
* cmm/CmmParse.y expands some macros using generators defined in
this module
* includes/stg/Ticky.h declares all of the global counters
* includes/rts/Ticky.h declares the C data type for an
STG-declaration's counters
* some macros defined in includes/Cmm.h (and used within the RTS's
CMM code) update the global ticky counters
* at the end of execution rts/Ticky.c generates the final report
+RTS -r<report-file> -RTS
The rts/Ticky.c function that generates the report includes an
STG-declaration's ticky counters if
* that declaration was entered, or
* it was allocated (if -ticky-allocd)
On either of those events, the counter is "registered" by adding it to
a linked list; cf the CMM generated by registerTickyCtr.
Ticky-ticky profiling has evolved over many years. Many of the
counters from its most sophisticated days are no longer
active/accurate. As the RTS has changed, sometimes the ticky code for
relevant counters was not accordingly updated. Unfortunately, neither
were the comments.
As of March 2013, there still exist deprecated code and comments in
the code generator as well as the RTS because:
* I don't know what is out-of-date versus merely commented out for
momentary convenience, and
* someone else might know how to repair it!
-}
module StgCmmTicky (
withNewTickyCounterFun,
withNewTickyCounterLNE,
withNewTickyCounterThunk,
withNewTickyCounterStdThunk,
tickyDynAlloc,
tickyAllocHeap,
tickyAllocPrim,
tickyAllocThunk,
tickyAllocPAP,
tickyHeapCheck,
tickyStackCheck,
tickyUnknownCall, tickyDirectCall,
tickyPushUpdateFrame,
tickyUpdateFrameOmitted,
tickyEnterDynCon,
tickyEnterStaticCon,
tickyEnterViaNode,
tickyEnterFun,
tickyEnterThunk, tickyEnterStdThunk, -- dynamic non-value
-- thunks only
tickyEnterLNE,
tickyUpdateBhCaf,
tickyBlackHole,
tickyUnboxedTupleReturn, tickyVectoredReturn,
tickyReturnOldCon, tickyReturnNewCon,
tickyKnownCallTooFewArgs, tickyKnownCallExact, tickyKnownCallExtraArgs,
tickySlowCall, tickySlowCallPat,
) where
#include "HsVersions.h"
import StgCmmArgRep ( slowCallPattern , toArgRep , argRepString )
import StgCmmEnv ( NonVoid, unsafe_stripNV )
import StgCmmClosure
import StgCmmUtils
import StgCmmMonad
import StgSyn
import CmmExpr
import MkGraph
import CmmUtils
import CLabel
import SMRep
import Module
import Name
import Id
import BasicTypes
import FastString
import Outputable
import DynFlags
-- Turgid imports for showTypeCategory
import PrelNames
import TcType
import Type
import TyCon
import Data.Maybe
import qualified Data.Char
import Control.Monad ( unless, when )
-----------------------------------------------------------------------------
--
-- Ticky-ticky profiling
--
-----------------------------------------------------------------------------
data TickyClosureType = TickyFun | TickyThunk | TickyLNE
withNewTickyCounterFun, withNewTickyCounterLNE :: Name -> [NonVoid Id] -> FCode a -> FCode a
withNewTickyCounterFun = withNewTickyCounter TickyFun
withNewTickyCounterLNE nm args code = do
b <- tickyLNEIsOn
if not b then code else withNewTickyCounter TickyLNE nm args code
withNewTickyCounterThunk,withNewTickyCounterStdThunk ::
Bool -> Name -> FCode a -> FCode a
withNewTickyCounterThunk isStatic name code = do
b <- tickyDynThunkIsOn
if isStatic || not b -- ignore static thunks
then code
else withNewTickyCounter TickyThunk name [] code
withNewTickyCounterStdThunk = withNewTickyCounterThunk
-- args does not include the void arguments
withNewTickyCounter :: TickyClosureType -> Name -> [NonVoid Id] -> FCode a -> FCode a
withNewTickyCounter cloType name args m = do
lbl <- emitTickyCounter cloType name args
setTickyCtrLabel lbl m
emitTickyCounter :: TickyClosureType -> Name -> [NonVoid Id] -> FCode CLabel
emitTickyCounter cloType name args
= let ctr_lbl = mkRednCountsLabel name in
(>> return ctr_lbl) $
ifTicky $ do
{ dflags <- getDynFlags
; parent <- getTickyCtrLabel
; mod_name <- getModuleName
-- When printing the name of a thing in a ticky file, we
-- want to give the module name even for *local* things. We
-- print just "x (M)" rather that "M.x" to distinguish them
-- from the global kind.
; let ppr_for_ticky_name :: SDoc
ppr_for_ticky_name =
let n = ppr name
p = case hasHaskellName parent of
-- NB the default "top" ticky ctr does not
-- have a Haskell name
Just pname -> text "in" <+> ppr (nameUnique pname)
_ -> empty
in (<+> p) $ if isInternalName name
then let s = n <+> (parens (ppr mod_name))
in case cloType of
TickyFun -> s
TickyThunk -> s <+> parens (text "thk")
TickyLNE -> s <+> parens (text "LNE")
else case cloType of
TickyFun -> n
TickyThunk -> n <+> parens (text "thk")
TickyLNE -> panic "emitTickyCounter: how is this an external LNE?"
; fun_descr_lit <- newStringCLit $ showSDocDebug dflags ppr_for_ticky_name
; arg_descr_lit <- newStringCLit $ map (showTypeCategory . idType . unsafe_stripNV) args
; emitDataLits ctr_lbl
-- Must match layout of includes/rts/Ticky.h's StgEntCounter
--
-- krc: note that all the fields are I32 now; some were I16
-- before, but the code generator wasn't handling that
-- properly and it led to chaos, panic and disorder.
[ mkIntCLit dflags 0, -- registered?
mkIntCLit dflags (length args), -- Arity
mkIntCLit dflags 0, -- Heap allocated for this thing
fun_descr_lit,
arg_descr_lit,
zeroCLit dflags, -- Entries into this thing
zeroCLit dflags, -- Heap allocated by this thing
zeroCLit dflags -- Link to next StgEntCounter
]
}
-- -----------------------------------------------------------------------------
-- Ticky stack frames
tickyPushUpdateFrame, tickyUpdateFrameOmitted :: FCode ()
tickyPushUpdateFrame = ifTicky $ bumpTickyCounter (fsLit "UPDF_PUSHED_ctr")
tickyUpdateFrameOmitted = ifTicky $ bumpTickyCounter (fsLit "UPDF_OMITTED_ctr")
-- -----------------------------------------------------------------------------
-- Ticky entries
-- NB the name-specific entries are only available for names that have
-- dedicated Cmm code. As far as I know, this just rules out
-- constructor thunks. For them, there is no CMM code block to put the
-- bump of name-specific ticky counter into. On the other hand, we can
-- still track allocation their allocation.
tickyEnterDynCon, tickyEnterStaticCon, tickyEnterViaNode :: FCode ()
tickyEnterDynCon = ifTicky $ bumpTickyCounter (fsLit "ENT_DYN_CON_ctr")
tickyEnterStaticCon = ifTicky $ bumpTickyCounter (fsLit "ENT_STATIC_CON_ctr")
tickyEnterViaNode = ifTicky $ bumpTickyCounter (fsLit "ENT_VIA_NODE_ctr")
tickyEnterThunk :: ClosureInfo -> FCode ()
tickyEnterThunk cl_info
= ifTicky $ do
{ bumpTickyCounter ctr
; unless static $ do
ticky_ctr_lbl <- getTickyCtrLabel
registerTickyCtrAtEntryDyn ticky_ctr_lbl
bumpTickyEntryCount ticky_ctr_lbl }
where
updatable = closureSingleEntry cl_info
static = isStaticClosure cl_info
ctr | static = if updatable then fsLit "ENT_STATIC_THK_SINGLE_ctr"
else fsLit "ENT_STATIC_THK_MANY_ctr"
| otherwise = if updatable then fsLit "ENT_DYN_THK_SINGLE_ctr"
else fsLit "ENT_DYN_THK_MANY_ctr"
tickyEnterStdThunk :: ClosureInfo -> FCode ()
tickyEnterStdThunk = tickyEnterThunk
tickyBlackHole :: Bool{-updatable-} -> FCode ()
tickyBlackHole updatable
= ifTicky (bumpTickyCounter ctr)
where
ctr | updatable = (fsLit "UPD_BH_SINGLE_ENTRY_ctr")
| otherwise = (fsLit "UPD_BH_UPDATABLE_ctr")
tickyUpdateBhCaf :: ClosureInfo -> FCode ()
tickyUpdateBhCaf cl_info
= ifTicky (bumpTickyCounter ctr)
where
ctr | closureUpdReqd cl_info = (fsLit "UPD_CAF_BH_SINGLE_ENTRY_ctr")
| otherwise = (fsLit "UPD_CAF_BH_UPDATABLE_ctr")
tickyEnterFun :: ClosureInfo -> FCode ()
tickyEnterFun cl_info = ifTicky $ do
ctr_lbl <- getTickyCtrLabel
if isStaticClosure cl_info
then do bumpTickyCounter (fsLit "ENT_STATIC_FUN_DIRECT_ctr")
registerTickyCtr ctr_lbl
else do bumpTickyCounter (fsLit "ENT_DYN_FUN_DIRECT_ctr")
registerTickyCtrAtEntryDyn ctr_lbl
bumpTickyEntryCount ctr_lbl
tickyEnterLNE :: FCode ()
tickyEnterLNE = ifTicky $ do
bumpTickyCounter (fsLit "ENT_LNE_ctr")
ifTickyLNE $ do
ctr_lbl <- getTickyCtrLabel
registerTickyCtr ctr_lbl
bumpTickyEntryCount ctr_lbl
-- needn't register a counter upon entry if
--
-- 1) it's for a dynamic closure, and
--
-- 2) -ticky-allocd is on
--
-- since the counter was registered already upon being alloc'd
registerTickyCtrAtEntryDyn :: CLabel -> FCode ()
registerTickyCtrAtEntryDyn ctr_lbl = do
already_registered <- tickyAllocdIsOn
when (not already_registered) $ registerTickyCtr ctr_lbl
registerTickyCtr :: CLabel -> FCode ()
-- Register a ticky counter
-- if ( ! f_ct.registeredp ) {
-- f_ct.link = ticky_entry_ctrs; /* hook this one onto the front of the list */
-- ticky_entry_ctrs = & (f_ct); /* mark it as "registered" */
-- f_ct.registeredp = 1 }
registerTickyCtr ctr_lbl = do
dflags <- getDynFlags
let
-- krc: code generator doesn't handle Not, so we test for Eq 0 instead
test = CmmMachOp (MO_Eq (wordWidth dflags))
[CmmLoad (CmmLit (cmmLabelOffB ctr_lbl
(oFFSET_StgEntCounter_registeredp dflags))) (bWord dflags),
zeroExpr dflags]
register_stmts
= [ mkStore (CmmLit (cmmLabelOffB ctr_lbl (oFFSET_StgEntCounter_link dflags)))
(CmmLoad ticky_entry_ctrs (bWord dflags))
, mkStore ticky_entry_ctrs (mkLblExpr ctr_lbl)
, mkStore (CmmLit (cmmLabelOffB ctr_lbl
(oFFSET_StgEntCounter_registeredp dflags)))
(mkIntExpr dflags 1) ]
ticky_entry_ctrs = mkLblExpr (mkCmmDataLabel rtsPackageKey (fsLit "ticky_entry_ctrs"))
emit =<< mkCmmIfThen test (catAGraphs register_stmts)
tickyReturnOldCon, tickyReturnNewCon :: RepArity -> FCode ()
tickyReturnOldCon arity
= ifTicky $ do { bumpTickyCounter (fsLit "RET_OLD_ctr")
; bumpHistogram (fsLit "RET_OLD_hst") arity }
tickyReturnNewCon arity
= ifTicky $ do { bumpTickyCounter (fsLit "RET_NEW_ctr")
; bumpHistogram (fsLit "RET_NEW_hst") arity }
tickyUnboxedTupleReturn :: RepArity -> FCode ()
tickyUnboxedTupleReturn arity
= ifTicky $ do { bumpTickyCounter (fsLit "RET_UNBOXED_TUP_ctr")
; bumpHistogram (fsLit "RET_UNBOXED_TUP_hst") arity }
tickyVectoredReturn :: Int -> FCode ()
tickyVectoredReturn family_size
= ifTicky $ do { bumpTickyCounter (fsLit "VEC_RETURN_ctr")
; bumpHistogram (fsLit "RET_VEC_RETURN_hst") family_size }
-- -----------------------------------------------------------------------------
-- Ticky calls
-- Ticks at a *call site*:
tickyDirectCall :: RepArity -> [StgArg] -> FCode ()
tickyDirectCall arity args
| arity == length args = tickyKnownCallExact
| otherwise = do tickyKnownCallExtraArgs
tickySlowCallPat (map argPrimRep (drop arity args))
tickyKnownCallTooFewArgs :: FCode ()
tickyKnownCallTooFewArgs = ifTicky $ bumpTickyCounter (fsLit "KNOWN_CALL_TOO_FEW_ARGS_ctr")
tickyKnownCallExact :: FCode ()
tickyKnownCallExact = ifTicky $ bumpTickyCounter (fsLit "KNOWN_CALL_ctr")
tickyKnownCallExtraArgs :: FCode ()
tickyKnownCallExtraArgs = ifTicky $ bumpTickyCounter (fsLit "KNOWN_CALL_EXTRA_ARGS_ctr")
tickyUnknownCall :: FCode ()
tickyUnknownCall = ifTicky $ bumpTickyCounter (fsLit "UNKNOWN_CALL_ctr")
-- Tick for the call pattern at slow call site (i.e. in addition to
-- tickyUnknownCall, tickyKnownCallExtraArgs, etc.)
tickySlowCall :: LambdaFormInfo -> [StgArg] -> FCode ()
tickySlowCall _ [] = return ()
tickySlowCall lf_info args = do
-- see Note [Ticky for slow calls]
if isKnownFun lf_info
then tickyKnownCallTooFewArgs
else tickyUnknownCall
tickySlowCallPat (map argPrimRep args)
tickySlowCallPat :: [PrimRep] -> FCode ()
tickySlowCallPat args = ifTicky $
let argReps = map toArgRep args
(_, n_matched) = slowCallPattern argReps
in if n_matched > 0 && n_matched == length args
then bumpTickyLbl $ mkRtsSlowFastTickyCtrLabel $ concatMap (map Data.Char.toLower . argRepString) argReps
else bumpTickyCounter $ fsLit "VERY_SLOW_CALL_ctr"
{-
Note [Ticky for slow calls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Terminology is unfortunately a bit mixed up for these calls. codeGen
uses "slow call" to refer to unknown calls and under-saturated known
calls.
Nowadays, though (ie as of the eval/apply paper), the significantly
slower calls are actually just a subset of these: the ones with no
built-in argument pattern (cf StgCmmArgRep.slowCallPattern)
So for ticky profiling, we split slow calls into
"SLOW_CALL_fast_<pattern>_ctr" (those matching a built-in pattern) and
VERY_SLOW_CALL_ctr (those without a built-in pattern; these are very
bad for both space and time).
-}
-- -----------------------------------------------------------------------------
-- Ticky allocation
tickyDynAlloc :: Maybe Id -> SMRep -> LambdaFormInfo -> FCode ()
-- Called when doing a dynamic heap allocation; the LambdaFormInfo
-- used to distinguish between closure types
--
-- TODO what else to count while we're here?
tickyDynAlloc mb_id rep lf = ifTicky $ getDynFlags >>= \dflags ->
let bytes = wORD_SIZE dflags * heapClosureSizeW dflags rep
countGlobal tot ctr = do
bumpTickyCounterBy tot bytes
bumpTickyCounter ctr
countSpecific = ifTickyAllocd $ case mb_id of
Nothing -> return ()
Just id -> do
let ctr_lbl = mkRednCountsLabel (idName id)
registerTickyCtr ctr_lbl
bumpTickyAllocd ctr_lbl bytes
-- TODO are we still tracking "good stuff" (_gds) versus
-- administrative (_adm) versus slop (_slp)? I'm going with all _gds
-- for now, since I don't currently know neither if we do nor how to
-- distinguish. NSF Mar 2013
in case () of
_ | isConRep rep ->
ifTickyDynThunk countSpecific >>
countGlobal (fsLit "ALLOC_CON_gds") (fsLit "ALLOC_CON_ctr")
| isThunkRep rep ->
ifTickyDynThunk countSpecific >>
if lfUpdatable lf
then countGlobal (fsLit "ALLOC_THK_gds") (fsLit "ALLOC_UP_THK_ctr")
else countGlobal (fsLit "ALLOC_THK_gds") (fsLit "ALLOC_SE_THK_ctr")
| isFunRep rep ->
countSpecific >>
countGlobal (fsLit "ALLOC_FUN_gds") (fsLit "ALLOC_FUN_ctr")
| otherwise -> panic "How is this heap object not a con, thunk, or fun?"
tickyAllocHeap ::
Bool -> -- is this a genuine allocation? As opposed to
-- StgCmmLayout.adjustHpBackwards
VirtualHpOffset -> FCode ()
-- Called when doing a heap check [TICK_ALLOC_HEAP]
-- Must be lazy in the amount of allocation!
tickyAllocHeap genuine hp
= ifTicky $
do { dflags <- getDynFlags
; ticky_ctr <- getTickyCtrLabel
; emit $ catAGraphs $
-- only test hp from within the emit so that the monadic
-- computation itself is not strict in hp (cf knot in
-- StgCmmMonad.getHeapUsage)
if hp == 0 then []
else let !bytes = wORD_SIZE dflags * hp in [
-- Bump the allocation total in the closure's StgEntCounter
addToMem (rEP_StgEntCounter_allocs dflags)
(CmmLit (cmmLabelOffB ticky_ctr (oFFSET_StgEntCounter_allocs dflags)))
bytes,
-- Bump the global allocation total ALLOC_HEAP_tot
addToMemLbl (cLong dflags)
(mkCmmDataLabel rtsPackageKey (fsLit "ALLOC_HEAP_tot"))
bytes,
-- Bump the global allocation counter ALLOC_HEAP_ctr
if not genuine then mkNop
else addToMemLbl (cLong dflags)
(mkCmmDataLabel rtsPackageKey (fsLit "ALLOC_HEAP_ctr"))
1
]}
--------------------------------------------------------------------------------
-- these three are only called from CmmParse.y (ie ultimately from the RTS)
-- the units are bytes
tickyAllocPrim :: CmmExpr -- ^ size of the full header, in bytes
-> CmmExpr -- ^ size of the payload, in bytes
-> CmmExpr -> FCode ()
tickyAllocPrim _hdr _goods _slop = ifTicky $ do
bumpTickyCounter (fsLit "ALLOC_PRIM_ctr")
bumpTickyCounterByE (fsLit "ALLOC_PRIM_adm") _hdr
bumpTickyCounterByE (fsLit "ALLOC_PRIM_gds") _goods
bumpTickyCounterByE (fsLit "ALLOC_PRIM_slp") _slop
tickyAllocThunk :: CmmExpr -> CmmExpr -> FCode ()
tickyAllocThunk _goods _slop = ifTicky $ do
-- TODO is it ever called with a Single-Entry thunk?
bumpTickyCounter (fsLit "ALLOC_UP_THK_ctr")
bumpTickyCounterByE (fsLit "ALLOC_THK_gds") _goods
bumpTickyCounterByE (fsLit "ALLOC_THK_slp") _slop
tickyAllocPAP :: CmmExpr -> CmmExpr -> FCode ()
tickyAllocPAP _goods _slop = ifTicky $ do
bumpTickyCounter (fsLit "ALLOC_PAP_ctr")
bumpTickyCounterByE (fsLit "ALLOC_PAP_gds") _goods
bumpTickyCounterByE (fsLit "ALLOC_PAP_slp") _slop
tickyHeapCheck :: FCode ()
tickyHeapCheck = ifTicky $ bumpTickyCounter (fsLit "HEAP_CHK_ctr")
tickyStackCheck :: FCode ()
tickyStackCheck = ifTicky $ bumpTickyCounter (fsLit "STK_CHK_ctr")
-- -----------------------------------------------------------------------------
-- Ticky utils
ifTicky :: FCode () -> FCode ()
ifTicky code =
getDynFlags >>= \dflags -> when (gopt Opt_Ticky dflags) code
tickyAllocdIsOn :: FCode Bool
tickyAllocdIsOn = gopt Opt_Ticky_Allocd `fmap` getDynFlags
tickyLNEIsOn :: FCode Bool
tickyLNEIsOn = gopt Opt_Ticky_LNE `fmap` getDynFlags
tickyDynThunkIsOn :: FCode Bool
tickyDynThunkIsOn = gopt Opt_Ticky_Dyn_Thunk `fmap` getDynFlags
ifTickyAllocd :: FCode () -> FCode ()
ifTickyAllocd code = tickyAllocdIsOn >>= \b -> when b code
ifTickyLNE :: FCode () -> FCode ()
ifTickyLNE code = tickyLNEIsOn >>= \b -> when b code
ifTickyDynThunk :: FCode () -> FCode ()
ifTickyDynThunk code = tickyDynThunkIsOn >>= \b -> when b code
bumpTickyCounter :: FastString -> FCode ()
bumpTickyCounter lbl = bumpTickyLbl (mkCmmDataLabel rtsPackageKey lbl)
bumpTickyCounterBy :: FastString -> Int -> FCode ()
bumpTickyCounterBy lbl = bumpTickyLblBy (mkCmmDataLabel rtsPackageKey lbl)
bumpTickyCounterByE :: FastString -> CmmExpr -> FCode ()
bumpTickyCounterByE lbl = bumpTickyLblByE (mkCmmDataLabel rtsPackageKey lbl)
bumpTickyEntryCount :: CLabel -> FCode ()
bumpTickyEntryCount lbl = do
dflags <- getDynFlags
bumpTickyLit (cmmLabelOffB lbl (oFFSET_StgEntCounter_entry_count dflags))
bumpTickyAllocd :: CLabel -> Int -> FCode ()
bumpTickyAllocd lbl bytes = do
dflags <- getDynFlags
bumpTickyLitBy (cmmLabelOffB lbl (oFFSET_StgEntCounter_allocd dflags)) bytes
bumpTickyLbl :: CLabel -> FCode ()
bumpTickyLbl lhs = bumpTickyLitBy (cmmLabelOffB lhs 0) 1
bumpTickyLblBy :: CLabel -> Int -> FCode ()
bumpTickyLblBy lhs = bumpTickyLitBy (cmmLabelOffB lhs 0)
bumpTickyLblByE :: CLabel -> CmmExpr -> FCode ()
bumpTickyLblByE lhs = bumpTickyLitByE (cmmLabelOffB lhs 0)
bumpTickyLit :: CmmLit -> FCode ()
bumpTickyLit lhs = bumpTickyLitBy lhs 1
bumpTickyLitBy :: CmmLit -> Int -> FCode ()
bumpTickyLitBy lhs n = do
dflags <- getDynFlags
emit (addToMem (bWord dflags) (CmmLit lhs) n)
bumpTickyLitByE :: CmmLit -> CmmExpr -> FCode ()
bumpTickyLitByE lhs e = do
dflags <- getDynFlags
emit (addToMemE (bWord dflags) (CmmLit lhs) e)
bumpHistogram :: FastString -> Int -> FCode ()
bumpHistogram _lbl _n
-- = bumpHistogramE lbl (CmmLit (CmmInt (fromIntegral n) cLongWidth))
= return () -- TEMP SPJ Apr 07
-- six years passed - still temp? JS Aug 2013
{-
bumpHistogramE :: LitString -> CmmExpr -> FCode ()
bumpHistogramE lbl n
= do t <- newTemp cLong
emitAssign (CmmLocal t) n
emit (mkCmmIfThen (CmmMachOp (MO_U_Le cLongWidth) [CmmReg (CmmLocal t), eight])
(mkAssign (CmmLocal t) eight))
emit (addToMem cLong
(cmmIndexExpr cLongWidth
(CmmLit (CmmLabel (mkRtsDataLabel lbl)))
(CmmReg (CmmLocal t)))
1)
where
eight = CmmLit (CmmInt 8 cLongWidth)
-}
------------------------------------------------------------------
-- Showing the "type category" for ticky-ticky profiling
showTypeCategory :: Type -> Char
{-
+ dictionary
> function
{C,I,F,D,W} char, int, float, double, word
{c,i,f,d,w} unboxed ditto
T tuple
P other primitive type
p unboxed ditto
L list
E enumeration type
S other single-constructor type
M other multi-constructor data-con type
. other type
- reserved for others to mark as "uninteresting"
Accurate as of Mar 2013, but I eliminated the Array category instead
of updating it, for simplicity. It's in P/p, I think --NSF
-}
showTypeCategory ty
| isDictTy ty = '+'
| otherwise = case tcSplitTyConApp_maybe ty of
Nothing -> '.'
Just (tycon, _) ->
(if isUnLiftedTyCon tycon then Data.Char.toLower else \x -> x) $
let anyOf us = getUnique tycon `elem` us in
case () of
_ | anyOf [funTyConKey] -> '>'
| anyOf [charPrimTyConKey, charTyConKey] -> 'C'
| anyOf [doublePrimTyConKey, doubleTyConKey] -> 'D'
| anyOf [floatPrimTyConKey, floatTyConKey] -> 'F'
| anyOf [intPrimTyConKey, int32PrimTyConKey, int64PrimTyConKey,
intTyConKey, int8TyConKey, int16TyConKey, int32TyConKey, int64TyConKey
] -> 'I'
| anyOf [wordPrimTyConKey, word32PrimTyConKey, word64PrimTyConKey, wordTyConKey,
word8TyConKey, word16TyConKey, word32TyConKey, word64TyConKey
] -> 'W'
| anyOf [listTyConKey] -> 'L'
| isTupleTyCon tycon -> 'T'
| isPrimTyCon tycon -> 'P'
| isEnumerationTyCon tycon -> 'E'
| isJust (tyConSingleDataCon_maybe tycon) -> 'S'
| otherwise -> 'M' -- oh, well...
|
urbanslug/ghc
|
compiler/codeGen/StgCmmTicky.hs
|
bsd-3-clause
| 23,790
| 58
| 25
| 5,662
| 4,236
| 2,164
| 2,072
| 358
| 7
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts #-}
-- !!! Scopes in kind checking
-- Exposes a bizarre bug in 4.08.1
-- TestSh.hs:6:
-- `Shape' is not in scope
-- When checking kinds in `HasConfigValue Shape nodeTypeParms'
-- In the class declaration for `HasShape'
module ShouldCompile where
data Shape value = Box | Circle
class HasConfigValue Shape nodeTypeParms => HasShape nodeTypeParms where {}
class HasConfigValue option configuration where
($$$) :: option value -> configuration value -> configuration value
|
olsner/ghc
|
testsuite/tests/typecheck/should_compile/tc108.hs
|
bsd-3-clause
| 551
| 0
| 9
| 103
| 77
| 44
| 33
| -1
| -1
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Main (main) where
import Language.Haskell.TH
infixr 7 :***:
data GADT a where
Prefix :: Int -> Int -> GADT Int
(:***:) :: Int -> Int -> GADT Int
$(do gadtName <- newName "GADT2"
prefixName <- newName "Prefix2"
infixName <- newName ":****:"
a <- newName "a"
return [ DataD [] gadtName [KindedTV a StarT] Nothing
[ GadtC [prefixName]
[ (Bang NoSourceUnpackedness NoSourceStrictness,ConT ''Int)
, (Bang NoSourceUnpackedness NoSourceStrictness,ConT ''Int)
] (AppT (ConT gadtName) (ConT ''Int))
, GadtC [infixName]
[ (Bang NoSourceUnpackedness NoSourceStrictness,ConT ''Int)
, (Bang NoSourceUnpackedness NoSourceStrictness,ConT ''Int)
] (AppT (ConT gadtName) (ConT ''Int))
] []
, InfixD (Fixity 7 InfixR) infixName
])
$(return [])
deriving instance Show (GADT2 a)
main :: IO ()
main = do
-- Verify that infix GADT constructors reify correctly
putStrLn $(reify ''GADT >>= stringE . pprint)
putStrLn $(reify '(:***:) >>= stringE . pprint)
-- Verify that reifyFixity returns something with (:***:)
-- (but not with Prefix, since it has no fixity declaration)
putStrLn $(reifyFixity 'Prefix >>= stringE . show)
putStrLn $(reifyFixity '(:***:) >>= stringE . show)
-- Verify that spliced-in GADT infix constructors are actually infix
print (1 :****: 4)
|
ezyang/ghc
|
testsuite/tests/th/T11345.hs
|
bsd-3-clause
| 1,599
| 0
| 18
| 427
| 456
| 232
| 224
| 34
| 1
|
{-# LANGUAGE ForeignFunctionInterface #-}
module B028.F where
foreign export ccall f :: Int -> Int
f :: Int -> Int
f n = n + 1
|
urbanslug/ghc
|
testsuite/tests/driver/B028/F.hs
|
bsd-3-clause
| 127
| 0
| 6
| 26
| 42
| 24
| 18
| 5
| 1
|
module Shaker.ModuleDataTest
where
import Control.Monad.Reader
import Data.List
import Data.Maybe
import Shaker.CommonTest
import Shaker.ModuleData
import Shaker.Type
import System.FilePath
import Test.HUnit
getAllModuleData :: IO [ModuleData]
getAllModuleData = testShakerInput >>= runReaderT parseAllModuleData
testReadWriteModuleData :: Assertion
testReadWriteModuleData = do
shIn <- testShakerInput
moduleData <- getTestModuleData "ModuleDataTest.hs"
runReaderT (writeModuleData moduleData) shIn
let testFile = testDirectory </> "ModuleDataTest.hs"
parsedModule <- runReaderT (parseModuleDataIfExist testFile) shIn
Just moduleData == parsedModule @? "Module datas should be equals, got " ++ show moduleData ++ " and " ++ show parsedModule
testGroupModuleData :: Assertion
testGroupModuleData = do
shIn <- testShakerInput
parsedMod <- fmap groupByValidTargets (runReaderT parseAllModuleData shIn)
let res = filter ( any ( \ md -> "/noHsSource.hs" `isSuffixOf` moduleDataFileName md) ) parsedMod
length res == 1 @? show res
testNubModuleData :: Assertion
testNubModuleData = do
parsedMod <- fmap nub getAllModuleData
let res = filter ( \ md -> "/CabalTest.hs" `isSuffixOf` moduleDataFileName md) parsedMod
length res == 1 @? show res
testGetNonMain :: Assertion
testGetNonMain = do
shIn <- testShakerInput
cpIn <- runReaderT getNonMainCompileInput shIn
let filtered = filter (isSuffixOf "CabalTest.hs" ) (compileInputTargetFiles cpIn )
length filtered ==1 @? show filtered
testModuleDataFileName :: Assertion
testModuleDataFileName = do
modData <- getTestModuleData "ModuleDataTest.hs"
"ModuleDataTest.hs" `isSuffixOf` moduleDataFileName modData @? show modData
testModuleHasMain :: Assertion
testModuleHasMain = do
parsedMod <- testShakerInput >>= runReaderT (parseModuleData "prog/Shaker.hs") >>= return . fromJust
moduleDataHasMain parsedMod @? "Should have main, got " ++ show parsedMod
testModuleDataHasTests :: Assertion
testModuleDataHasTests = do
modData <- getTestModuleData "HsHelperTest.hs"
regexModData <- getTestModuleData "RegexTest.hs"
hsModuleDataHasTest modData @? show modData
(not.null) (moduleDataProperties modData) @? show modData
(not.null) (moduleDataAssertions modData) @? show modData
(not.null) (moduleDataProperties regexModData) @? show regexModData
|
bonnefoa/Shaker
|
testsuite/tests/Shaker/ModuleDataTest.hs
|
isc
| 2,351
| 0
| 16
| 332
| 616
| 296
| 320
| 52
| 1
|
import Control.Monad (liftM)
import Data.List (isPrefixOf)
import System.Environment (getArgs)
newtype CmdLineParserT a = CmdLineParser ([String] -> (a, [String]))
runCmdLineParser :: CmdLineParserT a -> [String] -> (a, [String])
runCmdLineParser (CmdLineParser p) = p
instance Monad CmdLineParserT where
return v = CmdLineParser (\xs -> (v, xs))
p >>= f = CmdLineParser (\xs -> let (v, xs') = runCmdLineParser p xs
in runCmdLineParser (f v) xs')
data ParameterT = TrailingNewline | Help | Version | Unrecognized String
deriving Show
data ConfigT = Config { trailingNewline :: Bool
, backslashEscapes :: Bool
, displayHelp :: Bool
, displayVersion :: Bool
}
defaultConfig :: ConfigT
defaultConfig = Config { trailingNewline = True
, backslashEscapes = False
, displayHelp = False
, displayVersion = False
}
cmdLinePeek :: CmdLineParserT (Maybe String)
cmdLinePeek = CmdLineParser peek
where peek [] = (Nothing, [])
peek l@(x:_) = Just (x, l)
cmdLineItem :: CmdLineParserT String
cmdLineItem = CmdLineParser (\(x:xs) -> (x, xs))
cmdLineParser :: CmdLineParserT (Maybe ParameterT)
cmdLineParser = do
a <- cmdLinePeek
case a of
"-n" -> ret TrailingNewline
"-h" -> ret Help
"--help" -> ret Help
"-V" -> ret Version
"--version" -> ret Version
_ -> return (if "-" `isPrefixOf` a
then Just $ Unrecognized a
else Nothing)
where ret v = cmdLineItem >> return (Just v)
main :: IO ()
main = do args <- getArgs
print $ runCmdLineParser cmdLineParser args
-- getArgs >>= putStrLn . unwords
|
fredmorcos/attic
|
projects/hscoreutils/Echo.hs
|
isc
| 1,912
| 1
| 14
| 665
| 556
| 301
| 255
| 43
| 7
|
module KMC.Syntax.Char where
import Data.Char (toLower, toUpper)
import Text.Parsec.Prim (try, (<?>), (<|>))
import Text.ParserCombinators.Parsec.Char (char)
import KMC.Syntax.ParserTypes
-- | Match the lowercase or uppercase form of 'c'
caseInsensitiveChar :: Char -> Parser Char
caseInsensitiveChar c = char (toLower c) <|> char (toUpper c)
-- | Match an any-cased version of a string.
caseInsensitiveString :: String -> Parser String
caseInsensitiveString s = try (mapM caseInsensitiveChar s) <?> "\"" ++ s ++ "\""
|
diku-kmc/regexps-syntax
|
KMC/Syntax/Char.hs
|
mit
| 606
| 0
| 10
| 159
| 146
| 83
| 63
| 9
| 1
|
module Minimax where
|
mrkgnao/tictactoe-minimax
|
Minimax.hs
|
mit
| 23
| 0
| 2
| 5
| 4
| 3
| 1
| 1
| 0
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
module IHaskell.Display.Widgets.Float.BoundedFloatRange.FloatRangeSlider (
-- * The FloatRangeSlider
-- Widget
FloatRangeSlider,
-- * Constructor
mkFloatRangeSlider) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Data.Aeson
import qualified Data.HashMap.Strict as HM
import Data.IORef (newIORef)
import qualified Data.Scientific as Sci
import Data.Text (Text)
import qualified Data.Vector as V
import Data.Vinyl (Rec(..), (<+>))
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
import IHaskell.Display.Widgets.Common
-- | 'FloatRangeSlider' represents an FloatRangeSlider widget from IPython.html.widgets.
type FloatRangeSlider = IPythonWidget FloatRangeSliderType
-- | Create a new widget
mkFloatRangeSlider :: IO FloatRangeSlider
mkFloatRangeSlider = do
-- Default properties, with a random uuid
uuid <- U.random
let boundedFloatAttrs = defaultBoundedFloatRangeWidget "FloatSliderView" "FloatSliderModel"
sliderAttrs = (Orientation =:: HorizontalOrientation)
:& (ShowRange =:: True)
:& (ReadOut =:: True)
:& (SliderColor =:: "")
:& RNil
widgetState = WidgetState $ boundedFloatAttrs <+> sliderAttrs
stateIO <- newIORef widgetState
let widget = IPythonWidget uuid stateIO
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen widget $ toJSON widgetState
-- Return the widget
return widget
instance IHaskellDisplay FloatRangeSlider where
display b = do
widgetSendView b
return $ Display []
instance IHaskellWidget FloatRangeSlider where
getCommUUID = uuid
comm widget (Object dict1) _ = do
let key1 = "sync_data" :: Text
key2 = "value" :: Text
Just (Object dict2) = HM.lookup key1 dict1
Just (Array values) = HM.lookup key2 dict2
[x, y] = map (\(Number x) -> Sci.toRealFloat x) $ V.toList values
setField' widget FloatPairValue (x, y)
triggerChange widget
|
sumitsahrawat/IHaskell
|
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Float/BoundedFloatRange/FloatRangeSlider.hs
|
mit
| 2,367
| 0
| 16
| 579
| 481
| 266
| 215
| 49
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE CPP #-}
-- most of the codes in this file are directly copied from JuicyPixel
module BCM.Visualize.Internal where
#if !MIN_VERSION_base(4,8,0)
import Foreign.ForeignPtr.Safe( ForeignPtr, castForeignPtr )
#else
import Foreign.ForeignPtr( ForeignPtr, castForeignPtr )
#endif
import Foreign.Storable( Storable, sizeOf )
import Data.Word
import qualified Data.ByteString as B
import Data.Vector.Storable (Vector, unsafeToForeignPtr)
import qualified Data.ByteString.Internal as S
import qualified Data.Vector.Generic as G
import Data.Colour
import Data.Colour.SRGB
import Data.Conduit.Zlib as Z
import Data.Conduit
import qualified Data.Conduit.List as CL
import BCM.Visualize.Internal.Types
preparePngHeader :: Int -> Int -> PngImageType -> Word8 -> PngIHdr
preparePngHeader w h imgType depth = PngIHdr
{ width = fromIntegral w
, height = fromIntegral h
, bitDepth = depth
, colourType = imgType
, compressionMethod = 0
, filterMethod = 0
, interlaceMethod = PngNoInterlace
}
prepareIDatChunk :: B.ByteString -> PngRawChunk
prepareIDatChunk imgData = PngRawChunk
{ chunkLength = fromIntegral $ B.length imgData
, chunkType = iDATSignature
, chunkCRC = pngComputeCrc [iDATSignature, imgData]
, chunkData = imgData
}
endChunk :: PngRawChunk
endChunk = PngRawChunk { chunkLength = 0
, chunkType = iENDSignature
, chunkCRC = pngComputeCrc [iENDSignature]
, chunkData = B.empty
}
preparePalette :: Palette -> PngRawChunk
preparePalette pal = PngRawChunk
{ chunkLength = fromIntegral $ G.length pal
, chunkType = pLTESignature
, chunkCRC = pngComputeCrc [pLTESignature, binaryData]
, chunkData = binaryData
}
where binaryData = B.concat [toByteString pal]
toByteString :: forall a. (Storable a) => Vector a -> B.ByteString
toByteString vec = S.PS (castForeignPtr ptr) offset (len * size)
where (ptr, offset, len) = unsafeToForeignPtr vec
size = sizeOf (undefined :: a)
{-# INLINE toByteString #-}
coloursToPalette :: [Colour Double] -> Palette
coloursToPalette = G.fromList . concatMap f
where
f c = let RGB r g b = toSRGB24 c
in [r,g,b]
{-# INLINE coloursToPalette #-}
toPngData :: Conduit [Word8] IO B.ByteString
toPngData = CL.map (B.pack . (0:)) $= Z.compress 5 Z.defaultWindowBits
{-# INLINE toPngData #-}
|
kaizhang/BCMtools
|
src/BCM/Visualize/Internal.hs
|
mit
| 2,536
| 0
| 11
| 570
| 615
| 360
| 255
| 57
| 1
|
{-# LANGUAGE DeriveGeneric #-}
module D20.Internal.Damage where
import GHC.Generics
data ElementalDamageType
= Fire
| Ice
| Electricity
deriving (Show,Generic)
data PhysicalDamageType
= Slashing
| Bludgeoning
| Piercing
deriving (Show,Generic)
data DamageType
= Physical PhysicalDamageType
| Elemental ElementalDamageType
| Poison
deriving (Show,Generic)
data Damage =
Damage {getValue :: Int
,getType :: DamageType}
deriving (Show,Generic)
data Critical =
Critical {getCriticalRange :: (Int,Int)
,getCriticalMultiplier :: Int}
deriving (Show,Generic)
|
elkorn/d20
|
src/D20/Internal/Damage.hs
|
mit
| 611
| 0
| 9
| 124
| 159
| 95
| 64
| 26
| 0
|
module Main where
import Control.Monad (when)
import Control.Parallel.Strategies (withStrategy, parBuffer, rseq)
import Data.Bson (Document, ObjectId, (=:), (!?))
import Data.GeoIP2 (GeoDB, GeoResult(..), openGeoDB, findGeoData)
import Data.IP (IP)
import Data.Maybe (catMaybes, listToMaybe)
import Data.Text (Text)
import Database.MongoDB ( Query(..), UpdateOption(..), Action, WriteResult, AccessMode(..)
, connect, access, find, updateMany, rest, host, close, select )
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
data GeoIPData = GeoIPData { docId :: ObjectId
, stateCode :: Maybe Text
, countryCode :: Maybe Text
} deriving Show
main :: IO ()
main = do
args <- getArgs
when (length args /= 3) $ usage >> exitFailure
conn <- connect . host $ args !! 0
let run = access conn UnconfirmedWrites "shrunk"
visits <- run getVisits
geodb <- openGeoDB $ args !! 1
let bufSz = read $ args !! 2
writeResult <- run . updateDocuments $ getAllGeoIPData bufSz geodb visits
print writeResult
close conn
usage :: IO ()
usage = do
progName <- getProgName
putStrLn $ "usage: " ++ progName ++ " [db host] [geodb path] [parBuffer buf size]"
getVisits :: Action IO [Document]
getVisits = find (select [] "visits") {project = ["_id" =: 1, "source_ip" =: 1]} >>= rest
getGeoIPData :: GeoDB -> Document -> Maybe GeoIPData
getGeoIPData db doc = do
docId <- doc !? "_id"
sourceIp <- doc !? "source_ip"
geoResult <- either (const Nothing) Just $ findGeoData db "en" (read sourceIp :: IP)
return GeoIPData { docId
, stateCode = fst <$> listToMaybe (geoSubdivisions geoResult)
, countryCode = geoCountryISO geoResult
}
getAllGeoIPData :: Int -> GeoDB -> [Document] -> [GeoIPData]
getAllGeoIPData bufSz db = catMaybes . withStrategy (parBuffer bufSz rseq) . map (getGeoIPData db)
updateDocuments :: [GeoIPData] -> Action IO WriteResult
updateDocuments = updateMany "visits" . map mkUpdate
where mkUpdate GeoIPData { .. } = ( ["_id" =: docId]
, ["$set" =: [ "state_code" =: stateCode
, "country_code" =: countryCode
]
]
, [Upsert]
)
|
oss/shrunk
|
backend/scripts/add-geoip/src/Main.hs
|
mit
| 2,507
| 0
| 13
| 776
| 752
| 405
| 347
| -1
| -1
|
-- Maximum subarray sum
-- http://www.codewars.com/kata/54521e9ec8e60bc4de000d6c/
module MaxSequence where
import Data.List (inits)
maxSequence :: [Int] -> Int
maxSequence [] = 0
maxSequence (x:xs) = if m < 0 then 0 else m
where m = max (maximum . map ((+x) . sum) . inits $ xs) (maxSequence xs)
|
gafiatulin/codewars
|
src/5 kyu/MaxSequence.hs
|
mit
| 303
| 0
| 15
| 55
| 112
| 63
| 49
| 6
| 2
|
{-# LANGUAGE TypeSynonymInstances, GADTs, KindSignatures, DataKinds, TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
module Singletons where
import Data.Singletons.Prelude
import Data.Singletons.TH
import Data.Type.Equality
instance TestEquality SBool where
testEquality STrue STrue = Just Refl
testEquality SFalse SFalse = Just Refl
testEquality _ _ = Nothing
data Nat = Z | S Nat
deriving (Show, Eq, Ord)
genSingletons [''Nat]
singDecideInstances [''Nat]
|
vladfi1/hs-misc
|
TestSingletons.hs
|
mit
| 475
| 0
| 7
| 73
| 117
| 63
| 54
| 14
| 0
|
module SchemeSpec.ParserSpec where
import Test.Hspec
import Data.Symbol
import Scheme.Parser
import Scheme.Types
import SpecUtils
spec :: Spec
spec = do
describe "parseScheme" $ do
it "parses integers" $ do
parseScheme "42" `shouldBe` Right (Val 42)
it "parses names" $ do
parseScheme "x" `shouldBe` Right (Var $ intern "x")
it "parses lambda sexprs" $ do
parseScheme "(lambda (x) x)" `shouldBe` Right (Lam (intern "x") (Var $ intern "x"))
it "parses application sexprs" $ do
parseScheme "(x x)" `shouldBe` Right (App (Var $ intern "x") (Var $ intern "x"))
it "parses complicated sexprs" $ do
(parseScheme "(((lambda (x) (lambda (v) x)) 42) 7)" `shouldBe`) $ Right
(App (App (Lam (intern "x") (Lam (intern "v") (Var $ intern "x")))
(Val 42))
(Val 7))
(parseScheme "(((lambda (x) (lambda (f) (f x))) 42) (lambda (y) y))" `shouldBe`) $ Right
(App (App (Lam (intern "x")
(Lam (intern "f")
(App (Var $ intern "f") (Var $ intern "x"))))
(Val 42))
(Lam (intern "y") (Var $ intern "y")))
it "errors on invalid input" $ do
parseScheme "" `shouldSatisfy` isError
parseScheme "(x)" `shouldSatisfy` isError
parseScheme "(x x x)" `shouldSatisfy` isError
|
mjdwitt/a-scheme
|
test/SchemeSpec/ParserSpec.hs
|
mit
| 1,350
| 0
| 27
| 388
| 479
| 236
| 243
| 32
| 1
|
-- Copyright (c) 2012 Yohsuke YUKISHITA, https://github.com/ykst
--
-- Permission is hereby granted, free of charge, to any person obtaining
-- a copy of this software and associated documentation files (the
-- "Software"), to deal in the Software without restriction, including
-- without limitation the rights to use, copy, modify, merge, publish,
-- distribute, sublicense, and/or sell copies of the Software, and to
-- permit persons to whom the Software is furnished to do so, subject to
-- the following conditions:
--
-- The above copyright notice and this permission notice shall be
-- included in all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module AST where
import Text.Parsec (SourcePos)
import Data.Tree (Tree(..),drawTree)
data AST =
List [AST]
|Assign SourcePos ScopeVar AST
|VarDef SourcePos String AST
|Cond SourcePos AST AST AST
|Loop SourcePos AST AST
|Literal FVal
|Call SourcePos ScopeVar [AST]
|Construct SourcePos ScopeVar [AST]
|Dot SourcePos AST AST
|Fetch SourcePos ScopeVar
|Return AST
|Nil deriving (Eq)
data FVal =
VInt {intVal :: Integer}
|VString {stringVal :: String}
|VBool {boolVal :: Bool}
|VFunc {fargs :: [String], fBody :: AST, hId :: Maybe Int}
|VObj {scopeVal :: Int}
|VBot deriving (Eq)
data ScopeVar = Local String | Outside String | Intrinsic String deriving (Eq,Show)
data FType = TInt | TString | TBool | TAny | TObj | TBot deriving (Eq,Show)
viewType :: FVal -> FType
viewType v = case v of
VInt _ -> TInt
VString _ -> TString
VBool _ -> TBool
VObj _ -> TObj
_ -> TBot
instance Show AST where
show = drawTree . toNode
toNode :: AST -> Tree String
toNode dt = case dt of
List asts -> Node "list" $ map toNode asts
Assign _ s ast -> Node ("assign " ++ show s) [toNode ast]
VarDef _ s ast -> Node ("var " ++ s ) [toNode ast]
Cond _ c t f -> Node "cond" $ map toNode [c,t,f]
Loop _ c b -> Node "loop" [toNode b]
Call _ n args -> Node ("call " ++ show n) $ map toNode args
Literal (VFunc args b _) -> Node ("funcliteral " ++ show args) [toNode b]
Literal v -> Node ("literal " ++ show v) []
Fetch _ s -> Node ("fetch " ++ show s) []
Return ast -> Node "return" [toNode ast]
Nil -> Node "nil" []
instance Show FVal where
show v = case v of
VInt i -> show i
VString str -> str
VBool b
|b -> "true"
|otherwise -> "false"
VFunc _ _ v -> "function at " ++ (case v of
Just i -> show i
Nothing -> "?")
VObj i -> "object at " ++ show i
VBot -> "()"
|
ykst/funlang
|
src/AST.hs
|
mit
| 3,068
| 0
| 14
| 726
| 852
| 448
| 404
| 59
| 11
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGStringList
(js_clear, clear, js_initialize, initialize, js_getItem, getItem,
js_insertItemBefore, insertItemBefore, js_replaceItem, replaceItem,
js_removeItem, removeItem, js_appendItem, appendItem,
js_getNumberOfItems, getNumberOfItems, SVGStringList,
castToSVGStringList, gTypeSVGStringList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"clear\"]()" js_clear ::
SVGStringList -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.clear Mozilla SVGStringList.clear documentation>
clear :: (MonadIO m) => SVGStringList -> m ()
clear self = liftIO (js_clear (self))
foreign import javascript unsafe "$1[\"initialize\"]($2)"
js_initialize :: SVGStringList -> JSString -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.initialize Mozilla SVGStringList.initialize documentation>
initialize ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> m result
initialize self item
= liftIO
(fromJSString <$> (js_initialize (self) (toJSString item)))
foreign import javascript unsafe "$1[\"getItem\"]($2)" js_getItem
:: SVGStringList -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.getItem Mozilla SVGStringList.getItem documentation>
getItem ::
(MonadIO m, FromJSString result) =>
SVGStringList -> Word -> m result
getItem self index
= liftIO (fromJSString <$> (js_getItem (self) index))
foreign import javascript unsafe "$1[\"insertItemBefore\"]($2, $3)"
js_insertItemBefore ::
SVGStringList -> JSString -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.insertItemBefore Mozilla SVGStringList.insertItemBefore documentation>
insertItemBefore ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> Word -> m result
insertItemBefore self item index
= liftIO
(fromJSString <$>
(js_insertItemBefore (self) (toJSString item) index))
foreign import javascript unsafe "$1[\"replaceItem\"]($2, $3)"
js_replaceItem :: SVGStringList -> JSString -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.replaceItem Mozilla SVGStringList.replaceItem documentation>
replaceItem ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> Word -> m result
replaceItem self item index
= liftIO
(fromJSString <$> (js_replaceItem (self) (toJSString item) index))
foreign import javascript unsafe "$1[\"removeItem\"]($2)"
js_removeItem :: SVGStringList -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.removeItem Mozilla SVGStringList.removeItem documentation>
removeItem ::
(MonadIO m, FromJSString result) =>
SVGStringList -> Word -> m result
removeItem self index
= liftIO (fromJSString <$> (js_removeItem (self) index))
foreign import javascript unsafe "$1[\"appendItem\"]($2)"
js_appendItem :: SVGStringList -> JSString -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.appendItem Mozilla SVGStringList.appendItem documentation>
appendItem ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> m result
appendItem self item
= liftIO
(fromJSString <$> (js_appendItem (self) (toJSString item)))
foreign import javascript unsafe "$1[\"numberOfItems\"]"
js_getNumberOfItems :: SVGStringList -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.numberOfItems Mozilla SVGStringList.numberOfItems documentation>
getNumberOfItems :: (MonadIO m) => SVGStringList -> m Word
getNumberOfItems self = liftIO (js_getNumberOfItems (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGStringList.hs
|
mit
| 4,788
| 64
| 11
| 784
| 1,068
| 601
| 467
| 77
| 1
|
module Data.Trie where
-- Trie module. Partly taken from http://www.haskell.org/haskellwiki/Haskell_Quiz/Word_Search/Solution_Sjanssen
import qualified Data.Map as Map
import Control.Monad
data Trie = Trie Bool (Map.Map Char Trie) deriving (Show)
-- | A blank Trie
empty :: Trie
empty = Trie False Map.empty
-- | Insert a new string into the trie.
insert :: String -> Trie -> Trie
insert [] (Trie _ m) = Trie True m
insert (x:xs) (Trie b m) = Trie b $ Map.alter (maybe (Just $ fromString xs) (Just . insert xs)) x m
fromString :: String -> Trie
fromString = foldr (\x xs -> Trie False (Map.singleton x xs)) (Trie True Map.empty)
-- | Take a list of String and compress it into a Trie
fromList :: [String] -> Trie
fromList = foldr insert empty
-- | Take a trie and expand it into the strings that it represents
toList :: Trie -> [String]
toList (Trie b m) =
if b then "":expand
else expand
where expand = [ char:word | (char, trie) <- Map.toList m,
word <- toList trie ]
-- | Takes a trie and a prefix and returns the sub-trie that
-- of words with that prefix
lookupPrefix :: (MonadPlus m) => String -> Trie -> m Trie
lookupPrefix [] trie = return trie
lookupPrefix (x:xs) (Trie _ m) = liftMaybe (Map.lookup x m) >>= lookupPrefix xs
liftMaybe :: MonadPlus m => Maybe a -> m a
liftMaybe Nothing = mzero
liftMaybe (Just x) = return x
-- | Finds the longest certain path down the trie starting at a the root
-- Invariant Assumption: All paths have at least one 'true' node below them
forcedNext :: Trie -> String
forcedNext (Trie _ m) =
if length ls == 1 then
let (char, trie) = head ls in
char:forcedNext trie
else []
where ls = Map.toList m
-- | Helper function, finds all the suffixes of a given prefix
possibleSuffixes :: String -> Trie -> [String]
possibleSuffixes prefix fulltrie =
lookupPrefix prefix fulltrie >>= toList
-- | Helper function, finds the longest certain path down the trie starting at a given word
certainSuffix :: String -> Trie -> String
certainSuffix prefix fulltrie =
lookupPrefix prefix fulltrie >>= forcedNext
|
codemac/yi-editor
|
src/Data/Trie.hs
|
gpl-2.0
| 2,133
| 0
| 11
| 462
| 623
| 325
| 298
| 38
| 2
|
module Tree.Tree where
data Tree a = Leaf a | Node (Tree a) a (Tree a) deriving Show
t :: Tree Int
t = Node (Node (Leaf 1) 2 (Leaf 3)) 4 (Node (Leaf 5) 6 (Leaf 7))
occurs :: Eq a => Tree a -> a -> Bool
occurs (Leaf n) m = n == m
occurs (Node l n r) m = (n == m) ||
(occurs l m) ||
(occurs r m)
-- *Ch10> map (occurs t) [0..8]
-- [False,True,True,True,True,True,True,True,False]
flatten :: Tree a -> [a]
flatten (Leaf n) = [n]
flatten (Node l n r) = flatten l ++ [n] ++ flatten r
-- for search trees (left nodes have inferior values, right nodes superior values)
-- so we can rewrite occurs to be faster
occursST :: (Eq a, Ord a) => Tree a -> a -> Bool
occursST (Leaf n) m = m == n
occursST (Node l n r) m | m == n = True
| m < n = occursST l m
| otherwise = occursST r m
|
ardumont/haskell-lab
|
src/Tree/Tree.hs
|
gpl-2.0
| 893
| 0
| 9
| 298
| 382
| 194
| 188
| 17
| 1
|
{-# language TemplateHaskell #-}
{-# language DisambiguateRecordFields #-}
module Up.Store where
import Up.Type
import Autolib.ToDoc
import Autolib.FiniteMap () -- for instances
import qualified Data.Map as M
import qualified Control.Monad.State.Strict as S
data Value = ValUnit
| ValInt Integer
| ValClosure { link :: Int
, body :: Exp
}
data Frame = Frame { number :: Int
, dynamic_link :: Int
, static_link :: Int
, values :: M.Map Name Value
}
data Store = Store { step :: Int
, max_steps :: Int
, store :: M.Map Int Frame
}
derives [makeToDoc] [''Value, ''Frame, ''Store ]
blank :: Int -> Store
blank r = Store
{ step = 0
, max_steps = r
, store = M.empty
}
tick :: Monad m => S.StateT Store m ()
tick = S.modify $ \ s -> s { step = succ $ step s }
-- | allocate new empty frame, return its address
frame :: Monad m
=> Int -> Int
-> S.StateT Store m Int
frame dyn stat = do
s <- S.get
let n = succ $ M.size $ store s
let f = Frame { number = n
, values = M.empty
, dynamic_link = dyn
, static_link = stat
}
S.put $ s { store = M.insert n f $ store s }
return n
|
marcellussiegburg/autotool
|
collection/src/Up/Store.hs
|
gpl-2.0
| 1,423
| 0
| 12
| 576
| 412
| 232
| 180
| 39
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Types
import qualified Data.ByteString.Lazy as L
import Data.Text
import Data.Text.Encoding
import Data.Text.IO (putStrLn)
import Network.HTTP.Conduit (httpLbs, parseUrl,
responseBody, urlEncodedBody,
withManager)
import Network.OAuth.OAuth2
import Network.OAuth.OAuth2.HttpClient
import Network.HTTP.Types (hContentType)
import Data.Aeson
import qualified Data.ByteString.Char8 as BS
import qualified Prelude as P (id)
import System.Environment (getArgs)
import Prelude hiding (concat, putStrLn)
import qualified Text.HTML.DOM as DOM
import Text.XML
import Text.XML.Cursor
import Data.Time
import Data.Time.Calendar
import Data.Time.Calendar.WeekDate
main :: IO ()
main = do
args <- getArgs
let group = case args of
[a] ->a
_ -> "3538"
(ifmoCal, token) <- doGetInfo
let (Just calendarId) = Types.id ifmoCal
schedule <- getSchedule group
date <- getCurrentTime >>= return . utctDay
let (year, month, day') = toGregorian date
let (curWeek, day) = (\(_, a, c) -> (a, day' - (c - 1))) $ toWeekDate date
mapM_ (\(a, b) -> mapM (\el -> toCalendar token calendarId year (toInteger month) (toInteger day) (toInteger curWeek) a el) b) schedule
print "Success!"
toCalendar token calendarId year month day curWeek dayOfTheWeek a = authPostBSwithBody token (BS.pack ("https://www.googleapis.com/calendar/v3/calendars/" ++ unpack calendarId ++ "/events")) [] $ getRequest year (toInteger month) (toInteger day) (toInteger curWeek) (toInteger dayOfTheWeek) (startTime a) (endTime a) (week a) (place a) (title a) (teacher a)
createEvent token = authPostJSONWithBody token "https://www.googleapis.com/calendar/v3/calendars" [] (BS.pack "{\n\"summary\": \"ifmoSchedule\"\n}")
getRequest year month day' curWeek dayOfTheWeek startT endT interval' location name orginizer = result
where day = show $ ((+) dayOfTheWeek) $ if (((interval') == ("\1095\1077\1090")) && (even curWeek)) || (((interval') == ("\1085\1077\1095")) && (not $ even curWeek)) then day' else if (interval') == "\160" then day' else day' + 7
interval = show $ if interval' == "\160" then 1 else 2
result = encodeUtf8 ( pack "{\n \"end\": {\n \"dateTime\": \"" `append` (pack $ show year) `append` pack "-" `append` (pack $ show month) `append` pack "-" `append` pack day `append` pack "T" `append` endT `append` pack ":00\",\n \"timeZone\": \"Europe/Moscow\"\n },\n \"start\": {\n \"dateTime\": \""`append` (pack $ show year) `append` pack "-" `append` (pack $ show month) `append` pack "-" `append` pack day `append` pack "T" `append` startT `append` pack ":00\",\n \"timeZone\": \"Europe/Moscow\"\n },\n \"recurrence\": [\n \"RRULE:FREQ=WEEKLY;INTERVAL=" `append` pack interval `append` pack ";WKST=SU;UNTIL=20140627T000000Z\"\n ],\n \"location\": \"" `append` location `append` pack "\",\n \"organizer\": {\n \"displayName\": \"" `append` orginizer `append` pack "\"\n },\n \"summary\": \"" `append` name `append` pack "\"\n}" )
getSchedule group = do
initReq <- parseUrl "http://www.ifmo.ru/module/isu_schedule.php"
let req' = initReq
let req = (flip urlEncodedBody) req' $
[ ("group", BS.pack group),
("week", "0") ]
response <- withManager $ httpLbs req
let page = responseBody response
cursor <- cursorFor page
let res = Prelude.filter (/= pack ": лек+пр.") $ cursor $// findNodes &// content
return $ toSchedule res []
doGetInfo = do
BS.putStrLn $ authorizationUrl googleKey `appendQueryParam` (googleScopeCalendar)
putStrLn "visit the url and paste code here: "
code <- fmap BS.pack getLine
(Right token) <- fetchAccessToken googleKey code
(Right calendars) <- (getCalendarLists token :: IO (OAuth2Result CalendarsList))
let cals = Prelude.filter (\a -> summary a == "ifmoSchedule") $ items calendars
ifmoCal <- if Prelude.null cals then (createCalendar token :: IO (OAuth2Result CalendarList)) >>= (\(Right a) -> return a) else return $ Prelude.head cals
return (ifmoCal, token)
createCalendar token = authPostJSONWithBody token "https://www.googleapis.com/calendar/v3/calendars" [] (BS.pack "{\n\"summary\": \"ifmoSchedule\"\n}")
validateToken token = authGetBS token "https://www.googleapis.com/oauth2/v1/tokeninfo"
googleScopeCalendar :: QueryParams
googleScopeCalendar = [("scope", "https://www.googleapis.com/auth/calendar")]
googleAccessOffline :: QueryParams
googleAccessOffline = [("access_type", "offline")
,("approval_prompt", "force")]
getCalendarLists :: FromJSON a => AccessToken -> IO (OAuth2Result a)
getCalendarLists token = authGetJSON token $ "https://www.googleapis.com/calendar/v3/users/me/calendarList"
lesson a b c d n x = Lesson {startTime = a, endTime = b, week = c, place = d, title = n, teacher = x}
toDayNumber a = case a of
"Понедельник" -> 0
"Вторник" -> 1
"Среда" -> 2
"Четверг" -> 3
"Пятница" -> 4
"Суббота" -> 5
toSchedule :: [Text] -> [(Int, [Lesson])] -> [(Int, [Lesson])]
toSchedule (a:b:c:d:n:x:ss:xs) result = if a `elem` Prelude.map pack ["Понедельник", "Вторник", "Среда", "Четверг", "Пятница", "Суббота"]
then toSchedule xs (result ++ [(toDayNumber a, [lesson b c d n x ss])])
else toSchedule (ss:xs) result'
where (curDay, lessons) = Prelude.last result
result' = Prelude.init result ++ [(curDay, lessons ++ [lesson a b c d n x])]
toSchedule (a:b:c:d:n:x:[]) result = result'
where (curDay, lessons) = Prelude.last result
result' = Prelude.init result ++ [(curDay, lessons ++ [lesson a b c d n x])]
toSchedule ([]) res = error "FUCK YOU"
findNodes :: Cursor -> [Cursor]
findNodes = element "tr" &/ checkElement (\a -> let l = elementNodes a in
let ffst = Prelude.head l in
case ffst of
NodeElement a' -> elementName a' /= "strong"
NodeContent _ -> True
_ -> False)
cursorFor :: Monad m => L.ByteString -> m Cursor
cursorFor page = do
return $ fromDocument $ DOM.parseLBS page
|
jagajaga/ifmo2gcal
|
src/Main.hs
|
gpl-2.0
| 6,795
| 0
| 35
| 1,713
| 2,100
| 1,121
| 979
| 102
| 6
|
{- Copyright (c) 2007 John Goerzen <jgoerzen@complete.org>
Please see the COPYRIGHT file -}
module Buttons where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Glade
data Buttons =
Buttons {newbt :: Button,
renamebt :: Button,
removebt :: Button,
connectbt :: Button,
disconnectbt :: Button,
aboutbt :: Button,
closebt :: Button}
initButtons xml window = do
newB <- get "newmacrobt"
renameB <- get "renamebt"
removeB <- get "removebt"
connectB <- get "connectbt"
disconnectB <- get "disconnectbt"
aboutB <- get "aboutbt"
closeB <- get "closebt"
onClicked closeB (widgetDestroy window)
let buttons = Buttons {newbt = newB, renamebt = renameB,
removebt = removeB, connectbt = connectB,
disconnectbt = disconnectB, aboutbt = aboutB, closebt = closeB}
disablePerMacro buttons
return buttons
where get = xmlGetWidget xml castToButton
disablePerMacro buttons =
mapM_ ((flip widgetSetSensitivity) False) (macroButtons buttons)
enablePerMacro buttons =
mapM_ ((flip widgetSetSensitivity) True) (macroButtons buttons)
macroButtons buttons =
[renamebt buttons, removebt buttons, connectbt buttons,
disconnectbt buttons]
|
jgoerzen/gmacro
|
Buttons.hs
|
gpl-2.0
| 1,289
| 0
| 11
| 326
| 334
| 174
| 160
| 33
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE DeriveGeneric #-}
-- | Neuron module encapsulates behavior of a 'Neuron'
--
-- Some considerations for event driven simulation of SNN
--
-- * Given current 'state' of 'Neuron', it should be possible to predict the time at
-- which it will generate a spike (if any)
--
-- * For a synapse model with dynamics it is possible that the neuron fires in the
-- future, so such synapses should be part of the Neuron data type.
--
module Simulation.HSimSNN.Neuron where
import Control.DeepSeq
import qualified Data.Vector.Generic.Mutable as VM
import Data.Vector.Unboxed (Unbox)
import qualified Data.Vector.Unboxed as V
import Data.Vector.Unboxed.Deriving
import GHC.Generics (Generic)
import qualified Simulation.HSimSNN.Spikes as SPK
-- | Data container for synaptic information related to a connection
data SynType = Exec deriving (Show,Read,Enum,Generic)
instance NFData SynType
data SynInfo = SynInfo
{ weight :: {-# UNPACK #-} !Double
, syntype :: !SynType
} deriving (Show, Generic)
instance NFData SynInfo
derivingUnbox "SynInfo"
[t| SynInfo → (Double, Int) |]
[| \ (SynInfo weight syntype) → (weight, (fromEnum syntype)) |]
[| \ (weight, syntype) → (SynInfo weight (toEnum syntype)) |]
-- | Neuron threshold
threshold :: Double
threshold = 1.0
-- | Neuron is defined by its state and time at which its state was last evaluated
-- The state of a neuron is defined as list of doubles
-- Try unboxed Vector for better performance
data Neuron = Neuron
{ state :: !(V.Vector Double)
, tLastUpdate :: {-# UNPACK #-} !Double
} deriving (Generic)
instance NFData Neuron
-- | String representation for Neuron
instance Show Neuron where
show (Neuron st tl) = "Neuron (" ++ (show $ (V.toList) st) ++ " @ " ++ (show tl) ++ ")"
-- | Initializes a neuron with a given state at time 0
initNeuron :: [Double] -> Neuron
initNeuron st = Neuron (V.fromList st) 0
-- | Returns the membrane potential of a neuron
-- Maybe use Non-empty Vector?
vmem:: Neuron -> Double
vmem neuron = (V.head.state) neuron -- For now the state of a neuron is the first state variable
-- The below block of functions all effect the dynamics of the neuron
-- | Checks if the membrane potential of a neuron is above threshold value
-- use a reader monad for global values?
aboveThreshold:: Neuron -> Bool
aboveThreshold neuron = vmem neuron > threshold
{-# INLINE aboveThreshold #-}
-- | Check for threshold and reset neuron
-- Should be called with the simulatoin time and only when the neuron spikes
-- Perhaps this should be an internal/hidden function ?
-- Hardcasting threshold to 1.0 TODO: should parametrize somehow
-- See above use reader monad for parameters
resetNeuron:: Neuron -> Double -> Neuron
resetNeuron neuron t
| tLastUpdate neuron > t = error $
(show t)
++ "Neuron has already been updated to the future"
++ (show $ tLastUpdate neuron) -- for debugging
| otherwise = Neuron newstate t
where
-- Rewrite this without lists
y = V.tail $ state neuron -- time of last spike
-- neuron dynamics
newstate = [0,t] V.++ y
{-# INLINE resetNeuron #-}
-- | Evaluate the next possible spike time of a neuron given its state at time t
--
-- This function is essentially what defines the dynamics of the neuron. (not really.. it depends on the dynamics though)
-- Currently the neuron receives a constant input current
-- Ideally this should be something users can define and pass at the top level
nextSpikeTime:: Neuron -> SPK.NextSpikeTime
nextSpikeTime neuron
| aboveThreshold neuron = SPK.At $ tLastUpdate neuron
| otherwise = SPK.Never
-- -- |otherwise = SPK.At $(threshold-vmem neuron) + tlastupdate neuron
{-# INLINE nextSpikeTime #-}
-- | Evaluate state of neuron at time t
-- Ideally used at the arrival of a spike or when the neuron spikes (when an
-- event occoured)
evaluateNeuronStateAtt:: Neuron -> Double -> Neuron
evaluateNeuronStateAtt neuron t
| t > (tLastUpdate neuron) = Neuron newstate t
| t == (tLastUpdate neuron) = neuron -- The neuron has already been updated
| otherwise = error $ (show t)
++ "Neuron has already been updated to the future"
++ (show $ tLastUpdate neuron) -- for debugging
where
taum = 10.0
decayfact = exp (((tLastUpdate neuron)-t)/taum) -- decay factor
-- Rewrite this without lists
newstate =
V.modify
(\v ->
VM.modify v (* decayfact) 0)
(state neuron)
{-# INLINE evaluateNeuronStateAtt #-}
-- | Apply a presynaptic spike to a neuron at time t
applySynapticSpikeToNeuron :: SynInfo -> Double -> Neuron -> Neuron
applySynapticSpikeToNeuron (SynInfo w _) spktm neuron
| isRefractoryAtt neuron spktm = Neuron curstate spktm
| otherwise = Neuron newstate spktm
where
Neuron curstate _ = evaluateNeuronStateAtt neuron spktm
newstate = V.modify (\v -> VM.modify v (+w) 0) curstate
{-# INLINE applySynapticSpikeToNeuron #-}
-- | Check if a neuron is still refractory
isRefractoryAtt:: Neuron -> Double -> Bool
isRefractoryAtt (Neuron oldstate tlastupdate) t
| (t-tlastupdate) > tref = False -- Neuron has not been modified within refractory time window
| (t-(oldstate V.! 1)) > tref = False -- last spike time was before refractory time window
| otherwise = True
where
tref = 0.5
{-# INLINE isRefractoryAtt #-}
|
sheiksadique/HSimSNN
|
src/Simulation/HSimSNN/Neuron.hs
|
gpl-2.0
| 6,206
| 0
| 14
| 1,721
| 973
| 540
| 433
| 91
| 1
|
module Browser (openBrowser) where
import System.Process
--import Types
import Utils
-- url is of form: "file:///C:/Users/mcarter/AppData/Local/MarkCarter/sifi/sifi.htm"
rawUrl = do
url1 <- outFile "sifi.htm"
return ("file:///" ++ url1)
openLinuxBrowser = do
url <- rawUrl
--ret <- rawSystem "firefox" [url]
ret <- spawnProcess "firefox" [url]
return ()
openWinBrowser = do
url1 <- rawUrl -- outFile "sifi.htm"
let url2 = map (\c -> if c == '\\' then '/' else c) url1
--let url3 = "file:///" ++ url2
--print url2
ret <- rawSystem "explorer" [url2]
return ()
openBrowser :: IO ()
openBrowser = if isLinux then openLinuxBrowser else openWinBrowser
|
blippy/sifi
|
src/Browser.hs
|
gpl-3.0
| 686
| 0
| 14
| 136
| 178
| 93
| 85
| 17
| 2
|
{-# LANGUAGE TemplateHaskell #-}
module Math.Structure.Instances.TH.Additive
where
import Control.Applicative ( (<$>) )
import Prelude hiding ( (+), (-), negate, subtract )
import qualified Prelude as P
import Language.Haskell.TH
import Math.Structure.Additive
import Math.Structure.Utility.TH
-- | Make abelean monoid instance of n, assuming Num n
mkAbelianMonoidInstanceFromNum :: CxtQ -> TypeQ -> DecsQ
mkAbelianMonoidInstanceFromNum cxt t = sequence
[ mkInstanceWith cxt t [t|AdditiveMagma|]
[ mkDecl '(+) [| (P.+) |] ]
, mkInstance cxt t [t|Abelian|]
, mkInstance cxt t [t|AdditiveSemigroup|]
, mkInstanceWith cxt t [t|AdditiveMonoid|]
[ mkDecl 'zero [| 0 |] ]
, mkInstanceWith cxt t [t|DecidableZero|]
[ mkDecl 'isZero [| (==0) |] ]
]
-- | Make abelean group instance of n, assuming Num n
mkAbelianGroupInstanceFromNum :: CxtQ -> TypeQ -> DecsQ
mkAbelianGroupInstanceFromNum cxt t = concat <$> sequence
[ mkAbelianMonoidInstanceFromNum cxt t
, sequence
[ mkInstanceWith cxt t [t|AdditiveGroup|]
[ mkDecl '(-) [| (P.-) |]
, mkDecl 'negate [| P.negate |]
, mkDecl 'subtract [| P.subtract |]
]
]
]
|
martinra/algebraic-structures
|
src/Math/Structure/Instances/TH/Additive.hs
|
gpl-3.0
| 1,185
| 0
| 12
| 243
| 309
| 197
| 112
| 26
| 1
|
module OpenGL.IOS.Types
(
GLvoid,
GLchar,
GLenum,
GLboolean,
GLbitfield,
GLbyte,
GLshort,
GLint,
GLsizei,
GLubyte,
GLushort,
GLuint,
GLfloat,
GLclampf,
GLfixed,
GLclampx,
GLintptr,
GLsizeiptr,
) where
import Foreign.C.Types
type GLvoid =
()
type GLchar =
CChar
type GLenum =
CUInt
type GLboolean =
CUChar
type GLbitfield =
CUInt
type GLbyte =
CSChar
type GLshort =
CShort
type GLint =
CInt
type GLsizei =
CInt
type GLubyte =
CUChar
type GLushort =
CUShort
type GLuint =
CUInt
type GLfloat =
CFloat
type GLclampf =
CFloat
type GLfixed =
CInt
type GLclampx =
CInt
type GLintptr =
CLong
type GLsizeiptr =
CLong
|
karamellpelle/grid
|
designer/source/OpenGL/IOS/Types.hs
|
gpl-3.0
| 780
| 0
| 5
| 269
| 180
| 120
| 60
| 57
| 0
|
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>CHRIS/Proba Cloud Screening Tools Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">JavaHelpSearch</data>
</view>
</helpset>
|
bcdev/chris-box
|
chris-cloud-screening/src/main/resources/doc/help/cloudscreening.hs
|
gpl-3.0
| 794
| 54
| 45
| 167
| 291
| 147
| 144
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ProximityBeacon.Beacons.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Searches the beacon registry for beacons that match the given search
-- criteria. Only those beacons that the client has permission to list will
-- be returned. Authenticate using an [OAuth access
-- token](https:\/\/developers.google.com\/identity\/protocols\/OAuth2)
-- from a signed-in user with **viewer**, **Is owner** or **Can edit**
-- permissions in the Google Developers Console project.
--
-- /See:/ <https://developers.google.com/beacons/proximity/ Google Proximity Beacon API Reference> for @proximitybeacon.beacons.list@.
module Network.Google.Resource.ProximityBeacon.Beacons.List
(
-- * REST Resource
BeaconsListResource
-- * Creating a Request
, beaconsList
, BeaconsList
-- * Request Lenses
, blXgafv
, blUploadProtocol
, blPp
, blAccessToken
, blUploadType
, blQ
, blBearerToken
, blPageToken
, blProjectId
, blPageSize
, blCallback
) where
import Network.Google.Prelude
import Network.Google.ProximityBeacon.Types
-- | A resource alias for @proximitybeacon.beacons.list@ method which the
-- 'BeaconsList' request conforms to.
type BeaconsListResource =
"v1beta1" :>
"beacons" :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "q" Text :>
QueryParam "bearer_token" Text :>
QueryParam "pageToken" Text :>
QueryParam "projectId" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListBeaconsResponse
-- | Searches the beacon registry for beacons that match the given search
-- criteria. Only those beacons that the client has permission to list will
-- be returned. Authenticate using an [OAuth access
-- token](https:\/\/developers.google.com\/identity\/protocols\/OAuth2)
-- from a signed-in user with **viewer**, **Is owner** or **Can edit**
-- permissions in the Google Developers Console project.
--
-- /See:/ 'beaconsList' smart constructor.
data BeaconsList = BeaconsList'
{ _blXgafv :: !(Maybe Text)
, _blUploadProtocol :: !(Maybe Text)
, _blPp :: !Bool
, _blAccessToken :: !(Maybe Text)
, _blUploadType :: !(Maybe Text)
, _blQ :: !(Maybe Text)
, _blBearerToken :: !(Maybe Text)
, _blPageToken :: !(Maybe Text)
, _blProjectId :: !(Maybe Text)
, _blPageSize :: !(Maybe (Textual Int32))
, _blCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BeaconsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blXgafv'
--
-- * 'blUploadProtocol'
--
-- * 'blPp'
--
-- * 'blAccessToken'
--
-- * 'blUploadType'
--
-- * 'blQ'
--
-- * 'blBearerToken'
--
-- * 'blPageToken'
--
-- * 'blProjectId'
--
-- * 'blPageSize'
--
-- * 'blCallback'
beaconsList
:: BeaconsList
beaconsList =
BeaconsList'
{ _blXgafv = Nothing
, _blUploadProtocol = Nothing
, _blPp = True
, _blAccessToken = Nothing
, _blUploadType = Nothing
, _blQ = Nothing
, _blBearerToken = Nothing
, _blPageToken = Nothing
, _blProjectId = Nothing
, _blPageSize = Nothing
, _blCallback = Nothing
}
-- | V1 error format.
blXgafv :: Lens' BeaconsList (Maybe Text)
blXgafv = lens _blXgafv (\ s a -> s{_blXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
blUploadProtocol :: Lens' BeaconsList (Maybe Text)
blUploadProtocol
= lens _blUploadProtocol
(\ s a -> s{_blUploadProtocol = a})
-- | Pretty-print response.
blPp :: Lens' BeaconsList Bool
blPp = lens _blPp (\ s a -> s{_blPp = a})
-- | OAuth access token.
blAccessToken :: Lens' BeaconsList (Maybe Text)
blAccessToken
= lens _blAccessToken
(\ s a -> s{_blAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
blUploadType :: Lens' BeaconsList (Maybe Text)
blUploadType
= lens _blUploadType (\ s a -> s{_blUploadType = a})
-- | Filter query string that supports the following field filters: *
-- \`description:\"\"\` For example: \`description:\"Room 3\"\` Returns
-- beacons whose description matches tokens in the string \"Room 3\" (not
-- necessarily that exact string). The string must be double-quoted. *
-- \`status:\` For example: \`status:active\` Returns beacons whose status
-- matches the given value. Values must be one of the Beacon.Status enum
-- values (case insensitive). Accepts multiple filters which will be
-- combined with OR logic. * \`stability:\` For example:
-- \`stability:mobile\` Returns beacons whose expected stability matches
-- the given value. Values must be one of the Beacon.Stability enum values
-- (case insensitive). Accepts multiple filters which will be combined with
-- OR logic. * \`place_id:\"\"\` For example:
-- \`place_id:\"ChIJVSZzVR8FdkgRXGmmm6SslKw=\"\` Returns beacons explicitly
-- registered at the given place, expressed as a Place ID obtained from
-- [Google Places API](\/places\/place-id). Does not match places inside
-- the given place. Does not consider the beacon\'s actual location (which
-- may be different from its registered place). Accepts multiple filters
-- that will be combined with OR logic. The place ID must be double-quoted.
-- * \`registration_time[|=]\` For example:
-- \`registration_time>=1433116800\` Returns beacons whose registration
-- time matches the given filter. Supports the operators: , =. Timestamp
-- must be expressed as an integer number of seconds since midnight January
-- 1, 1970 UTC. Accepts at most two filters that will be combined with AND
-- logic, to support \"between\" semantics. If more than two are supplied,
-- the latter ones are ignored. * \`lat: lng: radius:\` For example:
-- \`lat:51.1232343 lng:-1.093852 radius:1000\` Returns beacons whose
-- registered location is within the given circle. When any of these fields
-- are given, all are required. Latitude and longitude must be decimal
-- degrees between -90.0 and 90.0 and between -180.0 and 180.0
-- respectively. Radius must be an integer number of meters between 10 and
-- 1,000,000 (1000 km). * \`property:\"=\"\` For example:
-- \`property:\"battery-type=CR2032\"\` Returns beacons which have a
-- property of the given name and value. Supports multiple filters which
-- will be combined with OR logic. The entire name=value string must be
-- double-quoted as one string. * \`attachment_type:\"\"\` For example:
-- \`attachment_type:\"my-namespace\/my-type\"\` Returns beacons having at
-- least one attachment of the given namespaced type. Supports \"any within
-- this namespace\" via the partial wildcard syntax: \"my-namespace\/*\".
-- Supports multiple filters which will be combined with OR logic. The
-- string must be double-quoted. Multiple filters on the same field are
-- combined with OR logic (except registration_time which is combined with
-- AND logic). Multiple filters on different fields are combined with AND
-- logic. Filters should be separated by spaces. As with any HTTP query
-- string parameter, the whole filter expression must be URL-encoded.
-- Example REST request: \`GET
-- \/v1beta1\/beacons?q=status:active%20lat:51.123%20lng:-1.095%20radius:1000\`
blQ :: Lens' BeaconsList (Maybe Text)
blQ = lens _blQ (\ s a -> s{_blQ = a})
-- | OAuth bearer token.
blBearerToken :: Lens' BeaconsList (Maybe Text)
blBearerToken
= lens _blBearerToken
(\ s a -> s{_blBearerToken = a})
-- | A pagination token obtained from a previous request to list beacons.
blPageToken :: Lens' BeaconsList (Maybe Text)
blPageToken
= lens _blPageToken (\ s a -> s{_blPageToken = a})
-- | The project id to list beacons under. If not present then the project
-- credential that made the request is used as the project. Optional.
blProjectId :: Lens' BeaconsList (Maybe Text)
blProjectId
= lens _blProjectId (\ s a -> s{_blProjectId = a})
-- | The maximum number of records to return for this request, up to a
-- server-defined upper limit.
blPageSize :: Lens' BeaconsList (Maybe Int32)
blPageSize
= lens _blPageSize (\ s a -> s{_blPageSize = a}) .
mapping _Coerce
-- | JSONP
blCallback :: Lens' BeaconsList (Maybe Text)
blCallback
= lens _blCallback (\ s a -> s{_blCallback = a})
instance GoogleRequest BeaconsList where
type Rs BeaconsList = ListBeaconsResponse
type Scopes BeaconsList =
'["https://www.googleapis.com/auth/userlocation.beacon.registry"]
requestClient BeaconsList'{..}
= go _blXgafv _blUploadProtocol (Just _blPp)
_blAccessToken
_blUploadType
_blQ
_blBearerToken
_blPageToken
_blProjectId
_blPageSize
_blCallback
(Just AltJSON)
proximityBeaconService
where go
= buildClient (Proxy :: Proxy BeaconsListResource)
mempty
|
rueshyna/gogol
|
gogol-proximitybeacon/gen/Network/Google/Resource/ProximityBeacon/Beacons/List.hs
|
mpl-2.0
| 10,122
| 0
| 21
| 2,228
| 1,172
| 697
| 475
| 149
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.Captions.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of resources, possibly filtered.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.captions.list@.
module Network.Google.Resource.YouTube.Captions.List
(
-- * REST Resource
CaptionsListResource
-- * Creating a Request
, captionsList
, CaptionsList
-- * Request Lenses
, clOnBehalfOf
, clXgafv
, clPart
, clUploadProtocol
, clAccessToken
, clUploadType
, clOnBehalfOfContentOwner
, clVideoId
, clId
, clCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.captions.list@ method which the
-- 'CaptionsList' request conforms to.
type CaptionsListResource =
"youtube" :>
"v3" :>
"captions" :>
QueryParams "part" Text :>
QueryParam "videoId" Text :>
QueryParam "onBehalfOf" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParams "id" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] CaptionListResponse
-- | Retrieves a list of resources, possibly filtered.
--
-- /See:/ 'captionsList' smart constructor.
data CaptionsList =
CaptionsList'
{ _clOnBehalfOf :: !(Maybe Text)
, _clXgafv :: !(Maybe Xgafv)
, _clPart :: ![Text]
, _clUploadProtocol :: !(Maybe Text)
, _clAccessToken :: !(Maybe Text)
, _clUploadType :: !(Maybe Text)
, _clOnBehalfOfContentOwner :: !(Maybe Text)
, _clVideoId :: !Text
, _clId :: !(Maybe [Text])
, _clCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CaptionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'clOnBehalfOf'
--
-- * 'clXgafv'
--
-- * 'clPart'
--
-- * 'clUploadProtocol'
--
-- * 'clAccessToken'
--
-- * 'clUploadType'
--
-- * 'clOnBehalfOfContentOwner'
--
-- * 'clVideoId'
--
-- * 'clId'
--
-- * 'clCallback'
captionsList
:: [Text] -- ^ 'clPart'
-> Text -- ^ 'clVideoId'
-> CaptionsList
captionsList pClPart_ pClVideoId_ =
CaptionsList'
{ _clOnBehalfOf = Nothing
, _clXgafv = Nothing
, _clPart = _Coerce # pClPart_
, _clUploadProtocol = Nothing
, _clAccessToken = Nothing
, _clUploadType = Nothing
, _clOnBehalfOfContentOwner = Nothing
, _clVideoId = pClVideoId_
, _clId = Nothing
, _clCallback = Nothing
}
-- | ID of the Google+ Page for the channel that the request is on behalf of.
clOnBehalfOf :: Lens' CaptionsList (Maybe Text)
clOnBehalfOf
= lens _clOnBehalfOf (\ s a -> s{_clOnBehalfOf = a})
-- | V1 error format.
clXgafv :: Lens' CaptionsList (Maybe Xgafv)
clXgafv = lens _clXgafv (\ s a -> s{_clXgafv = a})
-- | The *part* parameter specifies a comma-separated list of one or more
-- caption resource parts that the API response will include. The part
-- names that you can include in the parameter value are id and snippet.
clPart :: Lens' CaptionsList [Text]
clPart
= lens _clPart (\ s a -> s{_clPart = a}) . _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
clUploadProtocol :: Lens' CaptionsList (Maybe Text)
clUploadProtocol
= lens _clUploadProtocol
(\ s a -> s{_clUploadProtocol = a})
-- | OAuth access token.
clAccessToken :: Lens' CaptionsList (Maybe Text)
clAccessToken
= lens _clAccessToken
(\ s a -> s{_clAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
clUploadType :: Lens' CaptionsList (Maybe Text)
clUploadType
= lens _clUploadType (\ s a -> s{_clUploadType = a})
-- | *Note:* This parameter is intended exclusively for YouTube content
-- partners. The *onBehalfOfContentOwner* parameter indicates that the
-- request\'s authorization credentials identify a YouTube CMS user who is
-- acting on behalf of the content owner specified in the parameter value.
-- This parameter is intended for YouTube content partners that own and
-- manage many different YouTube channels. It allows content owners to
-- authenticate once and get access to all their video and channel data,
-- without having to provide authentication credentials for each individual
-- channel. The actual CMS account that the user authenticates with must be
-- linked to the specified YouTube content owner.
clOnBehalfOfContentOwner :: Lens' CaptionsList (Maybe Text)
clOnBehalfOfContentOwner
= lens _clOnBehalfOfContentOwner
(\ s a -> s{_clOnBehalfOfContentOwner = a})
-- | Returns the captions for the specified video.
clVideoId :: Lens' CaptionsList Text
clVideoId
= lens _clVideoId (\ s a -> s{_clVideoId = a})
-- | Returns the captions with the given IDs for Stubby or Apiary.
clId :: Lens' CaptionsList [Text]
clId
= lens _clId (\ s a -> s{_clId = a}) . _Default .
_Coerce
-- | JSONP
clCallback :: Lens' CaptionsList (Maybe Text)
clCallback
= lens _clCallback (\ s a -> s{_clCallback = a})
instance GoogleRequest CaptionsList where
type Rs CaptionsList = CaptionListResponse
type Scopes CaptionsList =
'["https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient CaptionsList'{..}
= go _clPart (Just _clVideoId) _clOnBehalfOf _clXgafv
_clUploadProtocol
_clAccessToken
_clUploadType
_clOnBehalfOfContentOwner
(_clId ^. _Default)
_clCallback
(Just AltJSON)
youTubeService
where go
= buildClient (Proxy :: Proxy CaptionsListResource)
mempty
|
brendanhay/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/Captions/List.hs
|
mpl-2.0
| 6,844
| 0
| 21
| 1,670
| 1,069
| 622
| 447
| 145
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeImportSnapshotTasks
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes your import snapshot tasks.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeImportSnapshotTasks.html>
module Network.AWS.EC2.DescribeImportSnapshotTasks
(
-- * Request
DescribeImportSnapshotTasks
-- ** Request constructor
, describeImportSnapshotTasks
-- ** Request lenses
, distDryRun
, distFilters
, distImportTaskIds
, distMaxResults
, distNextToken
-- * Response
, DescribeImportSnapshotTasksResponse
-- ** Response constructor
, describeImportSnapshotTasksResponse
-- ** Response lenses
, distrImportSnapshotTasks
, distrNextToken
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeImportSnapshotTasks = DescribeImportSnapshotTasks
{ _distDryRun :: Maybe Bool
, _distFilters :: List "Filter" Filter
, _distImportTaskIds :: List "ImportTaskId" Text
, _distMaxResults :: Maybe Int
, _distNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeImportSnapshotTasks' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'distDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'distFilters' @::@ ['Filter']
--
-- * 'distImportTaskIds' @::@ ['Text']
--
-- * 'distMaxResults' @::@ 'Maybe' 'Int'
--
-- * 'distNextToken' @::@ 'Maybe' 'Text'
--
describeImportSnapshotTasks :: DescribeImportSnapshotTasks
describeImportSnapshotTasks = DescribeImportSnapshotTasks
{ _distDryRun = Nothing
, _distImportTaskIds = mempty
, _distNextToken = Nothing
, _distMaxResults = Nothing
, _distFilters = mempty
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
distDryRun :: Lens' DescribeImportSnapshotTasks (Maybe Bool)
distDryRun = lens _distDryRun (\s a -> s { _distDryRun = a })
-- | One or more filters.
distFilters :: Lens' DescribeImportSnapshotTasks [Filter]
distFilters = lens _distFilters (\s a -> s { _distFilters = a }) . _List
-- | A list of import snapshot task IDs.
distImportTaskIds :: Lens' DescribeImportSnapshotTasks [Text]
distImportTaskIds =
lens _distImportTaskIds (\s a -> s { _distImportTaskIds = a })
. _List
-- | The maximum number of results to return in a single request.
distMaxResults :: Lens' DescribeImportSnapshotTasks (Maybe Int)
distMaxResults = lens _distMaxResults (\s a -> s { _distMaxResults = a })
-- | A token that indicates the next page of results.
distNextToken :: Lens' DescribeImportSnapshotTasks (Maybe Text)
distNextToken = lens _distNextToken (\s a -> s { _distNextToken = a })
data DescribeImportSnapshotTasksResponse = DescribeImportSnapshotTasksResponse
{ _distrImportSnapshotTasks :: List "item" ImportSnapshotTask
, _distrNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeImportSnapshotTasksResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'distrImportSnapshotTasks' @::@ ['ImportSnapshotTask']
--
-- * 'distrNextToken' @::@ 'Maybe' 'Text'
--
describeImportSnapshotTasksResponse :: DescribeImportSnapshotTasksResponse
describeImportSnapshotTasksResponse = DescribeImportSnapshotTasksResponse
{ _distrImportSnapshotTasks = mempty
, _distrNextToken = Nothing
}
-- | A list of zero or more import snapshot tasks that are currently active or
-- were completed or canceled in the previous 7 days.
distrImportSnapshotTasks :: Lens' DescribeImportSnapshotTasksResponse [ImportSnapshotTask]
distrImportSnapshotTasks =
lens _distrImportSnapshotTasks
(\s a -> s { _distrImportSnapshotTasks = a })
. _List
-- | The token to use to get the next page of results. This value is 'null' when
-- there are no more results to return.
distrNextToken :: Lens' DescribeImportSnapshotTasksResponse (Maybe Text)
distrNextToken = lens _distrNextToken (\s a -> s { _distrNextToken = a })
instance ToPath DescribeImportSnapshotTasks where
toPath = const "/"
instance ToQuery DescribeImportSnapshotTasks where
toQuery DescribeImportSnapshotTasks{..} = mconcat
[ "DryRun" =? _distDryRun
, "Filters" `toQueryList` _distFilters
, "ImportTaskId" `toQueryList` _distImportTaskIds
, "MaxResults" =? _distMaxResults
, "NextToken" =? _distNextToken
]
instance ToHeaders DescribeImportSnapshotTasks
instance AWSRequest DescribeImportSnapshotTasks where
type Sv DescribeImportSnapshotTasks = EC2
type Rs DescribeImportSnapshotTasks = DescribeImportSnapshotTasksResponse
request = post "DescribeImportSnapshotTasks"
response = xmlResponse
instance FromXML DescribeImportSnapshotTasksResponse where
parseXML x = DescribeImportSnapshotTasksResponse
<$> x .@? "importSnapshotTaskSet" .!@ mempty
<*> x .@? "nextToken"
|
romanb/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/DescribeImportSnapshotTasks.hs
|
mpl-2.0
| 6,171
| 0
| 10
| 1,244
| 788
| 473
| 315
| 87
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Webmasters.Sites.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the user\'s Search Console sites.
--
-- /See:/ <https://developers.google.com/webmaster-tools/ Search Console API Reference> for @webmasters.sites.list@.
module Network.Google.Resource.Webmasters.Sites.List
(
-- * REST Resource
SitesListResource
-- * Creating a Request
, sitesList
, SitesList
) where
import Network.Google.Prelude
import Network.Google.WebmasterTools.Types
-- | A resource alias for @webmasters.sites.list@ method which the
-- 'SitesList' request conforms to.
type SitesListResource =
"webmasters" :>
"v3" :>
"sites" :>
QueryParam "alt" AltJSON :>
Get '[JSON] SitesListResponse
-- | Lists the user\'s Search Console sites.
--
-- /See:/ 'sitesList' smart constructor.
data SitesList =
SitesList'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SitesList' with the minimum fields required to make a request.
--
sitesList
:: SitesList
sitesList = SitesList'
instance GoogleRequest SitesList where
type Rs SitesList = SitesListResponse
type Scopes SitesList =
'["https://www.googleapis.com/auth/webmasters",
"https://www.googleapis.com/auth/webmasters.readonly"]
requestClient SitesList'{}
= go (Just AltJSON) webmasterToolsService
where go
= buildClient (Proxy :: Proxy SitesListResource)
mempty
|
brendanhay/gogol
|
gogol-webmaster-tools/gen/Network/Google/Resource/Webmasters/Sites/List.hs
|
mpl-2.0
| 2,213
| 0
| 11
| 494
| 221
| 138
| 83
| 41
| 1
|
-- Sample Test...
|
seckcoder/lang-learn
|
haskell/algo/tests/sample.hs
|
unlicense
| 21
| 0
| 2
| 4
| 3
| 2
| 1
| 1
| 0
|
data Suit = Spades | Hearts deriving (Show)
data Rank = Ten | Jack | Queen | King | Ace deriving (Show)
type Card = (Rank, Suit)
type Hand = [Card]
value :: Rank -> Integer
value Ten = 1
value Jack = 2
value Queen = 3
value King = 4
value Ace = 5
cardValue :: Card -> Integer
cardValue (rank, suit) = value rank
|
fbartnitzek/notes
|
7_languages/Haskell/cards.hs
|
apache-2.0
| 313
| 0
| 6
| 68
| 140
| 79
| 61
| 12
| 1
|
iAmPicky = [x | x <- [10..20], x /= 13, x /= 15, x /= 19]
crossAllThisShit = [x*y | x <- [1..30], y <- [1..20], x*y > 20]
nouns = ["Sapo", "Papa", "Rei"]
adjetives = ["Bobao", "Sabichao", "Preguicoso"]
justSomeFun = [noun ++ " " ++ adjetive | noun <- nouns, adjetive <- adjetives]
answer = 4 * (let x = 9 in x + 1) + 2
letPower = let square x = x * x in (square 2, square 3, square 4)
-- flip div cria uma função que recebe dois parametros, faz flip e divide. Viva o currie
maluquice = zipWith (flip div) [2,2..] [10,8,6,4,2]
-- Curried + map
functionApplicationTrick = map ($ 3) [(4+), (10*), (^2), sqrt]
|
WillGluck/HaskellPlayground
|
definitionsPlayground.hs
|
apache-2.0
| 615
| 0
| 11
| 128
| 307
| 172
| 135
| 9
| 1
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.THash
-- Copyright : (C) 2006 Edward Kmett
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- A simple "STM" based transactional linear hash table based on Witold 1980.
-- This wraps a "Data.THash.THT" in an simple container. It may be
-- more appropriate to use the underlying "THT" structure directly if you are
-- nesting these. The performance hit hasn't yet been measured.
----------------------------------------------------------------------------
module Data.THash (
THash,
new, -- (k -> Int) -> STM (THash k v)
newH, -- Hashable k => STM (THash k v)
fromList,-- Eq k => (k -> Int) -> [(k,v)] -> STM (THash k v)
insert, -- Eq k => THash k v -> k -> v -> STM (Bool)
update, -- Eq k => THash k v -> k -> v -> STM ()
modify, -- Eq k => THash k v -> k -> (Maybe v -> v) -> STM ()
delete, -- Eq k => THash k v -> k -> STM (Bool)
lookup, -- Eq k => THash k v -> k -> STM (Maybe v)
mapH, -- ((k,v) -> r) -> THash k v -> STM [r]
each, -- THash k v -> STM [(k,v)]
keys, -- THash k v -> STM [k]
values, -- THash k v -> STM [v]
hashInt, -- Int -> Int
get,
-- hashString -- Int -> Int
) where
import qualified Data.THash.THT as THT hiding(THT)
import Data.Hashable
import Data.THash.THT (THT)
import Prelude hiding (lookup)
import Control.Monad (liftM)
import Control.Concurrent.STM
import Control.Concurrent.STM.TVar
import Data.Bits
-- | A hash with keys of type k to values of type v
newtype THash k v = THash (TVar (THT k v))
{-# INLINE make #-}
make :: THT k v -> STM (THash k v)
make t = do x <- newTVar t; return $ THash x
{-# INLINE get #-}
get :: THash k v -> STM (THT k v)
get (THash h) = readTVar h
{-# INLINE set #-}
set :: THash k v -> THT k v -> STM ()
set (THash h) = writeTVar h
{-# INLINE setif #-}
setif :: THash k v -> (THT k v,Bool) -> STM (Bool)
setif (THash h) (t,b)
| b == True
= writeTVar h t >> return True
| otherwise
= return False
{-# INLINE new #-}
-- | Build an empty hash table
new :: (k -> Int) -> STM (THash k v)
new hash = make =<< THT.new hash
{-# INLINE newH #-}
-- | Build an empty hash table using the default hash function for the key type.
newH :: Hashable k => STM (THash k v)
newH = new hash
{-# INLINE fromList #-}
-- | Build a hash table from a list of @(key,value)@ pairs
fromList :: Eq k => (k -> Int) -> [(k,v)] -> STM (THash k v)
fromList hash list = make =<< THT.fromList hash list
{-# INLINE insert #-}
-- | Insert a value into the hash table. If a value with the key is present
-- then nothing is changed and 'False' is returned.
insert :: Eq k => THash k v -> k -> v -> STM (Bool)
insert hash key value = do x <- get hash; y <- THT.insert x key value; setif hash y
{-# INLINE update #-}
-- | Insert a value into the hash table, replacing any value with the same key that is present.
update :: Eq k => THash k v -> k -> v -> STM ()
update hash key value = do x <- get hash; y <- THT.update x key value; set hash y
{-# INLINE modify #-}
-- | Update a value in the hash table using the supplied function.
modify :: Eq k => THash k v -> k -> (Maybe v -> v) -> STM ()
modify hash key f = do x <- get hash; y <- THT.modify x key f ; set hash y
{-# INLINE delete #-}
-- | Remove a value from the hash table. Returns 'True' to indicate success.
delete :: Eq k => THash k v -> k -> STM (Bool)
delete hash key = do x <- get hash; y <- THT.delete x key; setif hash y
{-# INLINE lookup #-}
-- | Lookup a value in the hash table.
lookup :: Eq k => THash k v -> k -> STM (Maybe v)
lookup hash key = do x <- get hash; THT.lookup x key
{-# INLINE mapH #-}
-- | Map a function over all @(key,value)@ functions in the hash table.
mapH :: ((k,v) -> r) -> THash k v -> STM [r]
mapH f hash = do x <- get hash; THT.mapH f x
{-# INLINE each #-}
-- | @each = mapH id@ and returns all @(key,value)@ pairs in the hash.
each :: THash k v -> STM [(k,v)]
each = mapH id
{-# INLINE keys #-}
-- | @each = mapH fst@ and returns all keys in the hash.
keys :: THash k v -> STM [k]
keys = mapH fst
{-# INLINE values #-}
-- | @each = mapH snd@ and returns all values present in the hash.
values :: THash k v -> STM [v]
values = mapH snd
{-# INLINE hashInt #-}
-- | Thomas Wang's 32 bit mix function; more effective than a prime modulus for
-- declustering a linear hash, but not good against an adversary, since its easily
-- reversed.
hashInt :: Int -> Int
hashInt = ap xor (`shiftR` 16)
. ap (+) (complement . (`shiftL` 11))
. ap xor (`shiftR` 6)
. ap (+) (`shiftL` 3)
. ap xor (`shiftR` 10)
. ap (+) (complement . (`shiftL` 15))
where ap x y z = x z $ y z
|
ekmett/stm-hash
|
src/Control/Concurrent/STM/THash.hs
|
bsd-2-clause
| 5,153
| 0
| 13
| 1,398
| 1,284
| 691
| 593
| 87
| 1
|
-- http://www.codewars.com/kata/534eb5ad704a49dcfa000ba6
module Hanoi where
hanoi :: Int -> Int
hanoi n = 2^n - 1
|
Bodigrim/katas
|
src/haskell/6-Hanoi-record.hs
|
bsd-2-clause
| 114
| 0
| 6
| 16
| 30
| 17
| 13
| 3
| 1
|
{-# LANGUAGE BangPatterns #-}
-- |
-- Module : Crypto.Number.Polynomial
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : Good
module Crypto.Number.Polynomial
( Monomial(..)
-- * polynomial operations
, Polynomial
, toList
, fromList
, addPoly
, subPoly
, mulPoly
, squarePoly
, expPoly
, divPoly
, negPoly
) where
import Data.List (intercalate, sort)
import Data.Vector ((!), Vector)
import qualified Data.Vector as V
import Control.Arrow (first)
data Monomial = Monomial {-# UNPACK #-} !Int !Integer
deriving (Eq)
data Polynomial = Polynomial (Vector Monomial)
deriving (Eq)
instance Ord Monomial where
compare (Monomial w1 v1) (Monomial w2 v2) =
case compare w1 w2 of
EQ -> compare v1 v2
r -> r
instance Show Monomial where
show (Monomial w v) = show v ++ "x^" ++ show w
instance Show Polynomial where
show (Polynomial p) = intercalate "+" $ map show $ V.toList p
toList :: Polynomial -> [Monomial]
toList (Polynomial p) = V.toList p
fromList :: [Monomial] -> Polynomial
fromList = Polynomial . V.fromList . reverse . sort . filterZero
where
filterZero = filter (\(Monomial _ v) -> v /= 0)
getWeight :: Polynomial -> Int -> Maybe Integer
getWeight (Polynomial p) n = look 0
where
plen = V.length p
look !i
| i >= plen = Nothing
| otherwise =
let (Monomial w v) = p ! i in
case compare w n of
LT -> Nothing
EQ -> Just v
GT -> look (i+1)
mergePoly :: (Integer -> Integer -> Integer) -> Polynomial -> Polynomial -> Polynomial
mergePoly f (Polynomial p1) (Polynomial p2) = fromList $ loop 0 0
where
l1 = V.length p1
l2 = V.length p2
loop !i1 !i2
| i1 == l1 && i2 == l2 = []
| i1 == l1 = (p2 ! i2) : loop i1 (i2+1)
| i2 == l2 = (p1 ! i1) : loop (i1+1) i2
| otherwise =
let (coef, i1inc, i2inc) = addCoef (p1 ! i1) (p2 ! i2) in
coef : loop (i1+i1inc) (i2+i2inc)
addCoef m1@(Monomial w1 v1) (Monomial w2 v2) =
case compare w1 w2 of
LT -> (Monomial w2 (f 0 v2), 0, 1)
EQ -> (Monomial w1 (f v1 v2), 1, 1)
GT -> (m1, 1, 0)
addPoly :: Polynomial -> Polynomial -> Polynomial
addPoly = mergePoly (+)
subPoly :: Polynomial -> Polynomial -> Polynomial
subPoly = mergePoly (-)
negPoly :: Polynomial -> Polynomial
negPoly (Polynomial p) = Polynomial $ V.map negateMonomial p
where negateMonomial (Monomial w v) = Monomial w (-v)
mulPoly :: Polynomial -> Polynomial -> Polynomial
mulPoly p1@(Polynomial v1) p2@(Polynomial v2) =
fromList $ filter (\(Monomial _ v) -> v /= 0) $ map (\i -> Monomial i (c i)) $ reverse [0..(m+n)]
where
(Monomial m _) = v1 ! 0
(Monomial n _) = v2 ! 0
c r = foldl (\acc i -> (b $ r-i) * (a $ i) + acc) 0 [0..r]
where
a = maybe 0 id . getWeight p1
b = maybe 0 id . getWeight p2
squarePoly :: Polynomial -> Polynomial
squarePoly p = p `mulPoly` p
expPoly :: Polynomial -> Integer -> Polynomial
expPoly p e = loop p e
where
loop t 0 = t
loop t n = loop (squarePoly t) (n-1)
divPoly :: Polynomial -> Polynomial -> (Polynomial, Polynomial)
divPoly p1 p2@(Polynomial pp2) = first fromList $ divLoop p1
where divLoop d1@(Polynomial pp1)
| V.null pp1 = ([], d1)
| otherwise =
let (Monomial w1 v1) = pp1 ! 0 in
let (Monomial w2 v2) = pp2 ! 0 in
let w = w1 - w2 in
let (v,r) = v1 `divMod` v2 in
if w >= 0 && r == 0
then
let mono = (Monomial w v) in
let remain = d1 `subPoly` (p2 `mulPoly` (fromList [mono])) in
let (l, finalRem) = divLoop remain in
(mono : l, finalRem)
else
([], d1)
|
vincenthz/hs-crypto-numbers
|
Crypto/Number/Polynomial.hs
|
bsd-2-clause
| 4,229
| 0
| 28
| 1,515
| 1,622
| 841
| 781
| 100
| 3
|
module Control.Monad.TuneSpec where
import SpecHelper
import qualified Data.Map as M
import System.Random
spec :: Spec
spec = do
describe "choiceMaps" $ do
it "creates an environment with one root choice" $
(M.size $ addChoiceRoot M.empty "a" [1]) `shouldBe` 1
it "creates an environment with two root choices" $
(M.size $ addChoiceRoot (addChoiceRoot M.empty "b" [1,2]) "a" [1]) `shouldBe` 2
it "creates an environment with one root choice and one dependent choice" $
let b = TunerChoice "b" [1, 2] Nothing
bmap = M.singleton "b" b
in (M.size $ addChoiceDepends bmap "a" [1] b) `shouldBe` 2
describe "tunerStates" $ do
it "creates a tuner state with a singleton environment" $ do
r <- getStdGen
let m = M.singleton "a" $ TunerChoice "a" [1,2] Nothing
(M.size $ env $ makeTunerState m r) `shouldBe` 1
|
vollmerm/monad-tune
|
test/Control/Monad/TuneSpec.hs
|
bsd-2-clause
| 887
| 0
| 18
| 218
| 291
| 150
| 141
| 20
| 1
|
{-# OPTIONS_GHC -Wall #-}
module Content
( pWord8
, pWord16
, pWord32
, pPC
) where
import qualified Data.Word as W
import qualified Koshucode.Baala.Core as K
pWord8 :: (K.CContent c) => W.Word8 -> c
pWord8 i = K.pDecFromInt i' where
i' = fromIntegral i :: Int
pWord16 :: (K.CContent c) => W.Word16 -> c
pWord16 i = K.pDecFromInt i' where
i' = fromIntegral i :: Int
pWord32 :: (K.CContent c) => W.Word32 -> c
pWord32 i = K.pDecFromInt i' where
i' = fromIntegral i :: Int
pPC :: (K.CDec c) => Int -> c
pPC = K.pDecFromInt
|
seinokatsuhiro/koshu-java-tool
|
Content.hs
|
bsd-3-clause
| 564
| 0
| 7
| 140
| 207
| 116
| 91
| 19
| 1
|
{-|
Module : Cmd.Build
Description : CLI for the `pp build` command
Copyright : (c) 2017 Patrick Champion
License : see LICENSE file
Maintainer : chlablak@gmail.com
Stability : provisional
Portability : portable
-}
module Cmd.Build
( commandArgs
, dispatch
) where
import Args
import qualified Cmd.Ebnf
import qualified Cmd.Lalr
import Control.Monad (unless, when)
import Data.Semigroup ((<>))
import qualified Log
import Options.Applicative
import qualified Project
import qualified Work
-- |Command arguments
commandArgs :: Parser CommandArgs
commandArgs = BuildCmd <$> buildArgs
where
buildArgs = BuildArgs
<$> switch ( long "no-template"
<> help "Disable templates compilation" )
<*> switch ( long "no-test"
<> help "Disable tests execution" )
<*> strOption ( long "test-with"
<> short 't'
<> metavar "FILENAME"
<> value ""
<> help "Test the grammar on a source file" )
<*> switch ( long "ast"
<> help "Print the parsed AST (with --test-with)" )
<*> strOption ( long "ast-to-html"
<> metavar "FILENAME"
<> value ""
<> help "Output the parsed AST to HTML list" )
-- |Command dispatch
dispatch :: Args -> Log.Logger
dispatch (Args cargs0 (BuildCmd args)) = do
Log.pushTag "build"
-- Parse pp.yaml
p <- Project.get
case p of
Project.NoProject -> Log.err "no project in current directory"
Project.MalformedProject err -> Log.err $ "malformed project: " ++ err
_ -> do
Log.info $ "build project: " ++ Project.projectName p
let cargs = mergeCArgs cargs0 p
let file = head $ Project.projectGrammars p
when (useWork cargs)
Work.initialize
-- EBNF checks
Log.info "EBNF checks:"
let ebnf = EbnfArgs file False False False True False False False
Cmd.Ebnf.dispatch $ Args cargs $ EbnfCmd ebnf
checkOk <- Log.ok
when checkOk $ do
-- LALR generation
Log.info "LALR generation:"
let lalr = LalrArgs file False (-1) False (buildTestWith args) "" False (buildShowAst args) (buildAstHtml args) False
Cmd.Lalr.dispatch $ Args cargs $ LalrCmd lalr
genOk <- Log.ok
when genOk $ do
-- Templates compilation
unless (disableTemplate args) $ do
Log.info "Templates compilation:"
Log.autoFlush False
mapM_ (buildTemplate cargs lalr) $ Project.projectTemplates p
Log.autoFlush True
-- Tests
unless (disableTest args) $ do
Log.info "Tests execution:"
Log.autoFlush False
mapM_ (buildTest cargs lalr) $ Project.projectTests p
Log.autoFlush True
-- End
Log.popTag
return ()
-- |Compute the correct common args
mergeCArgs :: CommonArgs -> Project.Project -> CommonArgs
mergeCArgs (CommonArgs l s _ p) pr =
CommonArgs l s (Project.projectUseWork pr) p
-- |Build template
buildTemplate :: CommonArgs -> LalrArgs -> Project.ProjectTemplate -> Log.Logger
buildTemplate cargs (LalrArgs l1 l2 l3 l4 l5 _ l7 l8 l9 l10) t = do
Log.pushTag "template"
Log.info $ Project.templateFile t ++ " > " ++ Project.templateDst t
Log.flushAll
let args = LalrArgs l1 l2 l3 l4 l5 (Project.templateFile t) l7 l8 l9 l10
Cmd.Lalr.dispatch $ Args cargs $ LalrCmd args
Log.flushOutToFile $ Project.templateDst t
Log.popTag
-- |Build test
buildTest :: CommonArgs -> LalrArgs -> Project.ProjectTest -> Log.Logger
buildTest cargs (LalrArgs l1 l2 l3 l4 _ l6 l7 _ l9 l10) t = do
Log.pushTag "test"
Log.info $ Project.testFile t ++ if Project.testAstDst t /= ""
then " > " ++ Project.testAstDst t
else ""
Log.flushAll
let args = LalrArgs l1 l2 l3 l4 (Project.testFile t) l6 l7 (Project.testAstDst t /= "") l9 l10
Cmd.Lalr.dispatch $ Args cargs $ LalrCmd args
if Project.testAstDst t /= "" then
Log.flushOutToFile $ Project.testAstDst t
else
Log.flushOutOnly
Log.popTag
|
chlablak/platinum-parsing
|
cli/Cmd/Build.hs
|
bsd-3-clause
| 4,107
| 0
| 24
| 1,134
| 1,167
| 556
| 611
| 90
| 3
|
{-# LANGUAGE CPP #-}
#if MIN_VERSION_base(4,5,0)
import Data.Bits ((.&.), popCount)
import Data.Word (Word)
#else
import Data.Bits ((.&.))
#endif
import Data.IntSet
import Data.List (nub,sort)
import qualified Data.List as List
import Data.Monoid (mempty)
import qualified Data.Set as Set
import Prelude hiding (lookup, null, map, filter, foldr, foldl)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test, Testable)
import Test.QuickCheck hiding ((.&.))
main :: IO ()
main = defaultMain [ testCase "lookupLT" test_lookupLT
, testCase "lookupGT" test_lookupGT
, testCase "lookupLE" test_lookupLE
, testCase "lookupGE" test_lookupGE
, testCase "split" test_split
, testProperty "prop_Single" prop_Single
, testProperty "prop_Member" prop_Member
, testProperty "prop_NotMember" prop_NotMember
, testProperty "prop_LookupLT" prop_LookupLT
, testProperty "prop_LookupGT" prop_LookupGT
, testProperty "prop_LookupLE" prop_LookupLE
, testProperty "prop_LookupGE" prop_LookupGE
, testProperty "prop_InsertDelete" prop_InsertDelete
, testProperty "prop_MemberFromList" prop_MemberFromList
, testProperty "prop_UnionInsert" prop_UnionInsert
, testProperty "prop_UnionAssoc" prop_UnionAssoc
, testProperty "prop_UnionComm" prop_UnionComm
, testProperty "prop_Diff" prop_Diff
, testProperty "prop_Int" prop_Int
, testProperty "prop_Ordered" prop_Ordered
, testProperty "prop_List" prop_List
, testProperty "prop_DescList" prop_DescList
, testProperty "prop_AscDescList" prop_AscDescList
, testProperty "prop_fromList" prop_fromList
, testProperty "prop_MaskPow2" prop_MaskPow2
, testProperty "prop_Prefix" prop_Prefix
, testProperty "prop_LeftRight" prop_LeftRight
, testProperty "prop_isProperSubsetOf" prop_isProperSubsetOf
, testProperty "prop_isProperSubsetOf2" prop_isProperSubsetOf2
, testProperty "prop_isSubsetOf" prop_isSubsetOf
, testProperty "prop_isSubsetOf2" prop_isSubsetOf2
, testProperty "prop_size" prop_size
, testProperty "prop_findMax" prop_findMax
, testProperty "prop_findMin" prop_findMin
, testProperty "prop_ord" prop_ord
, testProperty "prop_readShow" prop_readShow
, testProperty "prop_foldR" prop_foldR
, testProperty "prop_foldR'" prop_foldR'
, testProperty "prop_foldL" prop_foldL
, testProperty "prop_foldL'" prop_foldL'
, testProperty "prop_map" prop_map
, testProperty "prop_maxView" prop_maxView
, testProperty "prop_minView" prop_minView
, testProperty "prop_split" prop_split
, testProperty "prop_splitMember" prop_splitMember
, testProperty "prop_partition" prop_partition
, testProperty "prop_filter" prop_filter
#if MIN_VERSION_base(4,5,0)
, testProperty "prop_bitcount" prop_bitcount
#endif
]
----------------------------------------------------------------
-- Unit tests
----------------------------------------------------------------
test_lookupLT :: Assertion
test_lookupLT = do
lookupLT 3 (fromList [3, 5]) @?= Nothing
lookupLT 5 (fromList [3, 5]) @?= Just 3
test_lookupGT :: Assertion
test_lookupGT = do
lookupGT 4 (fromList [3, 5]) @?= Just 5
lookupGT 5 (fromList [3, 5]) @?= Nothing
test_lookupLE :: Assertion
test_lookupLE = do
lookupLE 2 (fromList [3, 5]) @?= Nothing
lookupLE 4 (fromList [3, 5]) @?= Just 3
lookupLE 5 (fromList [3, 5]) @?= Just 5
test_lookupGE :: Assertion
test_lookupGE = do
lookupGE 3 (fromList [3, 5]) @?= Just 3
lookupGE 4 (fromList [3, 5]) @?= Just 5
lookupGE 6 (fromList [3, 5]) @?= Nothing
test_split :: Assertion
test_split = do
split 3 (fromList [1..5]) @?= (fromList [1,2], fromList [4,5])
{--------------------------------------------------------------------
Arbitrary, reasonably balanced trees
--------------------------------------------------------------------}
instance Arbitrary IntSet where
arbitrary = do{ xs <- arbitrary
; return (fromList xs)
}
{--------------------------------------------------------------------
Single, Member, Insert, Delete, Member, FromList
--------------------------------------------------------------------}
prop_Single :: Int -> Bool
prop_Single x
= (insert x empty == singleton x)
prop_Member :: [Int] -> Int -> Bool
prop_Member xs n =
let m = fromList xs
in all (\k -> k `member` m == (k `elem` xs)) (n : xs)
prop_NotMember :: [Int] -> Int -> Bool
prop_NotMember xs n =
let m = fromList xs
in all (\k -> k `notMember` m == (k `notElem` xs)) (n : xs)
test_LookupSomething :: (Int -> IntSet -> Maybe Int) -> (Int -> Int -> Bool) -> [Int] -> Bool
test_LookupSomething lookup' cmp xs =
let odd_sorted_xs = filter_odd $ nub $ sort xs
t = fromList odd_sorted_xs
test x = case List.filter (`cmp` x) odd_sorted_xs of
[] -> lookup' x t == Nothing
cs | 0 `cmp` 1 -> lookup' x t == Just (last cs) -- we want largest such element
| otherwise -> lookup' x t == Just (head cs) -- we want smallest such element
in all test xs
where filter_odd [] = []
filter_odd [_] = []
filter_odd (_ : o : xs) = o : filter_odd xs
prop_LookupLT :: [Int] -> Bool
prop_LookupLT = test_LookupSomething lookupLT (<)
prop_LookupGT :: [Int] -> Bool
prop_LookupGT = test_LookupSomething lookupGT (>)
prop_LookupLE :: [Int] -> Bool
prop_LookupLE = test_LookupSomething lookupLE (<=)
prop_LookupGE :: [Int] -> Bool
prop_LookupGE = test_LookupSomething lookupGE (>=)
prop_InsertDelete :: Int -> IntSet -> Property
prop_InsertDelete k t
= not (member k t) ==> delete k (insert k t) == t
prop_MemberFromList :: [Int] -> Bool
prop_MemberFromList xs
= all (`member` t) abs_xs && all ((`notMember` t) . negate) abs_xs
where abs_xs = [abs x | x <- xs, x /= 0]
t = fromList abs_xs
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
prop_UnionInsert :: Int -> IntSet -> Bool
prop_UnionInsert x t
= union t (singleton x) == insert x t
prop_UnionAssoc :: IntSet -> IntSet -> IntSet -> Bool
prop_UnionAssoc t1 t2 t3
= union t1 (union t2 t3) == union (union t1 t2) t3
prop_UnionComm :: IntSet -> IntSet -> Bool
prop_UnionComm t1 t2
= (union t1 t2 == union t2 t1)
prop_Diff :: [Int] -> [Int] -> Bool
prop_Diff xs ys
= toAscList (difference (fromList xs) (fromList ys))
== List.sort ((List.\\) (nub xs) (nub ys))
prop_Int :: [Int] -> [Int] -> Bool
prop_Int xs ys
= toAscList (intersection (fromList xs) (fromList ys))
== List.sort (nub ((List.intersect) (xs) (ys)))
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
prop_Ordered
= forAll (choose (5,100)) $ \n ->
let xs = concat [[i-n,i-n]|i<-[0..2*n :: Int]]
in fromAscList xs == fromList xs
prop_List :: [Int] -> Bool
prop_List xs
= (sort (nub xs) == toAscList (fromList xs))
prop_DescList :: [Int] -> Bool
prop_DescList xs = (reverse (sort (nub xs)) == toDescList (fromList xs))
prop_AscDescList :: [Int] -> Bool
prop_AscDescList xs = toAscList s == reverse (toDescList s)
where s = fromList xs
prop_fromList :: [Int] -> Bool
prop_fromList xs
= case fromList xs of
t -> t == fromAscList sort_xs &&
t == fromDistinctAscList nub_sort_xs &&
t == List.foldr insert empty xs
where sort_xs = sort xs
nub_sort_xs = List.map List.head $ List.group sort_xs
{--------------------------------------------------------------------
Bin invariants
--------------------------------------------------------------------}
powersOf2 :: IntSet
powersOf2 = fromList [2^i | i <- [0..63]]
-- Check the invariant that the mask is a power of 2.
prop_MaskPow2 :: IntSet -> Bool
prop_MaskPow2 (Bin _ msk left right) = member msk powersOf2 && prop_MaskPow2 left && prop_MaskPow2 right
prop_MaskPow2 _ = True
-- Check that the prefix satisfies its invariant.
prop_Prefix :: IntSet -> Bool
prop_Prefix s@(Bin prefix msk left right) = all (\elem -> match elem prefix msk) (toList s) && prop_Prefix left && prop_Prefix right
prop_Prefix _ = True
-- Check that the left elements don't have the mask bit set, and the right
-- ones do.
prop_LeftRight :: IntSet -> Bool
prop_LeftRight (Bin _ msk left right) = and [x .&. msk == 0 | x <- toList left] && and [x .&. msk == msk | x <- toList right]
prop_LeftRight _ = True
{--------------------------------------------------------------------
IntSet operations are like Set operations
--------------------------------------------------------------------}
toSet :: IntSet -> Set.Set Int
toSet = Set.fromList . toList
-- Check that IntSet.isProperSubsetOf is the same as Set.isProperSubsetOf.
prop_isProperSubsetOf :: IntSet -> IntSet -> Bool
prop_isProperSubsetOf a b = isProperSubsetOf a b == Set.isProperSubsetOf (toSet a) (toSet b)
-- In the above test, isProperSubsetOf almost always returns False (since a
-- random set is almost never a subset of another random set). So this second
-- test checks the True case.
prop_isProperSubsetOf2 :: IntSet -> IntSet -> Bool
prop_isProperSubsetOf2 a b = isProperSubsetOf a c == (a /= c) where
c = union a b
prop_isSubsetOf :: IntSet -> IntSet -> Bool
prop_isSubsetOf a b = isSubsetOf a b == Set.isSubsetOf (toSet a) (toSet b)
prop_isSubsetOf2 :: IntSet -> IntSet -> Bool
prop_isSubsetOf2 a b = isSubsetOf a (union a b)
prop_size :: IntSet -> Bool
prop_size s = size s == List.length (toList s)
prop_findMax :: IntSet -> Property
prop_findMax s = not (null s) ==> findMax s == maximum (toList s)
prop_findMin :: IntSet -> Property
prop_findMin s = not (null s) ==> findMin s == minimum (toList s)
prop_ord :: IntSet -> IntSet -> Bool
prop_ord s1 s2 = s1 `compare` s2 == toList s1 `compare` toList s2
prop_readShow :: IntSet -> Bool
prop_readShow s = s == read (show s)
prop_foldR :: IntSet -> Bool
prop_foldR s = foldr (:) [] s == toList s
prop_foldR' :: IntSet -> Bool
prop_foldR' s = foldr' (:) [] s == toList s
prop_foldL :: IntSet -> Bool
prop_foldL s = foldl (flip (:)) [] s == List.foldl (flip (:)) [] (toList s)
prop_foldL' :: IntSet -> Bool
prop_foldL' s = foldl' (flip (:)) [] s == List.foldl' (flip (:)) [] (toList s)
prop_map :: IntSet -> Bool
prop_map s = map id s == s
prop_maxView :: IntSet -> Bool
prop_maxView s = case maxView s of
Nothing -> null s
Just (m,s') -> m == maximum (toList s) && s == insert m s' && m `notMember` s'
prop_minView :: IntSet -> Bool
prop_minView s = case minView s of
Nothing -> null s
Just (m,s') -> m == minimum (toList s) && s == insert m s' && m `notMember` s'
prop_split :: IntSet -> Int -> Bool
prop_split s i = case split i s of
(s1,s2) -> all (<i) (toList s1) && all (>i) (toList s2) && i `delete` s == union s1 s2
prop_splitMember :: IntSet -> Int -> Bool
prop_splitMember s i = case splitMember i s of
(s1,t,s2) -> all (<i) (toList s1) && all (>i) (toList s2) && t == i `member` s && i `delete` s == union s1 s2
prop_partition :: IntSet -> Int -> Bool
prop_partition s i = case partition odd s of
(s1,s2) -> all odd (toList s1) && all even (toList s2) && s == s1 `union` s2
prop_filter :: IntSet -> Int -> Bool
prop_filter s i = partition odd s == (filter odd s, filter even s)
#if MIN_VERSION_base(4,5,0)
prop_bitcount :: Int -> Word -> Bool
prop_bitcount a w = bitcount_orig a w == bitcount_new a w
where
bitcount_orig a0 x0 = go a0 x0
where go a 0 = a
go a x = go (a + 1) (x .&. (x-1))
bitcount_new a x = a + popCount x
#endif
|
ekmett/containers
|
tests/intset-properties.hs
|
bsd-3-clause
| 12,424
| 7
| 17
| 2,940
| 3,806
| 1,972
| 1,834
| 222
| 4
|
module AppData where
#include "Utils.cpp"
import qualified Graphics.UI.GLFW as GLFW
import qualified Data.IORef as R
import qualified Data.List as L
import Data.Maybe (fromMaybe)
import qualified Control.Monad.State as ST
import qualified Gamgine.Utils as GU
import qualified GameData.Data as GD
import qualified GameData.Level as LV
import qualified GameData.Entity as E
import qualified Defaults as DF
import qualified Gamgine.State.RenderState as RS
import qualified Gamgine.State.State as S
import qualified Gamgine.State.StateTree as SS
import Gamgine.State.StateTree (enterWhen, leaveWhen, adjacents, StateTree(..), StateTransition(..))
import Gamgine.State.StateTreeZipper as SZ
import qualified Gamgine.State.KeyInfo as KI
import qualified Gamgine.State.MouseInfo as MI
import qualified Gamgine.State.InputInfo as II
import Gamgine.State.InputInfo (Modifier(..), InputState(..))
import qualified States.GameRunning as GR
import qualified States.EditModeRunning as EM
import qualified States.MovingEntity as ME
import qualified States.CreatingPlatform as CP
import qualified States.ResizingPlatform as RP
import qualified States.DefiningAnimation as DA
import qualified States.IntroRunning as IR
IMPORT_LENS_AS_LE
data AppData = AppData {
window :: GLFW.Window,
windowSize :: (Int, Int),
frustumSize :: (Double, Double),
orthoScale :: Double,
renderRessources :: RS.Ressources,
levelsLoadedFrom :: FilePath,
saveLevelsTo :: FilePath,
appMode :: AppMode,
gameData :: GD.Data,
stateTree :: SZ.Zipper GD.Data
}
LENS(window)
LENS(windowSize)
LENS(frustumSize)
LENS(orthoScale)
LENS(renderRessources)
LENS(levelsLoadedFrom)
LENS(saveLevelsTo)
LENS(appMode)
LENS(gameData)
LENS(stateTree)
newAppData :: GLFW.Window -> GD.Data -> FilePath -> FilePath -> AppMode -> AppData
newAppData win gameData levelsLoadedFrom saveLevelsTo appMode = AppData {
window = win,
windowSize = (0,0),
frustumSize = (0,0),
orthoScale = DF.orthoScale,
renderRessources = RS.emptyRessources,
levelsLoadedFrom = levelsLoadedFrom,
saveLevelsTo = saveLevelsTo,
appMode = appMode,
gameData = gameData,
stateTree = SZ.zipper $
if appMode == EditMode
then SS.root EM.mkEditModeRunningState
[Branch {state = ME.mkMovingEntityState,
enterWhen = ByMouseWithMod GLFW.MouseButton'1 Pressed Ctrl,
leaveWhen = ByMouse GLFW.MouseButton'1 Released,
adjacents = []},
Branch {state = RP.mkResizingPlatformState,
enterWhen = ByMouseWithMod GLFW.MouseButton'1 Pressed Shift,
leaveWhen = ByMouse GLFW.MouseButton'1 Released,
adjacents = []},
Branch {state = CP.mkCreatingPlatformState,
enterWhen = ByMouse GLFW.MouseButton'1 Pressed,
leaveWhen = ByMouse GLFW.MouseButton'1 Released,
adjacents = []},
Branch {state = DA.mkDefiningAnimationState,
enterWhen = ByKey (GLFW.Key'U) Pressed,
leaveWhen = ByKey (GLFW.Key'U) Pressed,
adjacents = []}]
else SS.root IR.mkIntroRunningState
[Branch {state = GR.mkGameRunningState,
enterWhen = ByKey (GLFW.Key'Space) Pressed,
leaveWhen = NoTransition,
adjacents = []}]
}
data AppMode = GameMode | EditMode deriving Eq
type AppDataRef = R.IORef AppData
type AppST = ST.StateT AppDataRef IO
runAppST :: AppST a -> AppDataRef -> IO (a, AppDataRef)
runAppST = ST.runStateT
update :: AppData -> AppData
update app = applyToState f app
where
f = S.update $ LE.getL currentStateL app
render :: Double -> AppData -> IO AppData
render nextFrameFraction app = applyToStateIO f app
where
f = (S.render $ LE.getL currentStateL app) rstate
rstate = RS.RenderState nextFrameFraction (renderRessources app) (frustumSize app)
handleKeyEvent :: KI.KeyInfo -> AppData -> AppData
handleKeyEvent ki app =
let (gdata', stree') = SZ.handleKeyEvent ki (gameData app) (stateTree app)
in app {gameData = gdata', stateTree = stree'}
handleMouseEvent :: MI.MouseInfo -> AppData -> AppData
handleMouseEvent mi app =
let (gdata', stree') = SZ.handleMouseEvent mi (gameData app) (stateTree app)
in app {gameData = gdata', stateTree = stree'}
handleMouseMoved :: II.MousePos -> AppData -> AppData
handleMouseMoved mp app = applyToState f app
where
f = (S.mouseMoved $ LE.getL currentStateL app) mp
applyToState :: (GD.Data -> (GD.Data, S.State GD.Data)) -> AppData -> AppData
applyToState f app = LE.setL currentStateL state' $ app {gameData = gdata'}
where
(gdata', state') = f $ gameData app
applyToStateIO :: (GD.Data -> IO (GD.Data, S.State GD.Data)) -> AppData -> IO AppData
applyToStateIO f app = do
(gdata', state') <- f $ gameData app
return (LE.setL currentStateL state' $ app {gameData = gdata'})
currentStateL = LE.lens getCurrentState setCurrentState
where
getCurrentState = state . SZ.current . stateTree
where
state (SS.Branch s _ _ _) = s
setCurrentState state = LE.modL stateTreeL $ SZ.replace state
currentLevelL = GD.currentLevelL . gameDataL
activeLayerL = LV.activeLayerL . currentLevelL
inactiveLayers = LV.inactiveLayers . LE.getL currentLevelL
|
dan-t/layers
|
src/AppData.hs
|
bsd-3-clause
| 5,671
| 0
| 15
| 1,449
| 1,552
| 887
| 665
| -1
| -1
|
module SeqTests where
import Prelude hiding (lookup, filter)
import Control.Monad.State
import Data.Hashable
import Data.LinkedHashMap.Seq
import qualified Data.LinkedHashMap.Seq as LHM
import qualified Data.HashMap.Strict as M
test0 = fromList [(1 :: Int,"A"), (5, "B"), (7, "C"), (-6, "D")]
test1 = fromList [(1 :: Int,"A"), (5, "B"), (7, "C"), (-6, "D"), (1 :: Int,"AAAAA")]
test2 = fromList [(1 :: Int,"A"), (5, "B"), (7, "C"), (-6, "D"), (1,"AA"), (5,"BB"), (7,"CC"),(7,"CCC")]
test3 = fromList [(1 :: Int,"A"), (1, "B"), (1, "C"), (1, "D")]
y0@(LinkedHashMap mm0 _s0 _n0) = test0
y1@(LinkedHashMap mm1 s1 n1) = insert 3 "ZZ" test0
y2@(LinkedHashMap mm2 s2 n2) = insert (-10) "AA" $ insert 3 "ZZ" test0
y3@(LinkedHashMap mm3 s3 n3) = delete 5 $ insert (-10) "AA" $ insert 3 "ZZ" test0
y4@(LinkedHashMap mm4 s4 n4) = delete 3 $ delete 5 $ insert (-10) "AA" $ insert 3 "ZZ" test0
y5@(LinkedHashMap mm5 s5 n5) = insert 3 "ZZ" test0
y6@(LinkedHashMap mm6 s6 n6) = insert 3 "AA" y5
y7@(LinkedHashMap mm7 s7 n7) = delete 3 $ insert 3 "AA" $ insert 3 "ZZ" test0
y8@(LinkedHashMap mm8 s8 n8) = delete 5 $ insert (-10) "AA" $ insert 3 "ZZ" test0
y9@(LinkedHashMap mm9 s9 n9) = delete 3 y8
z0@(LinkedHashMap zm0 zs0 zn0) = test1
z1@(LinkedHashMap zm1 zs1 zn1) = test2
z2@(LinkedHashMap zm2 zs2 zn2) = test3
z3@(LinkedHashMap zm3 zs3 zn3) = delete 7 $ test0
z4@(LinkedHashMap zm4 zs4 zn4) = delete 1 $ z3
z5@(LinkedHashMap zm5 zs5 zn5) = delete 5 $ z4
test4 = fromList ([] :: [(Int, String)])
y3'@(LinkedHashMap mm3' s3' n3') = pack y3
z6 = insertWith (++) 5 "ZZZ_" test0
z7 = insertWith (++) 11 "ZZZ_" test0
z8 = adjust (\v0 -> v0 ++ "_adjusted") 5 test0
z9 = adjust (\v0 -> v0 ++ "_adjusted") 123 test0
t0 = fromList [(1 :: Int,"A"), (5, "B"), (7, "C"), (-6, "D")]
t1 = fromList [(2 :: Int,"2A"), (3, "2B"), (7, "2C"), (-6, "2D")]
t2 = fromList [(0 :: Int,"3A"), (5, "3B"), (17, "3C"), (-6, "3D")]
u1 = union t0 t1
u2 = union t1 t2
u3 = unionWith (++) t0 t1
u4 = unions [t0, t1, t2]
m1 = mapWithKey (\k v1 -> v1 ++ show k) t0
hm0 = M.fromList [(1 :: Int,"A"), (5, "B"), (7, "C"), (-6, "D")]
printItem :: Show a => a -> String -> IO String
printItem k v = do
putStrLn $ (show k) ++ "->" ++ v
return $ v ++ "_processed"
joinPrev :: Int -> String -> State String String
joinPrev _ v = do
prev <- get
put v
return $ v ++ ":" ++ prev
a0 = traverseWithKey printItem test0
a1 = m where (m, _) = runState (traverseWithKey joinPrev test0) "0"
d0 = difference test1 (delete 1 $ delete 7 $ test0)
i0 = intersection test1 (delete 1 $ delete 7 $ test0)
ik0 = intersectionWith (\v1 v2 -> v1 ++ v2) test1 (delete 1 $ delete 7 $ test0)
f0 = LHM.foldr (++) "" test1
f1 = LHM.foldr (++) "" y3
f3 = LHM.foldl' (++) "" test1
f4 = LHM.foldl' (++) "" y3
fk0 = foldlWithKey' (\a k v -> a ++ (show k) ++ "=" ++ v ++ ",") "" test1
fk1 = foldrWithKey (\k v a -> a ++ (show k) ++ "=" ++ v ++ ",") "" test1
ff0 = filter (\v -> v == "B" || v == "C") test0
ff1 = filterWithKey (\k v -> v == "B" || k == -6) test0
fff0 = fromListWith (\v1 v2 -> v2 ++ v1) [(1 :: Int,"A"), (5, "B"), (7, "C"), (-6, "D"), (1 :: Int,"ZZZ")]
|
abasko/linkedhashmap
|
tests/SeqTests.hs
|
bsd-3-clause
| 3,117
| 0
| 13
| 625
| 1,739
| 963
| 776
| 65
| 1
|
{-# LANGUAGE FlexibleInstances #-}
module MyCloud.DB where
import Control.Monad.IO.Peel
import Control.Monad.Reader
import qualified Database.HDBC as H
import Database.HDBC hiding (catchSql, handleSql, throwSqlError)
import Database.HDBC.PostgreSQL
import MyCloud.Internal.Types
withConnection :: (Connection -> IO a) -> MyCloud a
withConnection go = MyCloud $ do
Config { postgresqlConnection = conStr } <- ask
liftIO $ withPostgreSQL conStr go
--------------------------------------------------------------------------------
---- SQL helpers
updateSql :: String -> [SqlValue] -> MyCloud Integer
updateSql s v = withConnection $ \c -> do
i <- run c s v
commit c
return i
updateSql_ :: String -> [SqlValue] -> MyCloud ()
updateSql_ s v = updateSql s v >> return ()
querySql :: String -> [SqlValue] -> MyCloud [[SqlValue]]
querySql s v = withConnection $ \c -> quickQuery' c s v
--------------------------------------------------------------------------------
-- Exceptions
catchSql :: MonadPeelIO m => m a -> (SqlError -> m a) -> m a
catchSql m h = do
k <- peelIO
join . liftIO $ H.catchSql (k m) (\e -> k (h e))
handleSql :: MonadPeelIO m => (SqlError -> m a) -> m a -> m a
handleSql = flip catchSql
throwSqlError :: MonadIO m => SqlError -> m a
throwSqlError e = do
liftIO $ H.throwSqlError e
--------------------------------------------------------------------------------
-- SQL select
{-
-- | Minimal complete definition: `select` and `convertFromSql`. Example
-- implementation:
--
-- > instance Select Foo where
-- > select = withSelectStr "SELECT foo FROM foos"
-- > convertFromSql = convertSqlToFoo
--
-- Example usage:
--
-- > getFooById :: MonadIO m => Id -> m Foo
-- > getFooById id = select "WHERE id = ?" [toSql id]
class Select res where
convertFromSql :: [[SqlValue]] -> res
select :: String -> [SqlValue] -> MyCloud res
fullSelect :: String -> [SqlValue] -> MyCloud res
withSelectStr :: String -- ^ "SELECT .. FROM .."
-> String -- ^ "WHERE .." / "JOIN .." etc
-> [SqlValue]
-> MyCloud res
-- default implementations
fullSelect s v = querySql s v >>= return . convertFromSql
withSelectStr s1 s2 v = fullSelect (s1++" "++s2) v
--------------------------------------------------------------------------------
-- SQL queries
instance Select [Event] where
select = withSelectStr "SELECT (time, path, event) FROM recent_events"
convertFromSql = map conv1
where
conv1 [t,p,e]
| Right t' <- safeFromSql t
, Right p' <- safeFromSql p
, Right e' <- safeFromSql e
= Event t' p' (toEnum e')
conv1 v
= error $ "Cannot convert " ++ show v
-}
recentEvents :: SessionID -> MyCloud [Event]
recentEvents (SessionID sid) = toEvents `fmap`
querySql "SELECT time, path, event \
\ FROM recent_events_by_session_id \
\ WHERE session_id = ?"
[ toSql sid ]
where
toEvents = map conv1
conv1 [t,p,e]
| Right t' <- safeFromSql t
, Right p' <- safeFromSql p
, Right e' <- safeFromSql e
= Event t' p' (toEnum e')
conv1 v
= error $ "Cannot convert " ++ show v
|
mcmaniac/mycloud
|
src/MyCloud/DB.hs
|
bsd-3-clause
| 3,232
| 0
| 13
| 752
| 612
| 311
| 301
| 42
| 2
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE RankNTypes #-}
module Page where
import qualified Data.Aeson as A
import Data.Bool
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString as BS
import Data.Default
import Data.Foldable
import qualified Data.Map as Map
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import Data.Time
--import Lucid
--import qualified Lucid as L
--import qualified Lucid.Svg as LSvg
--import Lucid.Svg (Svg(..), renderText)
import CollabTypes
import Utils
import Network.HTTP.Base (urlEncode, urlDecode)
import Figure
import Reflex
import Primitives
import Shadow
import Menus
import Reflex.Dom
pageWidget :: MonadWidget t m => UTCTime -> m ()
pageWidget t0 = mdo
pb <- getPostBuild
let modelUrls = "/model" <$ (leftmost [pb
, () <$ piUpdates
, () <$ memberUpdates
, () <$ projectUpdates
])
tickTimes <- fmap _tickInfo_lastUTC <$> tickLossy 0.1 t0
modelEvents <- fmapMaybe id <$> getAndDecode modelUrls
focusEvents <- fmap (fmapMaybe id) $ fmap (updated . nubDyn ) $ combineDyn
(\m opt -> _modelFocus m >>= flip Map.lookup (piAngles m opt))
model dynFigOpts
focusTimesAngles <- performEvent (fmap (\a -> do
t <- liftIO getCurrentTime
return (t,a)) focusEvents)
let aux (t,a) p = MotionPlan t 1 (rEnd p) a
plan0 = MotionPlan (UTCTime (fromGregorian 2015 1 1) 0) 0 0 0
dynMotionPlan <- foldDyn aux plan0 focusTimesAngles
let modelEventCommands = fmap (const) modelEvents -- TODO right, given foldr?
lastMouseMove <- holdDyn (0,0) moves
let timedPlans = attach (current dynMotionPlan) tickTimes
angEvents = (fmapMaybe id) $ ffor timedPlans (uncurry (flip runMotionPlan))
figOptEvents = ffor angEvents $ \ang -> defFigOpts {thrustThetaOffset = ang}
--dynFigOpts <- holdDyn defFigOpts figOptEvents
dynFigOpts <- holdDyn defFigOpts (ffor tickTimes $ \t -> defFigOpts {thrustThetaOffset = realToFrac (diffUTCTime t t0)})
display dynFigOpts
-- dynFigOpts <- forDyn lastMouseMove $ \(x,y) ->
-- defFigOpts { thrustRadiusMin = 255
-- , thrustRadiusMax = fromIntegral y
-- , thrustThetaOffset = fromIntegral x / 100}
--dynMotionPlan <- foldDyn ($) Nothing (leftmost [])
--dynFigOpts <- foldDyn (\)
model <- foldDyn id -- (flip (foldr ($)))
(Model [] [] [] Nothing)
(leftmost [modelEventCommands, svgEvents])
piUpdates <- visToggleBox (text "PI") (newPIBox model)
el "br" (return ())
memberUpdates <- visToggleBox (text "M") (newMemberBox model)
el "br" (return ())
projectUpdates <- visToggleBox (text "Proj") (newProjectBox model)
-- menuEvents <- menusWidget pictureEvents
-- infoWidget menuEvents
(fig,svgEvents) <- elAttr' "div" ("class" =: "main-figure") $ do
svgEvents <- svgTag (floor svgWidth) (floor svgHeight) $ do
bkgnd
svgElAttr "g" ("transform" =: "translate(400 400)") $
modelSvg model dynFigOpts
--display model
return svgEvents
let moves = domEvent Mousemove fig
return ()
svgHeight, svgWidth :: Double
svgHeight = 800
svgWidth = 800
bkgnd :: MonadWidget t m => m ()
bkgnd = do
svgEl "defs" $ do
svgElAttr "radialGradient" ("id" =: st "bkgndGradient"
<> "cx" =: "0.6" <> "cy" =: "0.6"
<> "r" =: "0.4") $ do
svgElAttr "stop" ("offset" =: st "0%"
<> "stop-color" =: "#1b5354") $ return ()
svgElAttr "stop" ("offset" =: st "100%"
<> "stop-color" =: "#0f2d2d") $ return ()
svgElAttr "rect" ("x" =: "0" -- pxf (svgWidth / (-2))
<> "y" =: "0" -- pxf (svgHeight / (-2))
<> "width" =: pxf svgWidth
<> "height" =: pxf svgHeight
<> "fill" =: "url(#bkgndGradient)") $ return ()
data MotionPlan = MotionPlan
{ tStart :: UTCTime
, duration :: Double
, rStart :: Double
, rEnd :: Double
}
runMotionPlan :: UTCTime -> MotionPlan -> Maybe Double
runMotionPlan t mp =
let x = realToFrac (diffUTCTime t (tStart mp))
y = (rStart mp - rEnd mp) / (duration mp) * x
in bool Nothing (Just y) (x < duration mp)
|
imalsogreg/collabplot
|
client/src/Page.hs
|
bsd-3-clause
| 4,727
| 0
| 20
| 1,336
| 1,284
| 662
| 622
| 98
| 1
|
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
-----------------------------------------------------------------------------
-- |
-- Licence : BSD-style (see LICENSE)
--
-- Provides convenient functions for accessing the CLR, including: loading
-- the CLR into the process, releasing .NET object references, and obtaining
-- dynamically-generated stub functions for calling into .NET from Haskell.
--
-----------------------------------------------------------------------------
module Foreign.Salsa.CLR (
withCLR,
startCLR, stopCLR,
ObjectId,
releaseObject,
getMethodStub,
getFieldGetStub,
getFieldSetStub,
getDelegateConstructorStub,
boxString, boxInt32, boxBoolean,
SalsaString, withSalsaString, peekSalsaString
) where
import Data.Int
import System.IO.Unsafe ( unsafePerformIO )
import Foreign hiding ( new, newForeignPtr, unsafePerformIO )
import Foreign.C.String
#if (MONO)
import Foreign.Salsa.Mono.CLRHost
#else
import Foreign.Salsa.Win.CLRHost
#endif
-- | Identifies a foreign (.NET) object instance
type ObjectId = Int32
-- | Starts the .NET execution engine before executing the given IO action, and
-- finally stopping the execution engine. This can only be performed once
-- in a process.
withCLR :: IO a -> IO a
withCLR action = do
startCLR
r <- action
stopCLR
return r
startCLR :: IO ()
startCLR = do
startCLR'
-- Allow .NET to call into Haskell and free unused function pointer wrappers
setFreeHaskellFunPtr
stopCLR :: IO ()
stopCLR = do
-- saveDynamicAssembly -- (for debugging)
-- Prevent .NET finalizers from calling into Haskell (and causing access violations)
clearFreeHaskellFunPtr
stopCLR'
-- | @'unsafeGetPointerToMethod' m@ returns a function pointer to the method @m@
-- as implemented in the Salsa .NET driver assembly (Salsa.dll). It is safe only
-- if the type of the resulting function pointer matches that of the method given.
unsafeGetPointerToMethod :: String -> IO (FunPtr a)
unsafeGetPointerToMethod methodName = do
result <- withSalsaString methodName $ \methodName' -> getPointerToMethodRaw methodName'
if result == nullFunPtr
then error $ "Unable to execute Salsa.dll method '" ++ methodName ++ "'."
else return result
{-# NOINLINE getPointerToMethodRaw #-}
getPointerToMethodRaw :: GetPointerToMethodDelegate a
getPointerToMethodRaw = makeGetPointerToMethodDelegate $ unsafePerformIO $ loadDriverAndBoot
type GetPointerToMethodDelegate a = SalsaString -> IO (FunPtr a)
foreign import ccall "dynamic" makeGetPointerToMethodDelegate :: FunPtr (GetPointerToMethodDelegate a) ->
GetPointerToMethodDelegate a
-- | Releases the .NET object indicated by the given object id.
{-# NOINLINE releaseObject #-}
releaseObject :: ObjectId -> IO ()
releaseObject = makeReleaseObjectDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "ReleaseObject"
type ReleaseObjectDelegate = ObjectId -> IO ()
foreign import ccall "dynamic" makeReleaseObjectDelegate :: FunPtr ReleaseObjectDelegate -> ReleaseObjectDelegate
-- | Passes a function pointer to the 'freeHaskellFunPtr' function into .NET so
-- that Haskell FunPtr's can be freed from .NET code.
setFreeHaskellFunPtr :: IO ()
setFreeHaskellFunPtr = do
funPtr <- wrapFreeHaskellFunPtr freeHaskellFunPtr
setFreeHaskellFunPtrRaw funPtr
-- Note: since the function passed into .NET may be used by .NET at any
-- point until the engine is shutdown, and the engine is only loaded
-- once per process, we don't need to free it.
-- | Clears the 'freeHaskellFunPtr' pointer on the .NET side to prevent finalizers from
-- calling into Haskell (and causing access violations).
clearFreeHaskellFunPtr :: IO ()
clearFreeHaskellFunPtr = setFreeHaskellFunPtrRaw nullFunPtr
{-# NOINLINE setFreeHaskellFunPtrRaw #-}
setFreeHaskellFunPtrRaw :: (FunPtr (FunPtr a -> IO ()) -> IO ())
setFreeHaskellFunPtrRaw = makeSetFreeHaskellFunPtrDelegate $ unsafePerformIO $
unsafeGetPointerToMethod "SetFreeHaskellFunPtr"
foreign import ccall "dynamic" makeSetFreeHaskellFunPtrDelegate ::
FunPtr (FunPtr (FunPtr a -> IO ()) -> IO ()) -> (FunPtr (FunPtr a -> IO ()) -> IO ())
foreign import ccall "wrapper" wrapFreeHaskellFunPtr ::
(FunPtr a -> IO ()) -> IO (FunPtr (FunPtr a -> IO ()))
-- | 'saveDynamicAssembly' saves the assembly containing the dynamically-generated
-- wrapper stubs to disk (for debugging purposes).
{-# NOINLINE saveDynamicAssembly #-}
saveDynamicAssembly :: IO ()
saveDynamicAssembly = makeSaveDynamicAssemblyDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "SaveDynamicAssembly"
type SaveDynamicAssemblyDelegate = IO ()
foreign import ccall "dynamic" makeSaveDynamicAssemblyDelegate :: FunPtr SaveDynamicAssemblyDelegate -> SaveDynamicAssemblyDelegate
-- | @'getMethodStub' c m s@ returns a function pointer to a function that, when
-- called, invokes the method with name @m@ and signature @s@ in class @c@.
--
-- @s@ should be a semi-colon delimited list of parameter types indicating the
-- desired overload of the given method.
getMethodStub :: String -> String -> String -> IO (FunPtr f)
getMethodStub className methodName parameterTypeNames = do
withSalsaString className $ \className' ->
withSalsaString methodName $ \methodName' ->
withSalsaString parameterTypeNames $ \parameterTypeNames' ->
return $ getMethodStubRaw className' methodName' parameterTypeNames'
{-# NOINLINE getMethodStubRaw #-}
getMethodStubRaw :: GetMethodStubDelegate a
getMethodStubRaw = makeGetMethodStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetMethodStub"
type GetMethodStubDelegate a = SalsaString -> SalsaString -> SalsaString -> FunPtr a
foreign import ccall "dynamic" makeGetMethodStubDelegate :: FunPtr (GetMethodStubDelegate a) ->
(GetMethodStubDelegate a)
-- | @'getFieldGetStub' c f@ returns a function pointer to a function that, when
-- called, gets the value of the field @f@ in class @c@.
getFieldGetStub :: String -> String -> IO (FunPtr f)
getFieldGetStub className fieldName = do
withSalsaString className $ \className' ->
withSalsaString fieldName $ \fieldName' ->
return $ getFieldGetStubRaw className' fieldName'
{-# NOINLINE getFieldGetStubRaw #-}
getFieldGetStubRaw :: GetFieldGetStubDelegate a
getFieldGetStubRaw = makeGetFieldGetStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetFieldGetStub"
type GetFieldGetStubDelegate a = SalsaString -> SalsaString -> FunPtr a
foreign import ccall "dynamic" makeGetFieldGetStubDelegate :: FunPtr (GetFieldGetStubDelegate a) ->
(GetFieldGetStubDelegate a)
-- | @'getFieldSetStub' c f@ returns a function pointer to a function that, when
-- called, sets the value of the field @f@ in class @c@ to the given value.
getFieldSetStub :: String -> String -> IO (FunPtr f)
getFieldSetStub className fieldName = do
withSalsaString className $ \className' ->
withSalsaString fieldName $ \fieldName' ->
return $ getFieldSetStubRaw className' fieldName'
{-# NOINLINE getFieldSetStubRaw #-}
getFieldSetStubRaw :: GetFieldSetStubDelegate a
getFieldSetStubRaw = makeGetFieldSetStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetFieldSetStub"
type GetFieldSetStubDelegate a = SalsaString -> SalsaString -> FunPtr a
foreign import ccall "dynamic" makeGetFieldSetStubDelegate :: FunPtr (GetFieldSetStubDelegate a) ->
(GetFieldSetStubDelegate a)
-- | @'getDelegateConstructorStub' dt wrapper@ returns an action that, given a
-- function, will return a reference to a .NET delegate object that calls the
-- provided function. The delegate constructed will be of the type @dt@.
-- The function @wrapper@ will be called in order to wrap the given function
-- as a function pointer for passing into .NET.
getDelegateConstructorStub :: String -> (f -> IO (FunPtr f)) -> IO (f -> IO ObjectId)
getDelegateConstructorStub delegateTypeName wrapper = do
-- Obtain a function pointer to a function that, when called with a
-- function pointer compatible with the given wrapper function, returns
-- a reference to a .NET delegate object that calls the function.
delegateConstructor <- withSalsaString delegateTypeName $
\delegateTypeName' -> getDelegateConstructorStubRaw delegateTypeName'
-- Returns a function that accepts a function, 'f' implementing the
-- delegate, converts 'f' to a function pointer, and then wraps it
-- up as a .NET delegate.
return $ \f -> do
fFunPtr <- wrapper f
(makeDelegateConstructor delegateConstructor) fFunPtr
{-# NOINLINE getDelegateConstructorStubRaw #-}
getDelegateConstructorStubRaw :: GetDelegateConstructorStubDelegate a
getDelegateConstructorStubRaw = makeGetDelegateConstructorStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetDelegateConstructorStub"
type GetDelegateConstructorStubDelegate a = SalsaString -> IO (FunPtr (FunPtr a -> IO ObjectId))
foreign import ccall "dynamic" makeGetDelegateConstructorStubDelegate :: FunPtr (GetDelegateConstructorStubDelegate a) ->
(GetDelegateConstructorStubDelegate a)
type DelegateConstructor a = FunPtr a -> IO ObjectId
foreign import ccall "dynamic" makeDelegateConstructor :: FunPtr (DelegateConstructor a) -> (DelegateConstructor a)
--
-- Boxing support
--
-- | @'getBoxStub' t@ returns a function pointer to a function that, when
-- called, returns a boxed object reference to the given type.
getBoxStub :: String -> IO (FunPtr f)
getBoxStub typeName = do
withSalsaString typeName $ \typeName' -> return $ getBoxStubRaw typeName'
{-# NOINLINE getBoxStubRaw #-}
getBoxStubRaw :: GetBoxStubDelegate a
getBoxStubRaw = makeGetBoxStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetBoxStub"
type GetBoxStubDelegate a = SalsaString -> FunPtr a
foreign import ccall "dynamic" makeGetBoxStubDelegate :: FunPtr (GetBoxStubDelegate a) -> GetBoxStubDelegate a
boxString :: String -> IO ObjectId
boxString s = withSalsaString s $ \s' -> boxStringStub s'
type BoxStringStub = SalsaString -> IO ObjectId
foreign import ccall "dynamic" makeBoxStringStub :: FunPtr BoxStringStub -> BoxStringStub
{-# NOINLINE boxStringStub #-}
boxStringStub :: BoxStringStub
boxStringStub = makeBoxStringStub $ unsafePerformIO $ getBoxStub "System.String"
boxInt32 :: Int32 -> IO ObjectId
boxInt32 = boxInt32Stub
type BoxInt32Stub = Int32 -> IO ObjectId
foreign import ccall "dynamic" makeBoxInt32Stub :: FunPtr BoxInt32Stub -> BoxInt32Stub
{-# NOINLINE boxInt32Stub #-}
boxInt32Stub :: BoxInt32Stub
boxInt32Stub = makeBoxInt32Stub $ unsafePerformIO $ getBoxStub "System.Int32"
boxBoolean :: Bool -> ObjectId
boxBoolean True = boxedTrue
boxBoolean False = boxedFalse
{-# NOINLINE boxedTrue #-}
boxedTrue = unsafePerformIO $ boxBooleanStub True
{-# NOINLINE boxedFalse #-}
boxedFalse = unsafePerformIO $ boxBooleanStub False
type BoxBooleanStub = Bool -> IO ObjectId
foreign import ccall "dynamic" makeBoxBooleanStub :: FunPtr BoxBooleanStub -> BoxBooleanStub
{-# NOINLINE boxBooleanStub #-}
boxBooleanStub :: BoxBooleanStub
boxBooleanStub = makeBoxBooleanStub $ unsafePerformIO $ getBoxStub "System.Boolean"
-- vim:set ts=4 sw=4 expandtab:
|
tim-m89/Salsa
|
Foreign/Salsa/CLR.hs
|
bsd-3-clause
| 11,279
| 0
| 14
| 1,814
| 1,827
| 967
| 860
| 151
| 2
|
{-# LANGUAGE
MultiParamTypeClasses
, DeriveFunctor
, GeneralizedNewtypeDeriving
#-}
module System.Heap.Write where
import Prelude hiding (read)
import Control.Applicative
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Data.Binary (Binary, encode)
import System.Heap.Error
import System.Heap.Pointer
import System.IO
import qualified Data.ByteString.Lazy as Lazy
import qualified System.Heap.Read as Read
import qualified System.Heap.Alloc as Alloc
newtype Heap a = Heap { run :: Alloc.Heap a }
deriving
( Functor
, Applicative
, Monad
, MonadIO
, MonadState Alloc.Map
, MonadReader Handle
, MonadError HeapError
)
runAlloc :: Alloc.Heap a -> Heap a
runAlloc = Heap
runRead :: Read.Heap a -> Heap a
runRead = runAlloc . Alloc.read
allocate :: Size -> Heap (Offset, Size)
allocate = runAlloc . Alloc.allocate
read :: Binary a => Pointer a -> Heap a
read = runRead . Read.read
writeBlock :: Offset -> Size -> Lazy.ByteString -> Heap ()
writeBlock o s bs =
do h <- ask
liftIO $
do hSeek h AbsoluteSeek (fromIntegral o)
Lazy.hPut h (encode (s, bs))
write :: Binary a => a -> Heap (Pointer a)
write a =
do let bs = encode a
(o, s) <- allocate (fromIntegral (Lazy.length bs))
writeBlock o s bs
return (Ptr o)
writeAllocationMap :: Heap ()
writeAllocationMap =
get >>= write >>= writeBlock 0x18 0x08 . encode
readAllocationMap :: Heap ()
readAllocationMap = read 0x18 >>= read >>= put
writeRoot :: Binary a => a -> Heap ()
writeRoot = write >=> writeBlock 0x30 0x08 . encode
readRoot :: Binary a => Heap a
readRoot = read (Ptr 0x30) >>= read
|
sebastiaanvisser/diskheap
|
src/System/Heap/Write.hs
|
bsd-3-clause
| 1,679
| 0
| 13
| 354
| 582
| 307
| 275
| -1
| -1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Numeral.KO.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Numeral.KO.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "KO Tests"
[ makeCorpusTest [This Numeral] corpus
]
|
rfranek/duckling
|
tests/Duckling/Numeral/KO/Tests.hs
|
bsd-3-clause
| 600
| 0
| 9
| 96
| 80
| 51
| 29
| 11
| 1
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Juno.Types.Log
( LogEntry(..), leTerm, leCommand, leHash
, Log(..), lEntries
, LEWire(..), encodeLEWire, decodeLEWire, decodeLEWire', toSeqLogEntry
, lookupEntry
, lastEntry
, takeEntries
, getEntriesAfter
, logInfoForNextIndex
, lastLogTerm
, lastLogHash
, entryCount
, maxIndex
, appendLogEntry
, addLogEntriesAt
) where
import Control.Parallel.Strategies
import Control.Lens hiding (Index, (|>))
import Codec.Digest.SHA
import qualified Control.Lens as Lens
import Data.Sequence (Seq, (|>))
import qualified Data.Sequence as Seq
import Data.ByteString (ByteString)
import Data.Serialize
import Data.Foldable
import Data.Thyme.Time.Core ()
import GHC.Generics
import Juno.Types.Base
import Juno.Types.Config
import Juno.Types.Message.Signed
import Juno.Types.Message.CMD
data LogEntry = LogEntry
{ _leTerm :: !Term
, _leCommand :: !Command
, _leHash :: !ByteString
}
deriving (Show, Eq, Generic)
makeLenses ''LogEntry
newtype Log a = Log { _lEntries :: Seq a }
deriving (Eq,Show,Ord,Generic,Monoid,Functor,Foldable,Traversable,Applicative,Monad,NFData)
makeLenses ''Log
instance (t ~ Log a) => Rewrapped (Log a) t
instance Wrapped (Log a) where
type Unwrapped (Log a) = Seq a
_Wrapped' = iso _lEntries Log
instance Cons (Log a) (Log a) a a where
_Cons = _Wrapped . _Cons . mapping _Unwrapped
instance Snoc (Log a) (Log a) a a where
_Snoc = _Wrapped . _Snoc . firsting _Unwrapped
type instance IxValue (Log a) = a
type instance Lens.Index (Log a) = LogIndex
instance Ixed (Log a) where ix i = lEntries.ix (fromIntegral i)
data LEWire = LEWire (Term, SignedRPC, ByteString)
deriving (Show, Generic)
instance Serialize LEWire
decodeLEWire' :: Maybe ReceivedAt -> KeySet -> LEWire -> Either String LogEntry
decodeLEWire' !ts !ks (LEWire !(t,cmd,hsh)) = case fromWire ts ks cmd of
Left !err -> Left $!err
Right !cmd' -> Right $! LogEntry t cmd' hsh
{-# INLINE decodeLEWire' #-}
-- TODO: check if `toSeqLogEntry ele = Seq.fromList <$> sequence ele` is fusable?
toSeqLogEntry :: [Either String LogEntry] -> Either String (Seq LogEntry)
toSeqLogEntry !ele = go ele mempty
where
go [] s = Right $! s
go (Right le:les) s = go les (s |> le)
go (Left err:_) _ = Left $! err
{-# INLINE toSeqLogEntry #-}
decodeLEWire :: Maybe ReceivedAt -> KeySet -> [LEWire] -> Either String (Seq LogEntry)
decodeLEWire !ts !ks !les = go les Seq.empty
where
go [] s = Right $! s
go (LEWire !(t,cmd,hsh):ls) v = case fromWire ts ks cmd of
Left err -> Left $! err
Right cmd' -> go ls (v |> LogEntry t cmd' hsh)
{-# INLINE decodeLEWire #-}
encodeLEWire :: NodeID -> PublicKey -> PrivateKey -> Seq LogEntry -> [LEWire]
encodeLEWire nid pubKey privKey les =
(\LogEntry{..} -> LEWire (_leTerm, toWire nid pubKey privKey _leCommand, _leHash)) <$> toList les
{-# INLINE encodeLEWire #-}
-- | Get last entry.
lastEntry :: Log a -> Maybe a
lastEntry (_ :> e) = Just e
lastEntry _ = Nothing
-- | Get largest index in ledger.
maxIndex :: Log a -> LogIndex
maxIndex = subtract 1 . entryCount
-- | Get count of entries in ledger.
entryCount :: Log a -> LogIndex
entryCount = fromIntegral . Seq.length . view lEntries
-- | Safe index
lookupEntry :: LogIndex -> Log LogEntry -> Maybe LogEntry
lookupEntry i = firstOf (ix i)
-- | take operation
takeEntries :: LogIndex -> Log a -> Seq a
takeEntries t = Seq.take (fromIntegral t) . _lEntries
-- | called by leaders sending appendEntries.
-- given a replica's nextIndex, get the index and term to send as
-- prevLog(Index/Term)
logInfoForNextIndex :: Maybe LogIndex -> Log LogEntry -> (LogIndex,Term)
logInfoForNextIndex mni es =
case mni of
Just ni -> let pli = ni - 1 in
case lookupEntry pli es of
Just LogEntry{..} -> (pli, _leTerm)
-- this shouldn't happen, because nextIndex - 1 should always be at
-- most our last entry
Nothing -> (startIndex, startTerm)
Nothing -> (startIndex, startTerm)
-- | Latest hash or empty
lastLogHash :: Log LogEntry -> ByteString
lastLogHash = maybe mempty _leHash . lastEntry
-- | Latest term on log or 'startTerm'
lastLogTerm :: Log LogEntry -> Term
lastLogTerm = maybe startTerm _leTerm . lastEntry
-- | get entries after index to beginning, with limit, for AppendEntries message.
-- TODO make monadic to get 8000 limit from config.
getEntriesAfter :: LogIndex -> Log a -> Seq a
getEntriesAfter pli = Seq.take 8000 . Seq.drop (fromIntegral $ pli + 1) . _lEntries
-- TODO: This uses the old decode encode trick and should be changed...
hashLogEntry :: Maybe LogEntry -> LogEntry -> LogEntry
hashLogEntry (Just LogEntry{ _leHash = prevHash }) le@LogEntry{..} =
le { _leHash = hash SHA256 (encode $ LEWire (_leTerm, getCmdSignedRPC le, prevHash))}
hashLogEntry Nothing le@LogEntry{..} =
le { _leHash = hash SHA256 (encode $ LEWire (_leTerm, getCmdSignedRPC le, mempty))}
getCmdSignedRPC :: LogEntry -> SignedRPC
getCmdSignedRPC LogEntry{ _leCommand = Command{ _cmdProvenance = ReceivedMsg{ _pDig = dig, _pOrig = bdy }}} =
SignedRPC dig bdy
getCmdSignedRPC LogEntry{ _leCommand = Command{ _cmdProvenance = NewMsg }} =
error "Invariant Failure: for a command to be in a log entry, it needs to have been received!"
-- | Recursively hash entries from index to tail.
updateLogHashesFromIndex :: LogIndex -> Log LogEntry -> Log LogEntry
updateLogHashesFromIndex i es =
case lookupEntry i es of
Just _ -> updateLogHashesFromIndex (succ i) $
over lEntries (Seq.adjust (hashLogEntry (lookupEntry (i - 1) es)) (fromIntegral i)) es
Nothing -> es
-- | Append/hash a single entry
appendLogEntry :: LogEntry -> Log LogEntry -> Log LogEntry
appendLogEntry le es =
case lastEntry es of
Just ple -> over lEntries (Seq.|> hashLogEntry (Just ple) le) es
_ -> Log $ Seq.singleton (hashLogEntry Nothing le)
-- | Add/hash entries at specified index.
addLogEntriesAt :: LogIndex -> Seq LogEntry -> Log LogEntry -> Log LogEntry
addLogEntriesAt pli newEs = updateLogHashesFromIndex (pli + 1) .
over lEntries ((Seq.>< newEs) . Seq.take (fromIntegral pli + 1))
|
buckie/juno
|
src/Juno/Types/Log.hs
|
bsd-3-clause
| 6,454
| 0
| 17
| 1,231
| 1,984
| 1,046
| 938
| 139
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module UI where
import Prelude hiding (log)
import Control.Applicative hiding (empty)
import Control.Monad
import Control.Monad.Trans
import System.IO
import Control.Monad (void)
import Data.Monoid ((<>))
import qualified Graphics.Vty as V
import Data.Text ( Text
, pack
)
import Data.Text.IO as TIO
import Data.Text.Zipper
import Lens.Micro
import Lens.Micro.TH
import qualified Graphics.Vty as V
--import qualified Brick.Types as T
import qualified Brick.Main as M
import qualified Brick.Widgets.Center as C
import qualified Brick.Widgets.Border as B
import qualified Brick.Widgets.Edit as E
import qualified Brick.Types as T
import Brick.AttrMap
( attrMap
)
import Brick.Widgets.Core
(
viewport
, txt
, vBox
, hBox
, visible
, hLimit
)
import Data.Attoparsec.Text (parseOnly)
import MudIO
import InputParse
-- very simple UI layout
-- a top viewport for output and
-- a one row editor for input
data Name= Output -- Viewport to display the output
|Input -- Edit (just line) to input commands
deriving (Ord, Show, Eq)
data UIState =
UIState { _cli :: E.Editor Text Name -- Editor widget to input commands
, _output :: Text -- Output received from the host
, _history :: [Text] -- History of commands inputed by user
, _cmd :: Text -- Current command, possibly incomplete
, _handle :: Handle -- handle to the server socket
, _log :: Handle -- handle to the log file
}
makeLenses ''UIState
drawUi :: UIState -> [T.Widget Name]
drawUi st = [ui]
where
ui = C.center $ B.border $ -- hLimit 80 $ -- $ vLimit 24 $
vBox [ top , B.hBorder , bottom ]
top = viewport Output T.Vertical $ txt $ st^.output
bottom = E.renderEditor True $ st^.cli --(E.editorText Input (txt . last) (Just 1) (st^.cmd))
outputScroll :: M.ViewportScroll Name
outputScroll = M.viewportScroll Output
appEvent :: UIState -> T.BrickEvent Name CustomEvent -> T.EventM Name (T.Next UIState)
appEvent st ev =
case ev of
T.VtyEvent (V.EvKey V.KEnter []) -> do
let input = head $ E.getEditContents (st^.cli)
let parsed = parseOnly parseInput $ input
case parsed of
Left _ -> do -- | input is incorrect
liftIO $ TIO.hPutStrLn (st^.handle) input
M.vScrollToEnd outputScroll
M.continue (st & output %~ (<> input <> "\n") & cli %~ E.applyEdit clearZipper)
Right inputVals -> do -- |inputvals has type [InputVal] as defined in InputParse.hs
forM_ inputVals runInput
M.vScrollToEnd outputScroll
M.continue ( st & output %~ ( <> (pack . show $ inputVals)) & cli %~ E.applyEdit clearZipper)
where
runInput input =
case input of
Cmd current ->
liftIO $ TIO.hPutStrLn (st^.handle) $ current
Script script ->
liftIO $ TIO.hPutStrLn (st^.handle) $ script
NumberedCmd (times, current) ->
liftIO $ TIO.hPutStrLn (st^.handle) $ foldl (<>) (pack "") $ replicate times $ current <> "\n"
NumberedScript (times, script) ->
liftIO $ TIO.hPutStrLn (st^.handle) $ foldl (<>) (pack "") $ replicate times $ script <> "\n"
T.VtyEvent (V.EvKey V.KEsc []) -- Esc pressed, quit the program
-> M.halt st
T.VtyEvent (V.EvKey (V.KFun 12) []) -> do -- F12 pressed, write to log file
liftIO $ TIO.hPutStrLn (st^.log) $ (st^.output)
M.continue (st & output .~ "")
T.VtyEvent x -- Let the default editor event handler take care of this
-> T.handleEventLensed st cli E.handleEditorEvent x >>= M.continue
T.AppEvent (ServerOutput t) -- To handle custome evenets; i.e. when outpus is received from server
-- This is a tricky function since it does several things at once;
-- It updates the UIState with the output send through the BChannel
-- and then scrolls the viewport before the application continues
-> M.vScrollToEnd outputScroll >> M.continue (st & output %~ ( <> t))
_ -> M.continue st
app :: M.App UIState CustomEvent Name
app =
M.App { M.appDraw = drawUi
, M.appStartEvent = return
, M.appHandleEvent = appEvent
, M.appAttrMap = const $ attrMap V.defAttr []
, M.appChooseCursor = M.showFirstCursor
}
|
xfchen/Monud
|
src/UI.hs
|
bsd-3-clause
| 4,769
| 0
| 25
| 1,491
| 1,193
| 651
| 542
| 97
| 10
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.Middleware.Headers
( cors
, addHeaders
) where
import Network.Wai
import Data.ByteString
import Network.HTTP.Types (Header)
cors :: Middleware
cors = addHeaders [("Access-Control-Allow-Origin", "*")]
addHeaders :: [Header] -> Middleware
addHeaders hs app env = do
res <- app env
return $ case res of
ResponseFile s rhs f mfp -> ResponseFile s (fix rhs) f mfp
ResponseBuilder s rhs b -> ResponseBuilder s (fix rhs) b
ResponseSource s rhs src -> ResponseSource s (fix rhs) src
where fix rhs = rhs ++ hs
|
seanhess/wai-middleware-headers
|
Network/Wai/Middleware/Headers.hs
|
bsd-3-clause
| 604
| 0
| 13
| 135
| 201
| 105
| 96
| 17
| 3
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
module Control.Concurrent.STM.TChan.Class
where
import qualified Control.Concurrent.STM.TChan as C
import Control.Concurrent.STM
-- | A class capturing Chan operations in STM.
class SplitTChan i o | i -> o, o -> i where
-- | Write a value to the in chan.
writeTChan :: i a -> a -> STM ()
-- | Read the next value from the out chan.
readTChan :: o a -> STM a
-- | Get the next value from the @TChan@ without removing it,
-- retrying if the channel is empty.
peekTChan :: o a -> STM a
-- | A version of 'peekTChan' which does not retry. Instead it
-- returns @Nothing@ if no value is available.
tryPeekTChan :: o a -> STM (Maybe a)
-- | A version of 'readTChan' which does not retry. Instead it
-- returns @Nothing@ if no value is available.
tryReadTChan :: o a -> STM (Maybe a)
-- |Returns 'True' if the supplied 'TChan' is empty.
isEmptyTChan :: o a -> STM Bool
-- | A class for 'SplitTChan' types that can be instantiated without programmer
-- input. /e.g./ the standard haskell @TChan@ is a member of this class, however
-- a bounded chan type that took an @Int@ to define the buffer size would not.
class (SplitTChan i o)=> NewSplitTChan i o where
newSplitTChan :: STM (i a, o a)
-- instances --
-- one-bounded chan:
instance SplitTChan TMVar TMVar where
readTChan = takeTMVar
writeTChan = putTMVar
peekTChan = readTMVar
tryReadTChan = tryTakeTMVar
tryPeekTChan = tryReadTMVar
isEmptyTChan = isEmptyTMVar
instance SplitTChan C.TChan C.TChan where
writeTChan = C.writeTChan
readTChan = C.readTChan
peekTChan = C.peekTChan
tryReadTChan = C.tryReadTChan
tryPeekTChan = C.tryPeekTChan
isEmptyTChan = C.isEmptyTChan
instance NewSplitTChan TMVar TMVar where
newSplitTChan = do v <- newEmptyTMVar
return (v,v)
instance NewSplitTChan TChan TChan where
newSplitTChan = do v <- C.newTChan
return (v,v)
|
jberryman/chan-split
|
Control/Concurrent/STM/TChan/Class.hs
|
bsd-3-clause
| 2,044
| 0
| 10
| 491
| 379
| 209
| 170
| 33
| 0
|
module Proper.Clause(
Atom, atom, negation, lit, nLit, literal,
Clause, clause, concatClause,
assignTruthVal) where
import Data.Set as S
import Proper.Utils
data Atom a =
Lit a |
NLit a
deriving (Eq, Ord, Show)
assignTruthVal :: Atom l -> Bool
assignTruthVal (Lit _) = True
assignTruthVal (NLit _) = False
negation :: Atom a -> Atom a
negation (Lit n) = NLit n
negation (NLit n) = Lit n
literal :: Atom a -> Atom a
literal (Lit n) = Lit n
literal (NLit n) = Lit n
atom (Lit n) = n
atom (NLit n) = n
lit name = Lit name
nLit name = NLit name
type Clause c = Set (Atom c)
concatClause :: (Ord c) => Clause c -> Clause c -> Clause c
concatClause c1 c2 = S.union c1 c2
clause :: (Ord a) => [Atom a] -> Clause a
clause atoms = S.fromList atoms
|
dillonhuff/Proper
|
src/Proper/Clause.hs
|
bsd-3-clause
| 762
| 0
| 8
| 171
| 372
| 192
| 180
| 28
| 1
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
module ZM.Type.List
( List(..)
)
where
import Control.DeepSeq
import Flat
import Data.Model
-- |A list
data List a = Nil
| Cons a (List a)
deriving (Eq, Ord, Show, NFData, Generic, Functor, Foldable, Traversable, Flat)
instance Model a => Model (List a)
|
tittoassini/typed
|
src/ZM/Type/List.hs
|
bsd-3-clause
| 481
| 0
| 8
| 128
| 115
| 66
| 49
| 14
| 0
|
--
-- Pre-processing helpers for the AST to extract all of the channels associated
-- with a process.
--
-- (c) 2014 Galois, Inc.
--
module Tower.AADL.AST.Common
( ThdIds
, getTxThds
, getRxThds
, filterEndpoints
, threadsChannels
, extractTypes
, allConnections
, connectedThreadsSize
, emptyConnections
, towerTime
) where
import Prelude ()
import Prelude.Compat hiding (id)
import Tower.AADL.AST
import qualified Ivory.Language.Syntax.Type as I
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Data.List (foldl')
--------------------------------------------------------------------------------
type ThreadChans = (LocalId, ChanLabel)
-- Used in system composition. All the threads that are connected.
data ThdIds = ThdIds
{ chanTxThds :: S.Set ThreadChans
, chanRxThds :: S.Set ThreadChans
} deriving (Show, Eq)
instance Monoid ThdIds where
mempty = ThdIds mempty mempty
c0 `mappend` c1 =
ThdIds (chanTxThds c0 `mappend` chanTxThds c1)
(chanRxThds c0 `mappend` chanRxThds c1)
getTxThds :: ThdIds -> [ThreadChans]
getTxThds = S.toList . chanTxThds
getRxThds :: ThdIds -> [ThreadChans]
getRxThds = S.toList . chanRxThds
-- A mapping from channels to the sending and receiving threads on the channel.
type Connections = M.Map ChanId ThdIds
-- Interface below hides the data structure.
allConnections :: Connections -> [ThdIds]
allConnections = M.elems
connectedThreadsSize :: ThdIds -> Int
connectedThreadsSize thds =
S.size (chanTxThds thds) * S.size (chanRxThds thds)
emptyConnections :: Connections -> Bool
emptyConnections = M.null
-- | Remove connections that don't have both endpoints.
filterEndpoints :: Connections -> Connections
filterEndpoints = M.filter go
where
go c = not (S.null (chanTxThds c) || S.null (chanRxThds c))
-- Given a list of pairs of AADL threads and local variables, Create their
-- connections.
threadsChannels :: [(Thread, LocalId)] -> Connections
threadsChannels ls = foldl' go M.empty ls
where
go :: Connections -> (Thread, LocalId) -> Connections
go cs (th, id) =
(M.unionWith mappend) (threadChannels th id) cs
threadChannels :: Thread -> LocalId -> Connections
threadChannels th id = foldl' go M.empty (getThreadEndpoints th)
where
go :: Connections -> Endpoint -> Connections
go cs = insertConnectionId id cs
data Endpoint =
InputEp Input
-- | SignalEp SignalInfo
| OutputEp Output
deriving (Show, Eq)
endPointId :: Endpoint -> ChanId
endPointId ep = case ep of
InputEp rx -> inputId rx
OutputEp tx -> outputId tx
-- SignalEp s -> SignalChanId $ fromIntegral (signalInfoNumber s)
newChan :: LocalId -> Endpoint -> ThdIds
newChan l ep =
case ep of
InputEp c -> ThdIds S.empty (S.singleton (l, inputLabel c))
OutputEp c -> ThdIds (S.singleton (l, outputLabel c)) S.empty
-- TODO JED: This is probably wrong
-- SignalEp c -> ThdIds S.empty (S.singleton (l, signalInfoName c))
-- Add the id to the connections map, creating a new channel if needed.
insertConnectionId :: LocalId -> Connections -> Endpoint -> Connections
insertConnectionId l cs ep =
M.insertWith mappend (endPointId ep) (newChan l ep) cs
getThreadEndpoints :: Thread -> [Endpoint]
getThreadEndpoints t =
concatMap go (threadFeatures t)
where
go f =
case f of
InputFeature rx -> [InputEp rx]
OutputFeature tx -> [OutputEp tx]
SignalFeature _ -> []
-- Extract a unique instance of the channel types defined in the system.
extractTypes :: System -> [I.Type]
extractTypes sys = S.toList $ S.map getTy (S.fromList fs)
where
fs :: [Feature]
fs = concatMap threadFeatures
$ concatMap processComponents
$ systemComponents sys
getTy f = case f of
InputFeature rx -> inputType rx
OutputFeature tx -> outputType tx
-- TODO JED: Wouldn't this be so much nicer if the Time module told us what
-- type to put here?
SignalFeature _ -> towerTime
towerTime :: I.Type
towerTime = I.TyInt I.Int64
|
GaloisInc/tower
|
tower-aadl/src/Tower/AADL/AST/Common.hs
|
bsd-3-clause
| 4,064
| 0
| 12
| 833
| 1,040
| 560
| 480
| 88
| 3
|
module Data.Accessor.Monad.Trans.StrictState where
import qualified Data.Accessor.Basic as Accessor
import qualified Control.Monad.Trans.State.Strict as State
import qualified Control.Monad.Trans.Class as Trans
import Control.Monad.Trans.State.Strict (State, runState, StateT(runStateT), )
set :: Monad m => Accessor.T r a -> a -> StateT r m ()
set f x = State.modify (Accessor.set f x)
get :: Monad m => Accessor.T r a -> StateT r m a
get f = State.gets (Accessor.get f)
modify :: Monad m => Accessor.T r a -> (a -> a) -> StateT r m ()
modify f g = State.modify (Accessor.modify f g)
getAndModify :: Monad m => Accessor.T r a -> (a -> a) -> StateT r m a
getAndModify f g =
do x <- get f
modify f g
return x
infix 1 %=, %:
(%=) :: Monad m => Accessor.T r a -> a -> StateT r m ()
(%=) = set
(%:) :: Monad m => Accessor.T r a -> (a -> a) -> StateT r m ()
(%:) = modify
lift :: Monad m => Accessor.T r s -> State s a -> StateT r m a
lift f m =
do s0 <- get f
let (a,s1) = runState m s0
set f s1
return a
liftT :: (Monad m) =>
Accessor.T r s -> StateT s m a -> StateT r m a
liftT f m =
do s0 <- get f
(a,s1) <- Trans.lift $ runStateT m s0
set f s1
return a
|
christiaanb/clash
|
clash/Data/Accessor/Monad/Trans/StrictState.hs
|
bsd-3-clause
| 1,225
| 0
| 10
| 312
| 618
| 314
| 304
| 34
| 1
|
{-# LANGUAGE RecordWildCards #-}
module ProtoMonad where
import ProtoTypes
import ProtoTypes(Channel)
import VChanUtil
import Prelude hiding (lookup)
import Data.Map hiding (foldl)
import qualified Control.Monad.Trans.Reader as T
import Control.Monad.Error --import Control.Monad.Except
import qualified Control.Monad.Trans.Error as ET
import Control.Monad
type Proto = T.ReaderT ProtoEnv (ErrorT String IO)
runProto :: (Proto a) -> ProtoEnv -> IO (Either String a)
runProto proto env = ET.runErrorT $ T.runReaderT proto env
runWithLinks :: [(Int, Int)] -> (Proto a) -> Proto a
runWithLinks links proto = T.local (linkEnv links) proto
data ProtoEnv = ProtoEnv {
me :: EntityId,
myPriKey :: PrivateKey,
entities :: Map EntityId EntityInfo,
publicKeys :: Map EntityId PublicKey,
--privateKeys :: Map Int PrivateKey,
packScheme :: Int,
encScheme :: Int,
signScheme :: Int,
protoId :: Int
}
protoIs :: Proto Int
protoIs = do
id <- T.asks protoId
return id
getEntityChannel :: EntityId -> Proto Channel
getEntityChannel id = do
eInfo <- getEntityInfo id
return $ chan eInfo
getEntityInfo :: EntityId -> Proto EntityInfo
getEntityInfo i = do
infos <- T.asks entities
let maybeInfo = lookup i infos
case maybeInfo of
Nothing -> throwError ("No known EntityInfo for Entity with id: "
++ (show i) )
Just info -> return info
getEntityPubKey :: EntityId -> Proto PublicKey
getEntityPubKey i = do
pubKeys <- T.asks publicKeys
let maybePubKey = lookup i pubKeys
case maybePubKey of
Nothing -> throwError ("No known PublicKey for Entity with id: "
++ (show i) )
Just pubKey -> return pubKey
linkEnv :: [(Int, Int)] -> ProtoEnv -> {-Proto-} ProtoEnv
linkEnv links oldEnv= let
--oldEnv <- T.ask
newEnv = foldl linkEnv' oldEnv links in
newEnv
--return newEnv
linkEnv' :: ProtoEnv -> (Int, Int) -> {-Proto-} ProtoEnv
linkEnv' ProtoEnv{..} (targetId, currentId) =
let
maybeInfo = lookup currentId entities
eInfo = case maybeInfo of
Nothing -> error $ "No entity with id: " ++ (show currentId) ++ " defined in current environment"
Just e -> e{-<- case maybeInfo of
Nothing -> throwError $ "No entity with id: " ++ (show currentId) ++ " defined in current environment"
Just e -> return e -}
maybePubKey = lookup currentId publicKeys
pubKey = case maybePubKey of
Nothing -> error $ "No pubKey for entity: " ++ (show currentId) ++ "defined in current environment"
Just p -> p
newEntities = insert targetId eInfo entities
newPubKeys = insert targetId pubKey publicKeys in
ProtoEnv{entities = newEntities, publicKeys = newPubKeys, ..}
|
armoredsoftware/protocol
|
tpm/mainline/protoMonad/ProtoMonad.hs
|
bsd-3-clause
| 2,724
| 0
| 15
| 604
| 759
| 400
| 359
| 67
| 3
|
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Monad (when)
import Data.Char (toLower)
import Data.Default.Class (Default (..))
import Data.Maybe (fromJust)
import System.Console.GetOpt (getOpt, usageInfo, ArgOrder (..), OptDescr (..), ArgDescr (..))
import System.Directory (getTemporaryDirectory, getCurrentDirectory, setCurrentDirectory, makeAbsolute, copyFile)
import System.Environment (getArgs)
import System.Exit (exitFailure, exitSuccess)
import qualified System.IO.UTF8 as IO
import System.IO (hClose)
import System.IO.Temp (openTempFile)
import System.Process (callProcess)
import Text.Parakeet
data ExtraOptions = ExtraOptions {
outputPath :: Maybe FilePath
, showHelp :: Bool
, showTeXTemplate :: Bool
, showHTMLTemplate :: Bool
}
firstM :: Monad m => (a -> m b) -> (a, c) -> m (b, c)
firstM f (a, c) = f a >>= \b -> return (b, c)
instance Default Options where
def = Options { inputFileJ = ([], [])
, inputFileR = ([], [])
, templateFile = Nothing
, furigana = InHiragana
, noMeta = False
, keepLV = False
}
instance Default ExtraOptions where
def = ExtraOptions { outputPath = Nothing
, showHelp = False
, showTeXTemplate = False
, showHTMLTemplate = False
}
isEmptyFile :: File -> Bool
isEmptyFile (f, _) = null f
initFile :: FilePath -> IO File
initFile f = IO.readFile f >>= \c -> return (f, c)
bindInputFileJ a = firstM $ \o -> do
f <- initFile a
return o { inputFileJ = f }
bindInputFileR a = firstM $ \o -> do
f <- initFile a
return o { inputFileR = f }
bindTemplateFile a = firstM $ \o -> do
f <- initFile a
return o { templateFile = Just f }
bindOutputPath a (o, eo) = return (o, eo { outputPath = Just a })
bindFurigana a = firstM $ \o -> do
f <- format
return $ o { furigana = f }
where format = case map toLower a of
"hiragana" -> return InHiragana
"katakana" -> return InKatakana
_ -> die "Bad furigana format."
setNoMeta = firstM $ \o -> return o { noMeta = True }
setKeepLV = firstM $ \o -> return o { keepLV = True }
setShowTeXTemplate (o, eo) = return (o, eo { showTeXTemplate = True })
setShowHTMLTemplate (o, eo) = return (o, eo { showHTMLTemplate = True })
setShowHelp (o, eo) = return (o, eo { showHelp = True })
options :: [OptDescr ((Options, ExtraOptions) -> IO (Options, ExtraOptions))]
options = [ Option ['j'] ["japanese"] (ReqArg bindInputFileJ "FILE") "Japanese input file"
, Option ['r'] ["romaji"] (ReqArg bindInputFileR "FILE") "Romaji input file"
, Option ['t'] ["template"] (ReqArg bindTemplateFile "FILE") "Template file"
, Option ['o'] ["output"] (ReqArg bindOutputPath "FILE") "Output file path"
, Option [ ] ["template-tex"] (NoArg setShowTeXTemplate ) "Show TeX template"
, Option [ ] ["template-html"] (NoArg setShowHTMLTemplate ) "Show HTML template"
, Option [ ] ["furigana"] (ReqArg bindFurigana "FORMAT") "Furigana format: hiragana, katakana"
, Option [ ] ["no-meta"] (NoArg setNoMeta ) "Ignore title and author metadata"
, Option [ ] ["keep-lv"] (NoArg setKeepLV ) "Keep long vowel macron in output"
, Option ['h'] ["help"] (NoArg setShowHelp ) "Show help"
]
die :: String -> IO a
die e = do
putStrLn $ e ++ "\n" ++ usageInfo "Usage: " options
exitFailure
checkFile :: Options -> IO Options
checkFile opts = do
let (jName, rName) = (fst $ inputFileJ opts, fst $ inputFileR opts)
when (null jName || null rName) $ die "Missing inputs."
return opts
runOpts :: [String] -> IO (Options, ExtraOptions)
runOpts argv = case getOpt Permute options argv of
(a, _, []) -> foldl (>>=) (return (def, def)) a
(_, _, err) -> die $ concat err
xelatex :: FilePath -> String -> IO ()
xelatex filePath buffer = do
tmpDir <- getTemporaryDirectory
curDir <- getCurrentDirectory
(tmp, h) <- openTempFile tmpDir "parakeet.tex"
hClose h
setCurrentDirectory tmpDir
IO.writeFile tmp buffer
callProcess "xelatex" [tmp]
setCurrentDirectory curDir
let sourcePath = take (length tmp - 3) tmp ++ "pdf"
targetPath <- makeAbsolute filePath
putStrLn $ "Copying " ++ sourcePath ++ " to " ++ targetPath ++ "."
copyFile sourcePath targetPath
main :: IO ()
main = do
(opts, ExtraOptions {..}) <- runOpts =<< getArgs
when showHelp $ putStrLn (usageInfo "Usage: " options) >> exitSuccess
when showTeXTemplate $ putStr templateTeX >> exitSuccess
when showHTMLTemplate $ putStr templateHTML >> exitSuccess
checkFile opts
let ext = map toLower . extName <$> outputPath
let (format, io) = case ext of
Nothing -> (TeXFormat, putStr)
Just "pdf" -> (TeXFormat, xelatex (fromJust outputPath))
Just "htm" -> (HTMLFormat, IO.writeFile (fromJust outputPath))
Just "html" -> (HTMLFormat, IO.writeFile (fromJust outputPath))
_ -> (TeXFormat, IO.writeFile (fromJust outputPath))
case parakeet opts format of
Left err -> putStrLn (show err)
Right r -> io r
where
extName = reverse . takeWhile ((/=) '.') . reverse
|
foreverbell/parakeet
|
exe/Main.hs
|
mit
| 5,578
| 0
| 16
| 1,605
| 1,863
| 984
| 879
| 116
| 6
|
{-|
Module : Scheduling
Description : Provides algorithms to apply second-order rules.
-}
module Rewriting.DPO.TypedGraphRule.Scheduling
( asLongAsPossible
, oneStep
, specific
) where
import Data.Maybe (fromMaybe, isNothing)
import Abstract.Rewriting.DPO (MorphismsConfig,findApplicableMatches,rewrite)
import Category.TypedGraphRule (RuleMorphism)
import Rewriting.DPO.TypedGraph (TypedGraphRule)
import Rewriting.DPO.TypedGraphRule (SndOrderRule)
-- second-order rule is synonymous of 2-rule.
-- first-order rule is synonymous of rule.
type NamedRule a b = (String,TypedGraphRule a b)
type Named2Rule a b = (String,SndOrderRule a b)
-- | Apply "AsLongAsPossible" any second-order rule.
-- It is limited to `n` rewritings.
asLongAsPossible :: MorphismsConfig (RuleMorphism a b) -> [Named2Rule a b] -> [NamedRule a b] -> Int -> (String,[NamedRule a b])
asLongAsPossible c s f n = asLongAsPossible_ c s f ("limit of rewritings: " ++ show n) n
-- Each iteration applies a rewrite, removes the old rule, and put the new rule in the end of the rules list.
asLongAsPossible_ :: MorphismsConfig (RuleMorphism a b) -> [Named2Rule a b] -> [NamedRule a b] -> String -> Int -> (String,[NamedRule a b])
asLongAsPossible_ _ _ fstRules log 0 = (log ++ "\n limit of rewritings exceeded.",fstRules)
asLongAsPossible_ conf sndRules fstRules log n =
if isNothing matches
then (log++"\nno more matches.",fstRules)
else asLongAsPossible_ conf sndRules newFstOrder newLog (n-1)
where
newLog = log ++ "\n " ++ r1Name ++ " --" ++ fst r2 ++ "--> " ++ newName ++ "."
matches = getOneMatch conf sndRules [] fstRules
(Just (m,r2,r1Name,oldRules)) = matches
newName = r1Name ++ "_" ++ fst r2
newRule = rewrite m (snd r2)
newFstOrder = oldRules ++ [(newName,newRule)]
-- Returns just one match (if it exits) between [2-rules] and [rules].
-- Also, returns the list of rules without the matched one.
getOneMatch :: MorphismsConfig (RuleMorphism a b) -> [Named2Rule a b] -> [NamedRule a b] -> [NamedRule a b] -> Maybe (RuleMorphism a b, Named2Rule a b, String, [NamedRule a b])
getOneMatch _ _ _ [] = Nothing
getOneMatch conf sndRules rs1 (r:rs2) =
let matches = getOneMatch_ conf r sndRules
(Just (m,r2,r1Name)) = matches
in if isNothing matches then getOneMatch conf sndRules (r:rs1) rs2 else Just (m,r2,r1Name,rs1 ++ rs2)
getOneMatch_ :: MorphismsConfig (RuleMorphism a b) -> NamedRule a b -> [Named2Rule a b] -> Maybe (RuleMorphism a b, Named2Rule a b, String)
getOneMatch_ _ _ [] = Nothing
getOneMatch_ conf rule (r2:r2s) =
let matches = findApplicableMatches conf (snd r2) (snd rule)
in if Prelude.null matches then getOneMatch_ conf rule r2s else Just (head matches, r2, fst rule)
-- | Apply "oneStep".
-- All matches from 2-rules to rules are applied once.
oneStep :: MorphismsConfig (RuleMorphism a b) -> [Named2Rule a b] -> [NamedRule a b] -> (String,[NamedRule a b])
oneStep conf sndRules fstRules = (logs,rules)
where
info = map (oneStep_ conf sndRules) fstRules
logs = concatMap fst info
rules = concatMap snd info
oneStep_ :: MorphismsConfig (RuleMorphism a b) -> [Named2Rule a b] -> NamedRule a b -> (String,[NamedRule a b])
oneStep_ conf sndRules rule = (logs,rules)
where
info = map (applyAllMatches conf rule) sndRules
logs = concatMap fst info
rules = concatMap snd info
applyAllMatches :: MorphismsConfig (RuleMorphism a b) -> NamedRule a b -> Named2Rule a b -> (String,[NamedRule a b])
applyAllMatches conf r1 r2 = (log,namedRules)
where
matches = findApplicableMatches conf (snd r2) (snd r1)
newRules = map (`rewrite` snd r2) matches
newRulesId = zip newRules ([0..]::[Int])
namedRules = map (\(r,id) -> (fst r1 ++"_"++ fst r2 ++"_"++show id,r)) newRulesId
log = concatMap (\(name,_) -> "\n "++ fst r1 ++" --"++ fst r2 ++"--> "++name++".") namedRules
-- | Apply "Specific" from a 2rule to a rule.
-- Receives a 2-rule name and a rule name, and rewrites all matches between them.
specific :: MorphismsConfig (RuleMorphism a b) -> [Named2Rule a b] -> [NamedRule a b] -> String -> String -> (String,[NamedRule a b])
specific conf sndRules fstRules name2Rule nameRule = applyAllMatches conf (nameRule,rule1) (name2Rule,rule2)
where
rule1 = getRule fstRules nameRule
rule2 = getRule sndRules name2Rule
getRule :: [(String, a)] -> String -> a
getRule rules name = fromMaybe (error ("specific: "++name++" rule not found.")) (lookup name rules)
|
rodrigo-machado/verigraph
|
src/library/Rewriting/DPO/TypedGraphRule/Scheduling.hs
|
gpl-3.0
| 4,551
| 0
| 16
| 887
| 1,538
| 817
| 721
| 59
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.