code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
--------------------------------------------------------------------
-- |
-- Module : Codec.MIME.Base64
-- Copyright : (c) 2006-2009, Galois, Inc.
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <sigbjorn.finne@gmail.com>
-- Stability : provisional
-- Portability: portable
--
--
-- Base64 decoding and encoding routines, multiple entry
-- points for either depending on use and level of control
-- wanted over the encoded output (and its input form on the
-- decoding side.)
--
--------------------------------------------------------------------
module Codec.MIME.Base64
( encodeRaw -- :: Bool -> String -> [Word8]
, encodeRawString -- :: Bool -> String -> String
, encodeRawPrim -- :: Bool -> Char -> Char -> [Word8] -> String
, formatOutput -- :: Int -> Maybe String -> String -> String
, decode -- :: String -> [Word8]
, decodeToString -- :: String -> String
, decodePrim -- :: Char -> Char -> String -> [Word8]
) where
import Data.Bits
import Data.Char
import Data.Word
import Data.Maybe
encodeRawString :: Bool -> String -> String
encodeRawString trail xs = encodeRaw trail (map (fromIntegral.ord) xs)
-- | @formatOutput n mbLT str@ formats @str@, splitting it
-- into lines of length @n@. The optional value lets you control what
-- line terminator sequence to use; the default is CRLF (as per MIME.)
formatOutput :: Int -> Maybe String -> String -> String
formatOutput n mbTerm str
| n <= 0 = error ("Codec.MIME.Base64.formatOutput: negative line length " ++ show n)
| otherwise = chop n str
where
crlf :: String
crlf = fromMaybe "\r\n" mbTerm
chop _ "" = ""
chop i xs =
case splitAt i xs of
(as,"") -> as
(as,bs) -> as ++ crlf ++ chop i bs
encodeRaw :: Bool -> [Word8] -> String
encodeRaw trail bs = encodeRawPrim trail '+' '/' bs
-- | @encodeRawPrim@ lets you control what non-alphanum characters to use
-- (The base64url variation uses @*@ and @-@, for instance.)
-- No support for mapping these to multiple characters in the output though.
encodeRawPrim :: Bool -> Char -> Char -> [Word8] -> String
encodeRawPrim trail ch62 ch63 ls = encoder ls
where
trailer xs ys
| not trail = xs
| otherwise = xs ++ ys
f = fromB64 ch62 ch63
encoder [] = []
encoder [x] = trailer (take 2 (encode3 f x 0 0 "")) "=="
encoder [x,y] = trailer (take 3 (encode3 f x y 0 "")) "="
encoder (x:y:z:ws) = encode3 f x y z (encoder ws)
encode3 :: (Word8 -> Char) -> Word8 -> Word8 -> Word8 -> String -> String
encode3 f a b c rs =
f (low6 (w24 `shiftR` 18)) :
f (low6 (w24 `shiftR` 12)) :
f (low6 (w24 `shiftR` 6)) :
f (low6 w24) : rs
where
w24 :: Word32
w24 = (fromIntegral a `shiftL` 16) +
(fromIntegral b `shiftL` 8) +
fromIntegral c
decodeToString :: String -> String
decodeToString str = map (chr.fromIntegral) $ decode str
decode :: String -> [Word8]
decode str = decodePrim '+' '/' str
decodePrim :: Char -> Char -> String -> [Word8]
decodePrim ch62 ch63 str = decoder $ takeUntilEnd str
where
takeUntilEnd "" = []
takeUntilEnd ('=':_) = []
takeUntilEnd (x:xs) =
case toB64 ch62 ch63 x of
Nothing -> takeUntilEnd xs
Just b -> b : takeUntilEnd xs
decoder :: [Word8] -> [Word8]
decoder [] = []
decoder [x] = take 1 (decode4 x 0 0 0 [])
decoder [x,y] = take 1 (decode4 x y 0 0 []) -- upper 4 bits of second val are known to be 0.
decoder [x,y,z] = take 2 (decode4 x y z 0 [])
decoder (x:y:z:w:xs) = decode4 x y z w (decoder xs)
decode4 :: Word8 -> Word8 -> Word8 -> Word8 -> [Word8] -> [Word8]
decode4 a b c d rs =
(lowByte (w24 `shiftR` 16)) :
(lowByte (w24 `shiftR` 8)) :
(lowByte w24) : rs
where
w24 :: Word32
w24 =
(fromIntegral a) `shiftL` 18 .|.
(fromIntegral b) `shiftL` 12 .|.
(fromIntegral c) `shiftL` 6 .|.
(fromIntegral d)
toB64 :: Char -> Char -> Char -> Maybe Word8
toB64 a b ch
| ch >= 'A' && ch <= 'Z' = Just (fromIntegral (ord ch - ord 'A'))
| ch >= 'a' && ch <= 'z' = Just (26 + fromIntegral (ord ch - ord 'a'))
| ch >= '0' && ch <= '9' = Just (52 + fromIntegral (ord ch - ord '0'))
| ch == a = Just 62
| ch == b = Just 63
| otherwise = Nothing
fromB64 :: Char -> Char -> Word8 -> Char
fromB64 ch62 ch63 x
| x < 26 = chr (ord 'A' + xi)
| x < 52 = chr (ord 'a' + (xi-26))
| x < 62 = chr (ord '0' + (xi-52))
| x == 62 = ch62
| x == 63 = ch63
| otherwise = error ("fromB64: index out of range " ++ show x)
where
xi :: Int
xi = fromIntegral x
low6 :: Word32 -> Word8
low6 x = fromIntegral (x .&. 0x3f)
lowByte :: Word32 -> Word8
lowByte x = (fromIntegral x) .&. 0xff
| GaloisInc/mime | Codec/MIME/Base64.hs | bsd-3-clause | 4,735 | 0 | 14 | 1,202 | 1,656 | 857 | 799 | 98 | 4 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ParallelListComp #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-- | The adaptive radix tree contains four different types of internal
-- nodes, depending on how many children they store:
--
-- * 1–4 children are stored as an array of up to 4 keys and a
-- corresponding array of up to 4 values
--
-- * 5–16 children are stored as a sorted array of up to 16 keys
-- and their corresponding values
--
-- * 17–48 children are stored as a 256-element byte array which
-- stores indices into an array of up to 48 children
--
-- * 49–256 children are stored directly in a 256 element array
--
-- This module contains code for working with all four of these as a
-- single 'Children' type. Each kind of node also stores the number of
-- children it currently contains which is used to grow or shrink
-- nodes when appropriate.
module Data.ART.Children where
import Control.Applicative ((<$>))
import Control.DeepSeq (NFData (..), deepseq)
import Control.Monad (guard, join)
import Data.Array (Array)
import Data.Array.IArray ((!), (//))
import qualified Data.Array.IArray as Array
import Data.Array.Unboxed (UArray)
import qualified Data.Array.Unboxed as UArray
import Data.Function (on)
import Data.Word (Word8)
import GHC.Generics (Generic)
import Data.ART.Key (Chunk)
import qualified Data.ART.Internal.Array as Array
import Data.ART.Internal.SortingNetwork (sort4)
-- TODO: Make the order of arguments in this API consistent!
type Size = Word8
-- TODO: Figure out how to use unboxed vectors here!
-- | Stores elements using the appropriate inner node type.
data Children a =
-- | Stores 1–4 children as arrays of up to 4 keys and values.
N4 !Size !(UArray Chunk Chunk) !(Array Chunk a)
-- | Stores 5–16 children as arrays of up to 16 keys and values.
| N16 !Size !(UArray Chunk Chunk) !(Array Chunk a)
-- | Stores 17–48 children as a chunk-indexed 256-element array of
-- keys into an array of 48 children.
| N48 !Size !(UArray Chunk Chunk) !(Array Chunk a)
-- | Stores 49–256 children in a chunk-indexed array of 256 elements.
| N256 !Size !(Array Chunk (Maybe a))
deriving (Show, Eq)
-- TODO: Figure out how to do this while preserving lazy values!
-- Helper functions that construct nodes taking care of
-- strictness.
n4, n16, n48 :: Size -> Array.Keys -> Array.Values a -> Children a
n4 size keys values = values `Array.seqValues` N4 size keys values
n16 size keys values = values `Array.seqValues` N16 size keys values
n48 size keys values = values `Array.seqValues` N48 size keys values
n256 :: Size -> Array.Values (Maybe a) -> Children a
n256 size values = N256 size values
instance NFData a => NFData (Children a) where
rnf (N4 _ _ values) = values `deepseq` ()
rnf (N16 _ _ values) = values `deepseq` ()
rnf (N48 _ _ values) = values `deepseq` ()
rnf (N256 _ values) = values `deepseq` ()
-- | How many children the current node stores.
size :: Children a -> Size
size (N4 size _ _) = size
size (N16 size _ _) = size
size (N48 size _ _) = size
size (N256 size _) = size
-- | Gets the element for the given byte if it exists. Returns
-- 'Nothing' otherwise.
get :: Children a -> Chunk -> Maybe a
get (N4 size keys values) chunk = (values !) <$> Array.findIndex chunk keys
get (N16 size keys values) chunk = (values !) <$> Array.binarySearch chunk keys
get (N48 _ keys children) chunk =
case keys ! chunk of
i | i >= 48 -> Nothing
| otherwise -> Just $! children ! i
get (N256 _ children) chunk = children ! chunk
{-# INLINE get #-}
-- | Given that the given key already has a value, update that value
-- with given function. If the value is not in the node, the node is
-- returned unchanged.
update :: (a -> a) -> Chunk -> Children a -> Children a
update f chunk (N4 size keys values) = n4 size keys newValues
where newValues =
{-# SCC "update.newValues.N4" #-}
case Array.findIndex chunk keys of
Just i -> values // [(i, f $! values ! i)]
Nothing -> values
update f chunk (N16 size keys values) = n16 size keys newValues
where newValues =
{-# SCC "update.newValues.N16" #-}
case Array.binarySearch chunk keys of
Just i -> values // [(i, f $! values ! i)]
Nothing -> values
update f chunk (N48 size keys values) = n48 size keys newValues
where newValues | keys ! chunk < 0 = values
| otherwise =
{-# SCC "update.newValues.N48" #-}
values // [(keys ! chunk, f $! values ! (keys ! chunk))]
update f chunk (N256 size values) = n256 size newValues
where newValues | Just old <- values ! chunk =
{-# SCC "update.newValues.N256" #-}
values // [(chunk, Just $! f old)]
| otherwise = values
-- | Insert the given value at the given key. If the key already has a
-- value, the given function is used to combine the old value and the
-- new value.
insertWith :: (a -> a -> a) -> Chunk -> a -> Children a -> Children a
insertWith f !chunk !value children
| Just _ <- get children chunk = update (f value) chunk children
insertWith _ !chunk !value (N4 4 keys values) = n16 5 keys' values'
where (keys', values') = uncurry (Array.insert chunk value) $ sort4 keys values
insertWith _ !chunk !value (N4 n keys values) = n4 (n + 1) keys' values'
where (keys', values') = (Array.consKeys chunk keys, Array.consValues value values)
insertWith _ !chunk !value (N16 16 keys values) = n48 17 keys' values'
where keys' = Array.expandToByteKeyArray chunk keys
values' = Array.consValues value values
insertWith _ !chunk !value (N16 n keys values) = n16 (n + 1) keys' values'
where (keys', values') = Array.insert chunk value keys values
-- TODO: The bug here was caused because chunks and the indicies
-- internal to an N48 have the same type. I should probably wrap one
-- of them.
insertWith _ !chunk !value (N48 48 keys values) =
{-# SCC "update.insertWith.48to256" #-}
n256 49 newValues
where newValues = Array.expandKeysToValues keys values // [(chunk, Just $! value)]
insertWith _ !chunk !value (N48 n keys values) = n48 (n + 1) keys' values'
where keys' = keys // [(chunk, n)]
values' = Array.snocValues values value
insertWith f !chunk !value (N256 n values) =
{-# SCC "update.insertWith.N256" #-}
n256 (n + 1) newValues
where newValues = values // [(chunk, Just $! value)]
insert :: Chunk -> a -> Children a -> Children a
insert = insertWith const
-- A utility function you probably shouldn't use in real code! (Yet?)
fromList :: [(Chunk, a)] -> Children a
fromList = foldr (uncurry insert) (N4 0 Array.empty Array.empty) . reverse
-- | Create a Node4 with the two given elements an everything else empty.
pair :: Chunk -> a -> Chunk -> a -> Children a
pair chunk1 v1 chunk2 v2 = N4 2 (Array.listArray (0,1) [chunk1, chunk2])
(Array.listArray (0,1) [v1, v2])
-- -- TODO: Organize tests!
-- -- test_pairN4 = pairN4 k1 (Leaf k1 "abc") k2 (Leaf k2 "abc") == result
-- -- where k1 = Byte.pack ([1..6] ++ [10])
-- -- k2 = Byte.pack ([1..6] ++ [14])
-- -- result = Node 6 "\SOH\STX\ETX\EOT\ENQ\ACK" 2 (N4 (Node4 (fromList [10,14]) (fromList [Leaf "\SOH\STX\ETX\EOT\ENQ\ACK\n" "abc",Leaf "\SOH\STX\ETX\EOT\ENQ\ACK\SO" "abc"])))
| TikhonJelvis/adaptive-radix-trees | src/Data/ART/Children.hs | bsd-3-clause | 7,842 | 0 | 14 | 2,066 | 1,974 | 1,048 | 926 | -1 | -1 |
module Fun.Direct where
import qualified Fun.Direct.Config as D
import qualified Fun.Quiz.Type2 as T2
import qualified Fun.Quiz.Type as T
import Fun.Type
import Fun.Table
import Fun.Examples
import Fun.Check
import Fun.Create
import qualified RAM.Builtin
import Inter.Types
import Inter.Quiz
import Challenger.Partial
import Autolib.Informed
import Data.Array
import Autolib.Reporter
import Autolib.ToDoc
import Condition
instance OrderScore D.Primrec_2D where
scoringOrder _ = Increasing
instance Partial D.Primrec_2D D.Config Fun where
-- Aufgabe beschreiben
describe p i =
vcat [ text "Konstruieren Sie eine zweistellige primitiv rekursive Funktion"
, text "mit dieser Wertetabelle:"
, nest 4 $ toDoc $ D.table i
, if null $ D.properties i
then empty
else text "und diesen Eigenschaften:"
</> ( vcat $ map explain $ D.properties i )
]
-- Anfangsbeispiel
initial p i = Fun.Examples.plus
-- Partiell Korrekt
partial p i b = do
investigate ( D.properties i ) b
check_arity 2 b
-- Total Korrekt
total p i b = do
inform $ text "Die Wertetabelle Ihrer Funktion ist:"
mytafel <- D.mktafel2 $ D.table i
let (dl,ur) = bounds $ unTafel2 mytafel
let t :: Tafel2
t = tabulate2 b ur
inform $ nest 4 $ prettyTafel2 t
-- Unterschiede berechnen
let diffs = do
xy <- indices $ unTafel2 mytafel
let l = unTafel2 ( mytafel ) ! xy
r = unTafel2 ( t ) ! xy
guard $ l /= r
return ( xy, l, r )
-- Bei Unterschieden -> Differenz ausgeben
when ( not $ null diffs ) $ do
inform $ text "Die Tabellen stimmen wenigstens hier nicht überein:"
reject $ nest 4 $ vcat $ take 3 $ do
( xy, l, r ) <- diffs
return $ hsep
[ text "Argumente:", toDoc xy
, text "vorgebener Wert:", toDoc l
, text "Ihr Wert:", toDoc r
]
-- Sehr schoen: richtig Loesung
inform $ text "Die Tabellen stimmen überein."
make_fixed :: Make
make_fixed = direct D.Primrec_2D D.example
instance Generator D.Primrec_2D T2.Param ( Fun, D.Config ) where
generator _ par key = do
( f, t ) <- nontrivial
$ T.Param { T.expression_size = T2.expression_size par
, T.table_size = T2.table_size par
}
return ( f
, D.Config { D.table = D.mkmatrix t
, D.properties = T2.properties par
}
)
instance Project D.Primrec_2D ( Fun, D.Config ) D.Config where
project _ ( f, c ) = c
make_quiz :: Make
make_quiz = quiz D.Primrec_2D T2.example
| florianpilz/autotool | src/Fun/Direct.hs | gpl-2.0 | 2,762 | 53 | 14 | 888 | 494 | 317 | 177 | -1 | -1 |
{-
Copyright 2013 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# Language GeneralizedNewtypeDeriving #-}
module Plush.Run.Expansion.Types (
Expansion,
runExpansion,
evalExpansion,
withExpansion,
withExpansion',
noteError,
noteShellError,
noteExitCode,
)
where
import Control.Applicative (Applicative)
import Control.Monad.Exception (MonadException(..))
import Control.Monad.Trans.Class (lift, MonadTrans)
import qualified Control.Monad.Trans.State as ST
import Plush.Run.Posix
import Plush.Run.Posix.Return
import Plush.Run.ShellExec
import Plush.Run.Types
-- | Expansion is defined in the spec in a way that is both functional and
-- stateful! It always produces an expansion, but in some cases it may shell
-- error. Further, the last exit status from the expansion is retained because
-- exucting a command with no command name (just variable assignments) uses it.
--
-- Expansions are therefore defined in a small 'Expansion' monad. See
-- 'evalExpansion', 'withExpansion', and 'runExpansion' for ways to run them.
newtype ExpansionT m a = ExpansionT (ST.StateT ExpansionStatus m a)
deriving (Functor, Applicative, Monad, MonadTrans, MonadException)
type Expansion m = ExpansionT (ShellExec m)
-- | The state of an expansion operation.
data ExpansionStatus =
ExpansionStatus { exError :: ErrorCode
, exLastExitCode :: ExitCode
}
expansionStartStatus :: ExpansionStatus
expansionStartStatus = ExpansionStatus { exError = ErrorCode ExitSuccess,
exLastExitCode = ExitSuccess }
-- | Make note in an expansion of an exit status from an executed command. These
-- are from command expansion. The result of an expansion includes the last such
-- exit status.
noteExitCode :: (Monad m) => ExitCode -> Expansion m ()
noteExitCode ec = ExpansionT $
ST.modify $ \s -> s { exLastExitCode = ec }
-- | Make note of a shell error during expansion. Only the first such error
-- (whos value really is an error) is retained. Note that expansion continues
-- even when there is such an error.
noteError :: (Monad m) => ErrorCode -> Expansion m ()
noteError ec = ExpansionT $
ST.modify $ \s -> s { exError = ifError id ec $ exError s }
-- | This is a convenience function that calls 'shellError' and then notes it.
noteShellError :: (PosixLike m) => Int -> String -> Expansion m ()
noteShellError e s = lift (shellError e s) >>= noteError
-- | This is the most generalized way to run an expansion. The result is either
-- a shell error, or a pair of the expansion result, and the last exit status
-- from any command expansions.
runExpansion :: (Monad m) =>
ExpansionT m a -> m (Either ErrorCode (a, ExitCode))
runExpansion (ExpansionT st) = do
(a, ex) <- ST.runStateT st expansionStartStatus
return $ ifError Left (Right (a, exLastExitCode ex)) $ exError ex
-- | Perform an expansion, ignoring any shell errors (though their messages
-- will have been printed to stderr) and ignoring the last exit code if any.
evalExpansion :: (Monad m) => ExpansionT m a -> m a
evalExpansion (ExpansionT st) = ST.evalStateT st expansionStartStatus
-- | This is the common way to run an expansion. If the result is an error
-- then the action isn't run, and the error is returned instead.
withExpansion :: (Monad m) =>
Expansion m a -> (a -> ShellExec m ShellStatus)
-> ShellExec m ShellStatus
withExpansion eAct shAct = withExpansion' eAct (shAct . fst)
-- | Same 'as withExpansion', but provides the last exit code as well.
withExpansion' :: (Monad m) =>
Expansion m a -> ((a, ExitCode) -> ShellExec m ShellStatus)
-> ShellExec m ShellStatus
withExpansion' eAct shAct = runExpansion eAct >>= either returnError shAct
| mzero/plush | src/Plush/Run/Expansion/Types.hs | apache-2.0 | 4,287 | 0 | 13 | 828 | 715 | 401 | 314 | 50 | 1 |
import Test.Framework (defaultMain)
import qualified Tests.Distribution as Distribution
import qualified Tests.Function as Function
import qualified Tests.KDE as KDE
import qualified Tests.Matrix as Matrix
import qualified Tests.NonParametric as NonParametric
import qualified Tests.Transform as Transform
import qualified Tests.Correlation as Correlation
main :: IO ()
main = defaultMain [ Distribution.tests
, Function.tests
, KDE.tests
, Matrix.tests
, NonParametric.tests
, Transform.tests
, Correlation.tests
]
| fpco/statistics | tests/tests.hs | bsd-2-clause | 650 | 0 | 7 | 194 | 120 | 78 | 42 | 16 | 1 |
{-# OPTIONS -fglasgow-exts #-}
--
-- polymorphic eval!
--
module Main where
import Poly
import System.Eval.Haskell
main = do m_f <- eval "Fn (\\x y -> x == y)" ["Poly"]
when (isJust m_f) $ do
let (Fn f) = fromJust m_f
putStrLn $ show (f True True)
putStrLn $ show (f 1 2)
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/eval/eval_fn1/Main.hs | bsd-3-clause | 333 | 0 | 14 | 114 | 108 | 54 | 54 | 9 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.BuildTargets
-- Copyright : (c) Duncan Coutts 2012
-- License : BSD-like
--
-- Maintainer : duncan@community.haskell.org
--
-- Handling for user-specified build targets
-----------------------------------------------------------------------------
module Distribution.Simple.BuildTarget (
-- * Build targets
BuildTarget(..),
readBuildTargets,
-- * Parsing user build targets
UserBuildTarget,
readUserBuildTargets,
UserBuildTargetProblem(..),
reportUserBuildTargetProblems,
-- * Resolving build targets
resolveBuildTargets,
BuildTargetProblem(..),
reportBuildTargetProblems,
) where
import Distribution.Package
( Package(..), PackageId, packageName )
import Distribution.PackageDescription
( PackageDescription
, Executable(..)
, TestSuite(..), TestSuiteInterface(..), testModules
, Benchmark(..), BenchmarkInterface(..), benchmarkModules
, BuildInfo(..), libModules, exeModules )
import Distribution.ModuleName
( ModuleName, toFilePath )
import Distribution.Simple.LocalBuildInfo
( Component(..), ComponentName(..)
, pkgComponents, componentName, componentBuildInfo )
import Distribution.Text
( display )
import Distribution.Simple.Utils
( die, lowercase, equating )
import Data.List
( nub, stripPrefix, sortBy, groupBy, partition, intercalate )
import Data.Ord
import Data.Maybe
( listToMaybe, catMaybes )
import Data.Either
( partitionEithers )
import qualified Data.Map as Map
import Control.Monad
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative (Applicative(..))
#endif
import Control.Applicative (Alternative(..))
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP
( (+++), (<++) )
import Data.Char
( isSpace, isAlphaNum )
import System.FilePath as FilePath
( dropExtension, normalise, splitDirectories, joinPath, splitPath
, hasTrailingPathSeparator )
import System.Directory
( doesFileExist, doesDirectoryExist )
-- ------------------------------------------------------------
-- * User build targets
-- ------------------------------------------------------------
-- | Various ways that a user may specify a build target.
--
data UserBuildTarget =
-- | A target specified by a single name. This could be a component
-- module or file.
--
-- > cabal build foo
-- > cabal build Data.Foo
-- > cabal build Data/Foo.hs Data/Foo.hsc
--
UserBuildTargetSingle String
-- | A target specified by a qualifier and name. This could be a component
-- name qualified by the component namespace kind, or a module or file
-- qualified by the component name.
--
-- > cabal build lib:foo exe:foo
-- > cabal build foo:Data.Foo
-- > cabal build foo:Data/Foo.hs
--
| UserBuildTargetDouble String String
-- A fully qualified target, either a module or file qualified by a
-- component name with the component namespace kind.
--
-- > cabal build lib:foo:Data/Foo.hs exe:foo:Data/Foo.hs
-- > cabal build lib:foo:Data.Foo exe:foo:Data.Foo
--
| UserBuildTargetTriple String String String
deriving (Show, Eq, Ord)
-- ------------------------------------------------------------
-- * Resolved build targets
-- ------------------------------------------------------------
-- | A fully resolved build target.
--
data BuildTarget =
-- | A specific component
--
BuildTargetComponent ComponentName
-- | A specific module within a specific component.
--
| BuildTargetModule ComponentName ModuleName
-- | A specific file within a specific component.
--
| BuildTargetFile ComponentName FilePath
deriving (Show,Eq)
-- ------------------------------------------------------------
-- * Do everything
-- ------------------------------------------------------------
readBuildTargets :: PackageDescription -> [String] -> IO [BuildTarget]
readBuildTargets pkg targetStrs = do
let (uproblems, utargets) = readUserBuildTargets targetStrs
reportUserBuildTargetProblems uproblems
utargets' <- mapM checkTargetExistsAsFile utargets
let (bproblems, btargets) = resolveBuildTargets pkg utargets'
reportBuildTargetProblems bproblems
return btargets
checkTargetExistsAsFile :: UserBuildTarget -> IO (UserBuildTarget, Bool)
checkTargetExistsAsFile t = do
fexists <- existsAsFile (fileComponentOfTarget t)
return (t, fexists)
where
existsAsFile f = do
exists <- doesFileExist f
case splitPath f of
(d:_) | hasTrailingPathSeparator d -> doesDirectoryExist d
(d:_:_) | not exists -> doesDirectoryExist d
_ -> return exists
fileComponentOfTarget (UserBuildTargetSingle s1) = s1
fileComponentOfTarget (UserBuildTargetDouble _ s2) = s2
fileComponentOfTarget (UserBuildTargetTriple _ _ s3) = s3
-- ------------------------------------------------------------
-- * Parsing user targets
-- ------------------------------------------------------------
readUserBuildTargets :: [String] -> ([UserBuildTargetProblem]
,[UserBuildTarget])
readUserBuildTargets = partitionEithers . map readUserBuildTarget
readUserBuildTarget :: String -> Either UserBuildTargetProblem
UserBuildTarget
readUserBuildTarget targetstr =
case readPToMaybe parseTargetApprox targetstr of
Nothing -> Left (UserBuildTargetUnrecognised targetstr)
Just tgt -> Right tgt
where
parseTargetApprox :: Parse.ReadP r UserBuildTarget
parseTargetApprox =
(do a <- tokenQ
return (UserBuildTargetSingle a))
+++ (do a <- token
_ <- Parse.char ':'
b <- tokenQ
return (UserBuildTargetDouble a b))
+++ (do a <- token
_ <- Parse.char ':'
b <- token
_ <- Parse.char ':'
c <- tokenQ
return (UserBuildTargetTriple a b c))
token = Parse.munch1 (\x -> not (isSpace x) && x /= ':')
tokenQ = parseHaskellString <++ token
parseHaskellString :: Parse.ReadP r String
parseHaskellString = Parse.readS_to_P reads
readPToMaybe :: Parse.ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str
, all isSpace s ]
data UserBuildTargetProblem
= UserBuildTargetUnrecognised String
deriving Show
reportUserBuildTargetProblems :: [UserBuildTargetProblem] -> IO ()
reportUserBuildTargetProblems problems = do
case [ target | UserBuildTargetUnrecognised target <- problems ] of
[] -> return ()
target ->
die $ unlines
[ "Unrecognised build target '" ++ name ++ "'."
| name <- target ]
++ "Examples:\n"
++ " - build foo -- component name "
++ "(library, executable, test-suite or benchmark)\n"
++ " - build Data.Foo -- module name\n"
++ " - build Data/Foo.hsc -- file name\n"
++ " - build lib:foo exe:foo -- component qualified by kind\n"
++ " - build foo:Data.Foo -- module qualified by component\n"
++ " - build foo:Data/Foo.hsc -- file qualified by component"
showUserBuildTarget :: UserBuildTarget -> String
showUserBuildTarget = intercalate ":" . components
where
components (UserBuildTargetSingle s1) = [s1]
components (UserBuildTargetDouble s1 s2) = [s1,s2]
components (UserBuildTargetTriple s1 s2 s3) = [s1,s2,s3]
-- ------------------------------------------------------------
-- * Resolving user targets to build targets
-- ------------------------------------------------------------
{-
stargets =
[ BuildTargetComponent (CExeName "foo")
, BuildTargetModule (CExeName "foo") (mkMn "Foo")
, BuildTargetModule (CExeName "tst") (mkMn "Foo")
]
where
mkMn :: String -> ModuleName
mkMn = fromJust . simpleParse
ex_pkgid :: PackageIdentifier
Just ex_pkgid = simpleParse "thelib"
-}
-- | Given a bunch of user-specified targets, try to resolve what it is they
-- refer to.
--
resolveBuildTargets :: PackageDescription
-> [(UserBuildTarget, Bool)]
-> ([BuildTargetProblem], [BuildTarget])
resolveBuildTargets pkg = partitionEithers
. map (uncurry (resolveBuildTarget pkg))
resolveBuildTarget :: PackageDescription -> UserBuildTarget -> Bool
-> Either BuildTargetProblem BuildTarget
resolveBuildTarget pkg userTarget fexists =
case findMatch (matchBuildTarget pkg userTarget fexists) of
Unambiguous target -> Right target
Ambiguous targets -> Left (BuildTargetAmbigious userTarget targets')
where targets' = disambiguateBuildTargets
(packageId pkg) userTarget
targets
None errs -> Left (classifyMatchErrors errs)
where
classifyMatchErrors errs
| not (null expected) = let (things, got:_) = unzip expected in
BuildTargetExpected userTarget things got
| not (null nosuch) = BuildTargetNoSuch userTarget nosuch
| otherwise = error $ "resolveBuildTarget: internal error in matching"
where
expected = [ (thing, got) | MatchErrorExpected thing got <- errs ]
nosuch = [ (thing, got) | MatchErrorNoSuch thing got <- errs ]
data BuildTargetProblem
= BuildTargetExpected UserBuildTarget [String] String
-- ^ [expected thing] (actually got)
| BuildTargetNoSuch UserBuildTarget [(String, String)]
-- ^ [(no such thing, actually got)]
| BuildTargetAmbigious UserBuildTarget [(UserBuildTarget, BuildTarget)]
deriving Show
disambiguateBuildTargets :: PackageId -> UserBuildTarget -> [BuildTarget]
-> [(UserBuildTarget, BuildTarget)]
disambiguateBuildTargets pkgid original =
disambiguate (userTargetQualLevel original)
where
disambiguate ql ts
| null amb = unamb
| otherwise = unamb ++ disambiguate (succ ql) amb
where
(amb, unamb) = step ql ts
userTargetQualLevel (UserBuildTargetSingle _ ) = QL1
userTargetQualLevel (UserBuildTargetDouble _ _ ) = QL2
userTargetQualLevel (UserBuildTargetTriple _ _ _) = QL3
step :: QualLevel -> [BuildTarget]
-> ([BuildTarget], [(UserBuildTarget, BuildTarget)])
step ql = (\(amb, unamb) -> (map snd $ concat amb, concat unamb))
. partition (\g -> length g > 1)
. groupBy (equating fst)
. sortBy (comparing fst)
. map (\t -> (renderBuildTarget ql t pkgid, t))
data QualLevel = QL1 | QL2 | QL3
deriving (Enum, Show)
renderBuildTarget :: QualLevel -> BuildTarget -> PackageId -> UserBuildTarget
renderBuildTarget ql target pkgid =
case ql of
QL1 -> UserBuildTargetSingle s1 where s1 = single target
QL2 -> UserBuildTargetDouble s1 s2 where (s1, s2) = double target
QL3 -> UserBuildTargetTriple s1 s2 s3 where (s1, s2, s3) = triple target
where
single (BuildTargetComponent cn ) = dispCName cn
single (BuildTargetModule _ m) = display m
single (BuildTargetFile _ f) = f
double (BuildTargetComponent cn ) = (dispKind cn, dispCName cn)
double (BuildTargetModule cn m) = (dispCName cn, display m)
double (BuildTargetFile cn f) = (dispCName cn, f)
triple (BuildTargetComponent _ ) = error "triple BuildTargetComponent"
triple (BuildTargetModule cn m) = (dispKind cn, dispCName cn, display m)
triple (BuildTargetFile cn f) = (dispKind cn, dispCName cn, f)
dispCName = componentStringName pkgid
dispKind = showComponentKindShort . componentKind
reportBuildTargetProblems :: [BuildTargetProblem] -> IO ()
reportBuildTargetProblems problems = do
case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of
[] -> return ()
targets ->
die $ unlines
[ "Unrecognised build target '" ++ showUserBuildTarget target
++ "'.\n"
++ "Expected a " ++ intercalate " or " expected
++ ", rather than '" ++ got ++ "'."
| (target, expected, got) <- targets ]
case [ (t, e) | BuildTargetNoSuch t e <- problems ] of
[] -> return ()
targets ->
die $ unlines
[ "Unknown build target '" ++ showUserBuildTarget target
++ "'.\nThere is no "
++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'"
| (thing, got) <- nosuch ] ++ "."
| (target, nosuch) <- targets ]
where
mungeThing "file" = "file target"
mungeThing thing = thing
case [ (t, ts) | BuildTargetAmbigious t ts <- problems ] of
[] -> return ()
targets ->
die $ unlines
[ "Ambiguous build target '" ++ showUserBuildTarget target
++ "'. It could be:\n "
++ unlines [ " "++ showUserBuildTarget ut ++
" (" ++ showBuildTargetKind bt ++ ")"
| (ut, bt) <- amb ]
| (target, amb) <- targets ]
where
showBuildTargetKind (BuildTargetComponent _ ) = "component"
showBuildTargetKind (BuildTargetModule _ _) = "module"
showBuildTargetKind (BuildTargetFile _ _) = "file"
----------------------------------
-- Top level BuildTarget matcher
--
matchBuildTarget :: PackageDescription
-> UserBuildTarget -> Bool -> Match BuildTarget
matchBuildTarget pkg = \utarget fexists ->
case utarget of
UserBuildTargetSingle str1 ->
matchBuildTarget1 cinfo str1 fexists
UserBuildTargetDouble str1 str2 ->
matchBuildTarget2 cinfo str1 str2 fexists
UserBuildTargetTriple str1 str2 str3 ->
matchBuildTarget3 cinfo str1 str2 str3 fexists
where
cinfo = pkgComponentInfo pkg
matchBuildTarget1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget
matchBuildTarget1 cinfo str1 fexists =
matchComponent1 cinfo str1
`matchPlusShadowing` matchModule1 cinfo str1
`matchPlusShadowing` matchFile1 cinfo str1 fexists
matchBuildTarget2 :: [ComponentInfo] -> String -> String -> Bool
-> Match BuildTarget
matchBuildTarget2 cinfo str1 str2 fexists =
matchComponent2 cinfo str1 str2
`matchPlusShadowing` matchModule2 cinfo str1 str2
`matchPlusShadowing` matchFile2 cinfo str1 str2 fexists
matchBuildTarget3 :: [ComponentInfo] -> String -> String -> String -> Bool
-> Match BuildTarget
matchBuildTarget3 cinfo str1 str2 str3 fexists =
matchModule3 cinfo str1 str2 str3
`matchPlusShadowing` matchFile3 cinfo str1 str2 str3 fexists
data ComponentInfo = ComponentInfo {
cinfoName :: ComponentName,
cinfoStrName :: ComponentStringName,
cinfoSrcDirs :: [FilePath],
cinfoModules :: [ModuleName],
cinfoHsFiles :: [FilePath], -- other hs files (like main.hs)
cinfoCFiles :: [FilePath],
cinfoJsFiles :: [FilePath]
}
type ComponentStringName = String
pkgComponentInfo :: PackageDescription -> [ComponentInfo]
pkgComponentInfo pkg =
[ ComponentInfo {
cinfoName = componentName c,
cinfoStrName = componentStringName pkg (componentName c),
cinfoSrcDirs = hsSourceDirs bi,
cinfoModules = componentModules c,
cinfoHsFiles = componentHsFiles c,
cinfoCFiles = cSources bi,
cinfoJsFiles = jsSources bi
}
| c <- pkgComponents pkg
, let bi = componentBuildInfo c ]
componentStringName :: Package pkg => pkg -> ComponentName -> ComponentStringName
componentStringName pkg CLibName = display (packageName pkg)
componentStringName _ (CExeName name) = name
componentStringName _ (CTestName name) = name
componentStringName _ (CBenchName name) = name
componentModules :: Component -> [ModuleName]
componentModules (CLib lib) = libModules lib
componentModules (CExe exe) = exeModules exe
componentModules (CTest test) = testModules test
componentModules (CBench bench) = benchmarkModules bench
componentHsFiles :: Component -> [FilePath]
componentHsFiles (CExe exe) = [modulePath exe]
componentHsFiles (CTest TestSuite {
testInterface = TestSuiteExeV10 _ mainfile
}) = [mainfile]
componentHsFiles (CBench Benchmark {
benchmarkInterface = BenchmarkExeV10 _ mainfile
}) = [mainfile]
componentHsFiles _ = []
{-
ex_cs :: [ComponentInfo]
ex_cs =
[ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"])
, (mkC (CExeName "tst") ["src1", "test"] ["Foo"])
]
where
mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms)
mkMn :: String -> ModuleName
mkMn = fromJust . simpleParse
pkgid :: PackageIdentifier
Just pkgid = simpleParse "thelib"
-}
------------------------------
-- Matching component kinds
--
data ComponentKind = LibKind | ExeKind | TestKind | BenchKind
deriving (Eq, Ord, Show)
componentKind :: ComponentName -> ComponentKind
componentKind CLibName = LibKind
componentKind (CExeName _) = ExeKind
componentKind (CTestName _) = TestKind
componentKind (CBenchName _) = BenchKind
cinfoKind :: ComponentInfo -> ComponentKind
cinfoKind = componentKind . cinfoName
matchComponentKind :: String -> Match ComponentKind
matchComponentKind s
| s `elem` ["lib", "library"] = increaseConfidence >> return LibKind
| s `elem` ["exe", "executable"] = increaseConfidence >> return ExeKind
| s `elem` ["tst", "test", "test-suite"] = increaseConfidence
>> return TestKind
| s `elem` ["bench", "benchmark"] = increaseConfidence
>> return BenchKind
| otherwise = matchErrorExpected
"component kind" s
showComponentKind :: ComponentKind -> String
showComponentKind LibKind = "library"
showComponentKind ExeKind = "executable"
showComponentKind TestKind = "test-suite"
showComponentKind BenchKind = "benchmark"
showComponentKindShort :: ComponentKind -> String
showComponentKindShort LibKind = "lib"
showComponentKindShort ExeKind = "exe"
showComponentKindShort TestKind = "test"
showComponentKindShort BenchKind = "bench"
------------------------------
-- Matching component targets
--
matchComponent1 :: [ComponentInfo] -> String -> Match BuildTarget
matchComponent1 cs = \str1 -> do
guardComponentName str1
c <- matchComponentName cs str1
return (BuildTargetComponent (cinfoName c))
matchComponent2 :: [ComponentInfo] -> String -> String -> Match BuildTarget
matchComponent2 cs = \str1 str2 -> do
ckind <- matchComponentKind str1
guardComponentName str2
c <- matchComponentKindAndName cs ckind str2
return (BuildTargetComponent (cinfoName c))
-- utils:
guardComponentName :: String -> Match ()
guardComponentName s
| all validComponentChar s
&& not (null s) = increaseConfidence
| otherwise = matchErrorExpected "component name" s
where
validComponentChar c = isAlphaNum c || c == '.'
|| c == '_' || c == '-' || c == '\''
matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo
matchComponentName cs str =
orNoSuchThing "component" str
$ increaseConfidenceFor
$ matchInexactly caseFold
[ (cinfoStrName c, c) | c <- cs ]
str
matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String
-> Match ComponentInfo
matchComponentKindAndName cs ckind str =
orNoSuchThing (showComponentKind ckind ++ " component") str
$ increaseConfidenceFor
$ matchInexactly (\(ck, cn) -> (ck, caseFold cn))
[ ((cinfoKind c, cinfoStrName c), c) | c <- cs ]
(ckind, str)
------------------------------
-- Matching module targets
--
matchModule1 :: [ComponentInfo] -> String -> Match BuildTarget
matchModule1 cs = \str1 -> do
guardModuleName str1
nubMatchErrors $ do
c <- tryEach cs
let ms = cinfoModules c
m <- matchModuleName ms str1
return (BuildTargetModule (cinfoName c) m)
matchModule2 :: [ComponentInfo] -> String -> String -> Match BuildTarget
matchModule2 cs = \str1 str2 -> do
guardComponentName str1
guardModuleName str2
c <- matchComponentName cs str1
let ms = cinfoModules c
m <- matchModuleName ms str2
return (BuildTargetModule (cinfoName c) m)
matchModule3 :: [ComponentInfo] -> String -> String -> String
-> Match BuildTarget
matchModule3 cs str1 str2 str3 = do
ckind <- matchComponentKind str1
guardComponentName str2
c <- matchComponentKindAndName cs ckind str2
guardModuleName str3
let ms = cinfoModules c
m <- matchModuleName ms str3
return (BuildTargetModule (cinfoName c) m)
-- utils:
guardModuleName :: String -> Match ()
guardModuleName s
| all validModuleChar s
&& not (null s) = increaseConfidence
| otherwise = matchErrorExpected "module name" s
where
validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\''
matchModuleName :: [ModuleName] -> String -> Match ModuleName
matchModuleName ms str =
orNoSuchThing "module" str
$ increaseConfidenceFor
$ matchInexactly caseFold
[ (display m, m)
| m <- ms ]
str
------------------------------
-- Matching file targets
--
matchFile1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget
matchFile1 cs str1 exists =
nubMatchErrors $ do
c <- tryEach cs
filepath <- matchComponentFile c str1 exists
return (BuildTargetFile (cinfoName c) filepath)
matchFile2 :: [ComponentInfo] -> String -> String -> Bool -> Match BuildTarget
matchFile2 cs str1 str2 exists = do
guardComponentName str1
c <- matchComponentName cs str1
filepath <- matchComponentFile c str2 exists
return (BuildTargetFile (cinfoName c) filepath)
matchFile3 :: [ComponentInfo] -> String -> String -> String -> Bool
-> Match BuildTarget
matchFile3 cs str1 str2 str3 exists = do
ckind <- matchComponentKind str1
guardComponentName str2
c <- matchComponentKindAndName cs ckind str2
filepath <- matchComponentFile c str3 exists
return (BuildTargetFile (cinfoName c) filepath)
matchComponentFile :: ComponentInfo -> String -> Bool -> Match FilePath
matchComponentFile c str fexists =
expecting "file" str $
matchPlus
(matchFileExists str fexists)
(matchPlusShadowing
(msum [ matchModuleFileRooted dirs ms str
, matchOtherFileRooted dirs hsFiles str ])
(msum [ matchModuleFileUnrooted ms str
, matchOtherFileUnrooted hsFiles str
, matchOtherFileUnrooted cFiles str
, matchOtherFileUnrooted jsFiles str ]))
where
dirs = cinfoSrcDirs c
ms = cinfoModules c
hsFiles = cinfoHsFiles c
cFiles = cinfoCFiles c
jsFiles = cinfoJsFiles c
-- utils
matchFileExists :: FilePath -> Bool -> Match a
matchFileExists _ False = mzero
matchFileExists fname True = do increaseConfidence
matchErrorNoSuch "file" fname
matchModuleFileUnrooted :: [ModuleName] -> String -> Match FilePath
matchModuleFileUnrooted ms str = do
let filepath = normalise str
_ <- matchModuleFileStem ms filepath
return filepath
matchModuleFileRooted :: [FilePath] -> [ModuleName] -> String -> Match FilePath
matchModuleFileRooted dirs ms str = nubMatches $ do
let filepath = normalise str
filepath' <- matchDirectoryPrefix dirs filepath
_ <- matchModuleFileStem ms filepath'
return filepath
matchModuleFileStem :: [ModuleName] -> FilePath -> Match ModuleName
matchModuleFileStem ms =
increaseConfidenceFor
. matchInexactly caseFold
[ (toFilePath m, m) | m <- ms ]
. dropExtension
matchOtherFileRooted :: [FilePath] -> [FilePath] -> FilePath -> Match FilePath
matchOtherFileRooted dirs fs str = do
let filepath = normalise str
filepath' <- matchDirectoryPrefix dirs filepath
_ <- matchFile fs filepath'
return filepath
matchOtherFileUnrooted :: [FilePath] -> FilePath -> Match FilePath
matchOtherFileUnrooted fs str = do
let filepath = normalise str
_ <- matchFile fs filepath
return filepath
matchFile :: [FilePath] -> FilePath -> Match FilePath
matchFile fs = increaseConfidenceFor
. matchInexactly caseFold [ (f, f) | f <- fs ]
matchDirectoryPrefix :: [FilePath] -> FilePath -> Match FilePath
matchDirectoryPrefix dirs filepath =
exactMatches $
catMaybes
[ stripDirectory (normalise dir) filepath | dir <- dirs ]
where
stripDirectory :: FilePath -> FilePath -> Maybe FilePath
stripDirectory dir fp =
joinPath `fmap` stripPrefix (splitDirectories dir) (splitDirectories fp)
------------------------------
-- Matching monad
--
-- | A matcher embodies a way to match some input as being some recognised
-- value. In particular it deals with multiple and ambigious matches.
--
-- There are various matcher primitives ('matchExactly', 'matchInexactly'),
-- ways to combine matchers ('ambigiousWith', 'shadows') and finally we can
-- run a matcher against an input using 'findMatch'.
--
data Match a = NoMatch Confidence [MatchError]
| ExactMatch Confidence [a]
| InexactMatch Confidence [a]
deriving Show
type Confidence = Int
data MatchError = MatchErrorExpected String String
| MatchErrorNoSuch String String
deriving (Show, Eq)
instance Alternative Match where
empty = mzero
(<|>) = mplus
instance MonadPlus Match where
mzero = matchZero
mplus = matchPlus
matchZero :: Match a
matchZero = NoMatch 0 []
-- | Combine two matchers. Exact matches are used over inexact matches
-- but if we have multiple exact, or inexact then the we collect all the
-- ambigious matches.
--
matchPlus :: Match a -> Match a -> Match a
matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') =
ExactMatch (max d1 d2) (xs ++ xs')
matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a
matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a
matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b
matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') =
InexactMatch (max d1 d2) (xs ++ xs')
matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a
matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b
matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b
matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms')
| d1 > d2 = a
| d1 < d2 = b
| otherwise = NoMatch d1 (ms ++ ms')
-- | Combine two matchers. This is similar to 'ambigiousWith' with the
-- difference that an exact match from the left matcher shadows any exact
-- match on the right. Inexact matches are still collected however.
--
matchPlusShadowing :: Match a -> Match a -> Match a
matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a
matchPlusShadowing a b = matchPlus a b
instance Functor Match where
fmap _ (NoMatch d ms) = NoMatch d ms
fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs)
fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs)
instance Applicative Match where
pure = return
(<*>) = ap
instance Monad Match where
return a = ExactMatch 0 [a]
NoMatch d ms >>= _ = NoMatch d ms
ExactMatch d xs >>= f = addDepth d
$ foldr matchPlus matchZero (map f xs)
InexactMatch d xs >>= f = addDepth d . forceInexact
$ foldr matchPlus matchZero (map f xs)
addDepth :: Confidence -> Match a -> Match a
addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs
addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs
addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs
forceInexact :: Match a -> Match a
forceInexact (ExactMatch d ys) = InexactMatch d ys
forceInexact m = m
------------------------------
-- Various match primitives
--
matchErrorExpected, matchErrorNoSuch :: String -> String -> Match a
matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got]
matchErrorNoSuch thing got = NoMatch 0 [MatchErrorNoSuch thing got]
expecting :: String -> String -> Match a -> Match a
expecting thing got (NoMatch 0 _) = matchErrorExpected thing got
expecting _ _ m = m
orNoSuchThing :: String -> String -> Match a -> Match a
orNoSuchThing thing got (NoMatch 0 _) = matchErrorNoSuch thing got
orNoSuchThing _ _ m = m
increaseConfidence :: Match ()
increaseConfidence = ExactMatch 1 [()]
increaseConfidenceFor :: Match a -> Match a
increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r
nubMatches :: Eq a => Match a -> Match a
nubMatches (NoMatch d msgs) = NoMatch d msgs
nubMatches (ExactMatch d xs) = ExactMatch d (nub xs)
nubMatches (InexactMatch d xs) = InexactMatch d (nub xs)
nubMatchErrors :: Match a -> Match a
nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs)
nubMatchErrors (ExactMatch d xs) = ExactMatch d xs
nubMatchErrors (InexactMatch d xs) = InexactMatch d xs
-- | Lift a list of matches to an exact match.
--
exactMatches, inexactMatches :: [a] -> Match a
exactMatches [] = matchZero
exactMatches xs = ExactMatch 0 xs
inexactMatches [] = matchZero
inexactMatches xs = InexactMatch 0 xs
tryEach :: [a] -> Match a
tryEach = exactMatches
------------------------------
-- Top level match runner
--
-- | Given a matcher and a key to look up, use the matcher to find all the
-- possible matches. There may be 'None', a single 'Unambiguous' match or
-- you may have an 'Ambiguous' match with several possibilities.
--
findMatch :: Eq b => Match b -> MaybeAmbigious b
findMatch match =
case match of
NoMatch _ msgs -> None (nub msgs)
ExactMatch _ xs -> checkAmbigious xs
InexactMatch _ xs -> checkAmbigious xs
where
checkAmbigious xs = case nub xs of
[x] -> Unambiguous x
xs' -> Ambiguous xs'
data MaybeAmbigious a = None [MatchError] | Unambiguous a | Ambiguous [a]
deriving Show
------------------------------
-- Basic matchers
--
{-
-- | A primitive matcher that looks up a value in a finite 'Map'. The
-- value must match exactly.
--
matchExactly :: forall a b. Ord a => [(a, b)] -> (a -> Match b)
matchExactly xs =
\x -> case Map.lookup x m of
Nothing -> matchZero
Just ys -> ExactMatch 0 ys
where
m :: Ord a => Map a [b]
m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ]
-}
-- | A primitive matcher that looks up a value in a finite 'Map'. It checks
-- for an exact or inexact match. We get an inexact match if the match
-- is not exact, but the canonical forms match. It takes a canonicalisation
-- function for this purpose.
--
-- So for example if we used string case fold as the canonicalisation
-- function, then we would get case insensitive matching (but it will still
-- report an exact match when the case matches too).
--
matchInexactly :: (Ord a, Ord a') =>
(a -> a') ->
[(a, b)] -> (a -> Match b)
matchInexactly cannonicalise xs =
\x -> case Map.lookup x m of
Just ys -> exactMatches ys
Nothing -> case Map.lookup (cannonicalise x) m' of
Just ys -> inexactMatches ys
Nothing -> matchZero
where
m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ]
-- the map of canonicalised keys to groups of inexact matches
m' = Map.mapKeysWith (++) cannonicalise m
------------------------------
-- Utils
--
caseFold :: String -> String
caseFold = lowercase
| DavidAlphaFox/ghc | libraries/Cabal/Cabal/Distribution/Simple/BuildTarget.hs | bsd-3-clause | 32,737 | 0 | 22 | 8,559 | 7,907 | 4,074 | 3,833 | 589 | 9 |
-- DataKinds is needed for deriveAll0 calls on GHC 8
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module DataFamilies.Types where
import Prelude ()
import Prelude.Compat
import Generics.Deriving.TH (deriveAll0)
import Types (ApproxEq(..))
data family Nullary a
data instance Nullary Int = C1 | C2 | C3 deriving (Eq, Show)
data instance Nullary Char = C4 deriving (Eq, Show)
data family SomeType a b c
data instance SomeType c () a = Nullary
| Unary Int
| Product String (Maybe Char) a
| Record { testOne :: Double
, testTwo :: Maybe Bool
, testThree :: Maybe a
}
| List [a]
deriving (Eq, Show)
data family Approx a
newtype instance Approx a = Approx { fromApprox :: a }
deriving (Show, ApproxEq, Num)
instance (ApproxEq a) => Eq (Approx a) where
Approx a == Approx b = a =~ b
data family GADT a
data instance GADT a where
GADT :: { gadt :: String } -> GADT String
deriving instance Eq (GADT a)
deriving instance Show (GADT a)
-- We use generic-deriving to be able to derive Generic instances for
-- data families on GHC 7.4.
$(deriveAll0 'C1)
$(deriveAll0 'C4)
$(deriveAll0 'Approx)
$(deriveAll0 'Nullary)
| tolysz/prepare-ghcjs | spec-lts8/aeson/tests/DataFamilies/Types.hs | bsd-3-clause | 1,601 | 0 | 9 | 500 | 386 | 220 | 166 | 39 | 0 |
module Main where
import Test.DocTest
main :: IO ()
main = doctest ["-XOverloadedStrings", "Data/IP.hs", "Data/IP/RouteTable.hs"]
| DanielG/iproute | test/doctests.hs | bsd-3-clause | 132 | 0 | 6 | 17 | 36 | 21 | 15 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
-- TODO(DanBurton): remove the following once the module is done.
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-unused-imports #-}
module Stack.Iface where
import Data.Map (Map)
import Data.ByteString(ByteString)
import Distribution.ModuleName (ModuleName)
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.Catch
import Control.Monad.Logger
import Path
import Path.IO (fileExists)
import qualified Data.ByteString.Char8 as S8
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.Maybe
import Data.Monoid
import Data.Foldable (foldMap)
import Distribution.PackageDescription
import qualified Distribution.ModuleName as ModuleName
import System.Process (readProcess)
import System.FilePath (dropExtension, addExtension)
import Stack.Build.Source
import Stack.Build.Types
import Stack.Constants
import Stack.Package
import Stack.Types
--type M m env = (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasEnvConfig env)
data TargetModules = TargetModules
{ targetIsExecutable :: Bool
-- ^ Implies "Main" as a target module if True.
-- benchmark and test targets are also executable.
, targetExposedModules :: [ModuleName]
, targetOtherModules :: [ModuleName]
}
deriving (Show)
type ShowIface = Path Abs File -> IO ByteString
-- All of the compiled modules for a given target
-- can be found in a single directory tree.
detectFiles :: ShowIface -> Path Abs Dir -- place to find .hi files
-> TargetModules -> IO [FilePath]
detectFiles showIface hiDir targetModules = do
let targetFilesRel :: [FilePath] -- (Relative) FilePath representation of modules.
targetFilesRel
= if targetIsExecutable targetModules
then ["Main"]
else []
<> map ModuleName.toFilePath (targetExposedModules targetModules)
<> map ModuleName.toFilePath (targetOtherModules targetModules)
let targetHiFilesAbs :: [Path Abs File]
targetHiFilesAbs = concatMap toHi targetFilesRel
where
toHi :: FilePath -> [Path Abs File]
toHi fp = case pathHiExtMay of
Just pathHiExt -> [hiDir </> pathHiExt]
Nothing -> [] -- warn?
where
pathHiExtMay
= parseRelFile
$ addExtension fp "hi"
depFiles <- fmap concat $ forM targetHiFilesAbs $ \file -> do
exists <- fileExists file
if exists
then do
iface <- showIface file
return $ findDepFiles iface
else return [] -- warn?
return depFiles
findDepFiles :: ByteString -> [FilePath]
findDepFiles bs = depFiles
where
text = Text.decodeUtf8 bs
ts = Text.lines text
depFiles = map Text.unpack $ mapMaybe f ts
f = Text.stripPrefix "addDependentFile \""
>=> Text.stripSuffix "\""
-- Map from Target to TargetModules
targetModules :: PackageDescription -> Map Target TargetModules
targetModules pDesc
= foldMap libraryTargetModules (library pDesc)
<> foldMap executableTargetModules (executables pDesc)
<> foldMap testSuiteTargetModules (testSuites pDesc)
<> foldMap benchmarkTargetModules (benchmarks pDesc)
libraryTargetModules :: Library -> Map Target TargetModules
libraryTargetModules lib = Map.singleton TargetLibrary $
TargetModules
{ targetIsExecutable = False
, targetExposedModules = exposedModules lib
, targetOtherModules = otherModules (libBuildInfo lib)
}
executableTargetModules :: Executable -> Map Target TargetModules
executableTargetModules exe = Map.singleton (TargetExecutable (exeName exe)) $
TargetModules
{ targetIsExecutable = True
, targetExposedModules = []
, targetOtherModules = otherModules (buildInfo exe)
}
testSuiteTargetModules :: TestSuite -> Map Target TargetModules
testSuiteTargetModules test = Map.singleton (TargetExecutable (testName test)) $
TargetModules
{ targetIsExecutable = True
, targetExposedModules = []
, targetOtherModules = otherModules (testBuildInfo test)
}
benchmarkTargetModules :: Benchmark -> Map Target TargetModules
benchmarkTargetModules bench = Map.singleton (TargetExecutable (benchmarkName bench)) $
TargetModules
{ targetIsExecutable = True
, targetExposedModules = []
, targetOtherModules = otherModules (benchmarkBuildInfo bench)
}
data CompilationContext = CompilationContext
{ ccPackageName :: String
, ccPackageVersion :: Version
, ccProjectRoot :: Path Abs Dir
, ccGhcVersion :: Version
, ccArch :: String
, ccSnapshot :: String
, ccCabalLibVersion :: Version
}
targetHiDir :: MonadThrow m => CompilationContext -> Target -> m (Path Abs Dir)
targetHiDir cc TargetLibrary = do
let showGhcVer = versionString (ccGhcVersion cc)
let showArch = ccArch cc
let showPackageAndVersion = ccPackageName cc <> "-" <> versionString (ccPackageVersion cc)
arch <- parseRelDir (ccArch cc)
snapshot <- parseRelDir (ccSnapshot cc)
ghcVer <- parseRelDir showGhcVer
archGhc <- parseRelDir (showArch <> "-ghc-" <> showGhcVer)
packageAndVersion <- parseRelDir showPackageAndVersion
return $ ccProjectRoot cc </> $(mkRelDir ".stack-work/install")
</> arch </> snapshot </> ghcVer
</> $(mkRelDir "lib") </> archGhc </> packageAndVersion
targetHiDir cc (TargetExecutable exeName) = do
let showCabalVersion = versionString (ccCabalLibVersion cc)
arch <- parseRelDir (ccArch cc)
cabalWithVer <- parseRelDir ("Cabal-" <> showCabalVersion)
exe <- parseRelDir exeName
exeTmp <- parseRelDir (exeName <> "-tmp")
return $ ccProjectRoot cc </> $(mkRelDir ".stack-work/dist")
</> arch </> cabalWithVer
</> $(mkRelDir "build") </> exe </> exeTmp
data Target
= TargetLibrary
| TargetExecutable String
deriving (Eq, Ord, Show)
sampleRun :: IO ()
sampleRun = do
let showIface arg = do
str <- readProcess "ghc" ["--show-iface", toFilePath arg] ""
return $ S8.pack str
--let hiDir =
-- -- $(mkAbsDir "/home/dan/dep-file-test/.stack-work/install/x86_64-linux/lts-2.13/7.8.4/lib/x86_64-linux-ghc-7.8.4/dep-file-test-0.1.0.0")
-- $(mkAbsDir "/home/dan/dep-file-test/.stack-work/dist/x86_64-linux/Cabal-1.18.1.5/build/dep-file-test/dep-file-test-tmp")
sampleProjectRoot <- parseAbsDir "/home/dan/dep-file-test"
let ctx = CompilationContext
{ ccPackageName = "dep-file-test"
, ccPackageVersion = $(mkVersion "0.1.0.0")
, ccProjectRoot = sampleProjectRoot
, ccGhcVersion = $(mkVersion "7.8.4")
, ccArch = "x86_64-linux"
, ccSnapshot = "lts-2.13"
, ccCabalLibVersion = $(mkVersion "1.18.1.5")
}
hiDir <- targetHiDir ctx (TargetExecutable "dep-file-test")
let targetModules = TargetModules
{ targetIsExecutable = True
, targetExposedModules = []
, targetOtherModules = []
}
files <- detectFiles showIface hiDir targetModules
mapM_ print files
--iface :: M m env => m ()
--iface = do
-- let print' :: (Show a, MonadIO m) => a -> m ()
-- print' = liftIO . print
-- localInstallRoot <- installationRootLocal
-- print' localInstallRoot
-- dist <- distRelativeDir
-- print' dist
-- (lps, _, _) <- loadLocals defaultBuildOpts Map.empty
-- forM_ lps $ \lp -> do
-- print' $ packageName $ lpPackage lp
-- return ()
| wskplho/stack | src/Stack/Iface.hs | bsd-3-clause | 7,791 | 0 | 16 | 1,824 | 1,674 | 881 | 793 | 155 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE RankNTypes #-}
-- | Convert a stream of blaze-builder @Builder@s into a stream of @ByteString@s.
--
-- Adapted from blaze-builder-enumerator, written by myself and Simon Meier.
--
-- Note: if you have blaze-builder >= 0.4, 'newBlazeRecv' just calls
-- 'Data.Streaming.ByteString.Builder.newByteStringBuilderRecv'
-- Note that the functions here can work in any monad built on top of @IO@ or
-- @ST@.
module Data.Streaming.Blaze
( BlazeRecv
, BlazePopper
, BlazeFinish
, newBlazeRecv
-- * Buffers
, Buffer
-- ** Status information
, freeSize
, sliceSize
, bufferSize
-- ** Creation and modification
, allocBuffer
, reuseBuffer
, nextSlice
-- ** Conversion to bytestings
, unsafeFreezeBuffer
, unsafeFreezeNonEmptyBuffer
-- * Buffer allocation strategies
, BufferAllocStrategy
, allNewBuffersStrategy
, reuseBufferStrategy
, defaultStrategy
) where
import Blaze.ByteString.Builder
import qualified Data.ByteString as S
#if MIN_VERSION_blaze_builder(0,4,0)
import Data.Streaming.ByteString.Builder
newBlazeRecv :: BufferAllocStrategy -> IO (BlazeRecv, BlazeFinish)
newBlazeRecv = newByteStringBuilderRecv
{-# INLINE newBlazeRecv #-}
#else /* !MIN_VERSION_blaze_builder(0,4,0) */
import Blaze.ByteString.Builder.Internal hiding (insertByteString)
import Blaze.ByteString.Builder.Internal.Types hiding (insertByteString)
import Blaze.ByteString.Builder.Internal.Buffer (execBuildStep)
import Data.IORef
import Data.Streaming.ByteString.Builder.Buffer
newBlazeRecv :: BufferAllocStrategy -> IO (BlazeRecv, BlazeFinish)
newBlazeRecv (ioBufInit, nextBuf) = do
refBuf <- newIORef ioBufInit
return (push refBuf, finish refBuf)
where
finish refBuf = do
ioBuf <- readIORef refBuf
buf <- ioBuf
return $ unsafeFreezeNonEmptyBuffer buf
push refBuf builder = do
refStep <- newIORef $ Left $ unBuilder builder (buildStep finalStep)
return $ popper refBuf refStep
where
finalStep !(BufRange pf _) = return $ Done pf ()
popper refBuf refStep = do
ioBuf <- readIORef refBuf
ebStep <- readIORef refStep
case ebStep of
Left bStep -> do
!buf <- ioBuf
signal <- execBuildStep bStep buf
case signal of
Done op' _ -> do
writeIORef refBuf $ return $ updateEndOfSlice buf op'
return S.empty
BufferFull minSize op' bStep' -> do
let buf' = updateEndOfSlice buf op'
{-# INLINE cont #-}
cont mbs = do
-- sequencing the computation of the next buffer
-- construction here ensures that the reference to the
-- foreign pointer `fp` is lost as soon as possible.
ioBuf' <- nextBuf minSize buf'
writeIORef refBuf ioBuf'
writeIORef refStep $ Left bStep'
case mbs of
Just bs | not $ S.null bs -> return bs
_ -> popper refBuf refStep
cont $ unsafeFreezeNonEmptyBuffer buf'
InsertByteString op' bs bStep' -> do
let buf' = updateEndOfSlice buf op'
let yieldBS = do
nextBuf 1 buf' >>= writeIORef refBuf
writeIORef refStep $ Left bStep'
if S.null bs
then popper refBuf refStep
else return bs
case unsafeFreezeNonEmptyBuffer buf' of
Nothing -> yieldBS
Just bs' -> do
writeIORef refStep $ Right yieldBS
return bs'
Right action -> action
{-
helper :: (MonadBase base m, PrimMonad base, Monad (t m), MonadTrans t)
=> t m (Maybe (Flush Builder))
-> (Flush S.ByteString -> t m ())
-> BufferAllocStrategy
-> t m ()
helper await' yield' (ioBufInit, nextBuf) =
loop ioBufInit
where
loop ioBuf = do
await' >>= maybe (close ioBuf) (cont' ioBuf)
cont' ioBuf Flush = push ioBuf flush $ \ioBuf' -> yield' Flush >> loop ioBuf'
cont' ioBuf (Chunk builder) = push ioBuf builder loop
close ioBuf = do
buf <- lift $ unsafeLiftIO $ ioBuf
maybe (return ()) (yield' . Chunk) (unsafeFreezeNonEmptyBuffer buf)
-}
#endif /* !MIN_VERSION_blaze_builder(0,4,0) */
-- | Provides a series of @ByteString@s until empty, at which point it provides
-- an empty @ByteString@.
--
-- Since 0.1.2
type BlazePopper = IO S.ByteString
type BlazeRecv = Builder -> IO BlazePopper
type BlazeFinish = IO (Maybe S.ByteString)
| phadej/streaming-commons | Data/Streaming/Blaze.hs | mit | 5,129 | 0 | 8 | 1,767 | 170 | 115 | 55 | 80 | 7 |
module TRS.CL where
-- $Id$
import TRS.System
import TRS.Symbol
cls :: System Symbol
cls = [ read "a (a (a (S, x), y), z) -> a (a (x, z), a (y, z))" ]
| Erdwolf/autotool-bonn | src/TRS/CL.hs | gpl-2.0 | 157 | 0 | 6 | 39 | 36 | 21 | 15 | 5 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sk-SK">
<title>Automation Framework</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/automation/src/main/javahelp/org/zaproxy/addon/automation/resources/help_sk_SK/helpset_sk_SK.hs | apache-2.0 | 965 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ro-RO">
<title>DOM XSS Active Scan Rule | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/domxss/src/main/javahelp/org/zaproxy/zap/extension/domxss/resources/help_ro_RO/helpset_ro_RO.hs | apache-2.0 | 985 | 78 | 66 | 162 | 419 | 212 | 207 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Linux WebDrivers</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/webdrivers/webdriverlinux/src/main/javahelp/org/zaproxy/zap/extension/webdriverlinux/resources/help_ko_KR/helpset_ko_KR.hs | apache-2.0 | 961 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.ParserCombinators.ReadPrec
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (uses Text.ParserCombinators.ReadP)
--
-- This library defines parser combinators for precedence parsing.
-----------------------------------------------------------------------------
module Text.ParserCombinators.ReadPrec
(
ReadPrec,
-- * Precedences
Prec,
minPrec,
-- * Precedence operations
lift,
prec,
step,
reset,
-- * Other operations
-- | All are based directly on their similarly-named 'ReadP' counterparts.
get,
look,
(+++),
(<++),
pfail,
choice,
-- * Converters
readPrec_to_P,
readP_to_Prec,
readPrec_to_S,
readS_to_Prec,
)
where
import Text.ParserCombinators.ReadP
( ReadP
, ReadS
, readP_to_S
, readS_to_P
)
import qualified Text.ParserCombinators.ReadP as ReadP
( get
, look
, (+++), (<++)
, pfail
)
import GHC.Num( Num(..) )
import GHC.Base
import qualified Control.Monad.Fail as MonadFail
-- ---------------------------------------------------------------------------
-- The readPrec type
newtype ReadPrec a = P (Prec -> ReadP a)
-- Functor, Monad, MonadPlus
instance Functor ReadPrec where
fmap h (P f) = P (\n -> fmap h (f n))
instance Applicative ReadPrec where
pure x = P (\_ -> pure x)
(<*>) = ap
instance Monad ReadPrec where
fail s = P (\_ -> fail s)
P f >>= k = P (\n -> do a <- f n; let P f' = k a in f' n)
instance MonadFail.MonadFail ReadPrec where
fail s = P (\_ -> MonadFail.fail s)
instance MonadPlus ReadPrec
instance Alternative ReadPrec where
empty = pfail
(<|>) = (+++)
-- precedences
type Prec = Int
minPrec :: Prec
minPrec = 0
-- ---------------------------------------------------------------------------
-- Operations over ReadPrec
lift :: ReadP a -> ReadPrec a
-- ^ Lift a precedence-insensitive 'ReadP' to a 'ReadPrec'.
lift m = P (\_ -> m)
step :: ReadPrec a -> ReadPrec a
-- ^ Increases the precedence context by one.
step (P f) = P (\n -> f (n+1))
reset :: ReadPrec a -> ReadPrec a
-- ^ Resets the precedence context to zero.
reset (P f) = P (\_ -> f minPrec)
prec :: Prec -> ReadPrec a -> ReadPrec a
-- ^ @(prec n p)@ checks whether the precedence context is
-- less than or equal to @n@, and
--
-- * if not, fails
--
-- * if so, parses @p@ in context @n@.
prec n (P f) = P (\c -> if c <= n then f n else ReadP.pfail)
-- ---------------------------------------------------------------------------
-- Derived operations
get :: ReadPrec Char
-- ^ Consumes and returns the next character.
-- Fails if there is no input left.
get = lift ReadP.get
look :: ReadPrec String
-- ^ Look-ahead: returns the part of the input that is left, without
-- consuming it.
look = lift ReadP.look
(+++) :: ReadPrec a -> ReadPrec a -> ReadPrec a
-- ^ Symmetric choice.
P f1 +++ P f2 = P (\n -> f1 n ReadP.+++ f2 n)
(<++) :: ReadPrec a -> ReadPrec a -> ReadPrec a
-- ^ Local, exclusive, left-biased choice: If left parser
-- locally produces any result at all, then right parser is
-- not used.
P f1 <++ P f2 = P (\n -> f1 n ReadP.<++ f2 n)
pfail :: ReadPrec a
-- ^ Always fails.
pfail = lift ReadP.pfail
choice :: [ReadPrec a] -> ReadPrec a
-- ^ Combines all parsers in the specified list.
choice ps = foldr (+++) pfail ps
-- ---------------------------------------------------------------------------
-- Converting between ReadPrec and Read
readPrec_to_P :: ReadPrec a -> (Int -> ReadP a)
readPrec_to_P (P f) = f
readP_to_Prec :: (Int -> ReadP a) -> ReadPrec a
readP_to_Prec f = P f
readPrec_to_S :: ReadPrec a -> (Int -> ReadS a)
readPrec_to_S (P f) n = readP_to_S (f n)
readS_to_Prec :: (Int -> ReadS a) -> ReadPrec a
readS_to_Prec f = P (\n -> readS_to_P (f n))
| oldmanmike/ghc | libraries/base/Text/ParserCombinators/ReadPrec.hs | bsd-3-clause | 4,058 | 0 | 15 | 806 | 1,035 | 568 | 467 | 80 | 2 |
x = 2^62
y = pi
main :: IO ()
main = do print x
print y
| hvr/jhc | regress/tests/5_known_bugs/Defaulting.hs | mit | 67 | 0 | 7 | 28 | 42 | 20 | 22 | 5 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Text.Parsec.Perm
-- Copyright : (c) Daan Leijen 1999-2001, (c) Paolo Martini 2007
-- License : BSD-style (see the file libraries/parsec/LICENSE)
--
-- Maintainer : derek.a.elkins@gmail.com
-- Stability : provisional
-- Portability : non-portable (uses existentially quantified data constructors)
--
-- This module implements permutation parsers. The algorithm used
-- is fairly complex since we push the type system to its limits :-)
-- The algorithm is described in:
--
-- /Parsing Permutation Phrases,/
-- by Arthur Baars, Andres Loh and Doaitse Swierstra.
-- Published as a functional pearl at the Haskell Workshop 2001.
--
-----------------------------------------------------------------------------
{-# LANGUAGE ExistentialQuantification #-}
module Text.Parsec.Perm
( PermParser
, StreamPermParser -- abstract
, permute
, (<||>), (<$$>)
, (<|?>), (<$?>)
) where
import Text.Parsec
import Control.Monad.Identity
infixl 1 <||>, <|?>
infixl 2 <$$>, <$?>
{---------------------------------------------------------------
test -- parse a permutation of
* an optional string of 'a's
* a required 'b'
* an optional 'c'
---------------------------------------------------------------}
{-
test input
= parse (do{ x <- ptest; eof; return x }) "" input
ptest :: Parser (String,Char,Char)
ptest
= permute $
(,,) <$?> ("",many1 (char 'a'))
<||> char 'b'
<|?> ('_',char 'c')
-}
{---------------------------------------------------------------
Building a permutation parser
---------------------------------------------------------------}
-- | The expression @perm \<||> p@ adds parser @p@ to the permutation
-- parser @perm@. The parser @p@ is not allowed to accept empty input -
-- use the optional combinator ('<|?>') instead. Returns a
-- new permutation parser that includes @p@.
(<||>) :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> Parsec s st a -> StreamPermParser s st b
(<||>) perm p = add perm p
-- | The expression @f \<$$> p@ creates a fresh permutation parser
-- consisting of parser @p@. The the final result of the permutation
-- parser is the function @f@ applied to the return value of @p@. The
-- parser @p@ is not allowed to accept empty input - use the optional
-- combinator ('<$?>') instead.
--
-- If the function @f@ takes more than one parameter, the type variable
-- @b@ is instantiated to a functional type which combines nicely with
-- the adds parser @p@ to the ('<||>') combinator. This
-- results in stylized code where a permutation parser starts with a
-- combining function @f@ followed by the parsers. The function @f@
-- gets its parameters in the order in which the parsers are specified,
-- but actual input can be in any order.
(<$$>) :: (Stream s Identity tok) => (a -> b) -> Parsec s st a -> StreamPermParser s st b
(<$$>) f p = newperm f <||> p
-- | The expression @perm \<||> (x,p)@ adds parser @p@ to the
-- permutation parser @perm@. The parser @p@ is optional - if it can
-- not be applied, the default value @x@ will be used instead. Returns
-- a new permutation parser that includes the optional parser @p@.
(<|?>) :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> (a, Parsec s st a) -> StreamPermParser s st b
(<|?>) perm (x,p) = addopt perm x p
-- | The expression @f \<$?> (x,p)@ creates a fresh permutation parser
-- consisting of parser @p@. The the final result of the permutation
-- parser is the function @f@ applied to the return value of @p@. The
-- parser @p@ is optional - if it can not be applied, the default value
-- @x@ will be used instead.
(<$?>) :: (Stream s Identity tok) => (a -> b) -> (a, Parsec s st a) -> StreamPermParser s st b
(<$?>) f (x,p) = newperm f <|?> (x,p)
{---------------------------------------------------------------
The permutation tree
---------------------------------------------------------------}
-- | Provided for backwards compatibility. The tok type is ignored.
type PermParser tok st a = StreamPermParser String st a
-- | The type @StreamPermParser s st a@ denotes a permutation parser that,
-- when converted by the 'permute' function, parses
-- @s@ streams with user state @st@ and returns a value of
-- type @a@ on success.
--
-- Normally, a permutation parser is first build with special operators
-- like ('<||>') and than transformed into a normal parser
-- using 'permute'.
data StreamPermParser s st a = Perm (Maybe a) [StreamBranch s st a]
-- type Branch st a = StreamBranch String st a
data StreamBranch s st a = forall b. Branch (StreamPermParser s st (b -> a)) (Parsec s st b)
-- | The parser @permute perm@ parses a permutation of parser described
-- by @perm@. For example, suppose we want to parse a permutation of:
-- an optional string of @a@'s, the character @b@ and an optional @c@.
-- This can be described by:
--
-- > test = permute (tuple <$?> ("",many1 (char 'a'))
-- > <||> char 'b'
-- > <|?> ('_',char 'c'))
-- > where
-- > tuple a b c = (a,b,c)
-- transform a permutation tree into a normal parser
permute :: (Stream s Identity tok) => StreamPermParser s st a -> Parsec s st a
permute (Perm def xs)
= choice (map branch xs ++ empty)
where
empty
= case def of
Nothing -> []
Just x -> [return x]
branch (Branch perm p)
= do{ x <- p
; f <- permute perm
; return (f x)
}
-- build permutation trees
newperm :: (Stream s Identity tok) => (a -> b) -> StreamPermParser s st (a -> b)
newperm f
= Perm (Just f) []
add :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> Parsec s st a -> StreamPermParser s st b
add perm@(Perm _mf fs) p
= Perm Nothing (first:map insert fs)
where
first = Branch perm p
insert (Branch perm' p')
= Branch (add (mapPerms flip perm') p) p'
addopt :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> a -> Parsec s st a -> StreamPermParser s st b
addopt perm@(Perm mf fs) x p
= Perm (fmap ($ x) mf) (first:map insert fs)
where
first = Branch perm p
insert (Branch perm' p')
= Branch (addopt (mapPerms flip perm') x p) p'
mapPerms :: (Stream s Identity tok) => (a -> b) -> StreamPermParser s st a -> StreamPermParser s st b
mapPerms f (Perm x xs)
= Perm (fmap f x) (map mapBranch xs)
where
mapBranch (Branch perm p)
= Branch (mapPerms (f.) perm) p
| maurer/15-411-Haskell-Base-Code | src/Text/Parsec/Perm.hs | bsd-3-clause | 6,575 | 0 | 11 | 1,413 | 1,181 | 659 | 522 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.WithAll
-- License : BSD3-style (see LICENSE)
-- Stability : unstable
-- Portability : unportable
--
-- Provides functions for performing a given action on all windows of
-- the current workspace.
-----------------------------------------------------------------------------
module XMonad.Actions.WithAll (
-- * Usage
-- $usage
sinkAll, withAll,
withAll', killAll) where
import Data.Foldable hiding (foldr)
import XMonad
import XMonad.StackSet
-- $usage
--
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Actions.WithAll
--
-- then add a keybinding; for example:
--
-- , ((modm .|. shiftMask, xK_t), sinkAll)
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
-- | Un-float all floating windows on the current workspace.
sinkAll :: X ()
sinkAll = withAll' sink
-- | Apply a function to all windows on the current workspace.
withAll' :: (Window -> WindowSet -> WindowSet) -> X ()
withAll' f = windows $ \ws -> let all' = integrate' . stack . workspace . current $ ws
in foldr f ws all'
-- | Execute an 'X' action for each window on the current workspace.
withAll :: (Window -> X ()) -> X()
withAll f = withWindowSet $ \ws -> let all' = integrate' . stack . workspace . current $ ws
in forM_ all' f
-- | Kill all the windows on the current workspace.
killAll :: X()
killAll = withAll killWindow | pjones/xmonad-test | vendor/xmonad-contrib/XMonad/Actions/WithAll.hs | bsd-2-clause | 1,626 | 0 | 14 | 341 | 255 | 147 | 108 | 16 | 1 |
{-# LANGUAGE StandaloneDeriving, DeriveGeneric #-}
module SizedSeq
( SizedSeq(..)
, emptySS
, addToSS
, addListToSS
, ssElts
, sizeSS
) where
import Control.DeepSeq
import Data.Binary
import Data.List
import GHC.Generics
data SizedSeq a = SizedSeq {-# UNPACK #-} !Word [a]
deriving (Generic, Show)
instance Functor SizedSeq where
fmap f (SizedSeq sz l) = SizedSeq sz (fmap f l)
instance Foldable SizedSeq where
foldr f c ss = foldr f c (ssElts ss)
instance Traversable SizedSeq where
traverse f (SizedSeq sz l) = SizedSeq sz . reverse <$> traverse f (reverse l)
instance Binary a => Binary (SizedSeq a)
instance NFData a => NFData (SizedSeq a) where
rnf (SizedSeq _ xs) = rnf xs
emptySS :: SizedSeq a
emptySS = SizedSeq 0 []
addToSS :: SizedSeq a -> a -> SizedSeq a
addToSS (SizedSeq n r_xs) x = SizedSeq (n+1) (x:r_xs)
addListToSS :: SizedSeq a -> [a] -> SizedSeq a
addListToSS (SizedSeq n r_xs) xs
= SizedSeq (n + genericLength xs) (reverse xs ++ r_xs)
ssElts :: SizedSeq a -> [a]
ssElts (SizedSeq _ r_xs) = reverse r_xs
sizeSS :: SizedSeq a -> Word
sizeSS (SizedSeq n _) = n
| ezyang/ghc | libraries/ghci/SizedSeq.hs | bsd-3-clause | 1,116 | 0 | 9 | 226 | 460 | 235 | 225 | 34 | 1 |
module Handler (
HandlerFunc
) where
import Network.Socket
import qualified Data.ByteString.Lazy as L (ByteString, pack, unpack, take, toChunks, fromChunks)
import Data.Word
type HandlerFunc = Socket -> SockAddr -> L.ByteString -> IO ()
| stnma7e/scim_serv | src/Handler.hs | mit | 240 | 2 | 9 | 35 | 75 | 46 | 29 | 6 | 0 |
module Wobsurv.Logging where
import BasePrelude hiding (log)
import qualified Wobsurv.Interaction
import qualified System.Locale as Locale
import qualified Data.Time as Time
import qualified Wobsurv.Util.HTTP.Renderer as HTTP.Renderer
import qualified Wobsurv.Util.HTTP.Model as HTTP.Model
import qualified Wobsurv.Util.HTTP.URLEncoding as HTTP.URLEncoding
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Builder as ByteString.Builder
import qualified Data.ByteString.Lazy.Char8 as ByteString.Lazy.Char8
import qualified Data.Text.Lazy.IO as Text.Lazy.IO
import qualified Data.Text.Lazy.Encoding as Text.Lazy.Encoding
import qualified Data.Text.Lazy.Builder as Text.Lazy.Builder
log :: Wobsurv.Interaction.Summary -> IO ()
log (request, status) =
Text.Lazy.IO.putStrLn =<< do
time <- Time.formatTime Locale.defaultTimeLocale "%F %X %Z" <$> Time.getZonedTime
return $ Text.Lazy.Builder.toLazyText $
case request of
Just (method, uri) ->
Text.Lazy.Builder.fromString time <>
Text.Lazy.Builder.fromString ": " <>
(liftBSB $ HTTP.Renderer.status status) <>
Text.Lazy.Builder.fromString " <-- " <>
(liftBSB $ HTTP.Renderer.method method) <>
Text.Lazy.Builder.singleton ' ' <>
(Text.Lazy.Builder.fromText $ HTTP.URLEncoding.toText $ HTTP.Renderer.toByteString $ HTTP.Renderer.relativeURI uri)
Nothing ->
Text.Lazy.Builder.fromString time <>
Text.Lazy.Builder.fromString ": " <>
(liftBSB $ HTTP.Renderer.status status)
where
liftBSB =
Text.Lazy.Builder.fromLazyText . Text.Lazy.Encoding.decodeLatin1 . ByteString.Builder.toLazyByteString
newSynchronizedLogger :: IO (Wobsurv.Interaction.Summary -> IO ())
newSynchronizedLogger = do
loggerLock <- newMVar ()
return $
\m -> withMVar loggerLock $ const $ log m
| nikita-volkov/wobsurv | library/Wobsurv/Logging.hs | mit | 1,986 | 0 | 20 | 431 | 481 | 287 | 194 | 39 | 2 |
-------------------------------------------------------------------------
-- Copyright (c) 2007-2011, 2012 ETH Zurich.
-- All rights reserved.
--
-- This file is distributed under the terms in the attached LICENSE file.
-- If you do not find this file, copies can be found by writing to:-
-- ETH Zurich D-INFK CAB F.78, Universitaetstr 6, CH-8092 Zurich.
-- Attn: Systems Group.
--
-- Basic Hake rule definitions and combinators
--
--------------------------------------------------------------------------
module RuleDefs where
import Data.List (intersect, isSuffixOf, union, (\\), nub, sortBy, elemIndex)
import Path
import qualified X86_64
import qualified K1om
import qualified X86_32
import qualified SCC
import qualified ARMv5
import qualified ARM11MP
import qualified XScale
import qualified ARMv7
import qualified ARMv7_M
import HakeTypes
import qualified Args
import qualified Config
import Debug.Trace
-- enable debug spew
-- should we move this to Config.hs? -AB
debugFlag = False
--
-- Is a token to be displayed in a rule?
--
inRule :: RuleToken -> Bool
inRule (Dep _ _ _) = False
inRule (PreDep _ _ _) = False
inRule (Target _ _) = False
inRule _ = True
--
-- Look for a set of files: this is called using the "find" combinator
--
withSuffix :: [String] -> String -> String -> [String]
withSuffix af tf arg =
[ basename f | f <- af, f `isInSameDirAs` tf, isSuffixOf arg f ]
withSuffices :: [String] -> String -> [String] -> [String]
withSuffices af tf args =
concat [ withSuffix af tf arg | arg <- args ]
--
-- Find files with a given suffix in a given dir
--
inDir :: [String] -> String -> String -> String -> [String]
inDir af tf dir suffix =
-- Dummy is here so that we can find files in the same dir :-/
let subdir = (if head dir == '/' then absdir else reldir) ./. "dummy"
absdir = if head tf == '/' then dir else '.':dir
reldir = (dirname tf) ./. dir
files = withSuffix af subdir suffix
in
[ dir ./. f | f <- files ]
cInDir :: [String] -> String -> String -> [String]
cInDir af tf dir = inDir af tf dir ".c"
cxxInDir :: [String] -> String -> String -> [String]
cxxInDir af tf dir = (inDir af tf dir ".cpp") ++ (inDir af tf dir ".cc")
sInDir :: [String] -> String -> String -> [String]
sInDir af tf dir = inDir af tf dir ".S"
-------------------------------------------------------------------------
--
-- Architecture specific definitions
--
-------------------------------------------------------------------------
options :: String -> Options
options "x86_64" = X86_64.options
options "k1om" = K1om.options
options "x86_32" = X86_32.options
options "scc" = SCC.options
options "armv5" = ARMv5.options
options "arm11mp" = ARM11MP.options
options "xscale" = XScale.options
options "armv7" = ARMv7.options
options "armv7-m" = ARMv7_M.options
kernelCFlags "x86_64" = X86_64.kernelCFlags
kernelCFlags "k1om" = K1om.kernelCFlags
kernelCFlags "x86_32" = X86_32.kernelCFlags
kernelCFlags "scc" = SCC.kernelCFlags
kernelCFlags "armv5" = ARMv5.kernelCFlags
kernelCFlags "arm11mp" = ARM11MP.kernelCFlags
kernelCFlags "xscale" = XScale.kernelCFlags
kernelCFlags "armv7" = ARMv7.kernelCFlags
kernelCFlags "armv7-m" = ARMv7_M.kernelCFlags
kernelLdFlags "x86_64" = X86_64.kernelLdFlags
kernelLdFlags "k1om" = K1om.kernelLdFlags
kernelLdFlags "x86_32" = X86_32.kernelLdFlags
kernelLdFlags "scc" = SCC.kernelLdFlags
kernelLdFlags "armv5" = ARMv5.kernelLdFlags
kernelLdFlags "arm11mp" = ARM11MP.kernelLdFlags
kernelLdFlags "xscale" = XScale.kernelLdFlags
kernelLdFlags "armv7" = ARMv7.kernelLdFlags
kernelLdFlags "armv7-m" = ARMv7_M.kernelLdFlags
archFamily :: String -> String
archFamily arch = optArchFamily (options arch)
-------------------------------------------------------------------------
--
-- Options for compiling the kernel, which is special
--
-------------------------------------------------------------------------
kernelIncludes arch = [ NoDep BuildTree arch f | f <- [
"/include" ]]
++
[ NoDep SrcTree "src" f | f <- [
"/kernel/include/arch" ./. arch,
"/kernel/include/arch" ./. archFamily arch,
"/kernel/include",
"/include",
"/include/arch" ./. archFamily arch,
Config.libcInc,
"/include/c",
"/include/target" ./. archFamily arch]]
kernelOptions arch = Options {
optArch = arch,
optArchFamily = archFamily arch,
optFlags = kernelCFlags arch,
optCxxFlags = [],
optDefines = (optDefines (options arch)) ++ [ Str "-DIN_KERNEL",
Str ("-DCONFIG_SCHEDULER_" ++ (show Config.scheduler)),
Str ("-DCONFIG_TIMESLICE=" ++ (show Config.timeslice)) ],
optIncludes = kernelIncludes arch,
optDependencies =
[ Dep InstallTree arch "/include/errors/errno.h",
Dep InstallTree arch "/include/barrelfish_kpi/capbits.h",
Dep InstallTree arch "/include/asmoffsets.h",
Dep InstallTree arch "/include/trace_definitions/trace_defs.h" ],
optLdFlags = kernelLdFlags arch,
optLdCxxFlags = [],
optLibs = [],
optCxxLibs = [],
optSuffix = [],
optInterconnectDrivers = [],
optFlounderBackends = [],
extraFlags = [],
extraCxxFlags = [],
extraDefines = [],
extraIncludes = [],
extraDependencies = [],
extraLdFlags = []
}
-------------------------------------------------------------------------
--
-- IMPORTANT: This section contains extraction of functions from the
-- relevant architecture module. The names and types should be
-- exactly the same as in the architecture.hs file. This section
-- should not contain any logic; ony architecture extraction.
--
--------------------------------------------------------------------------
--
-- First, the default C compiler for an architecture
--
cCompiler :: Options -> String -> String -> String -> [ RuleToken ]
cCompiler opts phase src obj
| optArch opts == "x86_64" = X86_64.cCompiler opts phase src obj
| optArch opts == "k1om" = K1om.cCompiler opts phase src obj
| optArch opts == "x86_32" = X86_32.cCompiler opts phase src obj
| optArch opts == "scc" = SCC.cCompiler opts phase src obj
| optArch opts == "armv5" = ARMv5.cCompiler opts phase src obj
| optArch opts == "arm11mp" = ARM11MP.cCompiler opts phase src obj
| optArch opts == "xscale" = XScale.cCompiler opts phase src obj
| optArch opts == "armv7" = ARMv7.cCompiler opts phase src obj
| optArch opts == "armv7-m" = ARMv7_M.cCompiler opts phase src obj
| otherwise = [ ErrorMsg ("no C compiler for " ++ (optArch opts)) ]
cPreprocessor :: Options -> String -> String -> String -> [ RuleToken ]
cPreprocessor opts phase src obj
| otherwise = [ ErrorMsg ("no C preprocessor for " ++ (optArch opts)) ]
--
-- C++ compiler, where supported
--
cxxCompiler :: Options -> String -> String -> String -> [ RuleToken ]
cxxCompiler opts phase src obj
| optArch opts == "x86_64" = X86_64.cxxCompiler opts phase src obj
| optArch opts == "k1om" = K1om.cxxCompiler opts phase src obj
| otherwise = [ ErrorMsg ("no C++ compiler for " ++ (optArch opts)) ]
--
-- makeDepend step; note that obj can be whatever the intended output is
--
makeDepend :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeDepend opts phase src obj depfile
| optArch opts == "x86_64" =
X86_64.makeDepend opts phase src obj depfile
| optArch opts == "k1om" =
K1om.makeDepend opts phase src obj depfile
| optArch opts == "x86_32" =
X86_32.makeDepend opts phase src obj depfile
| optArch opts == "scc" =
SCC.makeDepend opts phase src obj depfile
| optArch opts == "armv5" =
ARMv5.makeDepend opts phase src obj depfile
| optArch opts == "arm11mp" =
ARM11MP.makeDepend opts phase src obj depfile
| optArch opts == "xscale" =
XScale.makeDepend opts phase src obj depfile
| optArch opts == "armv7" =
ARMv7.makeDepend opts phase src obj depfile
| optArch opts == "armv7-m" =
ARMv7_M.makeDepend opts phase src obj depfile
| otherwise = [ ErrorMsg ("no dependency generator for " ++ (optArch opts)) ]
makeCxxDepend :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeCxxDepend opts phase src obj depfile
| optArch opts == "x86_64" =
X86_64.makeCxxDepend opts phase src obj depfile
| optArch opts == "k1om" =
K1om.makeCxxDepend opts phase src obj depfile
| otherwise = [ ErrorMsg ("no C++ dependency generator for " ++ (optArch opts)) ]
cToAssembler :: Options -> String -> String -> String -> String -> [ RuleToken ]
cToAssembler opts phase src afile objdepfile
| optArch opts == "x86_64" = X86_64.cToAssembler opts phase src afile objdepfile
| optArch opts == "k1om" = K1om.cToAssembler opts phase src afile objdepfile
| optArch opts == "x86_32" = X86_32.cToAssembler opts phase src afile objdepfile
| optArch opts == "scc" = SCC.cToAssembler opts phase src afile objdepfile
| optArch opts == "armv5" = ARMv5.cToAssembler opts phase src afile objdepfile
| optArch opts == "arm11mp" = ARM11MP.cToAssembler opts phase src afile objdepfile
| optArch opts == "xscale" = XScale.cToAssembler opts phase src afile objdepfile
| optArch opts == "armv7" = ARMv7.cToAssembler opts phase src afile objdepfile
| optArch opts == "armv7-m" = ARMv7_M.cToAssembler opts phase src afile objdepfile
| otherwise = [ ErrorMsg ("no C compiler for " ++ (optArch opts)) ]
--
-- Assemble an assembly language file
--
assembler :: Options -> String -> String -> [ RuleToken ]
assembler opts src obj
| optArch opts == "x86_64" = X86_64.assembler opts src obj
| optArch opts == "k1om" = K1om.assembler opts src obj
| optArch opts == "x86_32" = X86_32.assembler opts src obj
| optArch opts == "scc" = SCC.assembler opts src obj
| optArch opts == "armv5" = ARMv5.assembler opts src obj
| optArch opts == "arm11mp" = ARM11MP.assembler opts src obj
| optArch opts == "xscale" = XScale.assembler opts src obj
| optArch opts == "armv7" = ARMv7.assembler opts src obj
| optArch opts == "armv7-m" = ARMv7_M.assembler opts src obj
| otherwise = [ ErrorMsg ("no assembler for " ++ (optArch opts)) ]
archive :: Options -> [String] -> [String] -> String -> String -> [ RuleToken ]
archive opts objs libs name libname
| optArch opts == "x86_64" = X86_64.archive opts objs libs name libname
| optArch opts == "k1om" = K1om.archive opts objs libs name libname
| optArch opts == "x86_32" = X86_32.archive opts objs libs name libname
| optArch opts == "scc" = SCC.archive opts objs libs name libname
| optArch opts == "armv5" = ARMv5.archive opts objs libs name libname
| optArch opts == "arm11mp" = ARM11MP.archive opts objs libs name libname
| optArch opts == "xscale" = XScale.archive opts objs libs name libname
| optArch opts == "armv7" = ARMv7.archive opts objs libs name libname
| optArch opts == "armv7-m" = ARMv7_M.archive opts objs libs name libname
| otherwise = [ ErrorMsg ("Can't build a library for " ++ (optArch opts)) ]
linker :: Options -> [String] -> [String] -> String -> [RuleToken]
linker opts objs libs bin
| optArch opts == "x86_64" = X86_64.linker opts objs libs bin
| optArch opts == "k1om" = K1om.linker opts objs libs bin
| optArch opts == "x86_32" = X86_32.linker opts objs libs bin
| optArch opts == "scc" = SCC.linker opts objs libs bin
| optArch opts == "armv5" = ARMv5.linker opts objs libs bin
| optArch opts == "arm11mp" = ARM11MP.linker opts objs libs bin
| optArch opts == "xscale" = XScale.linker opts objs libs bin
| optArch opts == "armv7" = ARMv7.linker opts objs libs bin
| optArch opts == "armv7-m" = ARMv7_M.linker opts objs libs bin
| otherwise = [ ErrorMsg ("Can't link executables for " ++ (optArch opts)) ]
cxxlinker :: Options -> [String] -> [String] -> String -> [RuleToken]
cxxlinker opts objs libs bin
| optArch opts == "x86_64" = X86_64.cxxlinker opts objs libs bin
| optArch opts == "k1om" = K1om.cxxlinker opts objs libs bin
| otherwise = [ ErrorMsg ("Can't link C++ executables for " ++ (optArch opts)) ]
--
-- The C compiler for compiling things on the host
--
nativeCCompiler :: String
nativeCCompiler = "$(CC)"
-------------------------------------------------------------------------
--
-- Functions to create useful filenames
--
dependFilePath :: String -> String
dependFilePath obj = obj ++ ".depend"
objectFilePath :: Options -> String -> String
objectFilePath opts src = (optSuffix opts) ./. ((removeSuffix src) ++ ".o")
generatedObjectFilePath :: Options -> String -> String
generatedObjectFilePath opts src = (removeSuffix src) ++ ".o"
preprocessedFilePath :: Options -> String -> String
preprocessedFilePath opts src = (optSuffix opts) ./. ((removeSuffix src) ++ ".i")
-- Standard convention is that human generated assembler is .S, machine generated is .s
assemblerFilePath :: Options -> String -> String
assemblerFilePath opts src = (optSuffix opts) ./. ((removeSuffix src) ++ ".s")
-------------------------------------------------------------------------
--
-- Functions with logic to start doing things
--
--
-- Create C file dependencies
--
-- Since this is where we know what the depfile is called it is here that we also
-- decide to include it. This stops many different places below trying to
-- guess what the depfile is called
--
makeDependArchSub :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeDependArchSub opts phase src objfile depfile =
[ Str ("@echo Generating $@"), NL ] ++
makeDepend opts phase src objfile depfile
makeDependArch :: Options -> String -> String -> String -> String -> HRule
makeDependArch opts phase src objfile depfile =
Rules [ Rule (makeDependArchSub opts phase src objfile depfile),
Include (Out (optArch opts) depfile)
]
-- Make depend for a standard object file
makeDependObj :: Options -> String -> String -> HRule
makeDependObj opts phase src =
let objfile = (objectFilePath opts src)
in
makeDependArch opts phase src objfile (dependFilePath objfile)
-- Make depend for a C++ object file
makeDependCxxArchSub :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeDependCxxArchSub opts phase src objfile depfile =
[ Str ("@echo Generating $@"), NL ] ++
makeCxxDepend opts phase src objfile depfile
makeDependCxxArch :: Options -> String -> String -> String -> String -> HRule
makeDependCxxArch opts phase src objfile depfile =
Rules [ Rule (makeDependCxxArchSub opts phase src objfile depfile),
Include (Out (optArch opts) depfile)
]
makeDependCxxObj :: Options -> String -> String -> HRule
makeDependCxxObj opts phase src =
let objfile = (objectFilePath opts src)
in
makeDependCxxArch opts phase src objfile (dependFilePath objfile)
-- Make depend for an assembler output
makeDependAssembler :: Options -> String -> String -> HRule
makeDependAssembler opts phase src =
let objfile = (assemblerFilePath opts src)
in
makeDependArch opts phase src objfile (dependFilePath objfile)
--
-- Compile a C program to assembler
--
makecToAssembler :: Options -> String -> String -> String -> [ RuleToken ]
makecToAssembler opts phase src obj =
cToAssembler opts phase src (assemblerFilePath opts src) (dependFilePath obj)
--
-- Assemble an assembly language file
--
assemble :: Options -> String -> [ RuleToken ]
assemble opts src =
assembler opts src (objectFilePath opts src)
--
-- Create a library from a set of object files
--
archiveLibrary :: Options -> String -> [String] -> [String] -> [ RuleToken ]
archiveLibrary opts name objs libs =
archive opts objs libs name (libraryPath name)
--
-- Link an executable
--
linkExecutable :: Options -> [String] -> [String] -> String -> [RuleToken]
linkExecutable opts objs libs bin =
linker opts objs libs (applicationPath bin)
--
-- Link a C++ executable
--
linkCxxExecutable :: Options -> [String] -> [String] -> String -> [RuleToken]
linkCxxExecutable opts objs libs bin =
cxxlinker opts objs libs (applicationPath bin)
-------------------------------------------------------------------------
-------------------------------------------------------------------------
--
-- Hake macros (hacros?): each of these evaluates to HRule, i.e. a
-- list of templates for Makefile rules
--
-------------------------------------------------------------------------
--
-- Compile a C file for a particular architecture
-- We include cToAssembler to permit humans to type "make foo/bar.s"
--
compileCFile :: Options -> String -> HRule
compileCFile opts src =
Rules [ Rule (cCompiler opts "src" src (objectFilePath opts src)),
Rule (makecToAssembler opts "src" src (objectFilePath opts src)),
makeDependObj opts "src" src
]
--
-- Compile a C++ file for a particular architecture
--
compileCxxFile :: Options -> String -> HRule
compileCxxFile opts src =
Rules [ Rule (cxxCompiler opts "src" src (objectFilePath opts src)),
makeDependCxxObj opts "src" src
]
--
-- Compile a C file for a particular architecture
--
compileGeneratedCFile :: Options -> String -> HRule
compileGeneratedCFile opts src =
let o2 = opts { optSuffix = "" }
arch = optArch o2
in
Rules [ Rule (cCompiler o2 arch src (objectFilePath o2 src) ),
Rule (makecToAssembler o2 arch src (objectFilePath o2 src)),
makeDependObj o2 arch src
]
compileGeneratedCxxFile :: Options -> String -> HRule
compileGeneratedCxxFile opts src =
let o2 = opts { optSuffix = "" }
arch = optArch o2
in
Rules [ Rule (cxxCompiler o2 arch src (objectFilePath o2 src) ),
makeDependCxxObj o2 arch src
]
compileCFiles :: Options -> [String] -> HRule
compileCFiles opts srcs = Rules [ compileCFile opts s | s <- srcs ]
compileCxxFiles :: Options -> [String] -> HRule
compileCxxFiles opts srcs = Rules [ compileCxxFile opts s | s <- srcs ]
compileGeneratedCFiles :: Options -> [String] -> HRule
compileGeneratedCFiles opts srcs =
Rules [ compileGeneratedCFile opts s | s <- srcs ]
compileGeneratedCxxFiles :: Options -> [String] -> HRule
compileGeneratedCxxFiles opts srcs =
Rules [ compileGeneratedCxxFile opts s | s <- srcs ]
--
-- Add a set of C (or whatever) dependences on a *generated* file.
-- Somewhere else this file has to be defined as a target, of
-- course...
--
extraCDependencyForObj :: Options -> String -> String -> String -> [RuleToken]
extraCDependencyForObj opts file s obj =
let arch = optArch opts
in
[ Target arch (dependFilePath obj),
Target arch obj,
Dep BuildTree arch file
]
extraCDependency :: Options -> String -> String -> HRule
extraCDependency opts file s = Rule (extraCDependencyForObj opts file s obj)
where obj = objectFilePath opts s
extraCDependencies :: Options -> String -> [String] -> HRule
extraCDependencies opts file srcs =
Rules [ extraCDependency opts file s | s <- srcs ]
extraGeneratedCDependency :: Options -> String -> String -> HRule
extraGeneratedCDependency opts file s =
extraCDependency (opts { optSuffix = "" }) file s
--
-- Copy include files to the appropriate directory
--
includeFile :: Options -> String -> HRule
includeFile opts hdr =
Rules [ (Rule [ Str "cp", In SrcTree "src" hdr, Out (optArch opts) hdr ]),
(Rule [ PreDep BuildTree (optArch opts) hdr,
Target (optArch opts) "/include/errors/errno.h" ]
)
]
--
-- Build a Mackerel header file from a definition.
--
mackerelProgLoc = In InstallTree "tools" "/bin/mackerel"
mackerelDevFileLoc d = In SrcTree "src" ("/devices" ./. (d ++ ".dev"))
mackerelDevHdrPath d = "/include/dev/" ./. (d ++ "_dev.h")
mackerel2 :: Options -> String -> HRule
mackerel2 opts dev = mackerel_generic opts dev "shift-driver"
mackerel :: Options -> String -> HRule
mackerel opts dev = mackerel_generic opts dev "bitfield-driver"
mackerel_generic :: Options -> String -> String -> HRule
mackerel_generic opts dev flag =
let
arch = optArch opts
in
Rule [ mackerelProgLoc,
Str ("--" ++ flag),
Str "-c", mackerelDevFileLoc dev,
Str "-o", Out arch (mackerelDevHdrPath dev)
]
mackerelDependencies :: Options -> String -> [String] -> HRule
mackerelDependencies opts d srcs =
extraCDependencies opts (mackerelDevHdrPath d) srcs
--
-- Basic Flounder definitions: where things are
--
flounderProgLoc = In InstallTree "tools" "/bin/flounder"
flounderIfFileLoc ifn = In SrcTree "src" ("/if" ./. (ifn ++ ".if"))
-- new-style stubs: path for generic header
flounderIfDefsPath ifn = "/include/if" ./. (ifn ++ "_defs.h")
-- new-style stubs: path for specific backend header
flounderIfDrvDefsPath ifn drv = "/include/if" ./. (ifn ++ "_" ++ drv ++ "_defs.h")
-- new-style stubs: generated C code (for all default enabled backends)
flounderBindingPath opts ifn =
(optSuffix opts) ./. (ifn ++ "_flounder_bindings.c")
-- new-style stubs: generated C code (for extra backends enabled by the user)
flounderExtraBindingPath opts ifn =
(optSuffix opts) ./. (ifn ++ "_flounder_extra_bindings.c")
flounderTHCHdrPath ifn = "/include/if" ./. (ifn ++ "_thc.h")
flounderTHCStubPath opts ifn =
(optSuffix opts) ./. (ifn ++ "_thc.c")
applicationPath name = "/sbin" ./. name
libraryPath libname = "/lib" ./. ("lib" ++ libname ++ ".a")
kernelPath = "/sbin/cpu"
-- construct include arguments to flounder for common types
-- these are:
-- 1. platform-specific types (if/platform/foo.if)
-- 2. architecture-specific types (if/arch/foo.if)
-- 3. generic types (if/types.if)
flounderIncludes :: Options -> [RuleToken]
flounderIncludes opts
= concat [ [Str "-i", flounderIfFileLoc ifn]
| ifn <- [ "platform" ./. (optArch opts), -- XXX: optPlatform
"arch" ./. (optArch opts),
"types" ] ]
flounderRule :: Options -> [RuleToken] -> HRule
flounderRule opts args
= Rule $ [ flounderProgLoc ] ++ (flounderIncludes opts) ++ args
--
-- Build new-style Flounder header files from a definition
-- (generic header, plus one per backend)
--
flounderGenDefs :: Options -> String -> HRule
flounderGenDefs opts ifn =
Rules $ flounderRule opts [
Str "--generic-header", flounderIfFileLoc ifn,
Out (optArch opts) (flounderIfDefsPath ifn)
] : [ flounderRule opts [
Str $ "--" ++ drv ++ "-header", flounderIfFileLoc ifn,
Out (optArch opts) (flounderIfDrvDefsPath ifn drv)]
| drv <- Args.allFlounderBackends ]
--
-- Build a new Flounder binding file from a definition.
-- This builds the binding for all enabled backends
--
flounderBinding :: Options -> String -> [String] -> HRule
flounderBinding opts ifn =
flounderBindingHelper opts ifn backends (flounderBindingPath opts ifn)
where
backends = "generic" : (optFlounderBackends opts)
-- as above, but for a specific set of user-specified backends
flounderExtraBinding :: Options -> String -> [String] -> [String] -> HRule
flounderExtraBinding opts ifn backends =
flounderBindingHelper opts ifn backends (flounderExtraBindingPath opts ifn)
flounderBindingHelper :: Options -> String -> [String] -> String -> [String] -> HRule
flounderBindingHelper opts ifn backends cfile srcs = Rules $
[ flounderRule opts $ args ++ [flounderIfFileLoc ifn, Out arch cfile ],
compileGeneratedCFile opts cfile,
flounderDefsDepend opts ifn allbackends srcs]
++ [extraGeneratedCDependency opts (flounderIfDrvDefsPath ifn d) cfile
| d <- allbackends]
where
arch = optArch opts
archfam = optArchFamily opts
args = [Str "-a", Str archfam] ++ [Str $ "--" ++ d ++ "-stub" | d <- backends]
allbackends = backends `union` optFlounderBackends opts \\ ["generic"]
--
-- Build a Flounder THC header file from a definition.
--
flounderTHCFile :: Options -> String -> HRule
flounderTHCFile opts ifn =
flounderRule opts [
Str "--thc-header", flounderIfFileLoc ifn,
Out (optArch opts) (flounderTHCHdrPath ifn)
]
--
-- Build a Flounder THC stubs file from a definition.
--
flounderTHCStub :: Options -> String -> [String] -> HRule
flounderTHCStub opts ifn srcs =
let cfile = flounderTHCStubPath opts ifn
hfile = flounderTHCHdrPath ifn
arch = optArch opts
in
Rules [ flounderRule opts [
Str "--thc-stubs", flounderIfFileLoc ifn,
Out arch cfile
],
compileGeneratedCFile opts cfile,
extraCDependencies opts hfile srcs,
extraGeneratedCDependency opts hfile cfile
]
--
-- Create a dependency on a Flounder header file for a set of files,
-- but don't actually build either stub (useful for libraries)
--
flounderDefsDepend :: Options -> String -> [String] -> [String] -> HRule
flounderDefsDepend opts ifn backends srcs = Rules $
(extraCDependencies opts (flounderIfDefsPath ifn) srcs) :
[extraCDependencies opts (flounderIfDrvDefsPath ifn drv) srcs
| drv <- backends, drv /= "generic" ]
--
-- Emit all the Flounder-related rules/dependencies for a given target
--
flounderRules :: Options -> Args.Args -> [String] -> [HRule]
flounderRules opts args csrcs =
([ flounderBinding opts f csrcs | f <- Args.flounderBindings args ]
++
[ flounderExtraBinding opts f backends csrcs
| (f, backends) <- Args.flounderExtraBindings args ]
++
[ flounderTHCStub opts f csrcs | f <- Args.flounderTHCStubs args ]
++
-- Flounder extra defs (header files) also depend on the base
-- Flounder headers for the same interface
[ flounderDefsDepend opts f baseBackends csrcs | f <- allIf ]
++
-- Extra defs only for non-base backends (those were already emitted above)
[ flounderDefsDepend opts f (backends \\ baseBackends) csrcs
| (f, backends) <- Args.flounderExtraDefs args ]
)
where
-- base backends enabled by default
baseBackends = optFlounderBackends opts
-- all interfaces mentioned in flounderDefs or ExtraDefs
allIf = nub $ Args.flounderDefs args ++ [f | (f,_) <- Args.flounderExtraDefs args]
--
-- Build a Fugu library
--
fuguFile :: Options -> String -> HRule
fuguFile opts file =
let arch = optArch opts
cfile = file ++ ".c"
hfile = "/include/errors/" ++ file ++ ".h"
in
Rules [ Rule [In InstallTree "tools" "/bin/fugu",
In SrcTree "src" (file++".fugu"),
Out arch hfile,
Out arch cfile ],
compileGeneratedCFile opts cfile
]
--
-- Build a Pleco library
--
plecoFile :: Options -> String -> HRule
plecoFile opts file =
let arch = optArch opts
cfile = file ++ ".c"
hfile = "/include/trace_definitions/" ++ file ++ ".h"
jsonfile = "/trace_definitions/" ++ file ++ ".json"
in
Rules [ Rule [In InstallTree "tools" "/bin/pleco",
In SrcTree "src" (file++".pleco"),
Out arch hfile,
Out arch jsonfile,
Out arch cfile ],
compileGeneratedCFile opts cfile
]
--
-- Build a Hamlet file
--
hamletFile :: Options -> String -> HRule
hamletFile opts file =
let arch = optArch opts
hfile = "/include/barrelfish_kpi/capbits.h"
cfile = "cap_predicates.c"
usercfile = "user_cap_predicates.c"
ofile = "user_cap_predicates.o"
nfile = "cap_predicates"
afile = "/lib/libcap_predicates.a"
in
Rules [ Rule [In InstallTree "tools" "/bin/hamlet",
In SrcTree "src" (file++".hl"),
Out arch hfile,
Out arch cfile,
Out arch usercfile ],
compileGeneratedCFile opts usercfile,
Rule (archive opts [ ofile ] [] nfile afile)
]
--
-- Link a set of object files and libraries together
--
link :: Options -> [String] -> [ String ] -> String -> HRule
link opts objs libs bin =
Rule (linkExecutable opts objs libs bin)
--
-- Link a set of C++ object files and libraries together
--
linkCxx :: Options -> [String] -> [ String ] -> String -> HRule
linkCxx opts objs libs bin =
Rule (linkCxxExecutable opts objs libs bin)
--
-- Link a CPU driver. This is where it gets distinctly architecture-specific.
--
linkKernel :: Options -> String -> [String] -> [String] -> HRule
linkKernel opts name objs libs
| optArch opts == "x86_64" = X86_64.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "k1om" = K1om.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "x86_32" = X86_32.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "scc" = SCC.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "armv5" = ARMv5.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "arm11mp" = ARM11MP.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "xscale" = XScale.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "armv7" = ARMv7.linkKernel opts objs [libraryPath l | l <- libs ] name
| optArch opts == "armv7-m" = ARMv7_M.linkKernel opts objs [libraryPath l | l <- libs ] name
| otherwise = Rule [ Str ("Error: Can't link kernel for '" ++ (optArch opts) ++ "'") ]
--
-- Copy a file from one place to another
--
copy :: Options -> String -> String -> HRule
copy opts src dest =
Rule [ Str "cp", In BuildTree (optArch opts) src, Out (optArch opts) dest ]
--
-- Assemble a list of S files for a particular architecture
--
assembleSFile :: Options -> String -> HRule
assembleSFile opts src =
Rules [ Rule (assemble opts src),
makeDependObj opts "src" src
]
assembleSFiles :: Options -> [String] -> HRule
assembleSFiles opts srcs = Rules [ assembleSFile opts s | s <- srcs ]
--
-- Archive a bunch of objects into a library
--
staticLibrary :: Options -> String -> [String] -> [String] -> HRule
staticLibrary opts libpath objs libs =
Rule (archiveLibrary opts libpath objs libs)
--
-- Compile a Haskell binary (for the host architecture)
--
compileHaskell prog main deps = compileHaskellWithLibs prog main deps []
compileHaskellWithLibs prog main deps dirs =
let
tools_dir = (Dep InstallTree "tools" "/tools/.marker")
in
Rule ([ NStr "ghc -i",
NoDep SrcTree "src" ".",
Str "-odir ", NoDep BuildTree "tools" ".",
Str "-hidir ", NoDep BuildTree "tools" ".",
Str "-rtsopts=all",
Str "--make ",
In SrcTree "src" main,
Str "-o ",
Out "tools" ("/bin" ./. prog),
Str "$(LDFLAGS)" ]
++ concat [[ NStr "-i", NoDep SrcTree "src" d] | d <- dirs]
++ [ (Dep SrcTree "src" dep) | dep <- deps ]
++ [ tools_dir ])
--
-- Compile (and link) a C binary (for the host architecture)
--
compileNativeC :: String -> [String] -> [String] -> [String] -> HRule
compileNativeC prog cfiles cflags ldflags =
Rule ([ Str nativeCCompiler,
Str "-o",
Out "tools" ("/bin" ./. prog),
Str "$(CFLAGS)",
Str "$(LDFLAGS)" ]
++ [ (Str flag) | flag <- cflags ]
++ [ (Str flag) | flag <- ldflags ]
++ [ (In SrcTree "src" dep) | dep <- cfiles ])
--
-- Build a Technical Note
--
buildTechNote :: String -> String -> Bool -> Bool -> [String] -> HRule
buildTechNote input output bib glo figs =
buildTechNoteWithDeps input output bib glo figs []
buildTechNoteWithDeps :: String -> String -> Bool -> Bool -> [String] -> [RuleToken] -> HRule
buildTechNoteWithDeps input output bib glo figs deps =
let
working_dir = NoDep BuildTree "tools" "/tmp/"
style_files = [ "bfish-logo.pdf", "bftn.sty", "defs.bib", "barrelfish.bib" ]
in
Rule ( [ Dep SrcTree "src" (f ++ ".pdf") | f <- figs]
++
[ Dep SrcTree "src" ("/doc/style" ./. f) | f <- style_files ]
++
[ Str "mkdir", Str "-p", working_dir, NL ]
++
deps
++
[ In SrcTree "src" "/tools/run-pdflatex.sh",
Str "--input-tex", In SrcTree "src" input,
Str "--working-dir", working_dir,
Str "--output-pdf", Out "docs" ("/" ++ output),
Str "--texinput", NoDep SrcTree "src" "/doc/style",
Str "--bibinput", NoDep SrcTree "src" "/doc/style"
]
++ (if bib then [ Str "--has-bib" ] else [])
++ (if glo then [ Str "--has-glo" ] else [])
)
---------------------------------------------------------------------
--
-- Transformations on file names
--
----------------------------------------------------------------------
allObjectPaths :: Options -> Args.Args -> [String]
allObjectPaths opts args =
[objectFilePath opts g
| g <- (Args.cFiles args)++(Args.cxxFiles args)++(Args.assemblyFiles args)]
++
[generatedObjectFilePath opts g
| g <- [ flounderBindingPath opts f
| f <- (Args.flounderBindings args)]
++
[ flounderExtraBindingPath opts f
| (f, _) <- (Args.flounderExtraBindings args)]
++
[ flounderTHCStubPath opts f
| f <- (Args.flounderTHCStubs args)]
++
(Args.generatedCFiles args) ++ (Args.generatedCxxFiles args)
]
allLibraryPaths :: Args.Args -> [String]
allLibraryPaths args =
[ libraryPath l | l <- Args.addLibraries args ]
---------------------------------------------------------------------
--
-- Very large-scale macros
--
----------------------------------------------------------------------
--
-- Build an application binary
--
application :: Args.Args
application = Args.defaultArgs { Args.buildFunction = applicationBuildFn }
applicationBuildFn :: [String] -> String -> Args.Args -> HRule
applicationBuildFn af tf args
| debugFlag && trace (Args.showArgs (tf ++ " Application ") args) False
= undefined
applicationBuildFn af tf args =
Rules [ appBuildArch af tf args arch | arch <- Args.architectures args ]
appGetOptionsForArch arch args =
(options arch) { extraIncludes =
[ NoDep SrcTree "src" a | a <- Args.addIncludes args]
++
[ NoDep BuildTree arch a | a <- Args.addGeneratedIncludes args],
optIncludes = (optIncludes $ options arch) \\
[ NoDep SrcTree "src" i | i <- Args.omitIncludes args ],
optFlags = (optFlags $ options arch) \\
[ Str f | f <- Args.omitCFlags args ],
optCxxFlags = (optCxxFlags $ options arch) \\
[ Str f | f <- Args.omitCxxFlags args ],
optSuffix = "_for_app_" ++ Args.target args,
extraFlags = Args.addCFlags args,
extraCxxFlags = Args.addCxxFlags args,
extraLdFlags = [ Str f | f <- Args.addLinkFlags args ],
extraDependencies =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
appBuildArch af tf args arch =
let -- Fiddle the options
opts = appGetOptionsForArch arch args
csrcs = Args.cFiles args
cxxsrcs = Args.cxxFiles args
gencsrc = Args.generatedCFiles args
gencxxsrc = Args.generatedCxxFiles args
appname = Args.target args
-- XXX: Not sure if this is correct. Currently assuming that if the app
-- contains C++ files, we have to use the C++ linker.
mylink = if cxxsrcs == [] then link else linkCxx
in
Rules ( flounderRules opts args csrcs
++
[ mackerelDependencies opts m csrcs | m <- Args.mackerelDevices args ]
++
[ compileCFiles opts csrcs,
compileCxxFiles opts cxxsrcs,
compileGeneratedCFiles opts gencsrc,
compileGeneratedCxxFiles opts gencxxsrc,
assembleSFiles opts (Args.assemblyFiles args),
mylink opts (allObjectPaths opts args) (allLibraryPaths args) appname
]
)
--
-- Build an Arrakis application binary
--
arrakisapplication :: Args.Args
arrakisapplication = Args.defaultArgs { Args.buildFunction = arrakisApplicationBuildFn }
arrakisApplicationBuildFn :: [String] -> String -> Args.Args -> HRule
arrakisApplicationBuildFn af tf args
| debugFlag && trace (Args.showArgs (tf ++ " Arrakis Application ") args) False
= undefined
arrakisApplicationBuildFn af tf args =
Rules [ arrakisAppBuildArch af tf args arch | arch <- Args.architectures args ]
arrakisAppGetOptionsForArch arch args =
(options arch) { extraIncludes =
[ NoDep SrcTree "src" a | a <- Args.addIncludes args],
optIncludes = (optIncludes $ options arch) \\
[ NoDep SrcTree "src" i | i <- Args.omitIncludes args ],
optFlags = ((optFlags $ options arch) ++ [ Str "-DARRAKIS" ]) \\
[ Str f | f <- Args.omitCFlags args ],
optCxxFlags = (optCxxFlags $ options arch) \\
[ Str f | f <- Args.omitCxxFlags args ],
optSuffix = "_for_app_" ++ Args.target args,
optLibs = [ In InstallTree arch "/lib/libarrakis.a" ] ++
((optLibs $ options arch) \\
[ In InstallTree arch "/lib/libbarrelfish.a" ]),
extraFlags = Args.addCFlags args,
extraCxxFlags = Args.addCxxFlags args,
extraLdFlags = [ Str f | f <- Args.addLinkFlags args ],
extraDependencies =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
arrakisAppBuildArch af tf args arch =
let -- Fiddle the options
opts = arrakisAppGetOptionsForArch arch args
csrcs = Args.cFiles args
cxxsrcs = Args.cxxFiles args
gencsrc = Args.generatedCFiles args
gencxxsrc = Args.generatedCxxFiles args
appname = Args.target args
-- XXX: Not sure if this is correct. Currently assuming that if the app
-- contains C++ files, we have to use the C++ linker.
mylink = if cxxsrcs == [] then link else linkCxx
in
Rules ( flounderRules opts args csrcs
++
[ mackerelDependencies opts m csrcs | m <- Args.mackerelDevices args ]
++
[ compileCFiles opts csrcs,
compileCxxFiles opts cxxsrcs,
compileGeneratedCFiles opts gencsrc,
compileGeneratedCxxFiles opts gencxxsrc,
assembleSFiles opts (Args.assemblyFiles args),
mylink opts (allObjectPaths opts args) (allLibraryPaths args) appname
]
)
--
-- Build a static library
--
library :: Args.Args
library = Args.defaultArgs { Args.buildFunction = libraryBuildFn }
libraryBuildFn :: [String] -> String -> Args.Args -> HRule
libraryBuildFn af tf args | debugFlag && trace (Args.showArgs (tf ++ " Library ") args) False = undefined
libraryBuildFn af tf args =
Rules [ libBuildArch af tf args arch | arch <- Args.architectures args ]
libGetOptionsForArch arch args =
(options arch) { extraIncludes =
[ NoDep SrcTree "src" a | a <- Args.addIncludes args],
optIncludes = (optIncludes $ options arch) \\
[ NoDep SrcTree "src" i | i <- Args.omitIncludes args ],
optFlags = (optFlags $ options arch) \\
[ Str f | f <- Args.omitCFlags args ],
optCxxFlags = (optCxxFlags $ options arch) \\
[ Str f | f <- Args.omitCxxFlags args ],
optSuffix = "_for_lib_" ++ Args.target args,
extraFlags = Args.addCFlags args,
extraCxxFlags = Args.addCxxFlags args,
extraDependencies =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
libBuildArch af tf args arch =
let -- Fiddle the options
opts = libGetOptionsForArch arch args
csrcs = Args.cFiles args
cxxsrcs = Args.cxxFiles args
gencsrc = Args.generatedCFiles args
gencxxsrc = Args.generatedCxxFiles args
in
Rules ( flounderRules opts args csrcs
++
[ mackerelDependencies opts m csrcs | m <- Args.mackerelDevices args ]
++
[ compileCFiles opts csrcs,
compileCxxFiles opts cxxsrcs,
compileGeneratedCFiles opts gencsrc,
compileGeneratedCxxFiles opts gencxxsrc,
assembleSFiles opts (Args.assemblyFiles args),
staticLibrary opts (Args.target args) (allObjectPaths opts args) (allLibraryPaths args)
]
)
--
-- Library dependecies
--
-- The following code is under heavy construction, and also somewhat ugly
data LibDepTree = LibDep String | LibDeps [LibDepTree] deriving (Show,Eq)
-- manually add dependencies for now (it would be better if each library
-- defined each own dependencies locally, but that does not seem to be an
-- easy thing to do currently
libposixcompat_deps = LibDeps [ LibDep "posixcompat",
libvfs_deps_all, LibDep "term_server" ]
liblwip_deps = LibDeps $ [ LibDep x | x <- deps ]
where deps = ["lwip" ,"contmng" ,"net_if_raw" ,"timer" ,"hashtable"]
libnetQmng_deps = LibDeps $ [ LibDep x | x <- deps ]
where deps = ["net_queue_manager", "contmng" ,"procon" , "net_if_raw", "bfdmuxvm"]
libnfs_deps = LibDeps $ [ LibDep "nfs", liblwip_deps]
libssh_deps = LibDeps [ libposixcompat_deps, libopenbsdcompat_deps,
LibDep "zlib", LibDep "crypto", LibDep "ssh" ]
libopenbsdcompat_deps = LibDeps [ libposixcompat_deps, LibDep "crypto",
LibDep "openbsdcompat" ]
-- we need to make vfs more modular to make this actually useful
data VFSModules = VFS_RamFS | VFS_NFS | VFS_BlockdevFS | VFS_FAT
vfsdeps :: [VFSModules] -> [LibDepTree]
vfsdeps [] = [LibDep "vfs"]
vfsdeps (VFS_RamFS:xs) = [] ++ vfsdeps xs
vfsdeps (VFS_NFS:xs) = [libnfs_deps] ++ vfsdeps xs
vfsdeps (VFS_BlockdevFS:xs) = [LibDep "ahci" ] ++ vfsdeps xs
vfsdeps (VFS_FAT:xs) = [] ++ vfsdeps xs
libvfs_deps_all = LibDeps $ vfsdeps [VFS_NFS, VFS_RamFS, VFS_BlockdevFS,
VFS_FAT]
libvfs_deps_nonfs = LibDeps $ vfsdeps [VFS_RamFS, VFS_BlockdevFS, VFS_FAT]
libvfs_deps_nfs = LibDeps $ vfsdeps [VFS_NFS]
libvfs_deps_ramfs = LibDeps $ vfsdeps [VFS_RamFS]
libvfs_deps_blockdevfs = LibDeps $ vfsdeps [VFS_BlockdevFS]
libvfs_deps_fat = LibDeps $ vfsdeps [VFS_FAT, VFS_BlockdevFS]
-- flatten the dependency tree
flat :: [LibDepTree] -> [LibDepTree]
flat [] = []
flat ((LibDep l):xs) = [LibDep l] ++ flat xs
flat ((LibDeps t):xs) = flat t ++ flat xs
str2dep :: String -> LibDepTree
str2dep str
| str == "vfs" = libvfs_deps_all
| str == "vfs_nonfs" = libvfs_deps_nonfs
| str == "posixcompat" = libposixcompat_deps
| str == "lwip" = liblwip_deps
| str == "netQmng" = libnetQmng_deps
| str == "ssh" = libssh_deps
| str == "openbsdcompat" = libopenbsdcompat_deps
| otherwise = LibDep str
-- get library depdencies
-- we need a specific order for the .a, so we define a total order
libDeps :: [String] -> [String]
libDeps xs = [x | (LibDep x) <- (sortBy xcmp) . nub . flat $ map str2dep xs ]
where xord = [ "ssh"
, "openbsdcompat"
, "crypto"
, "zlib"
, "posixcompat"
, "term_server"
, "vfs"
, "ahci"
, "nfs"
, "net_queue_manager"
, "bfdmuxvm"
, "lwip"
, "arranet"
, "e1000n"
, "e10k"
, "e10k_vf"
, "contmng"
, "procon"
, "net_if_raw"
, "vfsfd"
, "timer"
, "hashtable"]
xcmp (LibDep a) (LibDep b) = compare (elemIndex a xord) (elemIndex b xord)
--
-- Build a CPU driver
--
cpuDriver :: Args.Args
cpuDriver = Args.defaultArgs { Args.buildFunction = cpuDriverBuildFn,
Args.target = "cpu" }
-- CPU drivers are built differently
cpuDriverBuildFn :: [String] -> String -> Args.Args -> HRule
cpuDriverBuildFn af tf args = Rules []
| utsav2601/cmpe295A | hake/RuleDefs.hs | mit | 46,102 | 0 | 18 | 12,273 | 12,314 | 6,349 | 5,965 | 756 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Application.TopicTypes where
import Database.Persist.TH
import Data.Aeson
import GHC.Generics
data TopicType = Discussion | Presentation | Workshop deriving(Show, Read, Eq, Generic)
derivePersistField "TopicType"
instance ToJSON TopicType
instance FromJSON TopicType
| kRITZCREEK/FROST-Backend | src/Application/TopicTypes.hs | mit | 375 | 0 | 6 | 74 | 76 | 42 | 34 | 10 | 0 |
module Patterns where
import Prelude hiding (fst,null,head)
-- PATTERN MATCHING
fst :: (t, t1) -> t
fst (a,b) = a
null :: [t] -> Bool
null [] = True
null (x:xs) = False -- anything that uses the cons operator will be automatically be read as false.
head (x:xs) = x -- note! that we take the front of the cons
-- leave it blank
-- ghci> head "hello"
-- 'h'
-- ghci> head [1]
-- 1
-- ghci> head []
-- *** Exception: 2014-0214case.hs:13:1-15: Non-exhaustive patterns in function head
-- or we could try and give a message
-- ghci> :t head
-- head :: [[Char]] -> [Char]
-- head [] = "CRASH" -- with out the error handler we end up Type restricted
-- ghci> head []
-- "CRASH"
-- ghci> head [1]
--
-- <interactive>:3:7:
-- No instance for (Num [Char]) arising from the literal `1'
-- Possible fix: add an instance declaration for (Num [Char])
-- In the expression: 1
-- In the first argument of `head', namely `[1]'
-- In the expression: head [1]
-- head [] = 0
-- could then try this definition
-- head [] = []
-- ghci> head []
-- []
-- the above works but it isn't in keeping with purity in head's original definition
-- Lastly we could define an error exception
head [] = error "No option for empty lists"
-- ghci> head []
-- *** Exception: No option for empty lists
-- head and tail are fragile.
-- adding the type class post hoc is better than leaving it blank
-- here's a design pattern where:
-- if x then y else ourFunction that needs a front end filter. Which could also be accomplished with pattern matching
-- double :: Num a => [a] -> [a]
-- double nums =
-- if null nums
-- then []
-- else (2 * (head nums)) : (double (tail nums))
-- ghci> double [4,8..23]
-- [8,16,24,32,40]
-----------------------------------
-- GOOD PATTERNS ARE MORE ROBUST --
-----------------------------------
double [] = []
double (x:xs) = (2* x) : (double xs)
-- ghci> double [4,8..23]
-- [8,16,24,32,40]
| HaskellForCats/HaskellForCats | MenaBeginning/Ch008/2014-0214patterns.hs | mit | 1,968 | 0 | 7 | 434 | 206 | 136 | 70 | 11 | 1 |
module Parse where
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.ByteString.Lazy as L
import Data.Char
import Data.Int
import Data.Word (Word8)
import Control.Applicative
-- data Greymap = Greymap {
-- greyWidth :: Int
-- , greyHeight :: Int
-- , greyMax :: Int
-- , greyData :: L.ByteString
-- } deriving (Eq)
-- instance Show Greymap where
-- show (Greymap w h m _) = "Greymap " ++ show w ++ "x" ++ show h ++
-- " " ++ show m
-- parseP5 :: L.ByteString -> Maybe (Greymap, L.ByteString)
-- matchHeader :: L.ByteString -> L.ByteString -> Maybe L.ByteString
-- -- "nat" here is short for "natural number"
-- getNat :: L.ByteString -> Maybe (Int, L.ByteString)
-- getBytes :: Int -> L.ByteString
-- -> Maybe (L.ByteString, L.ByteString)
-- parseP5 s =
-- case matchHeader (L8.pack "P5") s of
-- Nothing -> Nothing
-- Just s1 ->
-- case getNat s1 of
-- Nothing -> Nothing
-- Just (width, s2) ->
-- case getNat (L8.dropWhile isSpace s2) of
-- Nothing -> Nothing
-- Just (height, s3) ->
-- case getNat (L8.dropWhile isSpace s3) of
-- Nothing -> Nothing
-- Just (maxGrey, s4)
-- | maxGrey > 255 -> Nothing
-- | otherwise ->
-- case getBytes 1 s4 of
-- Nothing -> Nothing
-- Just (_, s5) ->
-- case getBytes (width * height) s5 of
-- Nothing -> Nothing
-- Just (bitmap, s6) ->
-- Just (Greymap width height maxGrey bitmap, s6)
-- matchHeader prefix str
-- | prefix `L8.isPrefixOf` str
-- = Just (L8.dropWhile isSpace (L.drop (L.length prefix) str))
-- | otherwise
-- = Nothing
-- -- L.ByteString -> Maybe (Int, L.ByteString)
-- getNat s = case L8.readInt s of
-- Nothing -> Nothing
-- Just (num,rest)
-- | num <= 0 -> Nothing
-- | otherwise -> Just (fromIntegral num, rest)
-- -- Int -> L.ByteString -> Maybe (L.ByteString, L.ByteString)
-- getBytes n str = let count = fromIntegral n
-- both@(prefix,_) = L.splitAt count str
-- in if L.length prefix < count
-- then Nothing
-- else Just both
-- (>>?) :: Maybe a -> (a -> Maybe b) -> Maybe b
-- Nothing >>? _ = Nothing
-- Just v >>? f = f v
---
data ParseState = ParseState {
string :: L.ByteString
, offset :: Int64 -- imported from Data.Int
} deriving (Show)
simpleParse :: ParseState -> (a, ParseState)
simpleParse = undefined
newtype Parse a = Parse {
runParse :: ParseState -> Either String (a, ParseState)
}
identity :: a -> Parse a
identity a = Parse (\s -> Right (a, s))
parse :: Parse a -> L.ByteString -> Either String a
parse parser initState
= case runParse parser (ParseState initState 0) of
Left err -> Left err
Right (result, _) -> Right result
modifyOffset :: ParseState -> Int64 -> ParseState
modifyOffset initState newOffset =
initState { offset = newOffset }
parseByte :: Parse Word8
parseByte =
getState ==> \initState ->
case L.uncons (string initState) of
Nothing ->
bail "no more input"
Just (byte,remainder) ->
putState newState ==> \_ ->
identity byte
where newState = initState { string = remainder,
offset = newOffset }
newOffset = offset initState + 1
getState :: Parse ParseState
getState = Parse (\s -> Right (s, s))
putState :: ParseState -> Parse ()
putState s = Parse (\_ -> Right ((), s))
bail :: String -> Parse a
bail err = Parse $ \s -> Left $
"byte offset " ++ show (offset s) ++ ": " ++ err
(==>) :: Parse a -> (a -> Parse b) -> Parse b
firstParser ==> secondParser = Parse chainedParser
where chainedParser initState =
case runParse firstParser initState of
Left errMessage ->
Left errMessage
Right (firstResult, newState) ->
runParse (secondParser firstResult) newState
instance Functor Parse where
fmap f parser = parser ==> \result ->
identity (f result)
w2c :: Word8 -> Char
w2c = chr . fromIntegral
-- import Control.Applicative
parseChar :: Parse Char
parseChar = w2c <$> parseByte
peekByte :: Parse (Maybe Word8)
peekByte = (fmap fst . L.uncons . string) <$> getState
peekChar :: Parse (Maybe Char)
peekChar = fmap w2c <$> peekByte
parseWhile :: (Word8 -> Bool) -> Parse [Word8]
parseWhile p = (fmap p <$> peekByte) ==> \mp ->
if mp == Just True
then parseByte ==> \b ->
(b:) <$> parseWhile p
else identity []
parseWhileVerbose p =
peekByte ==> \mc ->
case mc of
Nothing -> identity []
Just c | p c ->
parseByte ==> \b ->
parseWhileVerbose p ==> \bs ->
identity (b:bs)
| otherwise ->
identity []
-- parseRawPGM =
-- parseWhileWith w2c notWhite ==> \header -> skipSpaces ==>&
-- assert (header == "P5") "invalid raw header" ==>&
-- parseNat ==> \width -> skipSpaces ==>&
-- parseNat ==> \height -> skipSpaces ==>&
-- parseNat ==> \maxGrey ->
-- parseByte ==>&
-- parseBytes (width * height) ==> \bitmap ->
-- identity (Greymap width height maxGrey bitmap)
-- where notWhite = (`notElem` " \r\n\t")
parseWhileWith :: (Word8 -> a) -> (a -> Bool) -> Parse [a]
parseWhileWith f p = fmap f <$> parseWhile (p . f)
parseNat :: Parse Int
parseNat = parseWhileWith w2c isDigit ==> \digits ->
if null digits
then bail "no more input"
else let n = read digits
in if n < 0
then bail "integer overflow"
else identity n
(==>&) :: Parse a -> Parse b -> Parse b
p ==>& f = p ==> \_ -> f
skipSpaces :: Parse ()
skipSpaces = parseWhileWith w2c isSpace ==>& identity ()
assert :: Bool -> String -> Parse ()
assert True _ = identity ()
assert False err = bail err
parseBytes :: Int -> Parse L.ByteString
parseBytes n =
getState ==> \st ->
let n' = fromIntegral n
(h, t) = L.splitAt n' (string st)
st' = st { offset = offset st + L.length h, string = t }
in putState st' ==>&
assert (L.length h == n') "end of input" ==>&
identity h
| lpenz/realworldhaskell-exercises | ch12/Parse.hs | mit | 6,649 | 0 | 18 | 2,207 | 1,394 | 754 | 640 | 105 | 3 |
module Test.Hspec.Core.ConfigSpec (spec) where
import Prelude ()
import Helper
import System.Directory
import System.FilePath
import Test.Hspec.Core.Config
spec :: Spec
spec = around_ inTempDirectory $ around_ (withEnvironment [("HOME", "foo")]) $ do
describe "readConfig" $ do
it "recognizes options from HSPEC_OPTIONS" $ do
withEnvironment [("HSPEC_OPTIONS", "--color")] $ do
configColorMode <$> readConfig defaultConfig [] `shouldReturn` ColorAlways
it "recognizes options from HSPEC_*" $ do
withEnvironment [("HSPEC_COLOR", "yes")] $ do
configColorMode <$> readConfig defaultConfig [] `shouldReturn` ColorAlways
describe "readConfigFiles" $ do
it "reads .hspec" $ do
dir <- getCurrentDirectory
let name = dir </> ".hspec"
writeFile name "--diff"
readConfigFiles `shouldReturn` [(name, ["--diff"])]
it "reads ~/.hspec" $ do
let name = "my-home/.hspec"
createDirectory "my-home"
writeFile name "--diff"
withEnvironment [("HOME", "my-home")] $ do
readConfigFiles `shouldReturn` [(name, ["--diff"])]
context "without $HOME" $ do
it "returns empty list" $ do
readConfigFiles `shouldReturn` []
context "without current directory" $ do
it "returns empty list" $ do
dir <- getCurrentDirectory
removeDirectory dir
readConfigFiles `shouldReturn` []
| hspec/hspec | hspec-core/test/Test/Hspec/Core/ConfigSpec.hs | mit | 1,458 | 0 | 20 | 368 | 403 | 201 | 202 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module AIChallenger
( startJudge
, Config(..)
, getConfigFromCommandlineFlags
) where
import Data.Function
import Data.Monoid
import Data.String
import qualified Data.Text as T
import qualified Network.Wai.Handler.Warp as Warp
import Network.Wai.Metrics
import qualified System.Remote.Monitoring as EKG
import Path
import AIChallenger.Config
import AIChallenger.StateVar
import AIChallenger.Types
import AIChallenger.WebApp
startJudge :: Game game => game -> Config -> IO ()
startJudge game config = do
metricStore <- EKG.serverMetricStore <$> EKG.forkServer "127.0.0.1" 7999
webMetrics <- registerWaiMetrics metricStore
stateVar <- mkStateVar
let bots =
[ Bot (T.pack (toFilePath (filename exe))) (ExecutableBot exe)
| exe <- cfgBotExecutables config
]
mapM_ (\bot -> addBot bot stateVar) bots
let settings = Warp.defaultSettings
& Warp.setPort (cfgPort config)
& Warp.setHost (fromString (cfgAddress config))
putStrLn ("Dashboard is at http://" <> cfgAddress config
<> ":" <> show (cfgPort config))
putStrLn "EKG is at http://127.0.0.1:7999"
Warp.runSettings settings (webApp game stateVar (Just webMetrics))
| ethercrow/ai-challenger | src/AIChallenger.hs | mit | 1,303 | 0 | 17 | 268 | 351 | 183 | 168 | 34 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- | Utilities for dealing with Aeson version update
module Data.Aeson.KeyHelper
( module KeyMap
, toKey
, toText
) where
import Prelude (id)
import qualified Data.Text as Text
#if MIN_VERSION_aeson (2,0,0)
import qualified Data.Aeson.Key as Key
import Data.Aeson.KeyMap as KeyMap hiding (map)
toKey :: Text.Text -> Key.Key
toKey = Key.fromText
toText :: Key.Key -> Text.Text
toText = Key.toText
#else
import Data.HashMap.Strict as KeyMap hiding (map)
toKey :: Text.Text -> Text.Text
toKey = id
toText :: Text.Text -> Text.Text
toText = id
#endif
| snoyberg/keter | Data/Aeson/KeyHelper.hs | mit | 689 | 0 | 6 | 176 | 104 | 69 | 35 | 13 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Control.Schule.Typ where
import Control.Types ( UNr, Name )
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Multilingual (Language(..))
import Data.Typeable
-- | das sollte exactly das sein, was auch in DB-tabelle steht
data Schule =
Schule { unr :: UNr
, name :: Name
, mail_suffix :: Name
-- ^ Studenten werden nur akzeptiert,
-- wenn email so endet
, use_shibboleth :: Bool
, preferred_language :: Language
}
deriving ( Typeable )
derives [makeReader, makeToDoc] [''Schule]
| marcellussiegburg/autotool | db/src/Control/Schule/Typ.hs | gpl-2.0 | 660 | 2 | 8 | 179 | 121 | 76 | 45 | 16 | 0 |
module KDtree where
---------------------------------------------------------
import Control.Applicative
import Data.Bits
import qualified Data.Foldable as F
import qualified Data.List as L
import Vec3D
---------------------------------------------------------
data KDtree a = Node
{ kdObject :: (a, Vec3D)
, kdAxis :: Axis
, kdRight :: KDtree a
, kdLeft :: KDtree a
}
| Leaf
deriving (Show)
instance F.Foldable KDtree where
foldr func i Leaf = i
foldr func i (Node (obj, _) _ r l) =
let a = F.foldr func i r
b = func obj a
in F.foldr func b l
prettyPrint :: (Show a) => KDtree a -> String
prettyPrint Leaf = "Leaf\n"
prettyPrint (Node obj axis r l) = "Node {\n\t" ++ "Object: " ++ (show obj) ++ "\n\tAxis: " ++ (show axis) ++
"\nLeft -\n" ++ (prettyPrint l) ++ "Right -\n" ++ (prettyPrint r) ++ "\n}\n"
---------------------------------------------------------
data Axis = X | Y | Z deriving (Show, Eq, Enum, Bounded)
next :: Axis -> Axis
next Z = X
next x = succ x
coord :: Axis -> (Vec3D -> Float)
coord X = vX
coord Y = vY
coord Z = vZ
---------------------------------------------------------
fromList :: [(a, Vec3D)] -> KDtree a
fromList xs = fromList' xs X
fromList' :: [(a, Vec3D)] -> Axis -> KDtree a
fromList' [] _ = Leaf
fromList' xs axis = Node
{ kdObject = m
, kdAxis = axis
, kdRight = fromList' r $ next axis
, kdLeft = fromList' l $ next axis
}
where sorted = L.sortBy (\(_, a) (_, b) -> let c = coord axis in compare (c a) (c b)) xs
(l, m:r) = L.splitAt (length sorted `div` 2) sorted
toList :: KDtree a -> [a]
toList = F.foldr (:) []
nearestNeighbor :: KDtree a -> Vec3D -> Maybe a
nearestNeighbor tree pos = fst <$> nearestNeighbor' tree pos
nearestNeighbor' :: KDtree a -> Vec3D -> Maybe (a, Float)
nearestNeighbor' Leaf _ = Nothing
nearestNeighbor' (Node (obj, pos) axis l r) pt =
case candidate of
Just (_, sd) -> if sd > (offset * offset)
then cmpPts candidate $ nearestNeighbor' other pt
else candidate
Nothing -> Nothing
where offset = coord axis pt - coord axis pos
(sub, other) = if offset > 0 then (r, l) else (l, r)
sqDist = vSqLen $ vSub pos pt
candidate = cmpPts (Just (obj, sqDist)) (nearestNeighbor' sub pt)
cmpPts (Just a) (Just b) = if snd a > snd b then Just b else Just a
cmpPts (Just a) _ = Just a
cmpPts _ (Just b) = Just b
cmpPts _ _ = Nothing
kNearestNeighbors :: KDtree a -> Vec3D -> Int -> [a]
kNearestNeighbors tree pos num = map fst $ kNearestNeighbors' tree pos num
kNearestNeighbors' :: KDtree a -> Vec3D -> Int -> [(a, Float)]
kNearestNeighbors' Leaf _ _ = []
kNearestNeighbors' (Node (obj, pos) axis l r) pt num
| largest > (offset * offset) = take num $ foldl (flip $ L.insertBy cmpPts) newList $ kNearestNeighbors' other pt num
| otherwise = newList
where offset = coord axis pt - coord axis pos
(sub, other) = if offset > 0 then (r, l) else (l, r)
sqDist = vSqLen $ vSub pos pt
newList = take num $ L.insertBy cmpPts (obj, sqDist) $ kNearestNeighbors' sub pt num
largest = snd $ last newList
cmpPts a b = snd a `compare` snd b
| Chase-C/KDtree | src/KDtree.hs | gpl-2.0 | 3,603 | 0 | 14 | 1,180 | 1,329 | 698 | 631 | 75 | 8 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
import Types
import Rss
import Db
import Config
import Utils
import Data.Monoid
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Monad.Reader
import Control.Monad.IO.Class
import Control.Exception
import System.Exit
import Web.Scotty
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Text.XML.Stream.Parse (XmlException)
import Network.HTTP.Conduit (HttpException)
import System.Clock
import Data.Time.Clock
import Data.Time.Calendar
import System.Directory (createDirectoryIfMissing, removeDirectoryRecursive)
import System.FilePath ((</>))
import System.Environment.XDG.BaseDir (getUserDataDir)
type ErrorChannels = Map T.Text (Either T.Text Int)
main :: IO ()
main = do
location <- getUserDataDir "html-rss-proxy"
createDirectoryIfMissing True location
errorChannels <- newMVar M.empty
void $ async (updateChannels location errorChannels)
scotty port $
forM_ channelList $ \(path, name, _) ->
get (literal path) (getRss location errorChannels name)
getRss :: FilePath -> MVar ErrorChannels -> T.Text -> ActionM ()
getRss path errorChannels name = do
setHeader "Content-Type" "application/rss+xml; charset=UTF-8"
currentErrorChannels <- liftIO (readMVar errorChannels)
case M.lookup name currentErrorChannels of
Just (Left err) -> raise ("Error in channel " <> TL.fromStrict name <> TL.fromStrict err)
_ -> do
channel <- liftIO $ getChannelFromDb path name
case channel of
Nothing -> raise ("Empty channel " <> TL.fromStrict name)
Just c -> raw (renderChannelToRSS c)
updateChannels :: FilePath -> MVar ErrorChannels -> IO ()
updateChannels path errorChannels = getCurrentMonotonicTime >>= go
where
getCurrentMonotonicTime = toNanoSecs <$> getTime Monotonic
go prevTime = do
forM_ channelList $ \(_, name, getChannel) ->
updateChannel name getChannel
currentTime <- getCurrentMonotonicTime
let nextTime = prevTime + updateInterval
waitTime = nextTime - currentTime
waitTimeUs = fromIntegral $ waitTime `div` 1000
-- Wait until the next step if we're not late
when (waitTimeUs > 0) $
liftIO $ threadDelay waitTimeUs
go nextTime
updateChannel name getChannel = flip catches (handlers name) $ do
channel <- getChannel
if null (channelArticles channel) then
storeException name "Empty channel"
else
modifyMVar_ errorChannels $ \currentErrorChannels -> do
(UTCTime nowDay nowTime) <- getCurrentTime
let (year, month, day) = toGregorian nowDay
seconds = fromInteger (diffTimeToPicoseconds nowTime `div` 1000000000000)
(hours, seconds') = divMod seconds (60*60)
(minutes, seconds'') = divMod seconds' 60
date = Date (fromInteger year) month day hours minutes seconds''
updateChannelInDb path name date channel
return (M.delete name currentErrorChannels)
handlers name = [ Handler (\(e :: IOException) -> storeException name (T.pack (show e)))
, Handler (\(e :: HttpException) -> storeException name (T.pack (show e)))
, Handler (\(e :: XmlException) -> storeException name (T.pack (show e)))
, Handler (\(e :: ParsingException) -> storeException name (T.pack (show e)))
, Handler (\(e :: SomeException) -> putStrLn ("Exception while updating " ++ T.unpack name ++ ": " ++ show e) >> exitFailure)
]
storeException name exception =
modifyMVar_ errorChannels $ \currentErrorChannels -> do
let newErrorChannels =
case M.lookup name currentErrorChannels of
Just (Left _) -> M.insert name (Left exception) currentErrorChannels
Just (Right count) -> if count >= retryCount then
M.insert name (Left exception) currentErrorChannels
else
M.insert name (Right (succ count)) currentErrorChannels
Nothing -> M.insert name (Right 1) currentErrorChannels
return newErrorChannels
| sdroege/html-rss-proxy | src/Main.hs | gpl-3.0 | 4,596 | 0 | 22 | 1,300 | 1,269 | 652 | 617 | 89 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Reffit.PaperRoll where
import Reffit.Types
import Reffit.AcidTypes
import Reffit.Document
import Reffit.Scores
import Reffit.Sort
import Reffit.Search
import Reffit.FieldTag
import Control.Applicative
import qualified Data.List as L
import Data.Maybe (listToMaybe)
import Data.Map.Syntax
import Snap.Snaplet (Handler)
import Snap.Core
import Snap.Snaplet.AcidState (query)
import Snap.Snaplet.Heist
import Application
import Heist
import qualified Heist.Interpreted as I
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Map as Map
import qualified Data.ByteString.Char8 as BS
import Control.Monad (join)
import Control.Monad.Trans (liftIO)
import Data.Time
paramsToStrategy :: FieldTags -> Map.Map BS.ByteString [BS.ByteString]
-> PresentationStrategy
paramsToStrategy tags params = case Map.lookup "q" params of
Just (searchTerms:_) -> SearchBy . decodeUtf8 $ searchTerms
Just [] -> FiltSort New []
Nothing ->
let sortCrit = maybe New id $ do
sortStrs <- Map.lookup "sortBy" params
sortStr <- listToMaybe sortStrs :: Maybe BS.ByteString
readSort sortStr
filtTags = case Map.lookup "filterTag" params of
-- not-logged-in-case with tag specified
Just fts -> [t | t <- map (fromFullName . decodeUtf8) $ fts
, tagPathIsElem t tags]
Nothing ->
[fromFullName . snd . T.breakOnEnd "filterTag."
. decodeUtf8 . fst $ kv
| kv <- Map.toList params
, T.isPrefixOf "filterTag." (decodeUtf8 . fst $ kv)
, Map.lookup (fst kv) params == Just ("on":[])]
in FiltSort sortCrit filtTags
allPaperRollSplices :: [Document] -> Splices (SnapletISplice App)
allPaperRollSplices docs = do
"paper_roll_papers" ## (renderPaperRollPapers (take 100 docs))
renderPaperRollPapers :: [Document] -> SnapletISplice App
renderPaperRollPapers = I.mapSplices $ I.runChildrenWith . splicesFromDocument
splicesFromDocument :: Document -> Splices (SnapletISplice App)
splicesFromDocument doc = do
let (novScore, rigScore, coolScore) = documentDimScores doc
"idNum" ## I.textSplice (T.pack . show $ docId doc)
"paper_title" ## I.textSplice (docTitle doc)
"paper_authors" ## I.textSplice (T.intercalate ", " $ docAuthors doc)
"paper_external_link" ## I.textSplice (docLink doc)
"noveltyScore" ## I.textSplice (T.pack $ show (novScore))
"rigorScore" ## I.textSplice (T.pack $ show (rigScore))
"coolnessScore" ## I.textSplice (T.pack $ show (coolScore))
(allDFieldTags $ docFieldTags doc)
allDFieldTags :: [TagPath] -> Splices (SnapletISplice App)
allDFieldTags tags = "fieldTags" ## renderDFieldTags fLabels
where
fLabels = map last tags
renderDFieldTags :: [T.Text] -> SnapletISplice App
renderDFieldTags = I.mapSplices $ I.runChildrenWith . splicesFromDTag
splicesFromDTag :: Monad n => T.Text -> Splices (I.Splice n)
splicesFromDTag t = do
"fieldTag" ## I.textSplice t
| imalsogreg/reffit | src/Reffit/PaperRoll.hs | gpl-3.0 | 3,112 | 0 | 21 | 651 | 946 | 488 | 458 | 71 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Datastore.Projects.Commit
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Commits a transaction, optionally creating, deleting or modifying some
-- entities.
--
-- /See:/ <https://cloud.google.com/datastore/ Google Cloud Datastore API Reference> for @datastore.projects.commit@.
module Network.Google.Resource.Datastore.Projects.Commit
(
-- * REST Resource
ProjectsCommitResource
-- * Creating a Request
, projectsCommit
, ProjectsCommit
-- * Request Lenses
, pcXgafv
, pcUploadProtocol
, pcPp
, pcAccessToken
, pcUploadType
, pcPayload
, pcBearerToken
, pcProjectId
, pcCallback
) where
import Network.Google.Datastore.Types
import Network.Google.Prelude
-- | A resource alias for @datastore.projects.commit@ method which the
-- 'ProjectsCommit' request conforms to.
type ProjectsCommitResource =
"v1" :>
"projects" :>
CaptureMode "projectId" "commit" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CommitRequest :>
Post '[JSON] CommitResponse
-- | Commits a transaction, optionally creating, deleting or modifying some
-- entities.
--
-- /See:/ 'projectsCommit' smart constructor.
data ProjectsCommit = ProjectsCommit'
{ _pcXgafv :: !(Maybe Xgafv)
, _pcUploadProtocol :: !(Maybe Text)
, _pcPp :: !Bool
, _pcAccessToken :: !(Maybe Text)
, _pcUploadType :: !(Maybe Text)
, _pcPayload :: !CommitRequest
, _pcBearerToken :: !(Maybe Text)
, _pcProjectId :: !Text
, _pcCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsCommit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pcXgafv'
--
-- * 'pcUploadProtocol'
--
-- * 'pcPp'
--
-- * 'pcAccessToken'
--
-- * 'pcUploadType'
--
-- * 'pcPayload'
--
-- * 'pcBearerToken'
--
-- * 'pcProjectId'
--
-- * 'pcCallback'
projectsCommit
:: CommitRequest -- ^ 'pcPayload'
-> Text -- ^ 'pcProjectId'
-> ProjectsCommit
projectsCommit pPcPayload_ pPcProjectId_ =
ProjectsCommit'
{ _pcXgafv = Nothing
, _pcUploadProtocol = Nothing
, _pcPp = True
, _pcAccessToken = Nothing
, _pcUploadType = Nothing
, _pcPayload = pPcPayload_
, _pcBearerToken = Nothing
, _pcProjectId = pPcProjectId_
, _pcCallback = Nothing
}
-- | V1 error format.
pcXgafv :: Lens' ProjectsCommit (Maybe Xgafv)
pcXgafv = lens _pcXgafv (\ s a -> s{_pcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pcUploadProtocol :: Lens' ProjectsCommit (Maybe Text)
pcUploadProtocol
= lens _pcUploadProtocol
(\ s a -> s{_pcUploadProtocol = a})
-- | Pretty-print response.
pcPp :: Lens' ProjectsCommit Bool
pcPp = lens _pcPp (\ s a -> s{_pcPp = a})
-- | OAuth access token.
pcAccessToken :: Lens' ProjectsCommit (Maybe Text)
pcAccessToken
= lens _pcAccessToken
(\ s a -> s{_pcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pcUploadType :: Lens' ProjectsCommit (Maybe Text)
pcUploadType
= lens _pcUploadType (\ s a -> s{_pcUploadType = a})
-- | Multipart request metadata.
pcPayload :: Lens' ProjectsCommit CommitRequest
pcPayload
= lens _pcPayload (\ s a -> s{_pcPayload = a})
-- | OAuth bearer token.
pcBearerToken :: Lens' ProjectsCommit (Maybe Text)
pcBearerToken
= lens _pcBearerToken
(\ s a -> s{_pcBearerToken = a})
-- | The ID of the project against which to make the request.
pcProjectId :: Lens' ProjectsCommit Text
pcProjectId
= lens _pcProjectId (\ s a -> s{_pcProjectId = a})
-- | JSONP
pcCallback :: Lens' ProjectsCommit (Maybe Text)
pcCallback
= lens _pcCallback (\ s a -> s{_pcCallback = a})
instance GoogleRequest ProjectsCommit where
type Rs ProjectsCommit = CommitResponse
type Scopes ProjectsCommit =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore"]
requestClient ProjectsCommit'{..}
= go _pcProjectId _pcXgafv _pcUploadProtocol
(Just _pcPp)
_pcAccessToken
_pcUploadType
_pcBearerToken
_pcCallback
(Just AltJSON)
_pcPayload
datastoreService
where go
= buildClient (Proxy :: Proxy ProjectsCommitResource)
mempty
| rueshyna/gogol | gogol-datastore/gen/Network/Google/Resource/Datastore/Projects/Commit.hs | mpl-2.0 | 5,619 | 0 | 19 | 1,442 | 939 | 545 | 394 | 132 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Ch23Spec where
import Ch23_System_F_Universals
import Protolude
import Test.Hspec
spec :: Spec
spec = describe "ch23" $ do
idT
doubleT
selfAppT
quadrupleT
| haroldcarr/learn-haskell-coq-ml-etc | foundations/book/2002-TAPL-2005-ATAPL/tapl/test/Ch23Spec.hs | unlicense | 274 | 0 | 7 | 72 | 46 | 25 | 21 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Language.K3.Transform.Hints where
import Control.DeepSeq
import Data.Binary
import Data.Serialize
import Data.Typeable
import GHC.Generics (Generic)
import qualified Data.Set as S
import Language.K3.Core.Common
data OptHint
-- | The sets of identifiers which can be bound by reference, by copy without writeback, and by
-- copy with writeback.
= BindHint (S.Set Identifier, S.Set Identifier, S.Set Identifier)
-- | Whether or not the argument in an application can be passed in as-is (True), or with a
-- move.
| PassHint Bool
-- | Whether or not a function's argument is read-only in its implementation.
| FuncHint Bool
-- | Partitioning of a function's closure capture into whether it wants the closure to be
-- referenced, moved or copied.
| CaptHint (S.Set Identifier, S.Set Identifier, S.Set Identifier)
-- | Whether or not a function's return value must be manually moved.
| ReturnMoveHint Bool
| GlobalHint
deriving (Eq, Ord, Read, Show, Typeable, Generic)
instance NFData OptHint
instance Binary OptHint
instance Serialize OptHint
| DaMSL/K3 | src/Language/K3/Transform/Hints.hs | apache-2.0 | 1,188 | 0 | 9 | 239 | 191 | 111 | 80 | 21 | 0 |
{- |
Module : Text.Tabl.Environment
Description : Table environments
Copyright : (c) 2016-2020 Daniel Lovasko
License : BSD2
Maintainer : Daniel Lovasko <daniel.lovasko@gmail.com>
Stability : stable
Portability : portable
Definition of various environments for table rendering.
-}
module Text.Tabl.Environment
( Environment(..)
) where
-- | Output environment that declares the way that the table will be
-- rendered.
data Environment
= EnvAscii -- ^ ASCII-art suitable for the command-line
| EnvLatex -- ^ LaTeX source code
deriving (Show)
| lovasko/tabl | src/Text/Tabl/Environment.hs | bsd-2-clause | 565 | 0 | 6 | 102 | 38 | 26 | 12 | 6 | 0 |
module Data.SimpleN3.Command where
import Data.SimpleN3.ProgType
import Data.SimpleN3.Job
commandLineProcess :: Simplen3 -> IO ()
commandLineProcess Test = do
putStrLn "test called"
startJob
| wavewave/simplen3 | lib/Data/SimpleN3/Command.hs | bsd-2-clause | 198 | 0 | 7 | 28 | 50 | 27 | 23 | 7 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
module HERMIT.Plugin.Display
( showDisplay
, printDisplay
, ps_putStr
, ps_putStrLn
) where
import Control.Monad.Reader
import Control.Monad.State
import Data.Maybe (fromMaybe)
import HERMIT.Kernel (queryK, CommitMsg(..))
import HERMIT.Kure
import HERMIT.Plugin.Types
import HERMIT.PrettyPrinter.Common
import Prelude.Compat
import System.IO
showDisplay :: Maybe PathH -> PluginM DocH
showDisplay window = do
k <- asks pr_kernel
st <- get
let ast = ps_cursor st
ppOpts = pOptions $ ps_pretty st
d <- queryK k (extractT $ pathT (fromMaybe mempty window) $ liftPrettyH ppOpts $ pLCoreTC $ ps_pretty st)
Never (mkKernelEnv st) ast
return $ snd d -- discard new AST, assuming pretty printer won't create one
-- TODO: rm
printDisplay :: Maybe Handle -> Maybe PathH -> PluginM ()
printDisplay mbh window = do
doc <- showDisplay window
st <- get
let ppOpts = pOptions $ ps_pretty st
h = fromMaybe stdout mbh
liftIO $ ps_render st h ppOpts $ Right $ doc
-- TODO: rm
ps_putStr :: (MonadIO m, MonadState PluginState m) => String -> m ()
ps_putStr str = do
st <- get
liftIO $ ps_render st stdout (pOptions $ ps_pretty st) (Left str)
-- TODO: rm
ps_putStrLn :: (MonadIO m, MonadState PluginState m) => String -> m ()
ps_putStrLn = ps_putStr . (++"\n")
| beni55/hermit | src/HERMIT/Plugin/Display.hs | bsd-2-clause | 1,416 | 0 | 16 | 313 | 448 | 230 | 218 | 38 | 1 |
{-# LANGUAGE TemplateHaskell, FunctionalDependencies #-}
{-| Implementation of the Ganeti Disk config object.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Objects.Disk where
import Prelude ()
import Ganeti.Prelude
import qualified Data.ByteString.UTF8 as UTF8
import Data.Char (isAsciiLower, isAsciiUpper, isDigit)
import Data.List (isPrefixOf, isInfixOf)
import Language.Haskell.TH.Syntax
import Text.JSON (showJSON, readJSON, JSValue(..))
import qualified Text.JSON as J
import Ganeti.JSON (Container, fromObj)
import Ganeti.THH
import Ganeti.THH.Field
import Ganeti.Types
import Ganeti.Utils.Validate
-- | Constant for the dev_type key entry in the disk config.
devType :: String
devType = "dev_type"
-- | The disk parameters type.
type DiskParams = Container JSValue
-- | An alias for DRBD secrets
type DRBDSecret = String
-- Represents a group name and a volume name.
--
-- From @man lvm@:
--
-- The following characters are valid for VG and LV names: a-z A-Z 0-9 + _ . -
--
-- VG and LV names cannot begin with a hyphen. There are also various reserved
-- names that are used internally by lvm that can not be used as LV or VG names.
-- A VG cannot be called anything that exists in /dev/ at the time of
-- creation, nor can it be called '.' or '..'. A LV cannot be called '.' '..'
-- 'snapshot' or 'pvmove'. The LV name may also not contain the strings '_mlog'
-- or '_mimage'
data LogicalVolume = LogicalVolume { lvGroup :: String
, lvVolume :: String
}
deriving (Eq, Ord)
instance Show LogicalVolume where
showsPrec _ (LogicalVolume g v) =
showString g . showString "/" . showString v
-- | Check the constraints for a VG/LV names (except the \@\/dev\/\@ check).
instance Validatable LogicalVolume where
validate (LogicalVolume g v) = do
let vgn = "Volume group name"
-- Group name checks
nonEmpty vgn g
validChars vgn g
notStartsDash vgn g
notIn vgn g [".", ".."]
-- Volume name checks
let lvn = "Volume name"
nonEmpty lvn v
validChars lvn v
notStartsDash lvn v
notIn lvn v [".", "..", "snapshot", "pvmove"]
reportIf ("_mlog" `isInfixOf` v) $ lvn ++ " must not contain '_mlog'."
reportIf ("_mimage" `isInfixOf` v) $ lvn ++ "must not contain '_mimage'."
where
nonEmpty prefix x = reportIf (null x) $ prefix ++ " must be non-empty"
notIn prefix x =
mapM_ (\y -> reportIf (x == y)
$ prefix ++ " must not be '" ++ y ++ "'")
notStartsDash prefix x = reportIf ("-" `isPrefixOf` x)
$ prefix ++ " must not start with '-'"
validChars prefix x =
reportIf (not . all validChar $ x)
$ prefix ++ " must consist only of [a-z][A-Z][0-9][+_.-]"
validChar c = isAsciiLower c || isAsciiUpper c || isDigit c
|| (c `elem` "+_.-")
instance J.JSON LogicalVolume where
showJSON = J.showJSON . show
readJSON (J.JSString s) | (g, _ : l) <- break (== '/') (J.fromJSString s) =
either fail return . evalValidate . validate' $ LogicalVolume g l
readJSON v = fail $ "Invalid JSON value " ++ show v
++ " for a logical volume"
-- | The disk configuration type. This includes the disk type itself,
-- for a more complete consistency. Note that since in the Python
-- code-base there's no authoritative place where we document the
-- logical id, this is probably a good reference point. There is a bijective
-- correspondence between the 'DiskLogicalId' constructors and 'DiskTemplate'.
data DiskLogicalId
= LIDPlain LogicalVolume -- ^ Volume group, logical volume
| LIDDrbd8 String String Int Int Int (Private DRBDSecret)
-- ^ NodeA, NodeB, Port, MinorA, MinorB, Secret
| LIDFile FileDriver String -- ^ Driver, path
| LIDSharedFile FileDriver String -- ^ Driver, path
| LIDGluster FileDriver String -- ^ Driver, path
| LIDBlockDev BlockDriver String -- ^ Driver, path (must be under /dev)
| LIDRados String String -- ^ Unused, path
| LIDExt String String -- ^ ExtProvider, unique name
deriving (Show, Eq)
-- | Mapping from a logical id to a disk type.
lidDiskType :: DiskLogicalId -> DiskTemplate
lidDiskType (LIDPlain {}) = DTPlain
lidDiskType (LIDDrbd8 {}) = DTDrbd8
lidDiskType (LIDFile {}) = DTFile
lidDiskType (LIDSharedFile {}) = DTSharedFile
lidDiskType (LIDGluster {}) = DTGluster
lidDiskType (LIDBlockDev {}) = DTBlock
lidDiskType (LIDRados {}) = DTRbd
lidDiskType (LIDExt {}) = DTExt
-- | Builds the extra disk_type field for a given logical id.
lidEncodeType :: DiskLogicalId -> [(String, JSValue)]
lidEncodeType v = [(devType, showJSON . lidDiskType $ v)]
-- | Returns the storage path or the unique name for a given logical id if
-- present
getStorageId :: DiskLogicalId -> Maybe String
getStorageId dlid =
case dlid of
LIDPlain lv -> Just $ lvGroup lv ++ "/" ++ lvVolume lv
LIDDrbd8 {} -> Nothing
LIDFile _ path -> Just path
LIDSharedFile _ path -> Just path
LIDGluster _ path -> Just path
LIDBlockDev _ path -> Just path
LIDRados _ path -> Just path
LIDExt _ uniqueName -> Just uniqueName
-- | Returns the provider for ExtStorage and Nothing otherwise
getExtProvider :: DiskLogicalId -> Maybe String
getExtProvider (LIDExt provider _) = Just provider
getExtProvider _ = Nothing
-- | Custom encoder for DiskLogicalId (logical id only).
encodeDLId :: DiskLogicalId -> JSValue
encodeDLId (LIDPlain (LogicalVolume vg lv)) =
JSArray [showJSON vg, showJSON lv]
encodeDLId (LIDDrbd8 nodeA nodeB port minorA minorB key) =
JSArray [ showJSON nodeA, showJSON nodeB, showJSON port
, showJSON minorA, showJSON minorB, showJSON key ]
encodeDLId (LIDRados pool name) = JSArray [showJSON pool, showJSON name]
encodeDLId (LIDFile driver name) = JSArray [showJSON driver, showJSON name]
encodeDLId (LIDSharedFile driver name) =
JSArray [showJSON driver, showJSON name]
encodeDLId (LIDGluster driver name) = JSArray [showJSON driver, showJSON name]
encodeDLId (LIDBlockDev driver name) = JSArray [showJSON driver, showJSON name]
encodeDLId (LIDExt extprovider name) =
JSArray [showJSON extprovider, showJSON name]
-- | Custom encoder for DiskLogicalId, composing both the logical id
-- and the extra disk_type field.
encodeFullDLId :: DiskLogicalId -> (JSValue, [(String, JSValue)])
encodeFullDLId v = (encodeDLId v, lidEncodeType v)
-- | Custom decoder for DiskLogicalId. This is manual for now, since
-- we don't have yet automation for separate-key style fields.
decodeDLId :: [(String, JSValue)] -> JSValue -> J.Result DiskLogicalId
decodeDLId obj lid = do
dtype <- fromObj obj devType
case dtype of
DTDrbd8 ->
case lid of
JSArray [nA, nB, p, mA, mB, k] ->
LIDDrbd8
<$> readJSON nA
<*> readJSON nB
<*> readJSON p
<*> readJSON mA
<*> readJSON mB
<*> readJSON k
_ -> fail "Can't read logical_id for DRBD8 type"
DTPlain ->
case lid of
JSArray [vg, lv] -> LIDPlain <$>
(LogicalVolume <$> readJSON vg <*> readJSON lv)
_ -> fail "Can't read logical_id for plain type"
DTFile ->
case lid of
JSArray [driver, path] ->
LIDFile
<$> readJSON driver
<*> readJSON path
_ -> fail "Can't read logical_id for file type"
DTSharedFile ->
case lid of
JSArray [driver, path] ->
LIDSharedFile
<$> readJSON driver
<*> readJSON path
_ -> fail "Can't read logical_id for shared file type"
DTGluster ->
case lid of
JSArray [driver, path] ->
LIDGluster
<$> readJSON driver
<*> readJSON path
_ -> fail "Can't read logical_id for shared file type"
DTBlock ->
case lid of
JSArray [driver, path] ->
LIDBlockDev
<$> readJSON driver
<*> readJSON path
_ -> fail "Can't read logical_id for blockdev type"
DTRbd ->
case lid of
JSArray [driver, path] ->
LIDRados
<$> readJSON driver
<*> readJSON path
_ -> fail "Can't read logical_id for rdb type"
DTExt ->
case lid of
JSArray [extprovider, name] ->
LIDExt
<$> readJSON extprovider
<*> readJSON name
_ -> fail "Can't read logical_id for extstorage type"
DTDiskless ->
fail "Retrieved 'diskless' disk."
-- | Disk data structure.
$(buildObjectWithForthcoming "Disk" "disk" $
[ customField 'decodeDLId 'encodeFullDLId ["dev_type"] $
simpleField "logical_id" [t| DiskLogicalId |]
, defaultField [| [] |]
$ simpleField "children" (return . AppT ListT . ConT $ mkName "Disk")
, defaultField [| [] |] $ simpleField "nodes" [t| [String] |]
, defaultField [| "" |] $ simpleField "iv_name" [t| String |]
, simpleField "size" [t| Int |]
, defaultField [| DiskRdWr |] $ simpleField "mode" [t| DiskMode |]
, optionalField $ simpleField "name" [t| String |]
, optionalField $ simpleField "spindles" [t| Int |]
, optionalField $ simpleField "params" [t| DiskParams |]
]
++ uuidFields
++ serialFields
++ timeStampFields)
instance TimeStampObject Disk where
cTimeOf = diskCtime
mTimeOf = diskMtime
instance UuidObject Disk where
uuidOf = UTF8.toString . diskUuid
instance SerialNoObject Disk where
serialOf = diskSerial
instance ForthcomingObject Disk where
isForthcoming = diskForthcoming
-- | Determines whether a disk or one of his children has the given logical id
-- (determined by the volume group name and by the logical volume name).
-- This can be true only for DRBD or LVM disks.
includesLogicalId :: LogicalVolume -> Disk -> Bool
includesLogicalId lv disk =
case diskLogicalId disk of
Just (LIDPlain lv') -> lv' == lv
Just (LIDDrbd8 {}) ->
any (includesLogicalId lv) $ diskChildren disk
_ -> False
| leshchevds/ganeti | src/Ganeti/Objects/Disk.hs | bsd-2-clause | 11,305 | 0 | 19 | 2,699 | 2,391 | 1,255 | 1,136 | 202 | 17 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Text.Syntax.Parser.Attoparsec.ByteString (
runAsAttoparsec', runAsAttoparsec,
runAsAttoparsecChar8', runAsAttoparsecChar8
) where
import Text.Syntax.Parser.Instances ()
import Text.Syntax.Poly
((<||>), TryAlternative (try, (<|>)), Syntax(..),
RunAsParser)
import Data.Attoparsec.Types (Parser, IResult (..))
import Data.ByteString (ByteString, empty)
import qualified Data.ByteString.Lazy as L (ByteString)
import qualified Data.Attoparsec.ByteString as A (anyWord8, try, parse)
import qualified Data.Attoparsec.ByteString.Char8 as A (anyChar)
import qualified Data.Attoparsec.ByteString.Lazy as L
import Data.Word (Word8)
instance TryAlternative (Parser ByteString) where
try = A.try
p <|> q = try p <||> q
instance Syntax Word8 (Parser ByteString) where
token = A.anyWord8
runResult :: IResult ByteString a -> Either ([String], String) a
runResult r' = case r' of
Fail _ estack msg -> Left (estack, msg)
Partial f -> runResult (f empty)
Done _ r -> Right r
runAsAttoparsec' :: RunAsParser Word8 ByteString a ([String], String)
runAsAttoparsec' parser tks = runResult $ A.parse parser tks
runAsAttoparsec :: RunAsParser Word8 L.ByteString a ([String], String)
runAsAttoparsec parser tks =
case L.parse parser tks of
L.Fail _ estack msg -> Left (estack, msg)
L.Done _ r -> Right r
instance Syntax Char (Parser ByteString) where
token = A.anyChar
runAsAttoparsecChar8' :: RunAsParser Char ByteString a ([String], String)
runAsAttoparsecChar8' parser tks = runResult $ A.parse parser tks
runAsAttoparsecChar8 :: RunAsParser Char L.ByteString a ([String], String)
runAsAttoparsecChar8 parser tks =
case L.parse parser tks of
L.Fail _ estack msg -> Left (estack, msg)
L.Done _ r -> Right r
| khibino/haskell-invertible-syntax-attoparsec | Text/Syntax/Parser/Attoparsec/ByteString.hs | bsd-3-clause | 1,863 | 0 | 10 | 327 | 608 | 340 | 268 | 42 | 3 |
------------------------------------------------------------------------------
-- |
-- Module : GGTD.Tickler
-- Copyright : (C) 2016 Samuli Thomasson
-- License : %% (see the file LICENSE)
-- Maintainer : Samuli Thomasson <samuli.thomasson@paivola.fi>
-- Stability : experimental
-- Portability : non-portable
--
-- Adding a tickler to a node allows for the node to change whenever the
-- tickler activates.
------------------------------------------------------------------------------
module GGTD.Tickler where
import GGTD.Base
import GGTD.DB (runHandler)
import GGTD.DB.Update
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class
import Control.Lens
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Graph.Inductive.Graph
import Data.Time.LocalTime
import Data.Time.Calendar
import Data.Time.Calendar.OrdinalDate (toOrdinalDate, mondayStartWeek)
import Data.Time.Calendar.MonthDay (dayOfYearToMonthAndDay)
import Data.Time.Calendar.WeekDate (toWeekDate, fromWeekDate)
-- * Types
type Year = Int
type Month = Int
type Week = Int
type DayOfWeek = Int
data Tickler = TDayOfWeek DayOfWeek -- ^ Every given day of week
| TMonth Month -- ^ Beginning of the month
| TMonthly -- ^ Beginning of any month
| TYear Year -- ^ Beginning of a year
| TDay Day -- ^ A specific day
deriving (Show, Read, Eq)
-- | What actions a tickler may do.
data TicklerAction = TSetFlag Flag (Maybe String)
deriving (Show, Read, Eq)
-- * Worker
forkTicklerWorker :: IO ThreadId
forkTicklerWorker = forkIO $ forever goTickle
where
goTickle = do
prev <- runHandler $ use ticklerLast
current <- getZonedTime <&> localDay . zonedTimeToLocalTime
if prev < current
then runHandler $ runTicklers (addDays 1 prev) current
else threadDelay duration
duration = 1 * 60 * 60 * 1000000 -- one hour
-- * Handlers
-- | Attaches a tickler to a node.
attachTickler :: Tickler -> TicklerAction -> Node -> Handler ()
attachTickler tickler action node =
overNode node $ _3.flags %~ Map.alter ins Ticklers
where
ins :: Maybe String -> Maybe String
ins (Just str) = Just . show . (++ [(tickler, action)]) $ read str
ins Nothing = Just (show [(tickler, action)])
-- | Remove ALL assigned ticklers from a node.
removeTicklers :: Node -> Handler ()
removeTicklers node =
overNode node $ _3.flags %~ Map.alter (const Nothing) Ticklers
listTicklers
:: Handler [ ( LNode Thingy
, [(Tickler, TicklerAction)] ) ]
listTicklers = use gr <&> mapMaybe go . labNodes
where
go ln@(_, th) = do
ts <- read <$> Map.lookup Ticklers (_flags th)
return (ln, ts)
-- |
runTicklers
:: Day -- ^ Start day, inclusive
-> Day -- ^ End day, inclusive
-> Handler ()
runTicklers start end = do
listTicklers >>= mapM_ go
ticklerLast .= end
where
go ((n, _), ts) = mapM_ (runTicklerAction n . snd) $ filter (matches . fst) ts
matches (TDayOfWeek day) = day `elem` triggeredWeekDays
matches (TMonth month) = month `elem` triggeredMonths
matches (TYear year) = fromIntegral year `elem` triggeredYears
matches (TDay day) = start <= day && day <= end
matches TMonthly = not $ null triggeredMonths
triggeredWeekDays = map (snd . mondayStartWeek) $ take 7 [start .. end]
triggeredMonths = take 12 $ map snd triggeredMonthsYears
triggeredYears = [ year | (year, 1) <- triggeredMonthsYears ]
triggeredMonthsYears =
[ (year, month)
| day <- [start .. end]
, let (year, yearDay) = toOrdinalDate day
, (month, 1) <- [dayOfYearToMonthAndDay (isLeapYear year) yearDay]
]
runTicklerAction :: Node -> TicklerAction -> Handler ()
runTicklerAction node (TSetFlag flag mcontent) =
overNode node $ _3.flags %~ Map.alter (const mcontent) flag
-- * Time utilities
-- | Set the time to Monday 0:00 of the running week.
toStartOfWeek :: LocalTime -> LocalTime
toStartOfWeek LocalTime{..} =
let (y, w, _) = toWeekDate localDay
in LocalTime { localDay = fromWeekDate y w 1, localTimeOfDay = midnight }
toStartOfMonth :: LocalTime -> LocalTime
toStartOfMonth LocalTime{..} =
let (y, m, _) = toGregorian localDay
in LocalTime { localDay = fromGregorian y m 1, localTimeOfDay = midnight }
addWeeks :: Integer -> LocalTime -> LocalTime
addWeeks n time = time { localDay = addDays (7 * n) (localDay time) }
-- | Clips to the end of month if necessary.
addMonths :: Integer -> LocalTime -> LocalTime
addMonths n time = time { localDay = addGregorianMonthsClip n (localDay time) }
getMonth :: LocalTime -> Month
getMonth = view _2 . toGregorian . localDay
getLocalTime :: MonadIO m => m LocalTime
getLocalTime = liftIO $ getZonedTime <&> zonedTimeToLocalTime
| SimSaladin/ggtd | src/GGTD/Tickler.hs | bsd-3-clause | 4,929 | 0 | 13 | 1,127 | 1,341 | 730 | 611 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Language.RM.TypeLevel
import Data.Proxy
import GHC.TypeLits
-- |Initialises R1 to 5, then raises 2 to the power of the value of R1,
-- leaving the result (32) in R0. Uses R2 as a scratch register,
-- thus the machine is initialised with 3 registers
pow2 :: ('Halted a (r ': rs) ~
Run
'[
-- Instr | label index
-- set R1 to 5
Inc (R 1) (L 1) -- 0
, Inc (R 1) (L 2) -- 1
, Inc (R 1) (L 3) -- 2
, Inc (R 1) (L 4) -- 3
, Inc (R 1) (L 5) -- 4
-- set R0 to 1
, Inc (R 0) (L 6) -- 5
-- R0 = 2^R1
, Dec (R 1) (L 7) (L 12) -- 6
-- R2 = R0
, Dec (R 0) (L 8) (L 9) -- 7
, Inc (R 2) (L 7) -- 8
-- R0 = 2*R2
, Dec (R 2) (L 10) (L 6) -- 9
, Inc (R 0) (L 11) -- 10
, Inc (R 0) (L 9) -- 11
, Halt -- 12
]) => Proxy r
pow2 = Proxy
result :: Integer
result = natVal pow2
main :: IO ()
main = putStrLn $ "The result of running the machine: " ++ (show result)
| kcsongor/register-machine-type | examples/Example.hs | bsd-3-clause | 1,462 | 0 | 13 | 709 | 400 | 218 | 182 | 30 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
{-|
Translate a @ClassDecl@ (see "AST") to its @CCode@ (see "CCode.Main")
equivalent.
-}
module CodeGen.ClassDecl () where
import CodeGen.Typeclasses
import CodeGen.CCodeNames
import CodeGen.MethodDecl ()
import CodeGen.ClassTable
import CodeGen.Type
import CodeGen.Trace
import CodeGen.GC
import CodeGen.DTrace
import CCode.Main
import CCode.PrettyCCode ()
import Data.List
import Control.Arrow
import qualified AST.AST as A
import qualified AST.Util as Util
import qualified Identifiers as ID
import qualified Types as Ty
instance Translatable A.ClassDecl (ProgramTable -> CCode FIN) where
translate cdecl table
| A.isActive cdecl = translateActiveClass cdecl table
| A.isShared cdecl = translateSharedClass cdecl table
| otherwise = translatePassiveClass cdecl table
methodImpls cdecl table = concatMap methodImpl
where
methodImpl mdecl = translate mdecl cdecl table
-- | Translates an active class into its C representation. Note
-- that there are additional declarations in the file generated by
-- "CodeGen.Header"
translateActiveClass cdecl@(A.Class{A.cname, A.cfields, A.cmethods}) table =
Program $ Concat $
(LocalInclude "header.h") :
[traitMethodSelector table cdecl] ++
[typeStructDecl cdecl] ++
[runtimeTypeInitFunDecl cdecl] ++
[tracefunDecl cdecl] ++
[constructorImpl Active cname] ++
methodImpls cdecl table cmethods ++
[dispatchFunDecl cdecl] ++
[runtimeTypeDecl cname]
typeStructDecl :: A.ClassDecl -> CCode Toplevel
typeStructDecl cdecl@(A.Class{A.cname, A.cfields, A.cmethods}) =
let typeParams = Ty.getTypeParameters cname in
StructDecl (AsType $ classTypeName cname) $
((encoreActorT, Var "_enc__actor") :
(map (\ty -> (Ptr ponyTypeT, AsLval $ typeVarRefName ty)) typeParams ++
zip
(map (translate . A.ftype) cfields)
(map (AsLval . fieldName . A.fname) cfields)))
dispatchFunDecl :: A.ClassDecl -> CCode Toplevel
dispatchFunDecl cdecl@(A.Class{A.cname, A.cfields, A.cmethods}) =
(Function (Static void) (classDispatchName cname)
([(Ptr (Ptr encoreCtxT), encoreCtxVar),
(Ptr ponyActorT, Var "_a"),
(Ptr ponyMsgT, Var "_m")])
(Seq [Assign (Decl (Ptr . AsType $ classTypeName cname, thisVar))
(Cast (Ptr . AsType $ classTypeName cname) (Var "_a")),
Seq $ map assignTypeVar classTypeVars,
(Switch (Var "_m" `Arrow` Nam "id")
(
(if (A.isMainClass cdecl)
then ponyMainClause :
methodClauses (filter ((/= ID.Name "main") . A.methodName) cmethods)
else methodClauses $ cmethods
))
(Statement $ Call (Nam "printf") [String "error, got invalid id: %zd", AsExpr $ (Var "_m") `Arrow` (Nam "id")]))]))
where
classTypeVars = Ty.getTypeParameters cname
assignTypeVar t =
Assign (Decl (Ptr ponyTypeT, AsLval $ typeVarRefName t))
(Arrow thisName (typeVarRefName t))
ponyMainClause =
(Nam "_ENC__MSG_MAIN",
Seq $ [Assign (Decl (Ptr ponyMainMsgT, Var "msg")) (Cast (Ptr ponyMainMsgT) (Var "_m")),
Statement $ Call ((methodImplName cname (ID.Name "main")))
[AsExpr encoreCtxVar,
(Cast (translate cname) (Var "_a")),
AsExpr nullVar,
Call (Nam "_init_argv")
[AsExpr encoreCtxVar,
AsExpr $ (Var "msg") `Arrow` (Nam "argc"),
AsExpr $ (Var "msg") `Arrow` (Nam "argv")]]])
methodClauses = concatMap methodClause
methodClause m = (mthdDispatchClause m mArgs) :
if not (A.isStreamMethod m)
then [oneWaySendDispatchClause m mArgs]
else []
where
mArgs = (A.methodName &&& A.methodParams) m
-- explode _enc__Foo_bar_msg_t struct into variable names
methodUnpackArguments :: A.MethodDecl -> CCode Ty -> [CCode Stat]
methodUnpackArguments mdecl msgTypeName =
map unpackMethodTypeParam (A.methodTypeParams mdecl) ++
zipWith unpack (A.methodParams mdecl) [1..]
where
unpackMethodTypeParam :: Ty.Type -> CCode Stat
unpackMethodTypeParam ty =
(Assign (Decl (Ptr ponyTypeT, AsLval $ typeVarRefName ty))
((Cast (msgTypeName) (Var "_m")) `Arrow` (typeVarRefName ty)))
unpack :: A.ParamDecl -> Int -> CCode Stat
unpack A.Param{A.pname, A.ptype} n =
(Assign (Decl (translate ptype, (AsLval . argName $ pname)))
((Cast (msgTypeName) (Var "_m")) `Arrow` (Nam $ "f"++show n)))
includeCtx xs = Deref encoreCtxVar : xs
mthdDispatchClause mdecl (mName, mParams)
| A.isStreamMethod mdecl =
(futMsgId cname mName,
Seq $ unpackFuture : arguments' ++
gcReceive ++ [streamMethodCall])
| otherwise =
(futMsgId cname mName,
Seq $ unpackFuture : arguments' ++
gcReceive ++ [pMethodDecl, methodCall])
where
(pMethodArrName, pMethodDecl) = arrMethodTypeVars mdecl
arguments' = arguments mdecl (futMsgTypeName cname mName)
gcReceive =
gcRecv mParams
(Statement $ Call ponyTraceObject
(includeCtx
[futVar,
futureTypeRecName `Dot` Nam "trace"]))
streamMethodCall =
Statement $ Call (methodImplName cname mName)
(encoreCtxVar :
thisVar :
nullVar :
futVar :
map (AsLval . argName . A.pname) mParams)
methodCall =
Statement $
if null $ Util.filter A.isForward (A.mbody mdecl)
then Call futureFulfil
[AsExpr encoreCtxVar,
AsExpr $ futVar,
asEncoreArgT (translate $ A.methodType mdecl)
(Call (methodImplName cname mName)
(encoreCtxVar : thisVar :
pMethodArrName :
map (AsLval . argName . A.pname) mParams))]
else forwardMethodCall mName pMethodArrName mParams futVar
forwardMethodCall = \mName pMethodArrName mParams lastArg ->
Call (forwardingMethodImplName cname mName)
(encoreCtxVar : thisVar :
pMethodArrName :
map (AsLval . argName . A.pname) mParams ++
[lastArg])
arguments mdecl ptr = methodUnpackArguments mdecl (Ptr . AsType $ ptr)
oneWaySendDispatchClause mdecl (mName, mParams) =
let ptr = oneWayMsgTypeName cname mName
in (oneWayMsgId cname mName,
Seq $ arguments mdecl ptr ++
gcReceive ++ [pMethodDecl, methodCall])
where
(pMethodArrName, pMethodDecl) = arrMethodTypeVars mdecl
gcReceive = gcRecv mParams
(Comm "Not tracing the future in a oneWay send")
methodCall =
Statement $
if null $ Util.filter A.isForward (A.mbody mdecl)
then Call (methodImplName cname mName)
(encoreCtxVar : thisVar : pMethodArrName :
map (AsLval . argName . A.pname) mParams)
else forwardMethodCall mName pMethodArrName mParams nullVar
unpackFuture =
let
lval = Decl (future, futVar)
rval = (Cast (Ptr $ encMsgT) (Var "_m")) `Arrow` (Nam "_fut")
in
Assign lval rval
arrMethodTypeVars mdecl =
let arrName = "methodTypeVars"
arr = map (AsExpr . AsLval . typeVarRefName) (A.methodTypeParams mdecl) :: [CCode Expr]
in (Var arrName, Assign
(Decl (Ptr ponyTypeT, Var $ arrName ++ "[]"))
(Record arr))
data Activity = Active | Shared | Passive
constructorImpl :: Activity -> Ty.Type -> CCode Toplevel
constructorImpl act cname =
let
retType = translate cname
fName = constructorImplName cname
args = [(Ptr (Ptr encoreCtxT), encoreCtxVar),
(Ptr (Ptr ponyTypeT), encoreRuntimeType)]
fBody = Seq $
assignThis :
decorateThis act ++
[ret thisVar]
in
Function retType fName args fBody
where
classType = AsType $ classTypeName cname
thisType = Ptr classType
cast = Cast thisType
declThis = Decl (thisType, thisVar)
runtimeType = Amp $ runtimeTypeName cname
create = createCall act
assignThis = Assign declThis $ cast create
ret = Return
createCall :: Activity -> CCode Expr
createCall Active =
Call encoreCreateName [AsExpr $ Deref encoreCtxVar, runtimeType]
createCall Shared =
Call encoreCreateName [AsExpr $ Deref encoreCtxVar, runtimeType]
createCall Passive =
Call encoreAllocName [AsExpr $ Deref encoreCtxVar, Sizeof classType]
decorateThis :: Activity -> [CCode Stat]
decorateThis Passive = [Assign (thisVar `Arrow` selfTypeField) runtimeType]
decorateThis _ = []
translateSharedClass cdecl@(A.Class{A.cname, A.cfields, A.cmethods}) table =
Program $ Concat $
(LocalInclude "header.h") :
[traitMethodSelector table cdecl] ++
[typeStructDecl cdecl] ++
[runtimeTypeInitFunDecl cdecl] ++
[tracefunDecl cdecl] ++
[constructorImpl Shared cname] ++
methodImpls cdecl table cmethods ++
[dispatchFunDecl cdecl] ++
[runtimeTypeDecl cname]
-- | Translates a passive class into its C representation. Note
-- that there are additional declarations (including the data
-- struct for instance variables) in the file generated by
-- "CodeGen.Header"
translatePassiveClass cdecl@(A.Class{A.cname, A.cfields, A.cmethods}) table =
Program $ Concat $
(LocalInclude "header.h") :
[traitMethodSelector table cdecl] ++
[runtimeTypeInitFunDecl cdecl] ++
[tracefunDecl cdecl] ++
[constructorImpl Passive cname] ++
methodImpls cdecl table cmethods ++
-- [dispatchfunDecl] ++
[runtimePassiveTypeDecl cname]
where
dispatchfunDecl =
Function (Static void) (classDispatchName cname)
([(Ptr (Ptr encoreCtxT), encoreCtxVar),
(Ptr ponyActorT, Var "_a"),
(Ptr ponyMsgT, Var "_m")])
(Comm "Stub! Might be used when we have dynamic dispatch on passive classes")
traitMethodSelector :: ProgramTable -> A.ClassDecl -> CCode Toplevel
traitMethodSelector table A.Class{A.cname, A.ccomposition} =
let
retType = Static (Ptr void)
fname = traitMethodSelectorName
args = [(Typ "int" , Var "id")]
cond = Var "id"
traitTypes = A.typesFromTraitComposition ccomposition
traitMethods = map (`lookupMethods` table) traitTypes
cases = concat $ zipWith (traitCase cname) traitTypes traitMethods
err = String "error, got invalid id: %d"
defaultCase = Statement $ Call (Nam "printf") [err, AsExpr $ Var "id"]
switch = Switch cond cases defaultCase
body = Seq [ switch, Return Null ]
in
Function retType fname args body
where
traitCase :: Ty.Type -> Ty.Type -> [A.FunctionHeader] ->
[(CCode Name, CCode Stat)]
traitCase cname tname tmethods =
let
methodNames = map A.hname tmethods
caseNames = map (msgId tname) methodNames
caseStmts = map (Return . methodImplName cname) methodNames
in zip caseNames caseStmts ++
if Ty.isActiveSingleType tname then
let
futCaseNames = map (futMsgId tname) methodNames
futCaseStmts =
map (Return . callMethodFutureName cname) methodNames
oneWayCaseNames = map (oneWayMsgId tname) methodNames
oneWayCaseStmts =
map (Return . methodImplOneWayName cname) methodNames
in
zip futCaseNames futCaseStmts ++
zip oneWayCaseNames oneWayCaseStmts
else
[]
runtimeTypeInitFunDecl :: A.ClassDecl -> CCode Toplevel
runtimeTypeInitFunDecl A.Class{A.cname, A.cfields, A.cmethods} =
Function void (runtimeTypeInitFnName cname)
[(Ptr . AsType $ classTypeName cname, thisVar), (Embed "...", Embed "")]
(Seq $
(Statement $ Decl (Typ "va_list", Var "params")) :
(Statement $ Call (Nam "va_start") [Var "params", thisVar]) :
map initRuntimeType typeParams ++
[Statement $ Call (Nam "va_end") [Var "params"]])
where
typeParams = Ty.getTypeParameters cname
initRuntimeType ty =
Assign (thisVar `Arrow` typeVarRefName ty)
(Call (Nam "va_arg") [Var "params", Var "pony_type_t *"])
tracefunDecl :: A.ClassDecl -> CCode Toplevel
tracefunDecl A.Class{A.cname, A.cfields, A.cmethods} =
case find ((== Ty.getId cname ++ "_trace") . show . A.methodName) cmethods of
Just mdecl@(A.Method{A.mbody}) ->
Function void (classTraceFnName cname)
[(Ptr encoreCtxT, encoreCtxVar), (Ptr void, Var "p")]
(Statement $ Call (methodImplName cname (A.methodName mdecl))
[Amp encoreCtxVar, AsExpr $ Var "p", AsExpr nullVar])
Nothing ->
Function void (classTraceFnName cname)
[(Ptr encoreCtxT, ctxArg),
(Ptr void, Var "p")]
(Seq $
(Assign (Decl (Ptr (Ptr encoreCtxT), encoreCtxVar)) (Amp ctxArg)):
(Assign (Decl (Ptr . AsType $ classTypeName cname, thisVar))
(Var "p")) :
runtimeTypeAssignment ++
map traceField cfields)
where
ctxArg = Var "_ctx_arg"
runtimeTypeAssignment = map extractTypeVariable typeParams
extractTypeVariable t =
if Ty.isTypeVar t then
Assign (Decl (Ptr ponyTypeT, AsLval $ typeVarRefName t))
(Arrow thisName (typeVarRefName t))
else error "Expected type variable but found concrete type"
typeParams = Ty.getTypeParameters cname
traceField A.Field {A.ftype, A.fname} =
let var = Var . show $ fieldName fname
field = thisVar `Arrow` fieldName fname
fieldAssign = Assign (Decl (translate ftype, var)) field
in Seq [fieldAssign, traceVariable ftype var]
runtimeTypeDecl cname =
AssignTL
(Decl (Typ "pony_type_t", AsLval $ runtimeTypeName cname)) $
DesignatedInitializer $ [ (Nam "id", AsExpr . AsLval $ classId cname)
, (Nam "size", Call (Nam "sizeof") [AsLval $ classTypeName cname])
, (Nam "trace", AsExpr . AsLval $ (classTraceFnName cname))
, (Nam "dispatch", AsExpr . AsLval $ (classDispatchName cname))
, (Nam "vtable", AsExpr . AsLval $ traitMethodSelectorName)
]
runtimePassiveTypeDecl cname =
AssignTL
(Decl (Typ "pony_type_t", AsLval $ runtimeTypeName cname)) $
DesignatedInitializer $ [ (Nam "id", AsExpr . AsLval $ classId cname)
, (Nam "size", Call (Nam "sizeof") [AsLval $ classTypeName cname])
, (Nam "trace", AsExpr . AsLval $ (classTraceFnName cname))
, (Nam "vtable", AsExpr . AsLval $ traitMethodSelectorName)
]
| Paow/encore | src/back/CodeGen/ClassDecl.hs | bsd-3-clause | 16,150 | 0 | 23 | 5,325 | 4,567 | 2,363 | 2,204 | -1 | -1 |
module GEC.Datagram
( P.ContextIn, P.ContextOut, P.TagSize(..)
, mkContextIn, mkContextOut
, P.inflationOut, P.inflationIn
, encode, decode
) where
import qualified GEC.Datagram.Pure as P
import Data.ByteString (ByteString)
mkContextIn :: P.TagSize -> ByteString -> P.ContextIn
mkContextIn t m = run "Could not construct input context" (P.mkContextIn t m)
mkContextOut :: P.TagSize -> ByteString -> P.ContextOut
mkContextOut t m = run "Could not construct output context" (P.mkContextOut t m)
encode :: P.ContextOut -> ByteString -> (P.ContextOut, ByteString)
encode c m = run "Could not encode message" (P.encode c m)
decode :: P.ContextIn -> ByteString -> (P.ContextIn, ByteString)
decode c m = run "Could not decode message" (P.decode c m)
run :: String -> Maybe a -> a
run msg = maybe (error $ "GEC: " ++ msg) id
| GaloisInc/gec | src/GEC/Datagram.hs | bsd-3-clause | 843 | 0 | 8 | 150 | 288 | 156 | 132 | 17 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Data.FAlgebra.Tree.Zipper
( module Data.FAlgebra.Base
, module Data.FAlgebra.Tree
, TreeZipStepF(..)
, TreeZipSteps(..)
, TreeDirection(..)
, TreeZip(..)
, _here
, directions
, root
, sibling
, up
, value
, setValue
, zip
, rotate
, insertHere
) where
import Prelude hiding (zip)
import Data.FAlgebra.Base
import Data.FAlgebra.Tree hiding (left, right)
-- |Unfixed version of the list of steps up the tree
-- LBranch means that the current tree is a left child
-- RBranch means that the current tree is a right child
data TreeZipStepF a t b = Root
| LBranch a t b
| RBranch a t b
deriving (Eq, Show, Functor)
type TreeZipSteps a t = Fix (TreeZipStepF a t)
data TreeDirection = L | R
deriving (Eq, Show, Ord)
instance (f ~ TreeZipStepF a t) => FAlgebra f (TreeZipSteps a t) where
alg = algNat
instance (f ~ TreeZipStepF a t) => FCoalgebra f (TreeZipSteps a t) where
coalg = coalgNat
data TreeZip a t = TreeZip t (TreeZipSteps a t)
deriving (Eq, Show)
-- |Extract the value at a zipper's current location (if any)
value :: FCoalgebra (TreeF a) t => TreeZip a t -> Maybe a
value (TreeZip t _) = case coalg t of
Empty -> Nothing
Branch a _ _ -> Just a
setValue :: forall a t. (FCoalgebra (TreeF a) t, FAlgebra (TreeF a) t) => a -> TreeZip a t -> TreeZip a t
setValue v (TreeZip t p) = case coalg t :: TreeF a t of
Empty -> TreeZip t p
Branch a l r -> TreeZip (alg (Branch v l r)) p
-- |Move up in the tree, staying still if at the root.
-- left and right come from the coalgebra instance for TreeZip a t.
up :: FAlgebra (TreeF a) t => TreeZip a t -> TreeZip a t
up (TreeZip t p) = case coalg p of
Root -> TreeZip t p
LBranch a t' p' -> TreeZip (branch a t t') p'
RBranch a t' p' -> TreeZip (branch a t' t) p'
-- Zippers have a natural F-Coalgebra structure
-- This allows us to use `left` and `right` for zippers as well.
instance (a ~ a', FCoalgebra (TreeF a) t) => FCoalgebra (TreeF a') (TreeZip a t) where
coalg (TreeZip t p) = case coalg t of
Empty -> Empty
Branch a b1 b2 -> Branch a
(TreeZip b1 (alg $ LBranch a b2 p))
(TreeZip b2 (alg $ RBranch a b1 p))
-- |Lens for the current subtree
_here :: Functor f => (t -> f t) -> TreeZip a t -> f (TreeZip a t)
_here f (TreeZip t p) = fmap (flip TreeZip p) (f t)
{-# INLINE _here #-}
-- |Get a zipper for the root of a tree
root :: t -> TreeZip a t
root t = TreeZip t (alg Root)
-- |Zip up a zipper into a tree
zip :: FAlgebra (TreeF a) t => TreeZip a t -> t
zip z@(TreeZip t p) = if isRoot z
then t
else zip (up z)
where
isRoot (TreeZip _ p) = case coalg p of
Root -> True
_ -> False
-- |Move to the current node's sibling. Stays still at the root.
sibling :: (FAlgebra (TreeF a) t, FCoalgebra (TreeF a) t) => TreeZip a t -> TreeZip a t
sibling (TreeZip t p) = case coalg p of
Root -> TreeZip t p
LBranch a t' p' -> TreeZip t' (alg $ RBranch a t p')
RBranch a t' p' -> TreeZip t' (alg $ LBranch a t p')
-- |Get the list of directions on the path up to the root.
-- The head of the list corresponds to the current node's relation to its parent,
-- so we're a left child if the head is L, for example.
directions :: forall a t. TreeZip a t -> [TreeDirection]
directions (TreeZip _ p) = directions' p
where
directions' p = case (coalg p :: TreeZipStepF a t (TreeZipSteps a t)) of
Root -> []
LBranch _ _ p' -> L : directions' p'
RBranch _ _ p' -> R : directions' p'
-- |Rotate the current node to its parents position.
-- Does nothing if at the root of the tree.
rotate :: forall a t. (FAlgebra (TreeF a) t, FCoalgebra (TreeF a) t) => TreeZip a t -> TreeZip a t
rotate z@(TreeZip t p) = case coalg p of
Root -> z
LBranch a' t' p' -> case (coalg t :: TreeF a t) of
Empty -> z
Branch a l r -> TreeZip (branch a l (branch a' r t')) p'
RBranch a' t' p' -> case (coalg t :: TreeF a t) of
Empty -> z
Branch a l r -> TreeZip (branch a (branch a' t' l) r) p'
-- |Insert a tree into the current zipper location if it's empty.
insertHere :: forall a t. (FAlgebra (TreeF a) t, FCoalgebra (TreeF a) t) => a -> TreeZip a t -> TreeZip a t
insertHere x (TreeZip t p) = case (coalg t :: TreeF a t) of
Empty -> TreeZip (leaf x) p
_ -> TreeZip t p
| bhamrick/fixalgs | Data/FAlgebra/Tree/Zipper.hs | bsd-3-clause | 4,652 | 0 | 15 | 1,250 | 1,674 | 863 | 811 | 94 | 5 |
{-# LANGUAGE RecordWildCards #-}
-- | A persistent version of the Ghci session, encoding lots of semantics on top.
-- Not suitable for calling multithreaded.
module Session(
Session, enableEval, withSession,
sessionStart, sessionReload,
sessionExecAsync,
) where
import Language.Haskell.Ghcid
import Language.Haskell.Ghcid.Escape
import Language.Haskell.Ghcid.Util
import Language.Haskell.Ghcid.Types
import Data.IORef
import System.Console.ANSI
import System.Time.Extra
import System.Process
import System.FilePath
import Control.Exception.Extra
import Control.Concurrent.Extra
import Control.Monad.Extra
import Data.Maybe
import Data.List.Extra
import Control.Applicative
import Prelude
import System.IO.Extra
data Session = Session
{ghci :: IORef (Maybe Ghci) -- ^ The Ghci session, or Nothing if there is none
,command :: IORef (Maybe (String, [String])) -- ^ The last command passed to sessionStart, setup operations
,warnings :: IORef [Load] -- ^ The warnings from the last load
,curdir :: IORef FilePath -- ^ The current working directory
,running :: Var Bool -- ^ Am I actively running an async command
,withThread :: ThreadId -- ^ Thread that called withSession
,allowEval :: Bool -- ^ Is the allow-eval flag set?
}
enableEval :: Session -> Session
enableEval s = s { allowEval = True }
debugShutdown x = when False $ print ("DEBUG SHUTDOWN", x)
-- | The function 'withSession' expects to be run on the main thread,
-- but the inner function will not. This ensures Ctrl-C is handled
-- properly and any spawned Ghci processes will be aborted.
withSession :: (Session -> IO a) -> IO a
withSession f = do
ghci <- newIORef Nothing
command <- newIORef Nothing
warnings <- newIORef []
curdir <- newIORef "."
running <- newVar False
debugShutdown "Starting session"
withThread <- myThreadId
let allowEval = False
f Session{..} `finally` do
debugShutdown "Start finally"
modifyVar_ running $ const $ pure False
whenJustM (readIORef ghci) $ \v -> do
writeIORef ghci Nothing
debugShutdown "Calling kill"
kill v
debugShutdown "Finish finally"
-- | Kill. Wait just long enough to ensure you've done the job, but not to see the results.
kill :: Ghci -> IO ()
kill ghci = ignored $ do
timeout 5 $ do
debugShutdown "Before quit"
ignored $ quit ghci
debugShutdown "After quit"
debugShutdown "Before terminateProcess"
ignored $ terminateProcess $ process ghci
debugShutdown "After terminateProcess"
-- Ctrl-C after a tests keeps the cursor hidden,
-- `setSGR []`didn't seem to be enough
-- See: https://github.com/ndmitchell/ghcid/issues/254
showCursor
loadedModules :: [Load] -> [FilePath]
loadedModules = nubOrd . map loadFile . filter (not . isLoadConfig)
qualify :: FilePath -> [Load] -> [Load]
qualify dir xs = [x{loadFile = dir </> loadFile x} | x <- xs]
-- | Spawn a new Ghci process at a given command line. Returns the load messages, plus
-- the list of files that were observed (both those loaded and those that failed to load).
sessionStart :: Session -> String -> [String] -> IO ([Load], [FilePath])
sessionStart Session{..} cmd setup = do
modifyVar_ running $ const $ pure False
writeIORef command $ Just (cmd, setup)
-- cleanup any old instances
whenJustM (readIORef ghci) $ \v -> do
writeIORef ghci Nothing
void $ forkIO $ kill v
-- start the new
outStrLn $ "Loading " ++ cmd ++ " ..."
(v, messages) <- mask $ \unmask -> do
(v, messages) <- unmask $ startGhci cmd Nothing $ const outStrLn
writeIORef ghci $ Just v
pure (v, messages)
-- do whatever preparation was requested
exec v $ unlines setup
-- deal with current directory
(dir, _) <- showPaths v
writeIORef curdir dir
messages <- pure $ qualify dir messages
let loaded = loadedModules messages
evals <- performEvals v allowEval loaded
-- install a handler
forkIO $ do
code <- waitForProcess $ process v
whenJustM (readIORef ghci) $ \ghci ->
when (ghci == v) $ do
sleep 0.3 -- give anyone reading from the stream a chance to throw first
throwTo withThread $ ErrorCall $ "Command \"" ++ cmd ++ "\" exited unexpectedly with " ++ show code
-- handle what the process returned
messages <- pure $ mapMaybe tidyMessage messages
writeIORef warnings $ getWarnings messages
pure (messages ++ evals, loaded)
getWarnings :: [Load] -> [Load]
getWarnings messages = [m | m@Message{..} <- messages, loadSeverity == Warning]
-- | Call 'sessionStart' at the previous command.
sessionRestart :: Session -> IO ([Load], [FilePath])
sessionRestart session@Session{..} = do
Just (cmd, setup) <- readIORef command
sessionStart session cmd setup
performEvals :: Ghci -> Bool -> [FilePath] -> IO [Load]
performEvals _ False _ = pure []
performEvals ghci True reloaded = do
cmds <- mapM getCommands reloaded
fmap join $ forM cmds $ \(file, cmds') ->
forM cmds' $ \(num, cmd) -> do
ref <- newIORef []
execStream ghci cmd $ \_ resp -> modifyIORef ref (resp :)
resp <- unlines . reverse <$> readIORef ref
pure $ Eval $ EvalResult file (num, 1) cmd resp
getCommands :: FilePath -> IO (FilePath, [(Int, String)])
getCommands fp = do
ls <- readFileUTF8' fp
pure (fp, splitCommands $ zipFrom 1 $ lines ls)
splitCommands :: [(Int, String)] -> [(Int, String)]
splitCommands [] = []
splitCommands ((num, line) : ls)
| isCommand line =
let (cmds, xs) = span (isCommand . snd) ls
in (num, unwords $ fmap (drop $ length commandPrefix) $ line : fmap snd cmds) : splitCommands xs
| isMultilineCommandPrefix line =
let (cmds, xs) = break (isMultilineCommandSuffix . snd) ls
in (num, unlines (wrapGhciMultiline (fmap snd cmds))) : splitCommands (drop1 xs)
| otherwise = splitCommands ls
isCommand :: String -> Bool
isCommand = isPrefixOf commandPrefix
commandPrefix :: String
commandPrefix = "-- $> "
isMultilineCommandPrefix :: String -> Bool
isMultilineCommandPrefix = (==) multilineCommandPrefix
multilineCommandPrefix :: String
multilineCommandPrefix = "{- $>"
isMultilineCommandSuffix :: String -> Bool
isMultilineCommandSuffix = (==) multilineCommandSuffix
multilineCommandSuffix :: String
multilineCommandSuffix = "<$ -}"
wrapGhciMultiline :: [String] -> [String]
wrapGhciMultiline xs = [":{"] ++ xs ++ [":}"]
-- | Reload, returning the same information as 'sessionStart'. In particular, any
-- information that GHCi doesn't repeat (warnings from loaded modules) will be
-- added back in.
sessionReload :: Session -> IO ([Load], [FilePath], [FilePath])
sessionReload session@Session{..} = do
-- kill anything async, set stuck if you didn't succeed
old <- modifyVar running $ \b -> pure (False, b)
stuck <- if not old then pure False else do
Just ghci <- readIORef ghci
fmap isNothing $ timeout 5 $ interrupt ghci
if stuck
then (\(messages,loaded) -> (messages,loaded,loaded)) <$> sessionRestart session
else do
-- actually reload
Just ghci <- readIORef ghci
dir <- readIORef curdir
messages <- mapMaybe tidyMessage . qualify dir <$> reload ghci
loaded <- map ((dir </>) . snd) <$> showModules ghci
let reloaded = loadedModules messages
warn <- readIORef warnings
evals <- performEvals ghci allowEval reloaded
-- only keep old warnings from files that are still loaded, but did not reload
let validWarn w = loadFile w `elem` loaded && loadFile w `notElem` reloaded
-- newest warnings always go first, so the file you hit save on most recently has warnings first
messages <- pure $ messages ++ filter validWarn warn
writeIORef warnings $ getWarnings messages
pure (messages ++ evals, nubOrd (loaded ++ reloaded), reloaded)
-- | Run an exec operation asynchronously. Should not be a @:reload@ or similar.
-- Will be automatically aborted if it takes too long. Only fires done if not aborted.
-- Argument to done is the final stderr line.
sessionExecAsync :: Session -> String -> (String -> IO ()) -> IO ()
sessionExecAsync Session{..} cmd done = do
Just ghci <- readIORef ghci
stderr <- newIORef ""
modifyVar_ running $ const $ pure True
caller <- myThreadId
void $ flip forkFinally (either (throwTo caller) (const $ pure ())) $ do
execStream ghci cmd $ \strm msg ->
when (msg /= "*** Exception: ExitSuccess") $ do
when (strm == Stderr) $ writeIORef stderr msg
outStrLn msg
old <- modifyVar running $ \b -> pure (False, b)
-- don't fire Done if someone interrupted us
stderr <- readIORef stderr
when old $ done stderr
-- | Ignore entirely pointless messages and remove unnecessary lines.
tidyMessage :: Load -> Maybe Load
tidyMessage Message{loadSeverity=Warning, loadMessage=[_,x]}
| unescape x == " -O conflicts with --interactive; -O ignored." = Nothing
tidyMessage m@Message{..}
= Just m{loadMessage = filter (\x -> not $ any (`isPrefixOf` unescape x) bad) loadMessage}
where bad = [" except perhaps to import instances from"
," To import instances alone, use: import "]
tidyMessage x = Just x
| ndmitchell/ghcid | src/Session.hs | bsd-3-clause | 9,509 | 0 | 20 | 2,250 | 2,622 | 1,326 | 1,296 | 178 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | This module provide an IO-based API. The /ks/ executable provides
-- some keystore management functions that can be used from the shell
-- and "Data.KeyStore.KeyStore" provides the underlying functional model.
module Data.KeyStore.IO
( readSettings
, CtxParams(..)
, IC(..)
, module Data.KeyStore.Types
, module Data.KeyStore.KS.KS
, keyStoreBytes
, keyStoreFromBytes
, settingsFromBytes
, defaultSettingsFilePath
, settingsFilePath
, defaultKeyStoreFilePath
, defaultCtxParams
, instanceCtx
, instanceCtx_
, newKeyStore
, store
, listSettings
, settings
, updateSettings
, listTriggers
, triggers
, addTrigger
, addTrigger'
, rmvTrigger
, createRSAKeyPair
, createKey
, adjustKey
, rememberKey
, rememberKey_
, secureKey
, loadKey
, showIdentity
, showComment
, showDate
, showHash
, showHashComment
, showHashSalt
, showPublic
, showSecret
, keys
, list
, keyInfo
, deleteKeys
, encrypt
, encrypt_
, encrypt__
, decrypt
, decrypt_
, decrypt__
, sign
, sign_
, verify
, verify_
, run
, getKeystore
, getState
, getCtxState
, putCtxState
) where
import Data.KeyStore.IO.IC
import Data.KeyStore.KS
import Data.KeyStore.KS.KS
import Data.KeyStore.Types
import Data.API.Types
import Data.IORef
import Data.Aeson
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as LBS
import qualified Data.ByteString.Base64 as B64
import qualified Data.Map as Map
import Data.Time
import Text.Printf
import qualified Control.Exception as X
import qualified Control.Lens as L
import Control.Monad
import System.IO
#if MIN_VERSION_time(1,5,0)
#else
import System.Locale (defaultTimeLocale)
#endif
-- | Generate a new keystore located in the given file with the given global
-- settings.
newKeyStore :: FilePath -> Settings -> IO ()
newKeyStore str_fp stgs =
do ei <- X.try $ B.readFile str_fp :: IO (Either X.SomeException B.ByteString)
either (const $ return ()) (const $ errorIO "keystore file exists") ei
g <- newGenerator
let state =
State
{ st_keystore = emptyKeyStore $ defaultConfiguration stgs
, st_cprng = g
}
LBS.writeFile str_fp $ keyStoreBytes $ st_keystore state
-- | Given 'CtxParams' describing the location of the keystore, etc., generate
-- an IC for use in the following keystore access functions that will allow
-- context to be cached between calls to these access functions.
instanceCtx :: CtxParams -> IO IC
instanceCtx cp =
do ctx_st <- getCtxState $ instanceCtx_ cp
IC cp . Just <$> newIORef ctx_st
-- | This functional method will generate an IC that will not cache any
-- state between calls.
instanceCtx_ :: CtxParams -> IC
instanceCtx_ cp = IC cp Nothing
-- | the filepath of the loaded store
store :: IC -> IO FilePath
store ic = run ic storeKS
-- | List the JSON settings on stdout.
listSettings :: IC -> IO ()
listSettings ic = settings ic >>= LBS.putStrLn . encode . _Settings
-- | Return the settings associated with the keystore.
settings :: IC -> IO Settings
settings ic = run ic $ _cfg_settings <$> getConfig
-- | Update the global settings of a keystore from the given JSON settings.
updateSettings :: IC -> FilePath -> IO ()
updateSettings ic fp =
do bs <- LBS.readFile fp
stgs <- e2io $ settingsFromBytes bs
run ic $ modConfig $ L.over cfg_settings $ const stgs
-- | List the triggers set up in the keystore on stdout.
listTriggers :: IC -> IO ()
listTriggers ic = triggers ic >>= putStr . unlines . map fmt
where
fmt Trigger{..} = printf "%-12s : %12s => %s" id_s pat_s stgs_s
where
id_s = T.unpack $ _TriggerID _trg_id
pat_s = _pat_string _trg_pattern
stgs_s = LBS.unpack $ encode $ Object $ _Settings _trg_settings
-- | Returns the striggers setup on the keystore.
triggers :: IC -> IO [Trigger]
triggers ic = run ic $ Map.elems . _cfg_triggers <$> getConfig
-- | addTrigger' cariant that erads the setting from a file.
addTrigger :: IC -> TriggerID -> Pattern -> FilePath -> IO ()
addTrigger ic tid pat fp =
do bs <- LBS.readFile fp
stgs <- e2io $ settingsFromBytes bs
addTrigger' ic tid pat stgs
-- | Set up a named trigger on a keystore that will fire when a key matches the
-- given pattern establishing the settings.
addTrigger' :: IC -> TriggerID -> Pattern -> Settings -> IO ()
addTrigger' ic tid pat stgs =
run ic $ modConfig $ L.over cfg_triggers $ Map.insert tid $ Trigger tid pat stgs
-- | Remove the named trigger from the keystore.
rmvTrigger :: IC -> TriggerID -> IO ()
rmvTrigger ic tid = run ic $ modConfig $ L.over cfg_triggers $ Map.delete tid
-- | Create an RSA key pair, encoding the private key in the named Safeguards.
createRSAKeyPair :: IC -> Name -> Comment -> Identity -> [Safeguard] -> IO ()
createRSAKeyPair ic nm cmt ide sgs = run ic $ createRSAKeyPairKS nm cmt ide sgs
-- | Create a symmetric key, possibly auto-loaded from an environment variable.
createKey :: IC
-> Name
-> Comment
-> Identity
-> Maybe EnvVar
-> Maybe B.ByteString
-> IO ()
createKey ic nm cmt ide mb_ev mb_bs =
run ic $ createKeyKS nm cmt ide mb_ev (ClearText . Binary <$> mb_bs)
-- | Adjust a named key.
adjustKey :: IC -> Name -> (Key->Key) -> IO ()
adjustKey ic nm adj = run ic $ adjustKeyKS nm adj
-- | Load a named key from the named file.
rememberKey :: IC -> Name -> FilePath -> IO ()
rememberKey ic nm fp = B.readFile fp >>= rememberKey_ ic nm
-- | Load the named key.
rememberKey_ :: IC -> Name -> B.ByteString -> IO ()
rememberKey_ ic nm bs = run ic $ rememberKeyKS nm $ ClearText $ Binary bs
-- | Encrypt and store the key with the named safeguard.
secureKey :: IC -> Name -> Safeguard -> IO ()
secureKey ic nm nms = run ic $ secureKeyKS nm nms
-- | Try and retrieve the secret text for a given key.
loadKey :: IC -> Name -> IO Key
loadKey ic nm = run ic $ loadKeyKS nm
-- | Return the identity of a key.
showIdentity :: IC -> Bool -> Name -> IO B.ByteString
showIdentity ic = show_it' ic "identity" (Just . _key_identity) (B.pack . T.unpack . _Identity)
-- | Return the comment associated with a key.
showComment :: IC -> Bool -> Name -> IO B.ByteString
showComment ic = show_it' ic "comment" (Just . _key_comment) (B.pack . T.unpack . _Comment )
-- | Return the creation UTC of a key.
showDate :: IC -> Bool -> Name -> IO B.ByteString
showDate ic = show_it' ic "date" (Just . _key_created_at) (B.pack . formatTime defaultTimeLocale fmt)
where
fmt = "%F-%TZ"
-- | Return the hash of a key.
showHash :: IC -> Bool -> Name -> IO B.ByteString
showHash ic = show_it ic "hash" (fmap _hash_hash . _key_hash) _HashData
-- | Return the hash comment of a key/
showHashComment :: IC -> Bool -> Name -> IO B.ByteString
showHashComment ic = show_it' ic "hash" _key_hash cmt
where
cmt = B.pack . T.unpack . _Comment . _hashd_comment . _hash_description
-- | Retuen the hash salt of a key.
showHashSalt :: IC -> Bool -> Name -> IO B.ByteString
showHashSalt ic = show_it ic "hash" (fmap (_hashd_salt . _hash_description) . _key_hash) _Salt
-- | (For public key pairs only) return the public key.
showPublic :: IC -> Bool -> Name -> IO B.ByteString
showPublic ic = show_it ic "public" (fmap encodePublicKeyDER . _key_public) _ClearText
-- | Return the secret text of a key (will be the private key for a public key pair).
showSecret :: IC -> Bool -> Name -> IO B.ByteString
showSecret ic = show_it ic "secret" _key_clear_text _ClearText
show_it :: IC
-> String
-> (Key->Maybe a)
-> (a->Binary)
-> Bool
-> Name
-> IO B.ByteString
show_it ic lbl prj_1 prj_2 aa nm = show_it' ic lbl prj_1 (_Binary . prj_2) aa nm
show_it' :: IC
-> String
-> (Key->Maybe a)
-> (a->B.ByteString)
-> Bool
-> Name
-> IO B.ByteString
show_it' ic lbl prj_1 prj_2 aa nm =
do key <- loadKey ic nm
case prj_2 <$> prj_1 key of
Nothing -> errorIO $ printf "%s: %s not present" (_name nm) lbl
Just bs -> return $ armr bs
where
armr = if aa then B64.encode else id
-- | List a summary of all of the keys on stdout.
list :: IC -> IO ()
list ic = run ic $ listKS
-- Summarize a single key on stdout.
keyInfo :: IC -> Name -> IO ()
keyInfo ic nm = run ic $ keyInfoKS nm
-- | Return all of the keys in the keystore.
keys :: IC -> IO [Key]
keys ic = Map.elems . _ks_keymap <$> getKeystore ic
-- | Delete a list of keys from the keystore.
deleteKeys :: IC -> [Name] -> IO ()
deleteKeys ic nms = run ic $ deleteKeysKS nms
-- Encrypt a file with a named key.
encrypt :: IC -> Name -> FilePath -> FilePath -> IO ()
encrypt ic nm s_fp d_fp =
do bs <- B.readFile s_fp
bs' <- encrypt_ ic nm bs
B.writeFile d_fp bs'
-- | Encrypt a 'B.ByteString' with a named key.
encrypt_ :: IC -> Name -> B.ByteString -> IO B.ByteString
encrypt_ ic nm bs = _Binary . _EncryptionPacket <$>
(run ic $ encryptWithRSAKeyKS nm $ ClearText $ Binary bs)
-- | Encrypt a 'B.ByteString' with a named key to produce a 'RSASecretData'.
encrypt__ :: IC -> Name -> B.ByteString -> IO RSASecretData
encrypt__ ic nm bs = run ic $ encryptWithRSAKeyKS_ nm $ ClearText $ Binary bs
-- | Decrypt a file with the named key (whose secret text must be accessible).
decrypt :: IC -> FilePath -> FilePath -> IO ()
decrypt ic s_fp d_fp =
do bs <- B.readFile s_fp
bs' <- decrypt_ ic bs
B.writeFile d_fp bs'
-- | Decrypt a 'B.ByteString' with the named key
-- (whose secret text must be accessible).
decrypt_ :: IC -> B.ByteString -> IO B.ByteString
decrypt_ ic bs = _Binary . _ClearText <$>
(run ic $ decryptWithRSAKeyKS $ EncryptionPacket $ Binary bs)
-- | Decrypt a 'B.ByteString' from a 'RSASecretData' with the named key
-- (whose secret text must be accessible).
decrypt__ :: IC -> Name -> RSASecretData -> IO B.ByteString
decrypt__ ic nm rsd = _Binary . _ClearText <$> (run ic $ decryptWithRSAKeyKS_ nm rsd)
-- | Sign a file with the named key (whose secret text must be accessible)
-- to produce a detached signature in the named file.
sign :: IC -> Name -> FilePath -> FilePath -> IO ()
sign ic nm s_fp d_fp =
do bs <- B.readFile s_fp
bs' <- sign_ ic nm bs
B.writeFile d_fp bs'
-- | Sign a 'B.ByteString' with the named key (whose secret text must be accessible)
-- to produce a detached signature.
sign_ :: IC -> Name -> B.ByteString -> IO B.ByteString
sign_ ic nm m_bs = _Binary . _SignaturePacket <$>
(run ic $ signWithRSAKeyKS nm $ ClearText $ Binary m_bs)
-- | Verify that a signature for a file via the named public key.
verify :: IC -> FilePath -> FilePath -> IO Bool
verify ic m_fp s_fp =
do m_bs <- B.readFile m_fp
s_bs <- B.readFile s_fp
ok <- verify_ ic m_bs s_bs
case ok of
True -> return ()
False -> report "signature does not match the data"
return ok
-- | Verify that a signature for a 'B.ByteString' via the named public key.
verify_ :: IC -> B.ByteString -> B.ByteString -> IO Bool
verify_ ic m_bs s_bs =
run ic $ verifyWithRSAKeyKS (ClearText $ Binary m_bs)
(SignaturePacket $ Binary s_bs)
-- | Run a KS function in an IO context, dealing with keystore updates, output,
-- debug logging and errors.
run :: IC -> KS a -> IO a
run ic p =
do (ctx,st0) <- getCtxState ic
st1 <- scan_env ctx st0
let msg = "[Keystore: " ++ ctx_store ctx ++"]\n"
(e,st2,les) = run_ ctx st1 $ debugLog msg >> p
r <- e2io e
mapM_ (logit ctx) les
st' <- backup_env ctx st2
putCtxState ic ctx st'
return r
scan_env :: Ctx -> State -> IO State
scan_env ctx st0 =
do (ks,les) <- scanEnv ks0
mapM_ (logit ctx) les
return st0 { st_keystore = ks }
where
ks0 = st_keystore st0
backup_env :: Ctx -> State -> IO State
backup_env ctx st0 =
do mapM_ (logit ctx) les'
e2io e
return st'
where
(e,st',les') = run_ ctx st0 backupKeysKS
getKeystore :: IC -> IO KeyStore
getKeystore ic = st_keystore <$> getState ic
getState :: IC -> IO State
getState ic = snd <$> getCtxState ic
getCtxState :: IC -> IO (Ctx,State)
getCtxState IC{..} =
case ic_cache of
Nothing -> determineCtx ic_ctx_params
Just rf -> readIORef rf
putCtxState :: IC -> Ctx -> State -> IO ()
putCtxState IC{..} ctx st =
do maybe (return ()) (flip writeIORef (ctx,st)) ic_cache
when (not $ maybe False id $ cp_readonly ic_ctx_params) $
LBS.writeFile (ctx_store ctx) $ keyStoreBytes $ st_keystore st
report :: String -> IO ()
report = hPutStrLn stderr
| cdornan/keystore | src/Data/KeyStore/IO.hs | bsd-3-clause | 13,304 | 0 | 14 | 3,394 | 3,641 | 1,849 | 1,792 | 276 | 3 |
----------------------------------------------------------------------------
-- |
-- Module : Language.Core.Interpreter.Libraries.GHC.Show
-- Copyright : (c) Carlos López-Camey, University of Freiburg
-- License : BSD-3
--
-- Maintainer : c.lopez@kmels.net
-- Stability : stable
--
--
-- Defines functions defined in GHC.Num
-----------------------------------------------------------------------------
module Language.Core.Interpreter.Libraries.GHC.Show(all) where
import DART.CmdLine
import Language.Core.Core
import Language.Core.Interpreter(evalId)
import Language.Core.Interpreter.Apply
import Language.Core.Interpreter.Util(return')
import Language.Core.Interpreter.Libraries.Monomophy(monomophy_1, monomophy_2,mkMonomophier)
import Language.Core.Interpreter.Structures
import Language.Core.Interpreter.Util(showValue)
import Prelude hiding (all)
all :: [(Id, Either Thunk Value)]
all = [ show'
, mkMonomophier "base:GHC.Show.$fShowInt"
]
-- | The function that converts a showable to a string
show' :: (Id, Either Thunk Value)
show' = (id, Right $ Fun (monomophy_1 "(show)" showVal) "polymorphic(show)")
where
showVal :: Value -> IM Value
showVal v = showValue v >>= return . String
id = "base:GHC.Show.show"
| kmels/dart-haskell | src/Language/Core/Interpreter/Libraries/GHC/Show.hs | bsd-3-clause | 1,279 | 0 | 9 | 184 | 231 | 145 | 86 | 18 | 1 |
module GUI.Timeline.HEC (
renderHEC,
renderInstantHEC,
) where
import GUI.Timeline.Render.Constants
import Events.EventTree
import Events.EventDuration
import GUI.Types
import GUI.Timeline.CairoDrawing
import GUI.ViewerColours
import Graphics.Rendering.Cairo
import qualified GHC.RTS.Events as GHC
import GHC.RTS.Events hiding (Event, GCWork, GCIdle)
import qualified Data.IntMap as IM
import Data.Maybe
import Control.Monad
renderHEC :: ViewParameters -> Timestamp -> Timestamp
-> IM.IntMap String -> (DurationTree,EventTree)
-> Render ()
renderHEC params@ViewParameters{..} start end perfNames (dtree,etree) = do
renderDurations params start end dtree
when (scaleValue < detailThreshold) $
case etree of
EventTree ltime etime tree -> do
renderEvents params ltime etime start end (fromIntegral detail)
perfNames tree
return ()
renderInstantHEC :: ViewParameters -> Timestamp -> Timestamp
-> IM.IntMap String -> EventTree
-> Render ()
renderInstantHEC params@ViewParameters{..} start end
perfNames (EventTree ltime etime tree) = do
let instantDetail = 1
renderEvents params ltime etime start end instantDetail perfNames tree
return ()
detailThreshold :: Double
detailThreshold = 3
-------------------------------------------------------------------------------
-- draws the trace for a single HEC
renderDurations :: ViewParameters
-> Timestamp -> Timestamp -> DurationTree
-> Render ()
renderDurations _ _ _ DurationTreeEmpty = return ()
renderDurations params@ViewParameters{..} startPos endPos (DurationTreeLeaf e)
| inView startPos endPos e = drawDuration params e
| otherwise = return ()
renderDurations params@ViewParameters{..} !startPos !endPos
(DurationSplit s splitTime e lhs rhs runAv gcAv)
| startPos < splitTime && endPos >= splitTime &&
(fromIntegral (e - s) / scaleValue) <= fromIntegral detail
= -- View spans both left and right sub-tree.
-- trace (printf "renderDurations (average): start:%d end:%d s:%d e:%d" startPos endPos s e) $
drawAverageDuration params s e runAv gcAv
| otherwise
= -- trace (printf "renderDurations: start:%d end:%d s:%d e:%d" startPos endPos s e) $
do when (startPos < splitTime) $
renderDurations params startPos endPos lhs
when (endPos >= splitTime) $
renderDurations params startPos endPos rhs
-------------------------------------------------------------------------------
renderEvents :: ViewParameters
-> Timestamp -- start time of this tree node
-> Timestamp -- end time of this tree node
-> Timestamp -> Timestamp -> Double
-> IM.IntMap String -> EventNode
-> Render Bool
renderEvents params@ViewParameters{..} !_s !_e !startPos !endPos ewidth
perfNames (EventTreeLeaf es)
= let within = [ e | e <- es, let t = time e, t >= startPos && t < endPos ]
untilTrue _ [] = return False
untilTrue f (x : xs) = do
b <- f x
if b then return b else untilTrue f xs
in untilTrue (drawEvent params ewidth perfNames) within
renderEvents params@ViewParameters{..} !_s !_e !startPos !endPos ewidth
perfNames (EventTreeOne ev)
| t >= startPos && t < endPos = drawEvent params ewidth perfNames ev
| otherwise = return False
where t = time ev
renderEvents params@ViewParameters{..} !s !e !startPos !endPos ewidth
perfNames (EventSplit splitTime lhs rhs)
| startPos < splitTime && endPos >= splitTime &&
(fromIntegral (e - s) / scaleValue) <= ewidth
= do drawnLhs <-
renderEvents params s splitTime startPos endPos ewidth perfNames lhs
if not drawnLhs
then
renderEvents params splitTime e startPos endPos ewidth perfNames rhs
else return True
| otherwise
= do drawnLhs <-
if startPos < splitTime
then
renderEvents params s splitTime startPos endPos ewidth perfNames lhs
else return False
drawnRhs <-
if endPos >= splitTime
then
renderEvents params splitTime e startPos endPos ewidth perfNames rhs
else return False
return $ drawnLhs || drawnRhs
-------------------------------------------------------------------------------
-- An event is in view if it is not outside the view.
inView :: Timestamp -> Timestamp -> EventDuration -> Bool
inView viewStart viewEnd event =
not (eStart > viewEnd || eEnd <= viewStart)
where
eStart = startTimeOf event
eEnd = endTimeOf event
-------------------------------------------------------------------------------
drawAverageDuration :: ViewParameters
-> Timestamp -> Timestamp -> Timestamp -> Timestamp
-> Render ()
drawAverageDuration ViewParameters{..} startTime endTime runAv gcAv = do
setSourceRGBAhex (if not bwMode then runningColour else black) 1.0
when (runAv > 0) $
draw_rectangle startTime hecBarOff -- x, y
(endTime - startTime) -- w
hecBarHeight
setSourceRGBAhex black 1.0
--move_to (oxs + startTime, 0)
--relMoveTo (4/scaleValue) 13
--unscaledText scaleValue (show nrEvents)
setSourceRGBAhex (if not bwMode then gcColour else black) gcRatio
draw_rectangle startTime -- x
(hecBarOff+hecBarHeight) -- y
(endTime - startTime) -- w
(hecBarHeight `div` 2) -- h
where
duration = endTime - startTime
-- runRatio :: Double
-- runRatio = (fromIntegral runAv) / (fromIntegral duration)
gcRatio :: Double
gcRatio = (fromIntegral gcAv) / (fromIntegral duration)
-------------------------------------------------------------------------------
unscaledText :: String -> Render ()
unscaledText text
= do m <- getMatrix
identityMatrix
showText text
setMatrix m
-------------------------------------------------------------------------------
textWidth :: Double -> String -> Render TextExtents
textWidth _scaleValue text
= do m <- getMatrix
identityMatrix
tExtent <- textExtents text
setMatrix m
return tExtent
-------------------------------------------------------------------------------
drawDuration :: ViewParameters -> EventDuration -> Render ()
drawDuration ViewParameters{..} (ThreadRun t s startTime endTime) = do
setSourceRGBAhex (if not bwMode then runningColour else black) 1.0
setLineWidth (1/scaleValue)
draw_rectangle_opt False
startTime -- x
hecBarOff -- y
(endTime - startTime) -- w
hecBarHeight -- h
-- Optionally label the bar with the threadID if there is room
tExtent <- textWidth scaleValue tStr
let tw = textExtentsWidth tExtent
th = textExtentsHeight tExtent
when (tw + 6 < fromIntegral rectWidth) $ do
setSourceRGBAhex labelTextColour 1.0
move_to (fromIntegral startTime + truncate (4*scaleValue),
hecBarOff + (hecBarHeight + round th) `quot` 2)
unscaledText tStr
-- Optionally write the reason for the thread being stopped
-- depending on the zoom value
labelAt labelsMode endTime $
show t ++ " " ++ showThreadStopStatus s
where
rectWidth = truncate (fromIntegral (endTime - startTime) / scaleValue) -- as pixels
tStr = show t
drawDuration ViewParameters{..} (GCStart startTime endTime)
= gcBar (if bwMode then black else gcStartColour) startTime endTime
drawDuration ViewParameters{..} (GCWork startTime endTime)
= gcBar (if bwMode then black else gcWorkColour) startTime endTime
drawDuration ViewParameters{..} (GCIdle startTime endTime)
= gcBar (if bwMode then black else gcIdleColour) startTime endTime
drawDuration ViewParameters{..} (GCEnd startTime endTime)
= gcBar (if bwMode then black else gcEndColour) startTime endTime
drawDuration ViewParameters{..} (TXStart startTime endTime)
= txBar (if bwMode then black else txStartColour) startTime endTime
txBar :: Color -> Timestamp -> Timestamp -> Render ()
txBar col !startTime !endTime = do
setSourceRGBAhex col 1.0
draw_rectangle_opt False
startTime -- x
(hecBarOff+hecBarHeight) -- y
(endTime - startTime) -- w
(hecBarHeight `div` 2) -- h
gcBar :: Color -> Timestamp -> Timestamp -> Render ()
gcBar col !startTime !endTime = do
setSourceRGBAhex col 1.0
draw_rectangle_opt False
startTime -- x
(hecBarOff+hecBarHeight) -- y
(endTime - startTime) -- w
(hecBarHeight `div` 2) -- h
labelAt :: Bool -> Timestamp -> String -> Render ()
labelAt labelsMode t str
| not labelsMode = return ()
| otherwise = do
setSourceRGB 0.0 0.0 0.0
move_to (t, hecBarOff+hecBarHeight+12)
save
identityMatrix
rotate (pi/4)
showText str
restore
drawEvent :: ViewParameters -> Double -> IM.IntMap String -> GHC.Event
-> Render Bool
drawEvent params@ViewParameters{..} ewidth perfNames event =
let renderI = renderInstantEvent params perfNames event ewidth
in case spec event of
CreateThread{} -> renderI createThreadColour
RequestSeqGC{} -> renderI seqGCReqColour
RequestParGC{} -> renderI parGCReqColour
MigrateThread{} -> renderI migrateThreadColour
WakeupThread{} -> renderI threadWakeupColour
Shutdown{} -> renderI shutdownColour
SparkCreate{} -> renderI createdConvertedColour
SparkDud{} -> renderI fizzledDudsColour
SparkOverflow{} -> renderI overflowedColour
SparkRun{} -> renderI createdConvertedColour
SparkSteal{} -> renderI createdConvertedColour
SparkFizzle{} -> renderI fizzledDudsColour
SparkGC{} -> renderI gcColour
--BEGIN STM
StartTX{} -> renderI seqGCReqColour
StartTXWInfo{} -> renderI seqGCReqColour
CommitTX{} -> renderI userMessageColour
EagerPartialAbort{} -> renderI fizzledDudsColour
CommitTimePartialAbort{} -> renderI fizzledDudsColour
EagerFullAbort{} -> renderI shutdownColour
CommitTimeFullAbort{} -> renderI shutdownColour
FastForward{} -> renderI bookmarkColour
BeginCommit{} -> renderI parGCReqColour
TSExtension{} -> renderI seqGCReqColour
--END STM
RememberObj{} -> renderI parGCReqColour
MajorGC{} -> renderI shutdownColour
GlobalGC{} -> renderI threadWakeupColour
UserMessage{} -> renderI userMessageColour
PerfCounter{} -> renderI createdConvertedColour
PerfTracepoint{} -> renderI shutdownColour
PerfName{} -> return False
RunThread{} -> return False
StopThread{} -> return False
StartGC{} -> return False
_ -> return False
renderInstantEvent :: ViewParameters -> IM.IntMap String -> GHC.Event
-> Double -> Color
-> Render Bool
renderInstantEvent ViewParameters{..} perfNames event ewidth color = do
setSourceRGBAhex color 1.0
setLineWidth (ewidth * scaleValue)
let t = time event
draw_line (t, hecBarOff-4) (t, hecBarOff+hecBarHeight+4)
let numToLabel PerfCounter{perfNum, period} | period == 0 =
IM.lookup (fromIntegral perfNum) perfNames
numToLabel PerfCounter{perfNum, period} =
fmap (++ " <" ++ show (period + 1) ++ " times>") $
IM.lookup (fromIntegral perfNum) perfNames
numToLabel PerfTracepoint{perfNum} =
fmap ("tracepoint: " ++) $ IM.lookup (fromIntegral perfNum) perfNames
numToLabel _ = Nothing
showLabel espec = fromMaybe (showEventInfo espec) (numToLabel espec)
labelAt labelsMode t $ showLabel (spec event)
return True
-------------------------------------------------------------------------------
| ml9951/ThreadScope | GUI/Timeline/HEC.hs | bsd-3-clause | 12,060 | 0 | 17 | 3,033 | 3,214 | 1,581 | 1,633 | -1 | -1 |
module Day25 where
import Data.Char
import Data.Maybe
import Safe
codeSequence :: [Int]
codeSequence = iterate next 20151125
where next c = (c * 252533) `rem` 33554393
getIndex :: (Int, Int) -> Int
getIndex (row, col) = column !! (row - 1)
where
firstRow = 1 : zipWith (+) firstRow [2..]
column = (firstRow !! (col - 1)) : zipWith (+) column [col..]
parseCoordinates :: String -> (Int, Int)
parseCoordinates s = (row, col)
where
[row, col] = catMaybes $ readMay <$> words stripped
stripped = filter (not . isPunctuation) s | patrickherrmann/advent | src/Day25.hs | bsd-3-clause | 548 | 0 | 11 | 115 | 227 | 129 | 98 | 15 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TupleSections #-}
import AuthAPI (app, authSettings, LoginForm(..), homePage, loginPage, Account(..))
import Control.Monad.IO.Class (liftIO)
import Crypto.Random (drgNew)
import Data.Default (def)
import Data.Int (Int64)
import Data.Maybe (fromMaybe)
import Data.Time.Clock (UTCTime(..))
import Network.HTTP.Media.RenderHeader (renderHeader)
import Network.HTTP.Types (Header, methodGet, methodPost, hContentType, hCookie)
import Network.Wai.Test (SResponse(..))
import Prelude ()
import Prelude.Compat
import Servant (FormUrlEncoded, contentType)
import Servant (Proxy(..))
import Servant.Server.Experimental.Auth.Cookie
import Test.Hspec (Spec, hspec, describe, context, it, shouldBe)
import Test.Hspec.Wai (WaiSession, WaiExpectation, shouldRespondWith, with, request, get)
import Text.Blaze (Markup)
import Text.Blaze.Renderer.Utf8 (renderMarkup)
import Web.Cookie (parseCookies)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Char8 as BSLC8
#if MIN_VERSION_hspec_wai (0,7,0)
import Test.Hspec.Wai.Matcher (bodyEquals, ResponseMatcher(..), MatchBody(..))
#else
import Test.Hspec.Wai (matchBody)
#endif
#if MIN_VERSION_servant (0,9,0)
import Web.FormUrlEncoded (ToForm, toForm, urlEncodeForm)
#else
import Servant (ToFormUrlEncoded, mimeRender)
#endif
#if MIN_VERSION_servant (0,9,1) && MIN_VERSION_directory (1,2,5)
import FileKeySet (mkFileKeySet, FileKSParams(..), mkFileKey)
import Control.Arrow ((***))
import Control.Monad (void, when)
import Data.Monoid ((<>))
import Control.Exception.Base (bracket)
import Network.HTTP.Types (urlEncode)
import Test.Hspec (shouldSatisfy)
import System.Directory (removeDirectoryRecursive, doesDirectoryExist)
import qualified Data.ByteString.Char8 as BSC8
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
#endif
data SpecState where
SpecState :: (ServerKeySet k) =>
{ ssRandomSource :: RandomSource
, ssAuthSettings :: AuthCookieSettings
, ssServerKeySet :: k
, ssGenerateKey :: IO ()
} -> SpecState
main :: IO ()
main = do
rs <- mkRandomSource drgNew 1000
return SpecState
{ ssRandomSource = rs
, ssAuthSettings = authSettings
, ssServerKeySet = mkPersistentServerKey "0123456789abcdef"
, ssGenerateKey = return ()
} >>= hspec . basicSpec
#if MIN_VERSION_servant (0,9,1) && MIN_VERSION_directory (1,2,5)
let rmDir name = doesDirectoryExist name
>>= \exists -> when exists $ removeDirectoryRecursive name
bracket
(do
let keySetDir = "./test-key-set"
rmDir keySetDir
return FileKSParams
{ fkspMaxKeys = 3
, fkspKeySize = 16
, fkspPath = keySetDir
} >>= \fksp -> (fksp,) <$> mkFileKeySet fksp)
(rmDir . fkspPath . fst)
(\(fksp, ks) -> hspec . renewalSpec $ SpecState
{ ssRandomSource = rs
, ssAuthSettings = authSettings
, ssServerKeySet = ks
, ssGenerateKey = mkFileKey fksp
})
#endif
basicSpec :: SpecState -> Spec
basicSpec ss@(SpecState {..}) = describe "basic functionality" $ with
(return $ app ssAuthSettings ssGenerateKey ssRandomSource ssServerKeySet) $ do
let form = LoginForm {
lfUsername = "mr_foo"
, lfPassword = "password1"
, lfRemember = False
, lfRenew = False
}
context "home page" $ do
it "responds successfully" $ do
get "/" `shouldRespondWithMarkup` homePage
context "login page" $ do
it "responds successfully" $ do
get "/login" `shouldRespondWithMarkup` (loginPage True)
it "shows message on incorrect login" $ do
(login form { lfPassword = "wrong" }) `shouldRespondWithMarkup` (loginPage False)
let hasExpirationFlags
= not . null
. filter ((`elem` ["Max-Age", "Expires"]) . fst)
. parseCookies
it "responds with session cookies if 'Remember me' is not set" $ do
(login form { lfRemember = False }
>>= return . hasExpirationFlags . getCookieValue)
>>= liftIO . (`shouldBe` False)
it "responds with normal cookies if `Remember me` is set" $ do
(login form { lfRemember = True }
>>= return . hasExpirationFlags . getCookieValue)
>>= liftIO . (`shouldBe` True)
context "private page" $ do
it "rejects requests without cookies" $ do
get "/private" `shouldRespondWith` 403 { matchBody = matchBody' "No cookies" }
it "accepts requests with proper cookies" $ do
(login form
>>= return . getCookieValue
>>= getPrivate) `shouldRespondWith` 200
it "accepts requests with proper cookies (sanity check)" $ do
(login form
>>= liftIO . forgeCookies ss authSettings ssServerKeySet
>>= getPrivate) `shouldRespondWith` 200
it "rejects requests with incorrect MAC" $ do
let newServerKeySet = mkPersistentServerKey "0000000000000000"
(login form
>>= liftIO . forgeCookies ss authSettings newServerKeySet
>>= getPrivate) `shouldRespondWithException` (IncorrectMAC "")
it "rejects requests with expired cookies" $ do
let newAuthSettings = authSettings { acsMaxAge = 0 }
let t = UTCTime (toEnum 0) 0
(login form
>>= liftIO . forgeCookies ss newAuthSettings ssServerKeySet
>>= getPrivate) `shouldRespondWithException` (CookieExpired t t)
let hasSetCookieHeader
= maybe False (const True)
. lookup "set-cookie"
. simpleHeaders
it "doesn't renew cookies when renew flag is not set" $ do
(login (form { lfRemember = True, lfRenew = False })
>>= return . getCookieValue
>>= getPrivate
>>= return . hasSetCookieHeader)
>>= liftIO . (`shouldBe` False)
#if MIN_VERSION_servant(0,9,1)
it "does renew cookies when renew flag is set" $ do
(login form { lfRemember = True, lfRenew = True }
>>= return . getCookieValue
>>= getPrivate
>>= return . hasSetCookieHeader)
>>= liftIO . (`shouldBe` True)
#endif
#if MIN_VERSION_servant (0,9,1) && MIN_VERSION_directory (1,2,5)
renewalSpec :: SpecState -> Spec
renewalSpec (SpecState {..}) = describe "renewal functionality" $ with
(return $ app ssAuthSettings ssGenerateKey ssRandomSource ssServerKeySet) $ do
context "keys" $ do
it "automatically creates a key" $ do
keys <- extractKeys
liftIO $ keys `shouldSatisfy` ((== 1) . length)
it "adds new key" $ do
keys <- extractKeys
addKey
keys' <- extractKeys
liftIO $ keys `shouldBe` (tail keys')
it "removes a key" $ do
keys <- extractKeys
remKey $ last keys
keys' <- extractKeys
liftIO $ keys' `shouldBe` (init keys)
context "cookies" $ do
let form = LoginForm {
lfUsername = "mr_foo"
, lfPassword = "password1"
, lfRemember = False
, lfRenew = False
}
it "rejects requests with deleted keys" $ do
cookieValue <- getCookieValue <$> login form
getPrivate cookieValue `shouldRespondWith` 200
key <- head <$> extractKeys
addKey >> remKey key
getPrivate cookieValue `shouldRespondWith` 403
it "accepts requests with old key and renews cookie" $ do
cookieValue <- getCookieValue <$> login form
getPrivate cookieValue `shouldRespondWith` 200
key <- head <$> extractKeys
addKey
newCookieValue <- getCookieValue <$> getPrivate cookieValue
remKey key
getPrivate newCookieValue `shouldRespondWith` 200
it "does not renew cookies for the newest key" $ do
cookieValue <- getCookieValue <$> login form
_ <- getPrivate cookieValue `shouldRespondWith` 200
r <- getPrivate cookieValue
liftIO $ (lookup "set-cookie" $ simpleHeaders r) `shouldBe` Nothing
#endif
#if MIN_VERSION_hspec_wai (0,7,0)
matchBody' :: BSL.ByteString -> MatchBody
matchBody' = bodyEquals
#else
matchBody' :: BSL.ByteString -> Maybe BSL.ByteString
matchBody' = Just
#endif
#if MIN_VERSION_servant (0,9,0)
encode :: ToForm a => a -> BSL.ByteString
encode = urlEncodeForm . toForm
#else
encode :: ToFormUrlEncoded a => a -> BSL.ByteString
encode = mimeRender (Proxy :: Proxy FormUrlEncoded)
#endif
shrinkBody :: Int64 -> SResponse -> SResponse
shrinkBody len r = r { simpleBody = BSL.take len $ simpleBody r }
shouldRespondWithException :: WaiSession SResponse -> AuthCookieException -> WaiExpectation
shouldRespondWithException req ex = do
let exception = BSLC8.pack . head . words . show $ ex
(shrinkBody (BSLC8.length exception) <$> req) `shouldRespondWith` 403 {
matchBody = matchBody' exception
}
shouldRespondWithMarkup :: WaiSession SResponse -> Markup -> WaiExpectation
shouldRespondWithMarkup req markup = do
req `shouldRespondWith` 200 {
matchBody = matchBody' $ renderMarkup markup
}
formContentType :: Header
formContentType = (
hContentType
, renderHeader $ contentType (Proxy :: Proxy FormUrlEncoded))
login :: LoginForm -> WaiSession SResponse
login lf = request
methodPost "/login" [formContentType] (encode lf)
getPrivate :: BS.ByteString -> WaiSession SResponse
getPrivate cookieValue = request
methodGet "/private" [(hCookie, cookieValue)] ""
extractSession :: SpecState -> SResponse -> IO (ExtendedPayloadWrapper Account)
extractSession SpecState {..} SResponse {..} = maybe
(error "cookies aren't available")
(decryptSession ssAuthSettings ssServerKeySet)
(parseSessionResponse ssAuthSettings simpleHeaders)
forgeCookies :: (ServerKeySet k)
=> SpecState
-> AuthCookieSettings
-> k
-> SResponse
-> IO BS.ByteString
forgeCookies ss newAuthSettings newServerKeySet r = extractSession ss r
>>= \s -> renderSession newAuthSettings (ssRandomSource ss) newServerKeySet def (epwSession s) ()
#if MIN_VERSION_servant (0,9,1) && MIN_VERSION_directory (1,2,5)
extractKeys :: WaiSession [BS.ByteString]
extractKeys = (extractKeys' . BSL.toStrict . simpleBody) <$> get "/keys" where
del = '#'
(openTag, closeTag) = (id *** BS.drop 1) $ BSC8.span (/= del) $
BSL.toStrict . renderMarkup $
H.span H.! A.class_ "key" $ H.toHtml [del]
shrinkBy prefix = BS.drop . BS.length $ prefix
extractKeys' body = let
body' = snd $ BS.breakSubstring openTag body
(key, rest) = shrinkBy openTag *** shrinkBy closeTag $
BS.breakSubstring closeTag body'
in if BS.null body'
then []
else key:(extractKeys' rest)
addKey :: WaiSession ()
addKey = void $ get "/keys/add"
remKey :: BS.ByteString -> WaiSession ()
remKey key = void $ get $ "/keys/rem/" <> (urlEncode True $ key)
#endif
getCookieValue :: SResponse -> BS.ByteString
getCookieValue = fromMaybe (error "cookies aren't available")
. lookup "set-cookie"
. simpleHeaders
| zohl/servant-auth-cookie | example/Test.hs | bsd-3-clause | 10,965 | 28 | 28 | 2,360 | 2,906 | 1,549 | 1,357 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Geekingfrog.Views.Errors where
import Data.Text as T
import Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Geekingfrog.Views.Partials (pageHead, navHeader, pageFooter)
genericError :: Text -> Text -> H.Markup
genericError title msg = docTypeHtml $ do
H.head $ pageHead Nothing
body ! class_ "home" $ do
navHeader Nothing
section ! class_ "hero" $
H.div ! class_ "container hero-container" $
h1 ! class_ "main-title main-title__huge" $ text title
section ! class_ "container content" $
pre ! class_ "center" ! A.style "width: 50ch;" $ text (frogWithText msg)
pageFooter
notFound = genericError "Oh noooooes!" "Couldn't find what you were looking for."
frogWithText :: Text -> Text
frogWithText text = let
len = T.length text + 2
line = append " " $ append (T.replicate len "-") "\n"
wrappedText = T.concat ["| ", text, " |\n"]
in T.concat [line, wrappedText, line, frog]
frog :: Text
frog = "\
\ \\ .--._.--. \n\
\ \\ ( x x ) \n\
\ / . . \\ \n\
\ `._______.'. \n\
\ /( )\\ \n\
\ _/ \\ \\ / / \\_ \n\
\ ~ ` \\ \\ / / ' ~. \n\
\ { -. \\ V / .- } \n\
\ _ _`. \\ | | | / .'_ _ \n\
\ >_ _} | | | {_ _< \n\
\ /. - ~ ,_-' .^. `-_, ~ - .\\ \n\
\ '-'|/ \\|`-` "
| geekingfrog/geekingfrog.com | src/Geekingfrog/Views/Errors.hs | bsd-3-clause | 1,487 | 0 | 17 | 503 | 317 | 164 | 153 | 26 | 1 |
module Vandelay.DSL.Estimates.ParserT
( readEstimates
) where
import Control.Monad.Error.Class
import qualified Control.Monad.State as S
import qualified Data.Map.Lazy as HM
import Text.Parsec hiding (many, optional, (<|>))
import Vandelay.DSL.Core hiding (try)
type EstParser m = ParsecT String () m
readEstimates ∷ (MonadError ErrorMsg m, MonadIO m)
⇒ String -- File name
→ m EstimatesHM
readEstimates f = do
rf <- safeReadFileWithError f "Estimates file"
runParserT (estimatesP f) () ("Estimates file: " ++ f) (unpack rf) >>= \case
Left parseErr → throwError $ tshow parseErr
Right est → return est
-- Parser
estimatesP ∷ (MonadError ErrorMsg m, MonadIO m)
⇒ String -- Filename
→ EstParser m EstimatesHM
estimatesP fileName = do
_models <- header
let models = map pack _models
numCols = length models
rows <- many (rowOfLength numCols <* eol)
let fcn = formCoeffs models rows
return $ singletonMap fileName fcn
-- Header
header ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m [String]
header = unsafeTail <$> sepBy (many (noneOf "\n\r\t") ) tab <* eol
-- Data row
rowOfLength ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ Int → EstParser m (CoefCmd, [DataItem])
rowOfLength i = (,) <$> (coefcmd <* tab) <*> sepByN i cell tab
cell ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m DataItem
cell = try numberCell <|> emptyCell <|> textCell
-- Coefficient commands
coefcmd ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m CoefCmd
coefcmd = try adddata
<|> newcoef
adddata ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m CoefCmd
adddata = manyTill space (lookAhead (tab <|> (eol >> return ' '))) >> return AddData
newcoef ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m CoefCmd
newcoef = NewCoef <$> many (noneOf "\t\n\r")
-- DataItems
textCell ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m DataItem
textCell = StrData . pack <$> many (noneOf "\t\n\r")
emptyCell ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m DataItem
emptyCell = manyTill space (lookAhead (tab <|> (eol >> return ' '))) >> return BlankData
numberCell ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m DataItem
numberCell = ValData <$> double <*> sigStars
sigStars ∷ (MonadError ErrorMsg m, MonadIO m) ⇒ EstParser m Int
sigStars = length <$> many (char '*')
-- Parser tools
sepByN ∷ (MonadError ErrorMsg m, MonadIO m)
⇒ Int
→ EstParser m a
→ EstParser m sep
→ EstParser m [a]
sepByN 1 p sep = (:) <$> p <*> pure []
sepByN n p sep = (:) <$> (p <* sep) <*> sepByN (n-1) p sep
-- Make coefficients from the individual rows
formCoeffs ∷ [ModelName] → [(CoefCmd,[DataItem])] → ModelHM
formCoeffs mns cmds =
unions $ zipWith (\mn chm → singletonMap mn chm) mns coefMaps
where
coefMaps ∷ [CoefHM]
coefMaps = S.evalState coefMaps' Nothing
coefMaps' ∷ S.State (Maybe Text) [CoefHM]
coefMaps' = foldlM processCmd baseCoefMaps cmds
baseCoefMaps ∷ [CoefHM]
baseCoefMaps = mns $> emptyCoefmap
emptyCoefmap ∷ CoefHM
emptyCoefmap = mempty
processCmd
∷ [CoefHM] -- ^ Coefficient Cell map
→ (CoefCmd, [DataItem]) -- ^ Construction command
→ S.State (Maybe Text) [CoefHM] -- ^ (State: Last Coefficient) Updated data
processCmd chms (NewCoef cname, ds) = do
S.put (Just $ pack cname)
return $ zipWith (\chm d → insertMap (pack cname) [d] chm) chms ds
processCmd chms (AddData, ds) = do
S.get >>= \case
Nothing → return chms -- error "processCmd with no last coefficient"
Just lcn → return $ zipWith (\chm d → insertWith (flip (<>)) lcn [d] chm) chms ds -- Flip (<>) required to preserve order
-- Estimate formation command
data CoefCmd = NewCoef String
| AddData
deriving Show
| tumarkin/vandelay | src/Vandelay/DSL/Estimates/ParserT.hs | bsd-3-clause | 3,999 | 0 | 17 | 945 | 1,345 | 710 | 635 | -1 | -1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE UndecidableInstances #-}
module BitRecordsSpec (spec) where
import Data.Bits
import qualified Data.ByteString.Lazy as B
import Data.Kind.Extra
import Data.Proxy
import Data.Type.BitRecords
import Data.Type.Equality ()
import GHC.TypeLits
import Test.Hspec
import Test.QuickCheck (property)
import Test.TypeSpecCrazy
import Text.Printf
import Util
import Prelude hiding (id, (.))
basicsSpec :: Spec
basicsSpec = do
describe "Maybe, Lists, Either, Bool, 'True, 'False, FlagJust, FlagNothing" $ do
let checkFlagJust ::
"Type Level Bool, 'True, 'False, FlagJust, FlagNothing Accessors"
################################################################## "The record size works"
~~~~~~~~~~~~~~~~~~~~~~~~ 1 `ShouldBe` FieldWidth (FlagJust 'Nothing)
-* 1 `ShouldBe` FieldWidth (FlagJust ('Just "Blah"))
-* 1 `ShouldBe` FieldWidth (FlagNothing ('Just "Blah"))
-* 0 `ShouldBe` SizeInBits (From (BitRecordOfList (Konst 'BitRecordMember) '[]))
-* 10 `ShouldBe` SizeInBits (From (BitRecordOfList (Konst 'BitRecordMember) '[Field 10]))
-* 20 `ShouldBe` SizeInBits (From (BitRecordOfList (Konst 'BitRecordMember) '[Field 10, Field 10]))
-* 1 `ShouldBe` SizeInBits ('BitRecordMember Flag)
-* 1 `ShouldBe` FieldWidth (Flag := 'True)
-* 1 `ShouldBe` SizeInBits ('BitRecordMember (Flag := 'False))
checkFlagJust = Valid
runIO $ print checkFlagJust
describe "bitBuffer64Printer" $ do
describe "Just x" $
it "writes x" $
bitBuffer64Printer (Proxy :: Proxy (OptionalRecord ('Just ('BitRecordMember (Flag := 'True))))) `shouldBe` "<< 80 >>"
describe "Nothing" $
it "writes nothing" $
bitBuffer64Printer (Proxy :: Proxy (OptionalRecord ('Nothing))) `shouldBe` "<< >>"
describe "'[]" $
it "writes nothing" $
bitBuffer64Printer (Proxy :: Proxy (BitRecordOfList (Konst 'BitRecordMember) ('[]))) `shouldBe` "<< >>"
describe "'[x1, x2]" $
it "writes x1 then x2" $
bitBuffer64Printer (Proxy :: Proxy (BitRecordOfList (Konst 'BitRecordMember) ('[FieldU8 := 1, FieldU8 := 2]))) `shouldBe` "<< 01 02 >>"
describe "'True" $
it "writes a single bit with a 1" $
bitBuffer64Printer (Proxy :: Proxy (RecordField (Flag := 'True))) `shouldBe` "<< 80 >>"
describe "'False" $
it "writes a single bit with a 0" $
bitBuffer64Printer (Proxy :: Proxy (RecordField (Flag := 'False))) `shouldBe` "<< 00 >>"
describe "@: labelled fields" $ do
it "writes them ..." $
let fld = Proxy @(From (RecordField ("foo" @: FlagJust 'Nothing)))
in bitBuffer64Printer fld `shouldBe` "<< 00 >>"
describe "FlagJust" $ do
it "writes a single bit '1' for a 'Just ...' parameter" $
bitBuffer64Printer (Proxy :: Proxy (RecordField (FlagJust ('Just "test")))) `shouldBe` "<< 80 >>"
it "writes a single bit '0' for a 'Nothing' parameter" $
bitBuffer64Printer (Proxy :: Proxy (RecordField (FlagJust 'Nothing))) `shouldBe` "<< 00 >>"
describe "FlagNothing" $ do
it "writes a single bit '0' for a 'Just ...' parameter" $
bitBuffer64Printer (Proxy :: Proxy (RecordField (FlagNothing ('Just "test")))) `shouldBe` "<< 00 >>"
it "writes a single bit '1' for a 'Nothing' parameter" $
bitBuffer64Printer (Proxy :: Proxy (RecordField (FlagNothing 'Nothing))) `shouldBe` "<< 80 >>"
describe "showRecord" $ do
describe "Maybe" $ do
it "prints 'Just x'" $
showRecord (Proxy :: Proxy (From (OptionalRecord ('Just ('BitRecordMember FieldU32))))) `shouldBe` "U32"
it "prints nothing for 'Nothing'" $
showRecord (Proxy :: Proxy (From (OptionalRecord 'Nothing))) `shouldBe` "\n"
describe "List" $ do
it "prints '[ FieldI32 ]'" $
showRecord (Proxy :: Proxy (From (BitRecordOfList (Konst 'BitRecordMember) '[FieldI32])))
`shouldBe` "I32"
it "prints '[Flag := True, Flag := False]'" $
showRecord (Proxy :: Proxy (From (BitRecordOfList (Konst 'BitRecordMember) '[Flag := 'True, Flag := 'False])))
`shouldBe` "boolean := yes\nboolean := no"
describe "Bool" $ do
it "prints 'True" $
showRecord (Proxy :: Proxy ('BitRecordMember (Flag := 'True))) `shouldBe` "boolean := yes"
it "prints 'False" $
showRecord (Proxy :: Proxy ('BitRecordMember (Flag := 'False))) `shouldBe` "boolean := no"
it "prints a Bool" $
showRecord (Proxy :: Proxy ('BitRecordMember Flag)) `shouldBe` "boolean"
describe "FlagJust" $ do
it "prints a 'FlagJust 'Just ..'" $
showRecord (Proxy :: Proxy ('BitRecordMember (FlagJust ('Just "123")))) `shouldBe` "boolean := yes"
it "prints a 'FlagJust 'Nothing'" $
showRecord (Proxy :: Proxy ('BitRecordMember (FlagJust 'Nothing))) `shouldBe` "boolean := no"
describe "FlagNothing" $ do
it "prints 'FlagNothing 'Just ..'" $
showRecord (Proxy :: Proxy ('BitRecordMember (FlagNothing ('Just "123")))) `shouldBe` "boolean := no"
it "prints a 'FlagNothing 'Nothing'" $
showRecord (Proxy :: Proxy ('BitRecordMember (FlagNothing 'Nothing))) `shouldBe` "boolean := yes"
#ifdef COMPLEXTESTS
arraySpec :: SpecWith ()
arraySpec =
describe "RecArray" $ do
describe "level record accessors" $ do
let checkArrayRec ::
"BitRecord accessors involving RecArray"
#######################################
"The record size works"
~~~~~~~~~~~~~~~~~~~~~~~~
1 `ShouldBe` SizeInBits (From (RecArray ('BitRecordMember Flag) 1))
-* 91 `ShouldBe` SizeInBits (From (("foo" @: Flag .+. FieldU8) ^^ 10) :+. Flag)
-* 91 `ShouldBe` SizeInBits (From (RecArray ("foo" @: Flag .+. FieldU8) 10) :+. Flag)
checkArrayRec = Valid
runIO $ print checkArrayRec
describe "showRecord" $
it "appends its body n times" $
let expected = "utf-8(40) := <<hello>> [5 Bytes]\nutf-8(40) := <<hello>> [5 Bytes]\nutf-8(40) := <<hello>> [5 Bytes]\nutf-8(40) := <<hello>> [5 Bytes]\nutf-8(40) := <<hello>> [5 Bytes]"
actual = showARecord (Proxy @ (('BitRecordMember [utf8|hello|] ^^ 5)))
in actual `shouldBe` expected
describe "bitBuffer64Printer" $
it "writes its contents n times to the builder" $
let actual = bitBuffer64Printer (Proxy :: Proxy (('BitRecordMember (Field 24 := 0x010203) ^^ 4)))
expected = "<< 01 02 03 01 02 03 01 02 03 01 02 03 >>"
in actual `shouldBe` expected
sizedSpec :: SpecWith ()
sizedSpec =
describe "Sized" $ do
describe "TypeChecks" $
let
checkSized ::
"Sized"
#########
"SizedString"
~~~~~~~~~~~~~~~
88 `ShouldBe` FieldWidth [utf8|Hello World|]
-* 104 `ShouldBe` SizeInBits (From (RecordField [utf8|Heλλo World|]))
-/-
"Sized BitRecord Members"
~~~~~~~~~~~~~~~~~~~~~~~~
8 `ShouldBe` SizeInBits (From (Sized8 'EmptyBitRecord))
-* 9 `ShouldBe` SizeInBits (From (Sized8 ('BitRecordMember Flag)))
-* 0 `ShouldBe` SizeInBytes 'EmptyBitRecord
-* 1 `ShouldBe` SizeInBytes ('BitRecordMember Flag)
-/-
"SizedField"
~~~~~~~~~~~~
9 `ShouldBe` SizeInBits (From (SizedField8 Flag))
-* SizeInBits (From (SizedField8 Flag) :+: (Flag .+: 'BitRecordMember FieldU32)) `ShouldBe` 42
-* 1 `ShouldBe` SizeInBytes Flag
-- TODO add more Sized tests, especially for SizedField
checkSized = Valid
in runIO $ print checkSized
describe "showRecord" $ do
describe "SizedString" $
it "renders a string containing wide utf-8 characters to a header containing the number of chars and the actual string" $
showARecord (Proxy :: Proxy (RecordField [utf8|Heλλo World!|])) `shouldBe` "utf-8(112) := <<He\955\955o World!>> [14 Bytes]"
describe "Sized SizeField16 SizedString" $
it "renders the number bytes not chars as the size field value" $
showARecord (Proxy :: Proxy (SizedField16 [utf8|Heλλo World!|])) `shouldBe` "size: U16 := hex: 000e (dec: 14)\nutf-8(112) := <<He\955\955o World!>> [14 Bytes]"
describe "bitBuffer64Printer" $ do
describe "no length prefix" $
it "renders no size prefix and the string as UTF-8 bytes" $
bitBuffer64Printer (Proxy :: Proxy (RecordField [utf8|ABC|]))
`shouldBe`
"<< 41 42 43 >>"
describe "8-bit length prefix" $
it "renders a single byte size prefix and the string as UTF-8 bytes" $
bitBuffer64Printer (Proxy :: Proxy (SizedField8 [utf8|ABC|]))
`shouldBe`
"<< 03 41 42 43 >>"
describe "16-bit length prefix" $
it "renders a big endian 16 bit size prefix and the string as UTF-8 bytes" $
bitBuffer64Printer (Proxy :: Proxy (SizedField16 [utf8|ABC|]))
`shouldBe`
"<< 00 03 41 42 43 >>"
describe "32-bit length prefix" $
it "renders a big endian 32 bit size prefix and the string as UTF-8 bytes" $
bitBuffer64Printer (Proxy :: Proxy (SizedField32 [utf8|ABC|]))
`shouldBe`
"<< 00 00 00 03 41 42 43 >>"
describe "64-bit length prefix" $
it "renders a big endian 64 bit size prefix and the string as UTF-8 bytes" $
bitBuffer64Printer (Proxy :: Proxy (SizedField64 [utf8|ABC|]))
`shouldBe`
"<< 00 00 00 00 00 00 00 03 41 42 43 >>"
type TestRecAligned =
"bar" @: Field 8 .+:
Field 8 := 0 .+:
"baz" @: Field 8 .+:
Field 32 := 0 .+:
"foo" @: Field 8 .+:
Field 8 := 0 .+:
"oof" @: Field 8 .+:
Field 8 := 0 .+.
"rab" @: Field 8
checkTestRecAligned
:: Expect '[ ShouldBe 96 (SizeInBits TestRecAligned) ]
checkTestRecAligned = Valid
type TestRecUnAligned =
"bar" @: Field 8 .+:
Field 8 := 0 .+:
"baz" @: Field 7 .+:
Field 32 := 0 .+:
"foo" @: Field 8 .+.
Field 8 := 0xfe
checkTestRecUnAligned
:: Expect '[ ShouldBe 71 (SizeInBits TestRecUnAligned) ]
checkTestRecUnAligned = Valid
testTakeLastN ::
"Taking the last n elements of a list" #######################################
TakeLastN 0 '[1,2,3] `ShouldBe` ('[] :: [Nat])
-* TakeLastN 1 '[1,2,3] `ShouldBe` '[3]
-* TakeLastN 2 '[1,2,3] `ShouldBe` '[2,3]
-* TakeLastN 5 '[1,2,3] `ShouldBe` '[1,2,3]
testTakeLastN = Valid
testRem
:: Expect '[ Mod 0 3 `ShouldBe` 0
, Mod 1 3 `ShouldBe` 1
, Mod 2 3 `ShouldBe` 2
, Mod 3 3 `ShouldBe` 0
, Mod 4 3 `ShouldBe` 1
, Mod 5 3 `ShouldBe` 2
, Mod 6 3 `ShouldBe` 0
]
testRem = Valid
testRemPow2
::
"ModPow2"
#########
"Remainder of '1'"
~~~~~~~~~~~~~~~~~~
It "1 `ModPow2` 1 is 1" (Is 1 (ModPow2 1 1))
-* It "1 `ModPow2` 8 is 1" (Is 1 (ModPow2 1 8))
-/-
"Remainder of '3916441'"
~~~~~~~~~~~~~~~~~~~~~~~~~~~
It " `ModPow2` 1 is 1" (Is 1 (ModPow2 3916441 1))
-* It " `ModPow2` 4 is 9" (Is 9 (ModPow2 3916441 4))
-* It " `ModPow2` 8 is 153" (Is 153 (ModPow2 3916441 8))
testRemPow2 = Valid
testDiv
:: Expect '[ Div 0 3 `ShouldBe` 0
, Div 1 3 `ShouldBe` 0
, Div 2 3 `ShouldBe` 0
, Div 3 3 `ShouldBe` 1
, Div 4 3 `ShouldBe` 1
, Div 5 3 `ShouldBe` 1
, Div 6 3 `ShouldBe` 2
, Div 144 13 `ShouldBe` 11
-- , Div 512 128 `ShouldBe` 11
]
testDiv = Valid
testNatBits
:: "Type Level bit operations"
###########################
"TestHighBit"
~~~~~~~~~~~~
ShouldBeFalse (TestHighBit 127 8)
-* ShouldBeFalse (TestHighBit 127 7)
-* ShouldBeTrue (TestHighBit 127 6)
-* ShouldBeFalse (TestHighBit 32 6)
-* ShouldBeTrue (TestHighBit 32 5)
-* ShouldBeFalse (TestHighBit 16 5)
-* ShouldBeTrue (TestHighBit 16 4)
-* ShouldBeFalse (TestHighBit 8 4)
-* ShouldBeTrue (TestHighBit 8 3)
-* ShouldBeFalse (TestHighBit 4 3)
-* ShouldBeTrue (TestHighBit 4 2)
-* ShouldBeFalse (TestHighBit 2 2)
-* ShouldBeTrue (TestHighBit 2 1)
-* ShouldBeFalse (TestHighBit 0 1)
-* ShouldBeTrue (TestHighBit 1 0)
-/-
"ToBits"
~~~~~~~~~
It "returns the empty list for a zero bit length"
(ToBits 1023 0 `ShouldBe` ('[] :: [Bool]) )
-* It "returns [] for a single unset bit"
(ShouldBe ('[] :: [Bool]) (ToBits 0 1))
-* It "returns [True] for a single set bit"
(ShouldBe '[ 'True] (ToBits 1 1))
-* It "returns [True, False] when getting two bits from 0x2"
(ShouldBe '[ 'True, 'False] (ToBits 0x2 2))
-* It "returns the list of bits in correct order"
(ShouldBe '[ 'True, 'True, 'True, 'True
, 'False, 'False, 'False, 'False]
(ToBits 0xf0 8))
-* It "returns no leading 'False (i.e. it omits leading zero bits)"
(ShouldBe '[ 'True, 'True, 'True, 'True]
(ToBits 0x0000000f 32))
-/-
"FromBits"
~~~~~~~~~
It "returns 0 for '[]"
(ShouldBe 0 (FromBits ('[] :: [Bool])))
-* It "returns 0 for [False]"
(ShouldBe 0 (FromBits '[ 'False]))
-* It "returns 1 for [True]"
(ShouldBe 1 (FromBits '[ 'True]))
-* It "returns 2 for [True, False]"
(ShouldBe 2 (FromBits '[ 'True, 'False]))
-* It "returns 4 for [True, False, False]"
(ShouldBe 4 (FromBits '[ 'True, 'False, 'False]))
-* It "returns 5 for [True, False, True]"
(ShouldBe 5 (FromBits '[ 'True, 'False, 'True]))
-/-
"ShiftBitsR"
~~~~~~~~~~~~
It "returns the input bits for n == 0"
(ShouldBe '[ 'True, 'False] (ShiftBitsR ['True, 'False] 0))
-* It "returns '[] when shifting [True] 1 bits"
(ShouldBe ('[] :: [Bool]) (ShiftBitsR '[ 'True ] 1))
-* It "returns '[True] when shifting [True, True] 1 bits"
(ShouldBe '[ 'True] (ShiftBitsR '[ 'True, 'True ] 1))
-* It "returns (ToBits 12 8) when shifting (ToBits 97 8) 3 bits to the right"
(ShouldBe (ToBits 12 8) (ShiftBitsR (ToBits 97 8) 3))
-/-
"GetMostSignificantBitIndex"
~~~~~~~~~~~~~~~
It "returns 1 for 0"
(ShouldBe 1 (GetMostSignificantBitIndex 64 0))
-* It "returns 1 for 1"
(ShouldBe 1 (GetMostSignificantBitIndex 64 1))
-* It "returns 1 for 2"
(ShouldBe 1 (GetMostSignificantBitIndex 64 2))
-* It "returns 1 for 3"
(ShouldBe 1 (GetMostSignificantBitIndex 64 3))
-* It "returns 2 for 4"
(ShouldBe 2 (GetMostSignificantBitIndex 64 4))
-* It "returns 2 for 5"
(ShouldBe 2 (GetMostSignificantBitIndex 64 4))
-* It "returns 8 for 511"
(ShouldBe 8 (GetMostSignificantBitIndex 64 511))
-* It "returns 63 for (2^64 - 1)"
(ShouldBe 63 (GetMostSignificantBitIndex 64 (2^64 - 1)))
-/-
"ShiftR"
~~~~~~~~~
It "returns '0' when shifting '42' 6 bits to the right"
(ShouldBe 0 (ShiftR 64 42 6))
-* It "returns 2 when shifting 512 8 bits to the right"
(ShouldBe 2 (ShiftR 64 512 8))
testNatBits = Valid
#endif
spec :: Spec
spec = do
basicsSpec
#ifdef COMPLEXTESTS
sizedSpec
arraySpec
describe "The Set of Type Functions" $
it "is sound" $ do
print (Valid :: Expect (SizeInBits (Flag .+. Field 7) `Is` 8))
print testTakeLastN
print testRem
print testRemPow2
print testDiv
print testNatBits
print checkTestRecAligned
print checkTestRecUnAligned
describe "showARecord" $ do
it "prints (Field 4 .+. (Field 4 := 0x96)) to \"<..>0110\"" $
let actual = showRecord (Proxy :: Proxy (Field 4 .+. Field 4 := 0x96))
in actual `shouldBe` "bits(4)\nbits(4) := 10010110 (hex: 96 dec: 150)"
describe "StaticLazybytestringbuilder" $ do
it "writes (and flushes) bits" $
let rec = Proxy
rec :: Proxy TestRecUnAligned
actualB :: B.ByteString
actualB =
writeBits
(toFunction (toFunctionBuilder rec :: FunctionBuilder BitBuilder BitBuilder (B 8 -> B 7 -> B 8 -> BitBuilder))
1
3
7)
actual = printByteString actualB
in actual `shouldBe`
"<< 01 00 06 00 00 00 00 0f fc >>"
describe "Formatting sub-byte fields" $ do
it "only the addressed bits are copied to the output" $
property $ \value ->
let rec = Proxy
rec :: Proxy (Field 4 := 0 .+. "here" @: Field 4)
actualB :: B.ByteString
actualB = writeBits (toFunction (toFunctionBuilder rec :: FunctionBuilder BitBuilder BitBuilder (B 4 -> BitBuilder)) value)
actual = printByteString actualB
expected = printf "<< %.2x >>" (value .&. 0xf)
in actual `shouldBe` expected
it "renders (Flag := 0 .+. Field 7 := 130) to << 02 >>" $
let rec = Proxy
rec :: Proxy (Flag := 'False .+. Field 7 := 130)
actual = printByteString b
where b = writeBits (toFunction (toFunctionBuilder rec :: FunctionBuilder BitBuilder BitBuilder BitBuilder))
in actual `shouldBe` "<< 02 >>"
describe "LazyByteStringBuilder" $
describe "writeBits" $
it "0x01020304050607 to << 00 01 02 03 04 05 06 07 >>" $
let expected = "<< 00 01 02 03 04 05 06 07 >>"
actual =
printByteString
(writeBits
(toFunction
(toFunctionBuilder
(bitBuffer64 64 0x01020304050607) :: FunctionBuilder BitBuilder BitBuilder BitBuilder)))
in actual `shouldBe` expected
#endif
| sheyll/isobmff-builder | spec/BitRecordsSpec.hs | bsd-3-clause | 18,220 | 5 | 57 | 5,350 | 5,205 | 2,623 | 2,582 | -1 | -1 |
{-# LANGUAGE EmptyDataDecls, NoMonomorphismRestriction #-}
{-# LANGUAGE TypeFamilies #-}
-- | Context-free grammar with quantifiers
-- A different ways to add quantification, via
-- Higher-Order abstract syntax (HOAS).
-- This is a "rational reconstruction" of Montague's
-- general approach of `administrative pronouns', which
-- later gave rise to the Quantifier Raising (QR)
--
module Lambda.QHCFG where
import Lambda.Semantics
import Lambda.CFG -- we shall re-use our earlier work
-- | No longer any need in a new syntactic category QNP
-- We leave out CN as an exercise
--
-- > data CN -- Common noun
--
-- We extend our earlier fragment with quantifiers everyone, someone.
-- In contrast to QCFG.hs, we do not add any new syntactic category,
-- so we don't need to add any rules to our CFG.
--
class (Symantics repr) => Quantifier repr where
everyone :: (repr NP -> repr S) -> repr S
someone :: (repr NP -> repr S) -> repr S
-- | Sample sentences (or, CFG derivations):
-- compare with those in QCFG.hs
-- We stress that the inferred type of sen2-sen4
-- is S. So, these are the derivations of
-- complete sentences.
--
sen2 = everyone (\he -> r1 he (r2 like mary))
sen3 = someone (\she -> r1 john (r2 like she))
sen4 = everyone (\he -> someone (\she -> r1 he (r2 like she)))
-- | We extend our EN interpreter (interpreter of
-- derivations as English phrases) to deal
-- with quantifiers.
--
instance Quantifier EN where
everyone f = f (EN "everyone")
someone f = f (EN "someone")
-- | We can now see the English sentences that
-- correspond to the derivations sen2-sen4.
sen2_en = sen2 :: EN S
sen3_en = sen3 :: EN S
sen4_en = sen4 :: EN S
-- | We also extend the semantics interpreter:
-- the interpreter of a derivation into a
-- formula of STT, or Lambda-calculus.
-- We reconstruct Montague's ``pronoun trick''
instance (Lambda lrepr) => Quantifier (Sem lrepr) where
everyone f = Sem (app forall (lam (\he -> unSem (f (Sem he)))))
someone f = Sem (app exists (lam (\she -> unSem (f (Sem she)))))
-- | We can see the semantic yield of our derivations
sen2_sem = sen2 :: Sem (P C) S -- We encode universal via existential
sen3_sem = sen3 :: Sem C S -- now reduced!
sen4_sem = sen4 :: Sem (P C) S
-- | As in QCFG.hs, sen4_sem demonstrates the linear reading.
-- Now however we can get the inverse reading of the phrase.
--
-- We build the following derivation
sen4' = someone (\she -> everyone (\he -> r1 he (r2 like she)))
-- | which corresponds to the same English phrase
sen4'_en = sen4' :: EN S
-- everyone likes someone
-- | The semantics shows the inverse reading
sen4'_sem = sen4' :: Sem (P C) S
| suhailshergill/liboleg | Lambda/QHCFG.hs | bsd-3-clause | 2,743 | 0 | 17 | 610 | 542 | 301 | 241 | 26 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
-- | Template Haskell splices to create constant-sized vectors and RingBuffer
-- instances for them
module Data.RingBuffer.TGen (
mkVecFromTo
,mkVec
)
where
import Prelude hiding (length)
import Data.RingBuffer.Class
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Control.Applicative
import Control.Monad
mkVecFromTo start stop elname binders prefix =
concat <$> mapM (mkVec elname binders prefix) [start .. stop]
mkVec elname binders prefix sz = do
let nm = mkName $ 'T':prefix ++ show sz
let tname = case binders of
[] -> ConT nm
[PlainTV b1] -> AppT (ConT nm) (VarT b1)
_ -> error "can't handle types with more than 1 type variable, or non-* kinded types"
d1 <- decTN sz nm elname binders
d2 <- mkElInst tname elname
d3 <- mkInitInst sz nm (return tname)
d4 <- mkRbInst sz nm (return tname)
return $ concat [d1,d2,d3,d4]
decTN sz nm elname binders =
let fields = replicate sz (IsStrict, elname)
in return [DataD [] nm binders [NormalC nm fields] []]
#if MIN_VERSION_template_haskell(2,9,0)
mkElInst tname elname = return [TySynInstD ''El $ TySynEqn [tname] (elname) ]
#else
mkElInst tname elname = return [TySynInstD ''El [tname] (elname) ]
#endif
mkInitInst vsz nm tname = let nmStr = show nm in [d| instance Initializable $(tname) where {-# INLINE newInit #-}; newInit el sz | sz >= 0 && sz <= vsz = $(appsE $ conE nm:replicate vsz [| el |]) ; newInit el sz = error ("cannot initialize " ++ nmStr ++ " with size: " ++ show sz) |]
mkRbInst vsz nm tname = [d| instance RingBuffer $(tname) where {-# INLINE length #-}; length = const vsz; {-# INLINE (!) #-}; (!) = $(mkLookup vsz nm); {-# INLINE push #-}; push = $(mkPush vsz nm) |]
mkLookup vsz nm = do
nms <- mapM (newName . ('v':) . show) [1 .. vsz]
ixNm <- newName "ix"
let lhs1 = conP nm (map varP nms)
lhs2 = varP ixNm
matches = map (\ix -> match (litP $ integerL (fromIntegral ix))
(normalB $ varE (nms !! ix) )
[] )
[0..vsz-1]
++ [match (varP (mkName "ix"))
(normalB [| error ("TGen: index out of bounds: " ++ show $(varE $ mkName "ix")) |])
[] ]
rhs = caseE (varE ixNm) matches
lamE [lhs1,lhs2] rhs
mkPush vsz nm = do
nms <- mapM (newName . ('v':) . show) [1 .. vsz]
elNm <- newName "el"
let lhs1 = conP nm (map varP nms)
lhs2 = varP elNm
rhs = appsE $ conE nm : varE elNm : map varE (init nms)
lamE [lhs1, lhs2] rhs
| JohnLato/combobuffer | src/Data/RingBuffer/TGen.hs | bsd-3-clause | 2,684 | 2 | 19 | 696 | 827 | 424 | 403 | 53 | 3 |
{-# LANGUAGE FlexibleContexts #-}
module Music.Parts.Part
(
Part,
_solo,
_subpart,
_instrument,
divide,
containsPart,
smallestPart,
smallestSubpart,
largestPart,
largestSubpart,
distinctFrom,
allDistinct,
solo,
tutti,
)
where
import Control.Applicative
import Control.Lens (toListOf, Lens, Lens', (^.))
import Data.Aeson (ToJSON (..), FromJSON(..))
import qualified Data.Aeson
import Data.Default
-- import Data.Monoid
-- import Control.Lens (set)
import Data.Functor.Adjunction (unzipR)
import qualified Data.List
import Data.Maybe
import Data.Semigroup
import Data.Semigroup.Option.Instances
import Data.Traversable (traverse)
import Data.Typeable
import Text.Numeral.Roman (toRoman)
import Music.Parts.Division
import Music.Parts.Solo
import Music.Parts.Instrument
import Music.Parts.Subpart
-- | A part is a subdivided group of instruments of a given type.
data Part = Part
Solo -- Solo vs. tutti
Instrument -- Type of instrument
Subpart -- Subdivision within instrument chorus
-- TODO Layer
deriving (Eq, Ord)
instance Show Part where
show (Part Solo instr subp) = "Solo " ++ show instr ++ addS (show subp)
where
addS "" = ""
addS x = " " ++ x
show (Part _ instr subp) = show instr ++ addS (show subp)
where
addS "" = ""
addS x = " " ++ x
-- Bad instance (?)
instance Enum Part where
toEnum x = Part Tutti (toEnum x) def
fromEnum (Part solo instr subp) = fromEnum instr
-- Semantics: Monoid (Option . First)
instance Monoid Part where
mempty = def
mappend x y
| x == mempty = y
| otherwise = x
instance Semigroup Part where
(<>) = mappend
instance Default Part where
def = Part def def def
instance ToJSON Part where
toJSON p = Data.Aeson.object [
("instrument", toJSON $ p^._instrument),
("subpart", toJSON $ p^._subpart),
("solo", toJSON $ p^._solo)
]
instance FromJSON Part where
parseJSON (Data.Aeson.Object v) = do
s <- v Data.Aeson..: "solo"
i <- v Data.Aeson..: "instrument"
u <- v Data.Aeson..: "subpart"
return $ Part s i u
parseJSON _ = empty
-- |
-- @a \`containsPart\` b@ holds if the set of players represented by a is an improper subset of the
-- set of players represented by b.
containsPart :: Part -> Part -> Bool
Part solo1 instr1 subp1 `containsPart` Part solo2 instr2 subp2 =
solo1 == solo2
&& instr1 == instr2
&& subp1 `containsSubpart` subp2
smallestPart :: Part -> Part -> Part
smallestPart p1@(Part _ _ sp1) p2@(Part _ _ sp2)
| sp1 `smallestSubpart` sp2 == sp1 = p1
| sp1 `smallestSubpart` sp2 == sp2 = p2
smallestSubpart :: Subpart -> Subpart -> Subpart
smallestSubpart x y
| x `isProperSubpartOf` y = x
| y `isProperSubpartOf` x = y
-- arbitrarily:
| otherwise = x
largestPart :: Part -> Part -> Part
largestPart p1@(Part _ _ sp1) p2@(Part _ _ sp2)
| sp1 `largestSubpart` sp2 == sp1 = p1
| sp1 `largestSubpart` sp2 == sp2 = p2
largestSubpart :: Subpart -> Subpart -> Subpart
largestSubpart x y
| x `isProperSubpartOf` y = y
| y `isProperSubpartOf` x = x
-- arbitrarily:
| otherwise = x
-- | Returns 'True' iff all given parts are distinct (as per 'distinctFrom').
allDistinct :: [Part] -> Bool
allDistinct [] = True
allDistinct (x:xs) = all (distinctFrom x) xs && allDistinct xs
-- | Returns 'True' iff x and y are completely distinct, i.e. neither contains the other.
--
-- >>> violins `distinctFrom` trumpets
-- True
-- >>> violins `distinctFrom` violins
-- False
-- >>> violins `distinctFrom` violins1
-- False
-- >>> violins1 `distinctFrom` violins
-- False
-- >>> violins1 `distinctFrom` violins2
-- True
--
distinctFrom :: Part -> Part -> Bool
distinctFrom (Part s1 i1 sp1) (Part s2 i2 sp2) = s1 /= s2 || i1 /= i2 || noneSubpart
where
-- Is this needed?
noneSubpart = not (sp1 `isSubpartOf` sp2) && not (sp2 `isSubpartOf` sp1)
-- if equal
-- [pa',pb'] = divide 2 pa
_solo :: Lens' Part Solo
_solo f (Part s i u) = fmap (\s -> Part s i u) $ f s
_subpart :: Lens' Part Subpart
_subpart f (Part s i u) = fmap (\u -> Part s i u) $ f u
_instrument :: Lens' Part Instrument
_instrument f (Part s i u) = fmap (\i -> Part s i u) $ f i
-- | Divide a part into @n@ subparts.
divide :: Int -> Part -> [Part]
divide n (Part solo instr subp) = fmap (\x -> Part solo instr (subp <> Subpart [x])) $ divisions n
solo instr = Part Solo instr def
tutti instr = Part Tutti instr def
| music-suite/music-parts | src/Music/Parts/Part.hs | bsd-3-clause | 4,759 | 0 | 13 | 1,275 | 1,430 | 772 | 658 | 109 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
module Data.Vinyl.Aeson (recordFromJSON, recordToJSON, KnownSymbols) where
import Data.Aeson
import Data.Aeson.Types
import Data.Proxy
import Data.Vinyl
import Data.Vinyl.Functor
import Data.Vinyl.TypeLevel
import Data.Text (pack)
import GHC.TypeLits
import GHC.Exts (Constraint)
type family KnownSymbols (symbols :: [Symbol]) :: Constraint where
KnownSymbols '[] = ()
KnownSymbols (symbol ': symbols) = (KnownSymbol symbol, KnownSymbols symbols)
data KnownSymbolDict (s :: Symbol) where
KnownSymbolDict :: (KnownSymbol s) => Proxy s -> KnownSymbolDict s
data ProxyDict (c :: * -> Constraint) x where
ProxyDict :: (c x) => Proxy x -> ProxyDict c x
proxyRecord :: (RecApplicative fields) => Rec (Compose Proxy f) fields
proxyRecord = rpure (Compose Proxy)
fieldProxyRecord :: (RecApplicative fields) => Rec Proxy (fields :: [Symbol])
fieldProxyRecord = rpure Proxy
reifyKnownSymbols :: (KnownSymbols fields) => Rec Proxy (fields :: [Symbol]) -> Rec KnownSymbolDict fields
reifyKnownSymbols rec = case rec of
RNil -> RNil
(proxy :& proxyRecord) -> KnownSymbolDict proxy :& reifyKnownSymbols proxyRecord
reifyProxyDict :: (RecAll f fields c) => Rec (Compose Proxy f) fields -> Rec (Compose (ProxyDict c) f) fields
reifyProxyDict rec = case rec of
RNil -> RNil
((Compose proxy) :& proxyRecord) -> Compose (ProxyDict proxy) :& reifyProxyDict proxyRecord
knownSymbolDictToString :: KnownSymbolDict s -> String
knownSymbolDictToString (KnownSymbolDict proxy) = symbolVal proxy
fieldNameRecord :: (KnownSymbols fields, RecApplicative fields) => Rec (Const String) fields
fieldNameRecord = rmap (Const . knownSymbolDictToString) $ reifyKnownSymbols fieldProxyRecord
fieldParserRecord :: (RecAll f fields FromJSON, RecApplicative fields) => Rec (Compose (ProxyDict FromJSON) f) fields
fieldParserRecord = reifyProxyDict proxyRecord
parseWithProxy :: (FromJSON a) => Proxy a -> Value -> Parser a
parseWithProxy _ value = parseJSON value
-- | Combine parallel records into a record of pairs
rzip :: Rec f fields -> Rec g fields -> Rec (Lift (,) f g) fields
rzip f g = rapply (rmap (\x -> Lift (\y -> Lift (x, y))) f) g
-- | Parse a record from a JSON object, where the fields give object field names and field types give parsers
recordFromJSON :: (RecAll f fields FromJSON, KnownSymbols fields, RecApplicative fields) => Value -> Parser (Rec f fields)
recordFromJSON value =
rtraverse
(\(Lift (Compose proxyDict, Const name)) ->
case proxyDict of
(ProxyDict proxy) ->
withObject
"Records must be encoded as Objects"
(\object -> object .: pack name >>= parseWithProxy proxy)
value)
$ rzip fieldParserRecord fieldNameRecord
-- | Serialize a record to a JSON object, where the fields give object field names and field types give serializers
recordToJSON :: (RecAll f fields ToJSON, KnownSymbols fields, RecApplicative fields) => Rec f fields -> Value
recordToJSON rec =
object
$ recordToList
$ rmap (\(Lift (Compose dict, Const name)) ->
case dict of
Dict x -> Const $ pack name .= toJSON x)
$ rzip (reifyConstraint (Proxy :: Proxy ToJSON) rec) fieldNameRecord
| plow-technologies/template-service | master/src/Data/Vinyl/Aeson.hs | bsd-3-clause | 3,378 | 0 | 17 | 628 | 1,025 | 540 | 485 | -1 | -1 |
-- |Partial binding to CoreFoundation.
-- At the moment only CFString is supported.
{-# LANGUAGE ForeignFunctionInterface, EmptyDataDecls #-}
module System.MacOSX.CoreFoundation
( -- * types
UInt8
, UInt16
, UInt32
, UInt64
, SInt8
, SInt16
, SInt32
, SInt64
, OSErr
, OSStatus
, UniChar
, CFIndex
, ItemCount
, ByteCount
-- , CFString
, CFDataRef
, CFStringRef
, CFAllocatorRef
, Boolean
, Float32
, Float64
-- * CFString
, newCFString
, releaseCFString
, peekCFString
, withCFString
-- * OSStatus
, osStatusString
, osStatusError
) where
import Data.Bits
import Data.Word
import Data.Int
import Control.Monad
import Foreign
import Foreign.C
import Foreign.Marshal
type UInt8 = Word8
type UInt16 = Word16
type UInt32 = Word32
type UInt64 = Word64
type SInt8 = Int8
type SInt16 = Int16
type SInt32 = Int32
type SInt64 = Int64
type OSErr = SInt16
type OSStatus = SInt32
type Boolean = Bool
type Float32 = Float
type Float64 = Double
type UniChar = Char
type CFIndex = SInt32
type ItemCount = UInt32
type ByteCount = UInt32
data CFData
data CFString
data CFAllocator
type CFDataRef = Ptr CFData
type CFStringRef = Ptr CFString
type CFAllocatorRef = Ptr CFAllocator
kCFAllocatorDefault = nullPtr
----- error "handling" :) -----
osStatusString :: OSStatus -> String
osStatusString osstatus = "OSStatus = " ++ show osstatus
osStatusError :: OSStatus -> IO a
osStatusError osstatus = fail $ osStatusString osstatus
----- Base -----
foreign import ccall unsafe "CFBase.h CFRelease"
c_CFRelease :: Ptr a -> IO ()
----- CFStrings -----
foreign import ccall unsafe "CFString.h CFStringGetLength"
c_CFStringGetLength :: CFStringRef -> IO CFIndex
foreign import ccall unsafe "CFString.h CFStringGetCharactersPtr"
c_CFStringGetCharactersPtr :: CFStringRef -> IO (Ptr UniChar)
foreign import ccall unsafe "CFString.h CFStringGetCharacterAtIndex"
c_CFStringGetCharacterAtIndex :: CFStringRef -> CFIndex -> IO UniChar
foreign import ccall unsafe "CFString.h CFStringCreateWithCharacters"
c_CFStringCreateWithCharacters :: CFAllocatorRef -> Ptr UniChar -> CFIndex -> IO CFStringRef
-- | Manually releasing a CFString.
releaseCFString :: CFStringRef -> IO ()
releaseCFString = c_CFRelease
-- | Peeks a CFString.
peekCFString :: CFStringRef -> IO String
peekCFString cfstring = do
n <- c_CFStringGetLength cfstring
p <- c_CFStringGetCharactersPtr cfstring
if p /= nullPtr
then forM [0..n-1] $ \i -> peekElemOff p (fromIntegral i)
else forM [0..n-1] $ \i -> c_CFStringGetCharacterAtIndex cfstring i
-- | Creates a new CFString. You have to release it manually.
newCFString :: String -> IO CFStringRef
newCFString string =
let n = length string in allocaArray n $ \p ->
c_CFStringCreateWithCharacters kCFAllocatorDefault p (fromIntegral n)
-- | Safe passing of a CFString to the OS (releases it afterwards).
withCFString :: String -> (CFStringRef -> IO a) -> IO a
withCFString string action = do
cfstring <- newCFString string
x <- action cfstring
releaseCFString cfstring
return x
| chpatrick/hmidi | System/MacOSX/CoreFoundation.hs | bsd-3-clause | 3,155 | 0 | 12 | 631 | 718 | 399 | 319 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Cauterize.Crucible.PrototypesSpec
( spec
) where
import Test.Hspec
import Cauterize.Crucible.Prototypes
spec :: Spec
spec = do
describe "parsePrototypeVariant" $
it "is able to parse all prototypes" $ do
parsePrototypeVariant "synonym" `shouldBe` Just PVSynonym
parsePrototypeVariant "array" `shouldBe` Just PVArray
parsePrototypeVariant "vector" `shouldBe` Just PVVector
parsePrototypeVariant "record" `shouldBe` Just PVRecord
parsePrototypeVariant "combination" `shouldBe` Just PVCombination
parsePrototypeVariant "union" `shouldBe` Just PVUnion
describe "parsePrototypeVariants" $
it "is able to parse comma-separated lists of prototypes" $ do
parsePrototypeVariants "array,vector" `shouldBe` Just [PVArray, PVVector]
parsePrototypeVariants "array,union" `shouldBe` Just [PVArray, PVUnion]
parsePrototypeVariants "array,union,vector" `shouldBe` Just [PVArray, PVUnion, PVVector]
parsePrototypeVariants "synonym,array,vector,record,combination,union"
`shouldBe` Just [PVSynonym,PVArray,PVVector,PVRecord,PVCombination,PVUnion]
| cauterize-tools/crucible | test/Cauterize/Crucible/PrototypesSpec.hs | bsd-3-clause | 1,158 | 0 | 12 | 187 | 257 | 132 | 125 | 22 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
-- TODO: Drop this when we remove support for Data.Attoparsec.Number
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Data.Aeson.Types.ToJSON
(
-- * Core JSON classes
ToJSON(..)
-- * Liftings to unary and binary type constructors
, ToJSON1(..)
, toJSON1
, toEncoding1
, ToJSON2(..)
, toJSON2
, toEncoding2
-- * Generic JSON classes
, GToJSON'(..)
, ToArgs(..)
, genericToJSON
, genericToEncoding
, genericLiftToJSON
, genericLiftToEncoding
-- * Classes and types for map keys
, ToJSONKey(..)
, ToJSONKeyFunction(..)
, toJSONKeyText
, toJSONKeyKey
, contramapToJSONKeyFunction
, GToJSONKey()
, genericToJSONKey
-- * Object key-value pairs
, KeyValue(..)
, KeyValuePair(..)
, FromPairs(..)
-- * Functions needed for documentation
-- * Encoding functions
, listEncoding
, listValue
) where
import Prelude.Compat
import Control.Applicative (Const(..))
import Control.Monad.ST (ST)
import Data.Aeson.Encoding (Encoding, Encoding', Series, dict, emptyArray_)
import Data.Aeson.Encoding.Internal ((>*<))
import Data.Aeson.Internal.Functions (mapKeyVal, mapKeyValO)
import Data.Aeson.Types.Generic (AllNullary, False, IsRecord, One, ProductSize, Tagged2(..), True, Zero, productSize)
import Data.Aeson.Types.Internal
import qualified Data.Aeson.Key as Key
import qualified Data.Aeson.KeyMap as KM
import Data.Attoparsec.Number (Number(..))
import Data.Bits (unsafeShiftR)
import Data.DList (DList)
import Data.Fixed (Fixed, HasResolution, Nano)
import Data.Foldable (toList)
import Data.Functor.Compose (Compose(..))
import Data.Functor.Contravariant (Contravariant (..))
import Data.Functor.Identity (Identity(..))
import Data.Functor.Product (Product(..))
import Data.Functor.Sum (Sum(..))
import Data.Functor.These (These1 (..))
import Data.Int (Int16, Int32, Int64, Int8)
import Data.List (intersperse)
import Data.List.NonEmpty (NonEmpty(..))
import Data.Proxy (Proxy(..))
import Data.Ratio (Ratio, denominator, numerator)
import Data.Scientific (Scientific)
import Data.Tagged (Tagged(..))
import Data.Text (Text, pack)
import Data.These (These (..))
import Data.Time (Day, DiffTime, LocalTime, NominalDiffTime, TimeOfDay, UTCTime, ZonedTime)
import Data.Time.Calendar.Month.Compat (Month)
import Data.Time.Calendar.Quarter.Compat (Quarter, QuarterOfYear (..))
import Data.Time.Calendar.Compat (CalendarDiffDays (..), DayOfWeek (..))
import Data.Time.LocalTime.Compat (CalendarDiffTime (..))
import Data.Time.Clock.System.Compat (SystemTime (..))
import Data.Time.Format.Compat (FormatTime, formatTime, defaultTimeLocale)
import Data.Tuple.Solo (Solo (..), getSolo)
import Data.Vector (Vector)
import Data.Version (Version, showVersion)
import Data.Void (Void, absurd)
import Data.Word (Word16, Word32, Word64, Word8)
import Foreign.Storable (Storable)
import Foreign.C.Types (CTime (..))
import GHC.Generics
import Numeric.Natural (Natural)
import qualified Data.Aeson.Encoding as E
import qualified Data.Aeson.Encoding.Internal as E (InArray, comma, econcat, retagEncoding, key)
import qualified Data.ByteString.Lazy as L
import qualified Data.DList as DList
#if MIN_VERSION_dlist(1,0,0)
import qualified Data.DList.DNonEmpty as DNE
#endif
import qualified Data.Fix as F
import qualified Data.HashMap.Strict as H
import qualified Data.HashSet as HashSet
import qualified Data.IntMap as IntMap
import qualified Data.IntSet as IntSet
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Monoid as Monoid
import qualified Data.Scientific as Scientific
import qualified Data.Semigroup as Semigroup
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Strict as S
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Short as ST
import qualified Data.Tree as Tree
import qualified Data.UUID.Types as UUID
import qualified Data.Vector as V
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Mutable as VM
import qualified Data.Vector.Primitive as VP
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Unboxed as VU
import qualified Data.Aeson.Encoding.Builder as EB
import qualified Data.ByteString.Builder as B
import qualified GHC.Exts as Exts
import qualified Data.Primitive.Array as PM
import qualified Data.Primitive.SmallArray as PM
import qualified Data.Primitive.Types as PM
import qualified Data.Primitive.PrimArray as PM
toJSONPair :: (a -> Value) -> (b -> Value) -> (a, b) -> Value
toJSONPair a b = liftToJSON2 a (listValue a) b (listValue b)
realFloatToJSON :: RealFloat a => a -> Value
realFloatToJSON d
| isNaN d = Null
| isInfinite d = if d > 0 then "+inf" else "-inf"
| otherwise = Number $ Scientific.fromFloatDigits d
-------------------------------------------------------------------------------
-- Generics
-------------------------------------------------------------------------------
-- | Class of generic representation types that can be converted to
-- JSON.
class GToJSON' enc arity f where
-- | This method (applied to 'defaultOptions') is used as the
-- default generic implementation of 'toJSON'
-- (with @enc ~ 'Value'@ and @arity ~ 'Zero'@)
-- and 'liftToJSON' (if the @arity@ is 'One').
--
-- It also provides a generic implementation of 'toEncoding'
-- (with @enc ~ 'Encoding'@ and @arity ~ 'Zero'@)
-- and 'liftToEncoding' (if the @arity@ is 'One').
gToJSON :: Options -> ToArgs enc arity a -> f a -> enc
-- | A 'ToArgs' value either stores nothing (for 'ToJSON') or it stores the two
-- function arguments that encode occurrences of the type parameter (for
-- 'ToJSON1').
data ToArgs res arity a where
NoToArgs :: ToArgs res Zero a
To1Args :: (a -> res) -> ([a] -> res) -> ToArgs res One a
-- | A configurable generic JSON creator. This function applied to
-- 'defaultOptions' is used as the default for 'toJSON' when the type
-- is an instance of 'Generic'.
genericToJSON :: (Generic a, GToJSON' Value Zero (Rep a))
=> Options -> a -> Value
genericToJSON opts = gToJSON opts NoToArgs . from
-- | A configurable generic JSON creator. This function applied to
-- 'defaultOptions' is used as the default for 'liftToJSON' when the type
-- is an instance of 'Generic1'.
genericLiftToJSON :: (Generic1 f, GToJSON' Value One (Rep1 f))
=> Options -> (a -> Value) -> ([a] -> Value)
-> f a -> Value
genericLiftToJSON opts tj tjl = gToJSON opts (To1Args tj tjl) . from1
-- | A configurable generic JSON encoder. This function applied to
-- 'defaultOptions' is used as the default for 'toEncoding' when the type
-- is an instance of 'Generic'.
genericToEncoding :: (Generic a, GToJSON' Encoding Zero (Rep a))
=> Options -> a -> Encoding
genericToEncoding opts = gToJSON opts NoToArgs . from
-- | A configurable generic JSON encoder. This function applied to
-- 'defaultOptions' is used as the default for 'liftToEncoding' when the type
-- is an instance of 'Generic1'.
genericLiftToEncoding :: (Generic1 f, GToJSON' Encoding One (Rep1 f))
=> Options -> (a -> Encoding) -> ([a] -> Encoding)
-> f a -> Encoding
genericLiftToEncoding opts te tel = gToJSON opts (To1Args te tel) . from1
-------------------------------------------------------------------------------
-- Class
-------------------------------------------------------------------------------
-- | A type that can be converted to JSON.
--
-- Instances in general /must/ specify 'toJSON' and /should/ (but don't need
-- to) specify 'toEncoding'.
--
-- An example type and instance:
--
-- @
-- \-- Allow ourselves to write 'Text' literals.
-- {-\# LANGUAGE OverloadedStrings #-}
--
-- data Coord = Coord { x :: Double, y :: Double }
--
-- instance 'ToJSON' Coord where
-- 'toJSON' (Coord x y) = 'object' [\"x\" '.=' x, \"y\" '.=' y]
--
-- 'toEncoding' (Coord x y) = 'pairs' (\"x\" '.=' x '<>' \"y\" '.=' y)
-- @
--
-- Instead of manually writing your 'ToJSON' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so it will probably be more efficient than the following option.
--
-- * The compiler can provide a default generic implementation for
-- 'toJSON'.
--
-- To use the second, simply add a @deriving 'Generic'@ clause to your
-- datatype and declare a 'ToJSON' instance. If you require nothing other than
-- 'defaultOptions', it is sufficient to write (and this is the only
-- alternative where the default 'toJSON' implementation is sufficient):
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import "GHC.Generics"
--
-- data Coord = Coord { x :: Double, y :: Double } deriving 'Generic'
--
-- instance 'ToJSON' Coord where
-- 'toEncoding' = 'genericToEncoding' 'defaultOptions'
-- @
--
-- If on the other hand you wish to customize the generic decoding, you have
-- to implement both methods:
--
-- @
-- customOptions = 'defaultOptions'
-- { 'fieldLabelModifier' = 'map' 'Data.Char.toUpper'
-- }
--
-- instance 'ToJSON' Coord where
-- 'toJSON' = 'genericToJSON' customOptions
-- 'toEncoding' = 'genericToEncoding' customOptions
-- @
--
-- Previous versions of this library only had the 'toJSON' method. Adding
-- 'toEncoding' had two reasons:
--
-- 1. toEncoding is more efficient for the common case that the output of
-- 'toJSON' is directly serialized to a @ByteString@.
-- Further, expressing either method in terms of the other would be
-- non-optimal.
--
-- 2. The choice of defaults allows a smooth transition for existing users:
-- Existing instances that do not define 'toEncoding' still
-- compile and have the correct semantics. This is ensured by making
-- the default implementation of 'toEncoding' use 'toJSON'. This produces
-- correct results, but since it performs an intermediate conversion to a
-- 'Value', it will be less efficient than directly emitting an 'Encoding'.
-- (this also means that specifying nothing more than
-- @instance ToJSON Coord@ would be sufficient as a generically decoding
-- instance, but there probably exists no good reason to not specify
-- 'toEncoding' in new instances.)
class ToJSON a where
-- | Convert a Haskell value to a JSON-friendly intermediate type.
toJSON :: a -> Value
default toJSON :: (Generic a, GToJSON' Value Zero (Rep a)) => a -> Value
toJSON = genericToJSON defaultOptions
-- | Encode a Haskell value as JSON.
--
-- The default implementation of this method creates an
-- intermediate 'Value' using 'toJSON'. This provides
-- source-level compatibility for people upgrading from older
-- versions of this library, but obviously offers no performance
-- advantage.
--
-- To benefit from direct encoding, you /must/ provide an
-- implementation for this method. The easiest way to do so is by
-- having your types implement 'Generic' using the @DeriveGeneric@
-- extension, and then have GHC generate a method body as follows.
--
-- @
-- instance 'ToJSON' Coord where
-- 'toEncoding' = 'genericToEncoding' 'defaultOptions'
-- @
toEncoding :: a -> Encoding
toEncoding = E.value . toJSON
toJSONList :: [a] -> Value
toJSONList = listValue toJSON
toEncodingList :: [a] -> Encoding
toEncodingList = listEncoding toEncoding
-------------------------------------------------------------------------------
-- Object key-value pairs
-------------------------------------------------------------------------------
-- | A key-value pair for encoding a JSON object.
class KeyValue kv where
(.=) :: ToJSON v => Key -> v -> kv
infixr 8 .=
instance KeyValue Series where
name .= value = E.pair name (toEncoding value)
{-# INLINE (.=) #-}
instance KeyValue Pair where
name .= value = (name, toJSON value)
{-# INLINE (.=) #-}
-- | Constructs a singleton 'KM.KeyMap'. For calling functions that
-- demand an 'Object' for constructing objects. To be used in
-- conjunction with 'mconcat'. Prefer to use 'object' where possible.
instance KeyValue Object where
name .= value = KM.singleton name (toJSON value)
{-# INLINE (.=) #-}
-------------------------------------------------------------------------------
-- Classes and types for map keys
-------------------------------------------------------------------------------
-- | Typeclass for types that can be used as the key of a map-like container
-- (like 'Map' or 'HashMap'). For example, since 'Text' has a 'ToJSONKey'
-- instance and 'Char' has a 'ToJSON' instance, we can encode a value of
-- type 'Map' 'Text' 'Char':
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [("foo" :: Text, 'a')]
-- {"foo":"a"}
--
-- Since 'Int' also has a 'ToJSONKey' instance, we can similarly write:
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [(5 :: Int, 'a')]
-- {"5":"a"}
--
-- JSON documents only accept strings as object keys. For any type
-- from @base@ that has a natural textual representation, it can be
-- expected that its 'ToJSONKey' instance will choose that representation.
--
-- For data types that lack a natural textual representation, an alternative
-- is provided. The map-like container is represented as a JSON array
-- instead of a JSON object. Each value in the array is an array with
-- exactly two values. The first is the key and the second is the value.
--
-- For example, values of type '[Text]' cannot be encoded to a
-- string, so a 'Map' with keys of type '[Text]' is encoded as follows:
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [(["foo","bar","baz" :: Text], 'a')]
-- [[["foo","bar","baz"],"a"]]
--
-- The default implementation of 'ToJSONKey' chooses this method of
-- encoding a key, using the 'ToJSON' instance of the type.
--
-- To use your own data type as the key in a map, all that is needed
-- is to write a 'ToJSONKey' (and possibly a 'FromJSONKey') instance
-- for it. If the type cannot be trivially converted to and from 'Text',
-- it is recommended that 'ToJSONKeyValue' is used. Since the default
-- implementations of the typeclass methods can build this from a
-- 'ToJSON' instance, there is nothing that needs to be written:
--
-- > data Foo = Foo { fooAge :: Int, fooName :: Text }
-- > deriving (Eq,Ord,Generic)
-- > instance ToJSON Foo
-- > instance ToJSONKey Foo
--
-- That's it. We can now write:
--
-- >>> let m = Map.fromList [(Foo 4 "bar",'a'),(Foo 6 "arg",'b')]
-- >>> LBC8.putStrLn $ encode m
-- [[{"fooName":"bar","fooAge":4},"a"],[{"fooName":"arg","fooAge":6},"b"]]
--
-- The next case to consider is if we have a type that is a
-- newtype wrapper around 'Text'. The recommended approach is to use
-- generalized newtype deriving:
--
-- > newtype RecordId = RecordId { getRecordId :: Text }
-- > deriving (Eq,Ord,ToJSONKey)
--
-- Then we may write:
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [(RecordId "abc",'a')]
-- {"abc":"a"}
--
-- Simple sum types are a final case worth considering. Suppose we have:
--
-- > data Color = Red | Green | Blue
-- > deriving (Show,Read,Eq,Ord)
--
-- It is possible to get the 'ToJSONKey' instance for free as we did
-- with 'Foo'. However, in this case, we have a natural way to go to
-- and from 'Text' that does not require any escape sequences. So
-- 'ToJSONKeyText' can be used instead of 'ToJSONKeyValue' to encode maps
-- as objects instead of arrays of pairs. This instance may be
-- implemented using generics as follows:
--
-- @
-- instance 'ToJSONKey' Color where
-- 'toJSONKey' = 'genericToJSONKey' 'defaultJSONKeyOptions'
-- @
--
-- === __Low-level implementations__
--
-- The 'Show' instance can be used to help write 'ToJSONKey':
--
-- > instance ToJSONKey Color where
-- > toJSONKey = ToJSONKeyText f g
-- > where f = Text.pack . show
-- > g = text . Text.pack . show
-- > -- text function is from Data.Aeson.Encoding
--
-- The situation of needing to turning function @a -> Text@ into
-- a 'ToJSONKeyFunction' is common enough that a special combinator
-- is provided for it. The above instance can be rewritten as:
--
-- > instance ToJSONKey Color where
-- > toJSONKey = toJSONKeyText (Text.pack . show)
--
-- The performance of the above instance can be improved by
-- not using 'String' as an intermediate step when converting to
-- 'Text'. One option for improving performance would be to use
-- template haskell machinery from the @text-show@ package. However,
-- even with the approach, the 'Encoding' (a wrapper around a bytestring
-- builder) is generated by encoding the 'Text' to a 'ByteString',
-- an intermediate step that could be avoided. The fastest possible
-- implementation would be:
--
-- > -- Assuming that OverloadedStrings is enabled
-- > instance ToJSONKey Color where
-- > toJSONKey = ToJSONKeyText f g
-- > where f x = case x of {Red -> "Red";Green ->"Green";Blue -> "Blue"}
-- > g x = case x of {Red -> text "Red";Green -> text "Green";Blue -> text "Blue"}
-- > -- text function is from Data.Aeson.Encoding
--
-- This works because GHC can lift the encoded values out of the case
-- statements, which means that they are only evaluated once. This
-- approach should only be used when there is a serious need to
-- maximize performance.
class ToJSONKey a where
-- | Strategy for rendering the key for a map-like container.
toJSONKey :: ToJSONKeyFunction a
default toJSONKey :: ToJSON a => ToJSONKeyFunction a
toJSONKey = ToJSONKeyValue toJSON toEncoding
-- | This is similar in spirit to the 'showsList' method of 'Show'.
-- It makes it possible to give 'String' keys special treatment
-- without using @OverlappingInstances@. End users should always
-- be able to use the default implementation of this method.
toJSONKeyList :: ToJSONKeyFunction [a]
default toJSONKeyList :: ToJSON a => ToJSONKeyFunction [a]
toJSONKeyList = ToJSONKeyValue toJSON toEncoding
data ToJSONKeyFunction a
= ToJSONKeyText !(a -> Key) !(a -> Encoding' Key)
-- ^ key is encoded to string, produces object
| ToJSONKeyValue !(a -> Value) !(a -> Encoding)
-- ^ key is encoded to value, produces array
-- | Helper for creating textual keys.
--
-- @
-- instance 'ToJSONKey' MyKey where
-- 'toJSONKey' = 'toJSONKeyText' myKeyToText
-- where
-- myKeyToText = Text.pack . show -- or showt from text-show
-- @
toJSONKeyText :: (a -> Text) -> ToJSONKeyFunction a
toJSONKeyText f = toJSONKeyKey (Key.fromText . f)
-- |
--
-- @since 2.0.0.0
toJSONKeyKey :: (a -> Key) -> ToJSONKeyFunction a
toJSONKeyKey f = ToJSONKeyText f (E.key . f)
-- | TODO: should this be exported?
toJSONKeyTextEnc :: (a -> Encoding' Key) -> ToJSONKeyFunction a
toJSONKeyTextEnc e = ToJSONKeyText tot e
where
-- TODO: dropAround is also used in stringEncoding, which is unfortunate atm
tot = Key.fromText
. T.dropAround (== '"')
. T.decodeLatin1
. L.toStrict
. E.encodingToLazyByteString
. e
instance Contravariant ToJSONKeyFunction where
contramap = contramapToJSONKeyFunction
-- | Contravariant map, as 'ToJSONKeyFunction' is a contravariant functor.
contramapToJSONKeyFunction :: (b -> a) -> ToJSONKeyFunction a -> ToJSONKeyFunction b
contramapToJSONKeyFunction h x = case x of
ToJSONKeyText f g -> ToJSONKeyText (f . h) (g . h)
ToJSONKeyValue f g -> ToJSONKeyValue (f . h) (g . h)
-- 'toJSONKey' for 'Generic' types.
-- Deriving is supported for enumeration types, i.e. the sums of nullary
-- constructors. The names of constructors will be used as keys for JSON
-- objects.
--
-- See also 'genericFromJSONKey'.
--
-- === __Example__
--
-- @
-- data Color = Red | Green | Blue
-- deriving 'Generic'
--
-- instance 'ToJSONKey' Color where
-- 'toJSONKey' = 'genericToJSONKey' 'defaultJSONKeyOptions'
-- @
genericToJSONKey :: (Generic a, GToJSONKey (Rep a))
=> JSONKeyOptions -> ToJSONKeyFunction a
genericToJSONKey opts = toJSONKeyKey (Key.fromString . keyModifier opts . getConName . from)
class GetConName f => GToJSONKey f
instance GetConName f => GToJSONKey f
-------------------------------------------------------------------------------
-- Lifings of FromJSON and ToJSON to unary and binary type constructors
-------------------------------------------------------------------------------
-- | Lifting of the 'ToJSON' class to unary type constructors.
--
-- Instead of manually writing your 'ToJSON1' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so it will probably be more efficient than the following option.
--
-- * The compiler can provide a default generic implementation for
-- 'toJSON1'.
--
-- To use the second, simply add a @deriving 'Generic1'@ clause to your
-- datatype and declare a 'ToJSON1' instance for your datatype without giving
-- definitions for 'liftToJSON' or 'liftToEncoding'.
--
-- For example:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import "GHC.Generics"
--
-- data Pair a b = Pair { pairFst :: a, pairSnd :: b } deriving 'Generic1'
--
-- instance 'ToJSON' a => 'ToJSON1' (Pair a)
-- @
--
-- If the default implementation doesn't give exactly the results you want,
-- you can customize the generic encoding with only a tiny amount of
-- effort, using 'genericLiftToJSON' and 'genericLiftToEncoding' with
-- your preferred 'Options':
--
-- @
-- customOptions = 'defaultOptions'
-- { 'fieldLabelModifier' = 'map' 'Data.Char.toUpper'
-- }
--
-- instance 'ToJSON' a => 'ToJSON1' (Pair a) where
-- 'liftToJSON' = 'genericLiftToJSON' customOptions
-- 'liftToEncoding' = 'genericLiftToEncoding' customOptions
-- @
--
-- See also 'ToJSON'.
class ToJSON1 f where
liftToJSON :: (a -> Value) -> ([a] -> Value) -> f a -> Value
default liftToJSON :: (Generic1 f, GToJSON' Value One (Rep1 f))
=> (a -> Value) -> ([a] -> Value) -> f a -> Value
liftToJSON = genericLiftToJSON defaultOptions
liftToJSONList :: (a -> Value) -> ([a] -> Value) -> [f a] -> Value
liftToJSONList f g = listValue (liftToJSON f g)
liftToEncoding :: (a -> Encoding) -> ([a] -> Encoding) -> f a -> Encoding
default liftToEncoding :: (Generic1 f, GToJSON' Encoding One (Rep1 f))
=> (a -> Encoding) -> ([a] -> Encoding)
-> f a -> Encoding
liftToEncoding = genericLiftToEncoding defaultOptions
liftToEncodingList :: (a -> Encoding) -> ([a] -> Encoding) -> [f a] -> Encoding
liftToEncodingList f g = listEncoding (liftToEncoding f g)
-- | Lift the standard 'toJSON' function through the type constructor.
toJSON1 :: (ToJSON1 f, ToJSON a) => f a -> Value
toJSON1 = liftToJSON toJSON toJSONList
{-# INLINE toJSON1 #-}
-- | Lift the standard 'toEncoding' function through the type constructor.
toEncoding1 :: (ToJSON1 f, ToJSON a) => f a -> Encoding
toEncoding1 = liftToEncoding toEncoding toEncodingList
{-# INLINE toEncoding1 #-}
-- | Lifting of the 'ToJSON' class to binary type constructors.
--
-- Instead of manually writing your 'ToJSON2' instance, "Data.Aeson.TH"
-- provides Template Haskell functions which will derive an instance at compile time.
--
-- The compiler cannot provide a default generic implementation for 'liftToJSON2',
-- unlike 'toJSON' and 'liftToJSON'.
class ToJSON2 f where
liftToJSON2 :: (a -> Value) -> ([a] -> Value) -> (b -> Value) -> ([b] -> Value) -> f a b -> Value
liftToJSONList2 :: (a -> Value) -> ([a] -> Value) -> (b -> Value) -> ([b] -> Value) -> [f a b] -> Value
liftToJSONList2 fa ga fb gb = listValue (liftToJSON2 fa ga fb gb)
liftToEncoding2 :: (a -> Encoding) -> ([a] -> Encoding) -> (b -> Encoding) -> ([b] -> Encoding) -> f a b -> Encoding
liftToEncodingList2 :: (a -> Encoding) -> ([a] -> Encoding) -> (b -> Encoding) -> ([b] -> Encoding) -> [f a b] -> Encoding
liftToEncodingList2 fa ga fb gb = listEncoding (liftToEncoding2 fa ga fb gb)
-- | Lift the standard 'toJSON' function through the type constructor.
toJSON2 :: (ToJSON2 f, ToJSON a, ToJSON b) => f a b -> Value
toJSON2 = liftToJSON2 toJSON toJSONList toJSON toJSONList
{-# INLINE toJSON2 #-}
-- | Lift the standard 'toEncoding' function through the type constructor.
toEncoding2 :: (ToJSON2 f, ToJSON a, ToJSON b) => f a b -> Encoding
toEncoding2 = liftToEncoding2 toEncoding toEncodingList toEncoding toEncodingList
{-# INLINE toEncoding2 #-}
-------------------------------------------------------------------------------
-- Encoding functions
-------------------------------------------------------------------------------
-- | Helper function to use with 'liftToEncoding'.
-- Useful when writing own 'ToJSON1' instances.
--
-- @
-- newtype F a = F [a]
--
-- -- This instance encodes 'String' as an array of chars
-- instance 'ToJSON1' F where
-- 'liftToJSON' tj _ (F xs) = 'liftToJSON' tj ('listValue' tj) xs
-- 'liftToEncoding' te _ (F xs) = 'liftToEncoding' te ('listEncoding' te) xs
--
-- instance 'Data.Aeson.FromJSON.FromJSON1' F where
-- 'Data.Aeson.FromJSON.liftParseJSON' p _ v = F \<$\> 'Data.Aeson.FromJSON.liftParseJSON' p ('Data.Aeson.FromJSON.listParser' p) v
-- @
listEncoding :: (a -> Encoding) -> [a] -> Encoding
listEncoding = E.list
{-# INLINE listEncoding #-}
-- | Helper function to use with 'liftToJSON', see 'listEncoding'.
listValue :: (a -> Value) -> [a] -> Value
listValue f = Array . V.fromList . map f
{-# INLINE listValue #-}
-------------------------------------------------------------------------------
-- [] instances
-------------------------------------------------------------------------------
-- These are needed for key-class default definitions
instance ToJSON1 [] where
liftToJSON _ to' = to'
liftToEncoding _ to' = to'
instance (ToJSON a) => ToJSON [a] where
{-# SPECIALIZE instance ToJSON String #-}
{-# SPECIALIZE instance ToJSON [String] #-}
{-# SPECIALIZE instance ToJSON [Array] #-}
{-# SPECIALIZE instance ToJSON [Object] #-}
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- Generic toJSON / toEncoding
-------------------------------------------------------------------------------
instance {-# OVERLAPPABLE #-} (GToJSON' enc arity a) => GToJSON' enc arity (M1 i c a) where
-- Meta-information, which is not handled elsewhere, is ignored:
gToJSON opts targs = gToJSON opts targs . unM1
{-# INLINE gToJSON #-}
instance GToJSON' enc One Par1 where
-- Direct occurrences of the last type parameter are encoded with the
-- function passed in as an argument:
gToJSON _opts (To1Args tj _) = tj . unPar1
{-# INLINE gToJSON #-}
instance ( ConsToJSON enc arity a
, AllNullary (C1 c a) allNullary
, SumToJSON enc arity (C1 c a) allNullary
) => GToJSON' enc arity (D1 d (C1 c a)) where
-- The option 'tagSingleConstructors' determines whether to wrap
-- a single-constructor type.
gToJSON opts targs
| tagSingleConstructors opts = (unTagged :: Tagged allNullary enc -> enc)
. sumToJSON opts targs
. unM1
| otherwise = consToJSON opts targs . unM1 . unM1
{-# INLINE gToJSON #-}
instance (ConsToJSON enc arity a) => GToJSON' enc arity (C1 c a) where
-- Constructors need to be encoded differently depending on whether they're
-- a record or not. This distinction is made by 'consToJSON':
gToJSON opts targs = consToJSON opts targs . unM1
{-# INLINE gToJSON #-}
instance ( AllNullary (a :+: b) allNullary
, SumToJSON enc arity (a :+: b) allNullary
) => GToJSON' enc arity (a :+: b)
where
-- If all constructors of a sum datatype are nullary and the
-- 'allNullaryToStringTag' option is set they are encoded to
-- strings. This distinction is made by 'sumToJSON':
gToJSON opts targs = (unTagged :: Tagged allNullary enc -> enc)
. sumToJSON opts targs
{-# INLINE gToJSON #-}
--------------------------------------------------------------------------------
-- Generic toJSON
-- Note: Refactoring 'ToJSON a' to 'ToJSON enc a' (and 'ToJSON1' similarly) is
-- possible but makes error messages a bit harder to understand for missing
-- instances.
instance GToJSON' Value arity V1 where
-- Empty values do not exist, which makes the job of formatting them
-- rather easy:
gToJSON _ _ x = x `seq` error "case: V1"
{-# INLINE gToJSON #-}
instance ToJSON a => GToJSON' Value arity (K1 i a) where
-- Constant values are encoded using their ToJSON instance:
gToJSON _opts _ = toJSON . unK1
{-# INLINE gToJSON #-}
instance ToJSON1 f => GToJSON' Value One (Rec1 f) where
-- Recursive occurrences of the last type parameter are encoded using their
-- ToJSON1 instance:
gToJSON _opts (To1Args tj tjl) = liftToJSON tj tjl . unRec1
{-# INLINE gToJSON #-}
instance GToJSON' Value arity U1 where
-- Empty constructors are encoded to an empty array:
gToJSON _opts _ _ = emptyArray
{-# INLINE gToJSON #-}
instance ( WriteProduct arity a, WriteProduct arity b
, ProductSize a, ProductSize b
) => GToJSON' Value arity (a :*: b)
where
-- Products are encoded to an array. Here we allocate a mutable vector of
-- the same size as the product and write the product's elements to it using
-- 'writeProduct':
gToJSON opts targs p =
Array $ V.create $ do
mv <- VM.unsafeNew lenProduct
writeProduct opts targs mv 0 lenProduct p
return mv
where
lenProduct = (unTagged2 :: Tagged2 (a :*: b) Int -> Int)
productSize
{-# INLINE gToJSON #-}
instance ( ToJSON1 f
, GToJSON' Value One g
) => GToJSON' Value One (f :.: g)
where
-- If an occurrence of the last type parameter is nested inside two
-- composed types, it is encoded by using the outermost type's ToJSON1
-- instance to generically encode the innermost type:
gToJSON opts targs =
let gtj = gToJSON opts targs in
liftToJSON gtj (listValue gtj) . unComp1
{-# INLINE gToJSON #-}
--------------------------------------------------------------------------------
-- Generic toEncoding
instance ToJSON a => GToJSON' Encoding arity (K1 i a) where
-- Constant values are encoded using their ToJSON instance:
gToJSON _opts _ = toEncoding . unK1
{-# INLINE gToJSON #-}
instance ToJSON1 f => GToJSON' Encoding One (Rec1 f) where
-- Recursive occurrences of the last type parameter are encoded using their
-- ToEncoding1 instance:
gToJSON _opts (To1Args te tel) = liftToEncoding te tel . unRec1
{-# INLINE gToJSON #-}
instance GToJSON' Encoding arity U1 where
-- Empty constructors are encoded to an empty array:
gToJSON _opts _ _ = E.emptyArray_
{-# INLINE gToJSON #-}
instance ( EncodeProduct arity a
, EncodeProduct arity b
) => GToJSON' Encoding arity (a :*: b)
where
-- Products are encoded to an array. Here we allocate a mutable vector of
-- the same size as the product and write the product's elements to it using
-- 'encodeProduct':
gToJSON opts targs p = E.list E.retagEncoding [encodeProduct opts targs p]
{-# INLINE gToJSON #-}
instance ( ToJSON1 f
, GToJSON' Encoding One g
) => GToJSON' Encoding One (f :.: g)
where
-- If an occurrence of the last type parameter is nested inside two
-- composed types, it is encoded by using the outermost type's ToJSON1
-- instance to generically encode the innermost type:
gToJSON opts targs =
let gte = gToJSON opts targs in
liftToEncoding gte (listEncoding gte) . unComp1
{-# INLINE gToJSON #-}
--------------------------------------------------------------------------------
class SumToJSON enc arity f allNullary where
sumToJSON :: Options -> ToArgs enc arity a
-> f a -> Tagged allNullary enc
instance ( GetConName f
, FromString enc
, TaggedObject enc arity f
, SumToJSON' ObjectWithSingleField enc arity f
, SumToJSON' TwoElemArray enc arity f
, SumToJSON' UntaggedValue enc arity f
) => SumToJSON enc arity f True
where
sumToJSON opts targs
| allNullaryToStringTag opts = Tagged . fromString
. constructorTagModifier opts . getConName
| otherwise = Tagged . nonAllNullarySumToJSON opts targs
{-# INLINE sumToJSON #-}
instance ( TaggedObject enc arity f
, SumToJSON' ObjectWithSingleField enc arity f
, SumToJSON' TwoElemArray enc arity f
, SumToJSON' UntaggedValue enc arity f
) => SumToJSON enc arity f False
where
sumToJSON opts targs = Tagged . nonAllNullarySumToJSON opts targs
{-# INLINE sumToJSON #-}
nonAllNullarySumToJSON :: ( TaggedObject enc arity f
, SumToJSON' ObjectWithSingleField enc arity f
, SumToJSON' TwoElemArray enc arity f
, SumToJSON' UntaggedValue enc arity f
) => Options -> ToArgs enc arity a
-> f a -> enc
nonAllNullarySumToJSON opts targs =
case sumEncoding opts of
TaggedObject{..} ->
taggedObject opts targs (Key.fromString tagFieldName) (Key.fromString contentsFieldName)
ObjectWithSingleField ->
(unTagged :: Tagged ObjectWithSingleField enc -> enc)
. sumToJSON' opts targs
TwoElemArray ->
(unTagged :: Tagged TwoElemArray enc -> enc)
. sumToJSON' opts targs
UntaggedValue ->
(unTagged :: Tagged UntaggedValue enc -> enc)
. sumToJSON' opts targs
{-# INLINE nonAllNullarySumToJSON #-}
--------------------------------------------------------------------------------
class FromString enc where
fromString :: String -> enc
instance FromString Encoding where
fromString = toEncoding
instance FromString Value where
fromString = String . pack
--------------------------------------------------------------------------------
class TaggedObject enc arity f where
taggedObject :: Options -> ToArgs enc arity a
-> Key -> Key
-> f a -> enc
instance ( TaggedObject enc arity a
, TaggedObject enc arity b
) => TaggedObject enc arity (a :+: b)
where
taggedObject opts targs tagFieldName contentsFieldName (L1 x) =
taggedObject opts targs tagFieldName contentsFieldName x
taggedObject opts targs tagFieldName contentsFieldName (R1 x) =
taggedObject opts targs tagFieldName contentsFieldName x
{-# INLINE taggedObject #-}
instance ( IsRecord a isRecord
, TaggedObject' enc pairs arity a isRecord
, FromPairs enc pairs
, FromString enc
, KeyValuePair enc pairs
, Constructor c
) => TaggedObject enc arity (C1 c a)
where
taggedObject opts targs tagFieldName contentsFieldName =
fromPairs . mappend tag . contents
where
tag = tagFieldName `pair`
(fromString (constructorTagModifier opts (conName (undefined :: t c a p)))
:: enc)
contents =
(unTagged :: Tagged isRecord pairs -> pairs) .
taggedObject' opts targs contentsFieldName . unM1
{-# INLINE taggedObject #-}
class TaggedObject' enc pairs arity f isRecord where
taggedObject' :: Options -> ToArgs enc arity a
-> Key -> f a -> Tagged isRecord pairs
instance ( GToJSON' enc arity f
, KeyValuePair enc pairs
) => TaggedObject' enc pairs arity f False
where
taggedObject' opts targs contentsFieldName =
Tagged . (contentsFieldName `pair`) . gToJSON opts targs
{-# INLINE taggedObject' #-}
instance {-# OVERLAPPING #-} Monoid pairs => TaggedObject' enc pairs arity U1 False where
taggedObject' _ _ _ _ = Tagged mempty
{-# INLINE taggedObject' #-}
instance ( RecordToPairs enc pairs arity f
) => TaggedObject' enc pairs arity f True
where
taggedObject' opts targs _ = Tagged . recordToPairs opts targs
{-# INLINE taggedObject' #-}
--------------------------------------------------------------------------------
-- | Get the name of the constructor of a sum datatype.
class GetConName f where
getConName :: f a -> String
instance (GetConName a, GetConName b) => GetConName (a :+: b) where
getConName (L1 x) = getConName x
getConName (R1 x) = getConName x
{-# INLINE getConName #-}
instance (Constructor c) => GetConName (C1 c a) where
getConName = conName
{-# INLINE getConName #-}
-- For genericToJSONKey
instance GetConName a => GetConName (D1 d a) where
getConName (M1 x) = getConName x
{-# INLINE getConName #-}
--------------------------------------------------------------------------------
-- Reflection of SumEncoding variants
data ObjectWithSingleField
data TwoElemArray
data UntaggedValue
--------------------------------------------------------------------------------
class SumToJSON' s enc arity f where
sumToJSON' :: Options -> ToArgs enc arity a
-> f a -> Tagged s enc
instance ( SumToJSON' s enc arity a
, SumToJSON' s enc arity b
) => SumToJSON' s enc arity (a :+: b)
where
sumToJSON' opts targs (L1 x) = sumToJSON' opts targs x
sumToJSON' opts targs (R1 x) = sumToJSON' opts targs x
{-# INLINE sumToJSON' #-}
--------------------------------------------------------------------------------
instance ( GToJSON' Value arity a
, ConsToJSON Value arity a
, Constructor c
) => SumToJSON' TwoElemArray Value arity (C1 c a) where
sumToJSON' opts targs x = Tagged $ Array $ V.create $ do
mv <- VM.unsafeNew 2
VM.unsafeWrite mv 0 $ String $ pack $ constructorTagModifier opts
$ conName (undefined :: t c a p)
VM.unsafeWrite mv 1 $ gToJSON opts targs x
return mv
{-# INLINE sumToJSON' #-}
--------------------------------------------------------------------------------
instance ( GToJSON' Encoding arity a
, ConsToJSON Encoding arity a
, Constructor c
) => SumToJSON' TwoElemArray Encoding arity (C1 c a)
where
sumToJSON' opts targs x = Tagged $ E.list id
[ toEncoding (constructorTagModifier opts (conName (undefined :: t c a p)))
, gToJSON opts targs x
]
{-# INLINE sumToJSON' #-}
--------------------------------------------------------------------------------
class ConsToJSON enc arity f where
consToJSON :: Options -> ToArgs enc arity a
-> f a -> enc
class ConsToJSON' enc arity f isRecord where
consToJSON' :: Options -> ToArgs enc arity a
-> f a -> Tagged isRecord enc
instance ( IsRecord f isRecord
, ConsToJSON' enc arity f isRecord
) => ConsToJSON enc arity f
where
consToJSON opts targs =
(unTagged :: Tagged isRecord enc -> enc)
. consToJSON' opts targs
{-# INLINE consToJSON #-}
instance {-# OVERLAPPING #-}
( RecordToPairs enc pairs arity (S1 s f)
, FromPairs enc pairs
, GToJSON' enc arity f
) => ConsToJSON' enc arity (S1 s f) True
where
consToJSON' opts targs
| unwrapUnaryRecords opts = Tagged . gToJSON opts targs
| otherwise = Tagged . fromPairs . recordToPairs opts targs
{-# INLINE consToJSON' #-}
instance ( RecordToPairs enc pairs arity f
, FromPairs enc pairs
) => ConsToJSON' enc arity f True
where
consToJSON' opts targs = Tagged . fromPairs . recordToPairs opts targs
{-# INLINE consToJSON' #-}
instance GToJSON' enc arity f => ConsToJSON' enc arity f False where
consToJSON' opts targs = Tagged . gToJSON opts targs
{-# INLINE consToJSON' #-}
--------------------------------------------------------------------------------
class RecordToPairs enc pairs arity f where
-- 1st element: whole thing
-- 2nd element: in case the record has only 1 field, just the value
-- of the field (without the key); 'Nothing' otherwise
recordToPairs :: Options -> ToArgs enc arity a
-> f a -> pairs
instance ( Monoid pairs
, RecordToPairs enc pairs arity a
, RecordToPairs enc pairs arity b
) => RecordToPairs enc pairs arity (a :*: b)
where
recordToPairs opts (targs :: ToArgs enc arity p) (a :*: b) =
pairsOf a `mappend` pairsOf b
where
pairsOf :: (RecordToPairs enc pairs arity f) => f p -> pairs
pairsOf = recordToPairs opts targs
{-# INLINE recordToPairs #-}
instance ( Selector s
, GToJSON' enc arity a
, KeyValuePair enc pairs
) => RecordToPairs enc pairs arity (S1 s a)
where
recordToPairs = fieldToPair
{-# INLINE recordToPairs #-}
instance {-# INCOHERENT #-}
( Selector s
, GToJSON' enc arity (K1 i (Maybe a))
, KeyValuePair enc pairs
, Monoid pairs
) => RecordToPairs enc pairs arity (S1 s (K1 i (Maybe a)))
where
recordToPairs opts _ (M1 k1) | omitNothingFields opts
, K1 Nothing <- k1 = mempty
recordToPairs opts targs m1 = fieldToPair opts targs m1
{-# INLINE recordToPairs #-}
#if !MIN_VERSION_base(4,16,0)
instance {-# INCOHERENT #-}
( Selector s
, GToJSON' enc arity (K1 i (Maybe a))
, KeyValuePair enc pairs
, Monoid pairs
) => RecordToPairs enc pairs arity (S1 s (K1 i (Semigroup.Option a)))
where
recordToPairs opts targs = recordToPairs opts targs . unwrap
where
unwrap :: S1 s (K1 i (Semigroup.Option a)) p -> S1 s (K1 i (Maybe a)) p
unwrap (M1 (K1 (Semigroup.Option a))) = M1 (K1 a)
{-# INLINE recordToPairs #-}
#endif
fieldToPair :: (Selector s
, GToJSON' enc arity a
, KeyValuePair enc pairs)
=> Options -> ToArgs enc arity p
-> S1 s a p -> pairs
fieldToPair opts targs m1 =
let key = Key.fromString $ fieldLabelModifier opts (selName m1)
value = gToJSON opts targs (unM1 m1)
in key `pair` value
{-# INLINE fieldToPair #-}
--------------------------------------------------------------------------------
class WriteProduct arity f where
writeProduct :: Options
-> ToArgs Value arity a
-> VM.MVector s Value
-> Int -- ^ index
-> Int -- ^ length
-> f a
-> ST s ()
instance ( WriteProduct arity a
, WriteProduct arity b
) => WriteProduct arity (a :*: b) where
writeProduct opts targs mv ix len (a :*: b) = do
writeProduct opts targs mv ix lenL a
writeProduct opts targs mv ixR lenR b
where
lenL = len `unsafeShiftR` 1
lenR = len - lenL
ixR = ix + lenL
{-# INLINE writeProduct #-}
instance {-# OVERLAPPABLE #-} (GToJSON' Value arity a) => WriteProduct arity a where
writeProduct opts targs mv ix _ =
VM.unsafeWrite mv ix . gToJSON opts targs
{-# INLINE writeProduct #-}
--------------------------------------------------------------------------------
class EncodeProduct arity f where
encodeProduct :: Options -> ToArgs Encoding arity a
-> f a -> Encoding' E.InArray
instance ( EncodeProduct arity a
, EncodeProduct arity b
) => EncodeProduct arity (a :*: b) where
encodeProduct opts targs (a :*: b) | omitNothingFields opts =
E.econcat $ intersperse E.comma $
filter (not . E.nullEncoding)
[encodeProduct opts targs a, encodeProduct opts targs b]
encodeProduct opts targs (a :*: b) =
encodeProduct opts targs a >*<
encodeProduct opts targs b
{-# INLINE encodeProduct #-}
instance {-# OVERLAPPABLE #-} (GToJSON' Encoding arity a) => EncodeProduct arity a where
encodeProduct opts targs a = E.retagEncoding $ gToJSON opts targs a
{-# INLINE encodeProduct #-}
--------------------------------------------------------------------------------
instance ( GToJSON' enc arity a
, ConsToJSON enc arity a
, FromPairs enc pairs
, KeyValuePair enc pairs
, Constructor c
) => SumToJSON' ObjectWithSingleField enc arity (C1 c a)
where
sumToJSON' opts targs =
Tagged . fromPairs . (typ `pair`) . gToJSON opts targs
where
typ = Key.fromString $ constructorTagModifier opts $
conName (undefined :: t c a p)
{-# INLINE sumToJSON' #-}
--------------------------------------------------------------------------------
instance {-# OVERLAPPABLE #-}
( ConsToJSON enc arity a
) => SumToJSON' UntaggedValue enc arity (C1 c a)
where
sumToJSON' opts targs = Tagged . gToJSON opts targs
{-# INLINE sumToJSON' #-}
instance {-# OVERLAPPING #-}
( Constructor c
, FromString enc
) => SumToJSON' UntaggedValue enc arity (C1 c U1)
where
sumToJSON' opts _ _ = Tagged . fromString $
constructorTagModifier opts $ conName (undefined :: t c U1 p)
{-# INLINE sumToJSON' #-}
-------------------------------------------------------------------------------
-- Instances
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- base
-------------------------------------------------------------------------------
instance ToJSON2 Const where
liftToJSON2 t _ _ _ (Const x) = t x
liftToEncoding2 t _ _ _ (Const x) = t x
instance ToJSON a => ToJSON1 (Const a) where
liftToJSON _ _ (Const x) = toJSON x
liftToEncoding _ _ (Const x) = toEncoding x
instance ToJSON a => ToJSON (Const a b) where
toJSON (Const x) = toJSON x
toEncoding (Const x) = toEncoding x
instance (ToJSON a, ToJSONKey a) => ToJSONKey (Const a b) where
toJSONKey = contramap getConst toJSONKey
instance ToJSON1 Maybe where
liftToJSON t _ (Just a) = t a
liftToJSON _ _ Nothing = Null
liftToEncoding t _ (Just a) = t a
liftToEncoding _ _ Nothing = E.null_
instance (ToJSON a) => ToJSON (Maybe a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON2 Either where
liftToJSON2 toA _ _toB _ (Left a) = Object $ KM.singleton "Left" (toA a)
liftToJSON2 _toA _ toB _ (Right b) = Object $ KM.singleton "Right" (toB b)
liftToEncoding2 toA _ _toB _ (Left a) = E.pairs $ E.pair "Left" $ toA a
liftToEncoding2 _toA _ toB _ (Right b) = E.pairs $ E.pair "Right" $ toB b
instance (ToJSON a) => ToJSON1 (Either a) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b) => ToJSON (Either a b) where
toJSON = toJSON2
toEncoding = toEncoding2
instance ToJSON Void where
toJSON = absurd
toEncoding = absurd
instance ToJSON Bool where
toJSON = Bool
toEncoding = E.bool
instance ToJSONKey Bool where
toJSONKey = toJSONKeyText $ \x -> if x then "true" else "false"
instance ToJSON Ordering where
toJSON = toJSON . orderingToText
toEncoding = toEncoding . orderingToText
orderingToText :: Ordering -> T.Text
orderingToText o = case o of
LT -> "LT"
EQ -> "EQ"
GT -> "GT"
instance ToJSON () where
toJSON _ = emptyArray
toEncoding _ = emptyArray_
instance ToJSON Char where
toJSON = String . T.singleton
toJSONList = String . T.pack
toEncoding = E.string . (:[])
toEncodingList = E.string
instance ToJSON Double where
toJSON = realFloatToJSON
toEncoding = E.double
instance ToJSONKey Double where
toJSONKey = toJSONKeyTextEnc E.doubleText
instance ToJSON Number where
toJSON (D d) = toJSON d
toJSON (I i) = toJSON i
toEncoding (D d) = toEncoding d
toEncoding (I i) = toEncoding i
instance ToJSON Float where
toJSON = realFloatToJSON
toEncoding = E.float
instance ToJSONKey Float where
toJSONKey = toJSONKeyTextEnc E.floatText
instance (ToJSON a, Integral a) => ToJSON (Ratio a) where
toJSON r = object [ "numerator" .= numerator r
, "denominator" .= denominator r
]
toEncoding r = E.pairs $
"numerator" .= numerator r <>
"denominator" .= denominator r
instance HasResolution a => ToJSON (Fixed a) where
toJSON = Number . realToFrac
toEncoding = E.scientific . realToFrac
instance HasResolution a => ToJSONKey (Fixed a) where
toJSONKey = toJSONKeyTextEnc (E.scientificText . realToFrac)
instance ToJSON Int where
toJSON = Number . fromIntegral
toEncoding = E.int
instance ToJSONKey Int where
toJSONKey = toJSONKeyTextEnc E.intText
instance ToJSON Integer where
toJSON = Number . fromInteger
toEncoding = E.integer
instance ToJSONKey Integer where
toJSONKey = toJSONKeyTextEnc E.integerText
instance ToJSON Natural where
toJSON = toJSON . toInteger
toEncoding = toEncoding . toInteger
instance ToJSONKey Natural where
toJSONKey = toJSONKeyTextEnc (E.integerText . toInteger)
instance ToJSON Int8 where
toJSON = Number . fromIntegral
toEncoding = E.int8
instance ToJSONKey Int8 where
toJSONKey = toJSONKeyTextEnc E.int8Text
instance ToJSON Int16 where
toJSON = Number . fromIntegral
toEncoding = E.int16
instance ToJSONKey Int16 where
toJSONKey = toJSONKeyTextEnc E.int16Text
instance ToJSON Int32 where
toJSON = Number . fromIntegral
toEncoding = E.int32
instance ToJSONKey Int32 where
toJSONKey = toJSONKeyTextEnc E.int32Text
instance ToJSON Int64 where
toJSON = Number . fromIntegral
toEncoding = E.int64
instance ToJSONKey Int64 where
toJSONKey = toJSONKeyTextEnc E.int64Text
instance ToJSON Word where
toJSON = Number . fromIntegral
toEncoding = E.word
instance ToJSONKey Word where
toJSONKey = toJSONKeyTextEnc E.wordText
instance ToJSON Word8 where
toJSON = Number . fromIntegral
toEncoding = E.word8
instance ToJSONKey Word8 where
toJSONKey = toJSONKeyTextEnc E.word8Text
instance ToJSON Word16 where
toJSON = Number . fromIntegral
toEncoding = E.word16
instance ToJSONKey Word16 where
toJSONKey = toJSONKeyTextEnc E.word16Text
instance ToJSON Word32 where
toJSON = Number . fromIntegral
toEncoding = E.word32
instance ToJSONKey Word32 where
toJSONKey = toJSONKeyTextEnc E.word32Text
instance ToJSON Word64 where
toJSON = Number . fromIntegral
toEncoding = E.word64
instance ToJSONKey Word64 where
toJSONKey = toJSONKeyTextEnc E.word64Text
instance ToJSON CTime where
toJSON (CTime i) = toJSON i
toEncoding (CTime i) = toEncoding i
instance ToJSON Text where
toJSON = String
toEncoding = E.text
instance ToJSONKey Text where
toJSONKey = toJSONKeyText id
instance ToJSON LT.Text where
toJSON = String . LT.toStrict
toEncoding = E.lazyText
instance ToJSONKey LT.Text where
toJSONKey = toJSONKeyText (LT.toStrict)
-- | @since 2.0.2.0
instance ToJSON ST.ShortText where
toJSON = String . ST.toText
toEncoding = E.shortText
-- | @since 2.0.2.0
instance ToJSONKey ST.ShortText where
toJSONKey = ToJSONKeyText Key.fromShortText E.shortText
instance ToJSON Version where
toJSON = toJSON . showVersion
toEncoding = toEncoding . showVersion
instance ToJSONKey Version where
toJSONKey = toJSONKeyKey (Key.fromString . showVersion)
-------------------------------------------------------------------------------
-- semigroups NonEmpty
-------------------------------------------------------------------------------
instance ToJSON1 NonEmpty where
liftToJSON t _ = listValue t . NE.toList
liftToEncoding t _ = listEncoding t . NE.toList
instance (ToJSON a) => ToJSON (NonEmpty a) where
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- scientific
-------------------------------------------------------------------------------
instance ToJSON Scientific where
toJSON = Number
toEncoding = E.scientific
instance ToJSONKey Scientific where
toJSONKey = toJSONKeyTextEnc E.scientificText
-------------------------------------------------------------------------------
-- DList
-------------------------------------------------------------------------------
instance ToJSON1 DList.DList where
liftToJSON t _ = listValue t . toList
liftToEncoding t _ = listEncoding t . toList
instance (ToJSON a) => ToJSON (DList.DList a) where
toJSON = toJSON1
toEncoding = toEncoding1
#if MIN_VERSION_dlist(1,0,0)
-- | @since 1.5.3.0
instance ToJSON1 DNE.DNonEmpty where
liftToJSON t _ = listValue t . DNE.toList
liftToEncoding t _ = listEncoding t . DNE.toList
-- | @since 1.5.3.0
instance (ToJSON a) => ToJSON (DNE.DNonEmpty a) where
toJSON = toJSON1
toEncoding = toEncoding1
#endif
-------------------------------------------------------------------------------
-- OneTuple
-------------------------------------------------------------------------------
-- | @since 2.0.2.0
instance ToJSON1 Solo where
liftToJSON t _ (Solo a) = t a
liftToJSONList _ tl xs = tl (map getSolo xs)
liftToEncoding t _ (Solo a) = t a
liftToEncodingList _ tl xs = tl (map getSolo xs)
-- | @since 2.0.2.0
instance (ToJSON a) => ToJSON (Solo a) where
toJSON = toJSON1
toJSONList = liftToJSONList toJSON toJSONList
toEncoding = toEncoding1
toEncodingList = liftToEncodingList toEncoding toEncodingList
-- | @since 2.0.2.0
instance (ToJSONKey a) => ToJSONKey (Solo a) where
toJSONKey = contramapToJSONKeyFunction getSolo toJSONKey
toJSONKeyList = contramapToJSONKeyFunction (map getSolo) toJSONKeyList
-------------------------------------------------------------------------------
-- transformers - Functors
-------------------------------------------------------------------------------
instance ToJSON1 Identity where
liftToJSON t _ (Identity a) = t a
liftToJSONList _ tl xs = tl (map runIdentity xs)
liftToEncoding t _ (Identity a) = t a
liftToEncodingList _ tl xs = tl (map runIdentity xs)
instance (ToJSON a) => ToJSON (Identity a) where
toJSON = toJSON1
toJSONList = liftToJSONList toJSON toJSONList
toEncoding = toEncoding1
toEncodingList = liftToEncodingList toEncoding toEncodingList
instance (ToJSONKey a) => ToJSONKey (Identity a) where
toJSONKey = contramapToJSONKeyFunction runIdentity toJSONKey
toJSONKeyList = contramapToJSONKeyFunction (map runIdentity) toJSONKeyList
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Compose f g) where
liftToJSON tv tvl (Compose x) = liftToJSON g gl x
where
g = liftToJSON tv tvl
gl = liftToJSONList tv tvl
liftToJSONList te tel xs = liftToJSONList g gl (map getCompose xs)
where
g = liftToJSON te tel
gl = liftToJSONList te tel
liftToEncoding te tel (Compose x) = liftToEncoding g gl x
where
g = liftToEncoding te tel
gl = liftToEncodingList te tel
liftToEncodingList te tel xs = liftToEncodingList g gl (map getCompose xs)
where
g = liftToEncoding te tel
gl = liftToEncodingList te tel
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Compose f g a) where
toJSON = toJSON1
toJSONList = liftToJSONList toJSON toJSONList
toEncoding = toEncoding1
toEncodingList = liftToEncodingList toEncoding toEncodingList
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Product f g) where
liftToJSON tv tvl (Pair x y) = liftToJSON2 tx txl ty tyl (x, y)
where
tx = liftToJSON tv tvl
txl = liftToJSONList tv tvl
ty = liftToJSON tv tvl
tyl = liftToJSONList tv tvl
liftToEncoding te tel (Pair x y) = liftToEncoding2 tx txl ty tyl (x, y)
where
tx = liftToEncoding te tel
txl = liftToEncodingList te tel
ty = liftToEncoding te tel
tyl = liftToEncodingList te tel
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Product f g a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Sum f g) where
liftToJSON tv tvl (InL x) = Object $ KM.singleton "InL" (liftToJSON tv tvl x)
liftToJSON tv tvl (InR y) = Object $ KM.singleton "InR" (liftToJSON tv tvl y)
liftToEncoding te tel (InL x) = E.pairs $ E.pair "InL" $ liftToEncoding te tel x
liftToEncoding te tel (InR y) = E.pairs $ E.pair "InR" $ liftToEncoding te tel y
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Sum f g a) where
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- containers
-------------------------------------------------------------------------------
instance ToJSON1 Seq.Seq where
liftToJSON t _ = listValue t . toList
liftToEncoding t _ = listEncoding t . toList
instance (ToJSON a) => ToJSON (Seq.Seq a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Set.Set where
liftToJSON t _ = listValue t . Set.toList
liftToEncoding t _ = listEncoding t . Set.toList
instance (ToJSON a) => ToJSON (Set.Set a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON IntSet.IntSet where
toJSON = toJSON . IntSet.toList
toEncoding = toEncoding . IntSet.toList
instance ToJSON1 IntMap.IntMap where
liftToJSON t tol = liftToJSON to' tol' . IntMap.toList
where
to' = liftToJSON2 toJSON toJSONList t tol
tol' = liftToJSONList2 toJSON toJSONList t tol
liftToEncoding t tol = liftToEncoding to' tol' . IntMap.toList
where
to' = liftToEncoding2 toEncoding toEncodingList t tol
tol' = liftToEncodingList2 toEncoding toEncodingList t tol
instance ToJSON a => ToJSON (IntMap.IntMap a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSONKey k => ToJSON1 (M.Map k) where
liftToJSON g _ = case toJSONKey of
ToJSONKeyText f _ -> Object . KM.fromMap . mapKeyValO f g
ToJSONKeyValue f _ -> Array . V.fromList . map (toJSONPair f g) . M.toList
liftToEncoding g _ = case toJSONKey of
ToJSONKeyText _ f -> dict f g M.foldrWithKey
ToJSONKeyValue _ f -> listEncoding (pairEncoding f) . M.toList
where
pairEncoding f (a, b) = E.list id [f a, g b]
instance (ToJSON v, ToJSONKey k) => ToJSON (M.Map k v) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Tree.Tree where
liftToJSON t tol = go
where
go (Tree.Node root branches) =
liftToJSON2 t tol to' tol' (root, branches)
to' = liftToJSON go (listValue go)
tol' = liftToJSONList go (listValue go)
liftToEncoding t tol = go
where
go (Tree.Node root branches) =
liftToEncoding2 t tol to' tol' (root, branches)
to' = liftToEncoding go (listEncoding go)
tol' = liftToEncodingList go (listEncoding go)
instance (ToJSON v) => ToJSON (Tree.Tree v) where
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- uuid
-------------------------------------------------------------------------------
instance ToJSON UUID.UUID where
toJSON = toJSON . UUID.toText
toEncoding = E.unsafeToEncoding . EB.quote . B.byteString . UUID.toASCIIBytes
instance ToJSONKey UUID.UUID where
toJSONKey = ToJSONKeyText (Key.fromText . UUID.toText) $
E.unsafeToEncoding . EB.quote . B.byteString . UUID.toASCIIBytes
-------------------------------------------------------------------------------
-- vector
-------------------------------------------------------------------------------
instance ToJSON1 Vector where
liftToJSON t _ = Array . V.map t
liftToEncoding t _ = listEncoding t . V.toList
instance (ToJSON a) => ToJSON (Vector a) where
{-# SPECIALIZE instance ToJSON Array #-}
toJSON = toJSON1
toEncoding = toEncoding1
encodeVector :: (ToJSON a, VG.Vector v a) => v a -> Encoding
encodeVector = listEncoding toEncoding . VG.toList
{-# INLINE encodeVector #-}
vectorToJSON :: (VG.Vector v a, ToJSON a) => v a -> Value
vectorToJSON = Array . V.map toJSON . V.convert
{-# INLINE vectorToJSON #-}
instance (Storable a, ToJSON a) => ToJSON (VS.Vector a) where
toJSON = vectorToJSON
toEncoding = encodeVector
instance (VP.Prim a, ToJSON a) => ToJSON (VP.Vector a) where
toJSON = vectorToJSON
toEncoding = encodeVector
instance (VG.Vector VU.Vector a, ToJSON a) => ToJSON (VU.Vector a) where
toJSON = vectorToJSON
toEncoding = encodeVector
-------------------------------------------------------------------------------
-- unordered-containers
-------------------------------------------------------------------------------
instance ToJSON1 HashSet.HashSet where
liftToJSON t _ = listValue t . HashSet.toList
liftToEncoding t _ = listEncoding t . HashSet.toList
instance (ToJSON a) => ToJSON (HashSet.HashSet a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSONKey k => ToJSON1 (H.HashMap k) where
liftToJSON g _ = case toJSONKey of
ToJSONKeyText f _ -> Object . KM.fromHashMap . mapKeyVal f g
ToJSONKeyValue f _
-> Array . V.fromList . map (toJSONPair f g) . H.toList
-- liftToEncoding :: forall a. (a -> Encoding) -> ([a] -> Encoding) -> KM.HashMap k a -> Encoding
liftToEncoding g _ = case toJSONKey of
ToJSONKeyText _ f -> dict f g H.foldrWithKey
ToJSONKeyValue _ f -> listEncoding (pairEncoding f) . H.toList
where
pairEncoding f (a, b) = E.list id [f a, g b]
instance (ToJSON v, ToJSONKey k) => ToJSON (H.HashMap k v) where
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- Data.Aeson.KeyMap
-------------------------------------------------------------------------------
instance ToJSON1 KM.KeyMap where
liftToJSON g _ = Object . fmap g
liftToEncoding g _ = dict E.key g KM.foldrWithKey
instance (ToJSON v) => ToJSON (KM.KeyMap v) where
{-# SPECIALIZE instance ToJSON Object #-}
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- aeson
-------------------------------------------------------------------------------
instance ToJSON Key where
toJSON = toJSON . Key.toText
toEncoding = E.key
instance ToJSONKey Key where
toJSONKey = ToJSONKeyText id E.key
instance ToJSON Value where
toJSON a = a
toEncoding = E.value
instance ToJSON DotNetTime where
toJSON = toJSON . dotNetTime
toEncoding = toEncoding . dotNetTime
dotNetTime :: DotNetTime -> String
dotNetTime (DotNetTime t) = secs ++ formatMillis t ++ ")/"
where secs = formatTime defaultTimeLocale "/Date(%s" t
formatMillis :: (FormatTime t) => t -> String
formatMillis = take 3 . formatTime defaultTimeLocale "%q"
-------------------------------------------------------------------------------
-- primitive
-------------------------------------------------------------------------------
instance ToJSON a => ToJSON (PM.Array a) where
-- note: we could do better than this if vector exposed the data
-- constructor in Data.Vector.
toJSON = toJSON . Exts.toList
toEncoding = toEncoding . Exts.toList
instance ToJSON a => ToJSON (PM.SmallArray a) where
toJSON = toJSON . Exts.toList
toEncoding = toEncoding . Exts.toList
instance (PM.Prim a,ToJSON a) => ToJSON (PM.PrimArray a) where
toJSON = toJSON . Exts.toList
toEncoding = toEncoding . Exts.toList
-------------------------------------------------------------------------------
-- time
-------------------------------------------------------------------------------
instance ToJSON Day where
toJSON = stringEncoding . E.day
toEncoding = E.day
instance ToJSONKey Day where
toJSONKey = toJSONKeyTextEnc E.day
instance ToJSON Month where
toJSON = stringEncoding . E.month
toEncoding = E.month
instance ToJSONKey Month where
toJSONKey = toJSONKeyTextEnc E.month
instance ToJSON Quarter where
toJSON = stringEncoding . E.quarter
toEncoding = E.quarter
instance ToJSONKey Quarter where
toJSONKey = toJSONKeyTextEnc E.quarter
instance ToJSON TimeOfDay where
toJSON = stringEncoding . E.timeOfDay
toEncoding = E.timeOfDay
instance ToJSONKey TimeOfDay where
toJSONKey = toJSONKeyTextEnc E.timeOfDay
instance ToJSON LocalTime where
toJSON = stringEncoding . E.localTime
toEncoding = E.localTime
instance ToJSONKey LocalTime where
toJSONKey = toJSONKeyTextEnc E.localTime
instance ToJSON ZonedTime where
toJSON = stringEncoding . E.zonedTime
toEncoding = E.zonedTime
instance ToJSONKey ZonedTime where
toJSONKey = toJSONKeyTextEnc E.zonedTime
instance ToJSON UTCTime where
toJSON = stringEncoding . E.utcTime
toEncoding = E.utcTime
instance ToJSONKey UTCTime where
toJSONKey = toJSONKeyTextEnc E.utcTime
-- | Encode something t a JSON string.
stringEncoding :: Encoding' Text -> Value
stringEncoding = String
. T.dropAround (== '"')
. T.decodeLatin1
. L.toStrict
. E.encodingToLazyByteString
{-# INLINE stringEncoding #-}
instance ToJSON NominalDiffTime where
toJSON = Number . realToFrac
toEncoding = E.scientific . realToFrac
instance ToJSON DiffTime where
toJSON = Number . realToFrac
toEncoding = E.scientific . realToFrac
-- | Encoded as number
instance ToJSON SystemTime where
toJSON (MkSystemTime secs nsecs) =
toJSON (fromIntegral secs + fromIntegral nsecs / 1000000000 :: Nano)
toEncoding (MkSystemTime secs nsecs) =
toEncoding (fromIntegral secs + fromIntegral nsecs / 1000000000 :: Nano)
instance ToJSON CalendarDiffTime where
toJSON (CalendarDiffTime m nt) = object
[ "months" .= m
, "time" .= nt
]
toEncoding (CalendarDiffTime m nt) = E.pairs
("months" .= m <> "time" .= nt)
instance ToJSON CalendarDiffDays where
toJSON (CalendarDiffDays m d) = object
[ "months" .= m
, "days" .= d
]
toEncoding (CalendarDiffDays m d) = E.pairs
("months" .= m <> "days" .= d)
instance ToJSON DayOfWeek where
toJSON Monday = "monday"
toJSON Tuesday = "tuesday"
toJSON Wednesday = "wednesday"
toJSON Thursday = "thursday"
toJSON Friday = "friday"
toJSON Saturday = "saturday"
toJSON Sunday = "sunday"
toEncoding = toEncodingDayOfWeek
toEncodingDayOfWeek :: DayOfWeek -> E.Encoding' a
toEncodingDayOfWeek Monday = E.unsafeToEncoding "\"monday\""
toEncodingDayOfWeek Tuesday = E.unsafeToEncoding "\"tuesday\""
toEncodingDayOfWeek Wednesday = E.unsafeToEncoding "\"wednesday\""
toEncodingDayOfWeek Thursday = E.unsafeToEncoding "\"thursday\""
toEncodingDayOfWeek Friday = E.unsafeToEncoding "\"friday\""
toEncodingDayOfWeek Saturday = E.unsafeToEncoding "\"saturday\""
toEncodingDayOfWeek Sunday = E.unsafeToEncoding "\"sunday\""
instance ToJSONKey DayOfWeek where
toJSONKey = toJSONKeyTextEnc toEncodingDayOfWeek
instance ToJSON QuarterOfYear where
toJSON Q1 = "q1"
toJSON Q2 = "q2"
toJSON Q3 = "q3"
toJSON Q4 = "q4"
toEncodingQuarterOfYear :: QuarterOfYear -> E.Encoding' a
toEncodingQuarterOfYear Q1 = E.unsafeToEncoding "\"q1\""
toEncodingQuarterOfYear Q2 = E.unsafeToEncoding "\"q2\""
toEncodingQuarterOfYear Q3 = E.unsafeToEncoding "\"q3\""
toEncodingQuarterOfYear Q4 = E.unsafeToEncoding "\"q4\""
instance ToJSONKey QuarterOfYear where
toJSONKey = toJSONKeyTextEnc toEncodingQuarterOfYear
-------------------------------------------------------------------------------
-- base Monoid/Semigroup
-------------------------------------------------------------------------------
instance ToJSON1 Monoid.Dual where
liftToJSON t _ = t . Monoid.getDual
liftToEncoding t _ = t . Monoid.getDual
instance ToJSON a => ToJSON (Monoid.Dual a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Monoid.First where
liftToJSON t to' = liftToJSON t to' . Monoid.getFirst
liftToEncoding t to' = liftToEncoding t to' . Monoid.getFirst
instance ToJSON a => ToJSON (Monoid.First a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Monoid.Last where
liftToJSON t to' = liftToJSON t to' . Monoid.getLast
liftToEncoding t to' = liftToEncoding t to' . Monoid.getLast
instance ToJSON a => ToJSON (Monoid.Last a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Semigroup.Min where
liftToJSON t _ (Semigroup.Min x) = t x
liftToEncoding t _ (Semigroup.Min x) = t x
instance ToJSON a => ToJSON (Semigroup.Min a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Semigroup.Max where
liftToJSON t _ (Semigroup.Max x) = t x
liftToEncoding t _ (Semigroup.Max x) = t x
instance ToJSON a => ToJSON (Semigroup.Max a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Semigroup.First where
liftToJSON t _ (Semigroup.First x) = t x
liftToEncoding t _ (Semigroup.First x) = t x
instance ToJSON a => ToJSON (Semigroup.First a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Semigroup.Last where
liftToJSON t _ (Semigroup.Last x) = t x
liftToEncoding t _ (Semigroup.Last x) = t x
instance ToJSON a => ToJSON (Semigroup.Last a) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSON1 Semigroup.WrappedMonoid where
liftToJSON t _ (Semigroup.WrapMonoid x) = t x
liftToEncoding t _ (Semigroup.WrapMonoid x) = t x
instance ToJSON a => ToJSON (Semigroup.WrappedMonoid a) where
toJSON = toJSON1
toEncoding = toEncoding1
#if !MIN_VERSION_base(4,16,0)
instance ToJSON1 Semigroup.Option where
liftToJSON t to' = liftToJSON t to' . Semigroup.getOption
liftToEncoding t to' = liftToEncoding t to' . Semigroup.getOption
instance ToJSON a => ToJSON (Semigroup.Option a) where
toJSON = toJSON1
toEncoding = toEncoding1
#endif
-------------------------------------------------------------------------------
-- data-fix
-------------------------------------------------------------------------------
-- | @since 1.5.3.0
instance ToJSON1 f => ToJSON (F.Fix f) where
toJSON = go where go (F.Fix f) = liftToJSON go toJSONList f
toEncoding = go where go (F.Fix f) = liftToEncoding go toEncodingList f
-- | @since 1.5.3.0
instance (ToJSON1 f, Functor f) => ToJSON (F.Mu f) where
toJSON = F.foldMu (liftToJSON id (listValue id))
toEncoding = F.foldMu (liftToEncoding id (listEncoding id))
-- | @since 1.5.3.0
instance (ToJSON1 f, Functor f) => ToJSON (F.Nu f) where
toJSON = F.foldNu (liftToJSON id (listValue id))
toEncoding = F.foldNu (liftToEncoding id (listEncoding id))
-------------------------------------------------------------------------------
-- strict
-------------------------------------------------------------------------------
-- | @since 1.5.3.0
instance (ToJSON a, ToJSON b) => ToJSON (S.These a b) where
toJSON = toJSON . S.toLazy
toEncoding = toEncoding . S.toLazy
-- | @since 1.5.3.0
instance ToJSON2 S.These where
liftToJSON2 toa toas tob tobs = liftToJSON2 toa toas tob tobs . S.toLazy
liftToEncoding2 toa toas tob tobs = liftToEncoding2 toa toas tob tobs . S.toLazy
-- | @since 1.5.3.0
instance ToJSON a => ToJSON1 (S.These a) where
liftToJSON toa tos = liftToJSON toa tos . S.toLazy
liftToEncoding toa tos = liftToEncoding toa tos . S.toLazy
-- | @since 1.5.3.0
instance (ToJSON a, ToJSON b) => ToJSON (S.Pair a b) where
toJSON = toJSON . S.toLazy
toEncoding = toEncoding . S.toLazy
-- | @since 1.5.3.0
instance ToJSON2 S.Pair where
liftToJSON2 toa toas tob tobs = liftToJSON2 toa toas tob tobs . S.toLazy
liftToEncoding2 toa toas tob tobs = liftToEncoding2 toa toas tob tobs . S.toLazy
-- | @since 1.5.3.0
instance ToJSON a => ToJSON1 (S.Pair a) where
liftToJSON toa tos = liftToJSON toa tos . S.toLazy
liftToEncoding toa tos = liftToEncoding toa tos . S.toLazy
-- | @since 1.5.3.0
instance (ToJSON a, ToJSON b) => ToJSON (S.Either a b) where
toJSON = toJSON . S.toLazy
toEncoding = toEncoding . S.toLazy
-- | @since 1.5.3.0
instance ToJSON2 S.Either where
liftToJSON2 toa toas tob tobs = liftToJSON2 toa toas tob tobs . S.toLazy
liftToEncoding2 toa toas tob tobs = liftToEncoding2 toa toas tob tobs . S.toLazy
-- | @since 1.5.3.0
instance ToJSON a => ToJSON1 (S.Either a) where
liftToJSON toa tos = liftToJSON toa tos . S.toLazy
liftToEncoding toa tos = liftToEncoding toa tos . S.toLazy
-- | @since 1.5.3.0
instance ToJSON a => ToJSON (S.Maybe a) where
toJSON = toJSON . S.toLazy
toEncoding = toEncoding . S.toLazy
-- | @since 1.5.3.0
instance ToJSON1 S.Maybe where
liftToJSON toa tos = liftToJSON toa tos . S.toLazy
liftToEncoding toa tos = liftToEncoding toa tos . S.toLazy
-------------------------------------------------------------------------------
-- tagged
-------------------------------------------------------------------------------
instance ToJSON1 Proxy where
liftToJSON _ _ _ = Null
liftToEncoding _ _ _ = E.null_
instance ToJSON (Proxy a) where
toJSON _ = Null
toEncoding _ = E.null_
instance ToJSON2 Tagged where
liftToJSON2 _ _ t _ (Tagged x) = t x
liftToEncoding2 _ _ t _ (Tagged x) = t x
instance ToJSON1 (Tagged a) where
liftToJSON t _ (Tagged x) = t x
liftToEncoding t _ (Tagged x) = t x
instance ToJSON b => ToJSON (Tagged a b) where
toJSON = toJSON1
toEncoding = toEncoding1
instance ToJSONKey b => ToJSONKey (Tagged a b) where
toJSONKey = contramapToJSONKeyFunction unTagged toJSONKey
toJSONKeyList = contramapToJSONKeyFunction (fmap unTagged) toJSONKeyList
-------------------------------------------------------------------------------
-- these
-------------------------------------------------------------------------------
-- | @since 1.5.1.0
instance (ToJSON a, ToJSON b) => ToJSON (These a b) where
toJSON (This a) = object [ "This" .= a ]
toJSON (That b) = object [ "That" .= b ]
toJSON (These a b) = object [ "This" .= a, "That" .= b ]
toEncoding (This a) = E.pairs $ "This" .= a
toEncoding (That b) = E.pairs $ "That" .= b
toEncoding (These a b) = E.pairs $ "This" .= a <> "That" .= b
-- | @since 1.5.1.0
instance ToJSON2 These where
liftToJSON2 toa _ _tob _ (This a) = object [ "This" .= toa a ]
liftToJSON2 _toa _ tob _ (That b) = object [ "That" .= tob b ]
liftToJSON2 toa _ tob _ (These a b) = object [ "This" .= toa a, "That" .= tob b ]
liftToEncoding2 toa _ _tob _ (This a) = E.pairs $ E.pair "This" (toa a)
liftToEncoding2 _toa _ tob _ (That b) = E.pairs $ E.pair "That" (tob b)
liftToEncoding2 toa _ tob _ (These a b) = E.pairs $ E.pair "This" (toa a) <> E.pair "That" (tob b)
-- | @since 1.5.1.0
instance ToJSON a => ToJSON1 (These a) where
liftToJSON _tob _ (This a) = object [ "This" .= a ]
liftToJSON tob _ (That b) = object [ "That" .= tob b ]
liftToJSON tob _ (These a b) = object [ "This" .= a, "That" .= tob b ]
liftToEncoding _tob _ (This a) = E.pairs $ "This" .= a
liftToEncoding tob _ (That b) = E.pairs $ E.pair "That" (tob b)
liftToEncoding tob _ (These a b) = E.pairs $ "This" .= a <> E.pair "That" (tob b)
-- | @since 1.5.1.0
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (These1 f g) where
liftToJSON tx tl (This1 a) = object [ "This" .= liftToJSON tx tl a ]
liftToJSON tx tl (That1 b) = object [ "That" .= liftToJSON tx tl b ]
liftToJSON tx tl (These1 a b) = object [ "This" .= liftToJSON tx tl a, "That" .= liftToJSON tx tl b ]
liftToEncoding tx tl (This1 a) = E.pairs $ E.pair "This" (liftToEncoding tx tl a)
liftToEncoding tx tl (That1 b) = E.pairs $ E.pair "That" (liftToEncoding tx tl b)
liftToEncoding tx tl (These1 a b) = E.pairs $
pair "This" (liftToEncoding tx tl a) `mappend`
pair "That" (liftToEncoding tx tl b)
-- | @since 1.5.1.0
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (These1 f g a) where
toJSON = toJSON1
toEncoding = toEncoding1
-------------------------------------------------------------------------------
-- Instances for converting t map keys
-------------------------------------------------------------------------------
instance (ToJSON a, ToJSON b) => ToJSONKey (a,b)
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSONKey (a,b,c)
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSONKey (a,b,c,d)
instance ToJSONKey Char where
toJSONKey = toJSONKeyText T.singleton
toJSONKeyList = toJSONKeyText T.pack
instance (ToJSONKey a, ToJSON a) => ToJSONKey [a] where
toJSONKey = toJSONKeyList
-------------------------------------------------------------------------------
-- Tuple instances
-------------------------------------------------------------------------------
instance ToJSON2 (,) where
liftToJSON2 toA _ toB _ (a, b) = Array $ V.create $ do
mv <- VM.unsafeNew 2
VM.unsafeWrite mv 0 (toA a)
VM.unsafeWrite mv 1 (toB b)
return mv
liftToEncoding2 toA _ toB _ (a, b) = E.list id [toA a, toB b]
instance (ToJSON a) => ToJSON1 ((,) a) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b) => ToJSON (a, b) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a) => ToJSON2 ((,,) a) where
liftToJSON2 toB _ toC _ (a, b, c) = Array $ V.create $ do
mv <- VM.unsafeNew 3
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toB b)
VM.unsafeWrite mv 2 (toC c)
return mv
liftToEncoding2 toB _ toC _ (a, b, c) = E.list id
[ toEncoding a
, toB b
, toC c
]
instance (ToJSON a, ToJSON b) => ToJSON1 ((,,) a b) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON (a, b, c) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b) => ToJSON2 ((,,,) a b) where
liftToJSON2 toC _ toD _ (a, b, c, d) = Array $ V.create $ do
mv <- VM.unsafeNew 4
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toC c)
VM.unsafeWrite mv 3 (toD d)
return mv
liftToEncoding2 toC _ toD _ (a, b, c, d) = E.list id
[ toEncoding a
, toEncoding b
, toC c
, toD d
]
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON1 ((,,,) a b c) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON (a, b, c, d) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON2 ((,,,,) a b c) where
liftToJSON2 toD _ toE _ (a, b, c, d, e) = Array $ V.create $ do
mv <- VM.unsafeNew 5
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toD d)
VM.unsafeWrite mv 4 (toE e)
return mv
liftToEncoding2 toD _ toE _ (a, b, c, d, e) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toD d
, toE e
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON1 ((,,,,) a b c d) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON (a, b, c, d, e) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON2 ((,,,,,) a b c d) where
liftToJSON2 toE _ toF _ (a, b, c, d, e, f) = Array $ V.create $ do
mv <- VM.unsafeNew 6
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toE e)
VM.unsafeWrite mv 5 (toF f)
return mv
liftToEncoding2 toE _ toF _ (a, b, c, d, e, f) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toE e
, toF f
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON1 ((,,,,,) a b c d e) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON (a, b, c, d, e, f) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON2 ((,,,,,,) a b c d e) where
liftToJSON2 toF _ toG _ (a, b, c, d, e, f, g) = Array $ V.create $ do
mv <- VM.unsafeNew 7
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toF f)
VM.unsafeWrite mv 6 (toG g)
return mv
liftToEncoding2 toF _ toG _ (a, b, c, d, e, f, g) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toF f
, toG g
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON1 ((,,,,,,) a b c d e f) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON (a, b, c, d, e, f, g) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON2 ((,,,,,,,) a b c d e f) where
liftToJSON2 toG _ toH _ (a, b, c, d, e, f, g, h) = Array $ V.create $ do
mv <- VM.unsafeNew 8
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toG g)
VM.unsafeWrite mv 7 (toH h)
return mv
liftToEncoding2 toG _ toH _ (a, b, c, d, e, f, g, h) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toG g
, toH h
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON1 ((,,,,,,,) a b c d e f g) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON (a, b, c, d, e, f, g, h) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON2 ((,,,,,,,,) a b c d e f g) where
liftToJSON2 toH _ toI _ (a, b, c, d, e, f, g, h, i) = Array $ V.create $ do
mv <- VM.unsafeNew 9
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toH h)
VM.unsafeWrite mv 8 (toI i)
return mv
liftToEncoding2 toH _ toI _ (a, b, c, d, e, f, g, h, i) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toH h
, toI i
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON1 ((,,,,,,,,) a b c d e f g h) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON (a, b, c, d, e, f, g, h, i) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON2 ((,,,,,,,,,) a b c d e f g h) where
liftToJSON2 toI _ toJ _ (a, b, c, d, e, f, g, h, i, j) = Array $ V.create $ do
mv <- VM.unsafeNew 10
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toI i)
VM.unsafeWrite mv 9 (toJ j)
return mv
liftToEncoding2 toI _ toJ _ (a, b, c, d, e, f, g, h, i, j) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toI i
, toJ j
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON1 ((,,,,,,,,,) a b c d e f g h i) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON (a, b, c, d, e, f, g, h, i, j) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON2 ((,,,,,,,,,,) a b c d e f g h i) where
liftToJSON2 toJ _ toK _ (a, b, c, d, e, f, g, h, i, j, k) = Array $ V.create $ do
mv <- VM.unsafeNew 11
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJ j)
VM.unsafeWrite mv 10 (toK k)
return mv
liftToEncoding2 toJ _ toK _ (a, b, c, d, e, f, g, h, i, j, k) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toJ j
, toK k
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON1 ((,,,,,,,,,,) a b c d e f g h i j) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON (a, b, c, d, e, f, g, h, i, j, k) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON2 ((,,,,,,,,,,,) a b c d e f g h i j) where
liftToJSON2 toK _ toL _ (a, b, c, d, e, f, g, h, i, j, k, l) = Array $ V.create $ do
mv <- VM.unsafeNew 12
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toK k)
VM.unsafeWrite mv 11 (toL l)
return mv
liftToEncoding2 toK _ toL _ (a, b, c, d, e, f, g, h, i, j, k, l) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toK k
, toL l
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON1 ((,,,,,,,,,,,) a b c d e f g h i j k) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON2 ((,,,,,,,,,,,,) a b c d e f g h i j k) where
liftToJSON2 toL _ toM _ (a, b, c, d, e, f, g, h, i, j, k, l, m) = Array $ V.create $ do
mv <- VM.unsafeNew 13
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toJSON k)
VM.unsafeWrite mv 11 (toL l)
VM.unsafeWrite mv 12 (toM m)
return mv
liftToEncoding2 toL _ toM _ (a, b, c, d, e, f, g, h, i, j, k, l, m) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toEncoding k
, toL l
, toM m
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON1 ((,,,,,,,,,,,,) a b c d e f g h i j k l) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON2 ((,,,,,,,,,,,,,) a b c d e f g h i j k l) where
liftToJSON2 toM _ toN _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) = Array $ V.create $ do
mv <- VM.unsafeNew 14
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toJSON k)
VM.unsafeWrite mv 11 (toJSON l)
VM.unsafeWrite mv 12 (toM m)
VM.unsafeWrite mv 13 (toN n)
return mv
liftToEncoding2 toM _ toN _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toEncoding k
, toEncoding l
, toM m
, toN n
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON1 ((,,,,,,,,,,,,,) a b c d e f g h i j k l m) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n) where
toJSON = toJSON2
toEncoding = toEncoding2
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON2 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m) where
liftToJSON2 toN _ toO _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = Array $ V.create $ do
mv <- VM.unsafeNew 15
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toJSON k)
VM.unsafeWrite mv 11 (toJSON l)
VM.unsafeWrite mv 12 (toJSON m)
VM.unsafeWrite mv 13 (toN n)
VM.unsafeWrite mv 14 (toO o)
return mv
liftToEncoding2 toN _ toO _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toEncoding k
, toEncoding l
, toEncoding m
, toN n
, toO o
]
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n) => ToJSON1 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m n) where
liftToJSON = liftToJSON2 toJSON toJSONList
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n, ToJSON o) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) where
toJSON = toJSON2
toEncoding = toEncoding2
--------------------------------------------------------------------------------
-- | Wrap a list of pairs as an object.
class Monoid pairs => FromPairs enc pairs | enc -> pairs where
fromPairs :: pairs -> enc
instance (a ~ Value) => FromPairs (Encoding' a) Series where
fromPairs = E.pairs
{-# INLINE fromPairs #-}
instance FromPairs Value (DList Pair) where
fromPairs = object . toList
{-# INLINE fromPairs #-}
-- | Like 'KeyValue' but the value is already converted to JSON
-- ('Value' or 'Encoding'), and the result actually represents lists of pairs
-- so it can be readily concatenated.
class Monoid kv => KeyValuePair v kv where
pair :: Key -> v -> kv
instance (v ~ Value) => KeyValuePair v (DList Pair) where
pair k v = DList.singleton (k .= v)
{-# INLINE pair #-}
instance (e ~ Encoding) => KeyValuePair e Series where
pair = E.pair
{-# INLINE pair #-}
| dmjio/aeson | src/Data/Aeson/Types/ToJSON.hs | bsd-3-clause | 97,210 | 0 | 16 | 22,851 | 26,477 | 13,999 | 12,478 | -1 | -1 |
{-# LANGUAGE
ConstraintKinds
, FlexibleInstances
, FlexibleContexts
, GeneralizedNewtypeDeriving
, MultiParamTypeClasses
, ScopedTypeVariables
, TemplateHaskell
, TypeFamilies
, TypeOperators
, TypeSynonymInstances
, UndecidableInstances
#-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.Toy.Button
-- Copyright : (c) 2012 Michael Sloan
-- License : BSD-style (see the LICENSE file)
-- Maintainer : mgsloan@gmail.com
--
-- Simple button UI element.
--
--------------------------------------------------------------------------------
module Graphics.UI.Toy.Button
( ButtonState(..), Button(..)
-- * Lenses
, buttonState, buttonHit, buttonDiagram
-- * Mutation
, clearButtonHit
-- * Construction
, mkButton
) where
import Control.Lens
import Data.AffineSpace.Point (Point(P))
import Diagrams.Prelude hiding (view)
import Diagrams.Lens
import Graphics.UI.Toy
import Graphics.UI.Toy.Diagrams
data ButtonState
= NormalState
| HoverState
| PressState
deriving (Eq, Ord, Enum, Bounded, Read, Show)
-- | A button stores the state necessary to know if the button is currently
-- being pressed ('_buttonHeld'), and if it was hit ('_buttonHit'). The
-- semantics of '_buttonHit' are up to the user, as it's up to the user to
-- call 'clearButtonHit' or otherwise set its value to @False@.
--
-- In order to draw the button, and figure out when mouse-clicks are inside
-- it, the function '_buttonDiagram' is used. It draws the button based on
-- the current '_buttonHeld' state.
data Button b v = Button
{ _buttonState :: ButtonState -- ^ Whether the mouse is hovering / pressing.
, _buttonHit :: Bool -- ^ Whether the button was hit.
, _buttonDiagram :: Button b v -> Diagram b v
-- ^ Draw button based on the state.
}
type instance V (Button b v) = v
$(makeLenses ''Button)
-- | Builds a button, given the function used to draw it. The first argument
-- of this function is a boolean that indicates whether it's held.
mkButton :: (Button b v -> Diagram b v) -> Button b v
mkButton = Button NormalState False
-- | This function literally just 'set's 'buttonHit' to 'False'.
clearButtonHit :: Button b v -> Button b v
clearButtonHit = set buttonHit False
instance ( Wrapped' v (MousePos ib)
, HasLinearMap v, InnerSpace v, OrderedField (Scalar v) )
=> Interactive ib (Button b v) where
mouse m i b = return $ transition b
where
ci = clickInside b . P . view unwrapped' $ mousePos i
transition = case (ci, m, b ^. buttonState) of
(True, Just (True, 0), _) -> buttonState .~ PressState
(True, Just (False, 0), PressState) -> (buttonState .~ HoverState)
. (buttonHit .~ True)
(True, _, PressState) -> id
(True, _, _) -> buttonState .~ HoverState
(False, _, _) -> buttonState .~ NormalState
instance Diagrammable b v Any (Button b v) where
diagram x = (x ^. buttonDiagram) x
instance (InnerSpace v, HasLinearMap v, OrderedField (Scalar v))
=> Enveloped (Button b v) where
getEnvelope b = getEnvelope (diagram b :: Diagram b v)
instance (InnerSpace v, HasLinearMap v, OrderedField (Scalar v))
=> Clickable (Button b v) where
clickInside b = clickInside (diagram b :: Diagram b v) | mgsloan/toy-diagrams | src/Graphics/UI/Toy/Button.hs | bsd-3-clause | 3,456 | 0 | 13 | 811 | 726 | 407 | 319 | 58 | 1 |
-- vim:sw=2:ts=2:expandtab:autoindent
module Main where
-- example of generating three test cases of 6 integer variables
-- l1, u1, i, l2, u2, j sush that l1 < i <= u1 and l2 < j <= u2.
import Math.SMT.Yices.Parser
import Math.SMT.Yices.Syntax
import Math.SMT.Yices.Pipe
import Data.List
import Control.Monad
-- import Random
yicesPath = "/Users/robertwhite/Projects/yices-1.0.40/bin/yices" -- your yices path
--main_test =
-- do yp@(Just hin, Just hout, Nothing, p) <- createYicesPipe yicesPath []
-- runCmdsY yp (defs ++ ctrs)
-- -- gr <- getStdGen
-- -- let (rn,gr') = next gr
-- Sat ss <- checkY yp
-- print (head ss)
-- runCmdsY yp [ASSERT_P (NOT $ ss!!0) Nothing]
-- Sat ss <- checkY yp
-- print ss
-- runCmdsY yp [ASSERT_P (NOT $ ss!!0) Nothing]
-- Sat ss <- checkY yp
-- print ss
-- exitY yp
-- --return ss
--defs = map (\x -> DEFINE (x,int) Nothing) ["l1","u1","i","l2","u2","j"]
--ctrs = map ASSERT [ l1:<u1, l1:<=i, i:<=u1, l2:<u2, l2:<=j, j:<=u2 ]
-- -- ++ map (\e -> ASSERT_P e Nothing) [ i:<j, j:<i ]
-- where
-- l1 = VarE "l1"
-- u1 = VarE "u1"
-- i = VarE "i"
-- l2 = VarE "l2"
-- u2 = VarE "u2"
-- j = VarE "j"
int = VarT "int"
nat = VarT "nat"
bool = VarT "bool"
real = VarT "real"
true = LitB True
false = LitB False
--test =
-- do yp@(Just hin, Just hout, Nothing, p) <- createYicesPipe yicesPath []
-- runCmdsY yp (defs' ++ ctrs' )
-- -- gr <- getStdGen
-- -- let (rn,gr') = next gr
-- Sat ss <- checkY yp
-- runCmdsY yp (mymax ++ [MAXSAT])
-- Sat ss <- checkY yp
-- return (obtain_bool_value vlist' ss)
--defs' = map (\x -> DEFINE (x,bool) Nothing) ["l1","l2","l3","l4","l5","l6"]
--vlist' = map (\x -> VarE x) ["l1","l2","l3"]
--l1 = VarE "l1"
--l2 = VarE "l2"
--l3 = VarE "l3"
--ctrs' = map ASSERT [ OR vlist']
-- ++ map (\e -> ASSERT_P e Nothing) [l1:=false]
-- ++ [ASSERT (FORALL [("l1",bool), ("l2", bool)] (OR[AND[l1, l2], l3]))]
----ASSERT_P ExpY (Maybe Integer) :
--mymax = [ASSERT_P (l2:=true) (Just 8)]
obtain_bvalue x [] = Nothing
obtain_bvalue x l =
let h = head l in
let ((VarE vname) := (LitB b)) = h in
let VarE xname = x in
if xname == vname then
Just b
else
(obtain_bvalue x (tail l))
obtain_bool_value vars valuelist =
map (\x -> obtain_bvalue x valuelist) vars
mt = [(1,[2,1,3]), (2, [3,2,1]), (3,[1,3,2])]
wt = [(1,[1,2,3]),(2,[3,2,1]), (3,[1,3,2])]
get_name i n = map (\x -> i ++ (show x)) [1..n]
n = 3
-- string of integer basically :)
m = get_name "m" n
w = get_name "w" n
men = map fst mt
women = map fst wt
--define a list of n * n variables
defs :: [CmdY]
defs = concat (map (\x -> map (\y -> DEFINE ((x ++""++ y) , bool) Nothing) w) m)
var_list_m::[[ExpY]]
var_list_m = (map (\x -> map (\y -> VarE (x ++ "" ++ y)) w) m)
--var_list_m = (map (\x -> map (\y -> VarE (x ++ " " ++ y)) w) m)
var_list_w =
let list = map fromIntegral [1..n] in
let f ll index = map (\l -> l !! (index-1)) ll in
map (f var_list_m) list
var_all_list = (concat var_list_w)
-- a list of VarE to be used later for adding constrants
-- x can not be true at the same time with any elem of l
differ x l = map (\y -> ASSERT (NOT (AND [x,y]))) l
--
ctr_unique [] = []
ctr_unique [x] = []
ctr_unique l =
(differ (head l) (tail l)) ++ (ctr_unique (tail l))
unique_engate = concat (map ctr_unique var_list_m) ++ concat (map ctr_unique var_list_w)
must_engate = (map (\l -> ASSERT (OR l)) var_list_m) ++ (map (\l -> ASSERT (OR l)) var_list_w)
-- the maxsat part
--mymax = [ASSERT_P (l2:=true) (Just 8)]
-- for men
encode_max :: [[ExpY]] -> [(Integer, [Integer])] -> [[CmdY]]
encode_max varlist preflist =
let f vl who (p, w) = ASSERT_P ((vl!!(who - 1)) !! (p-1)) (Just w) in
let wei = reverse (map fromIntegral [1..n]) in
let encode vl (who, plist) = (map (f vl (fromIntegral who)) (zip (map fromIntegral plist) wei)) in
map (encode varlist) preflist
mymax = concat ((encode_max var_list_m mt))
-- ++ (encode_max var_list_w wt))
test1 =
do yp@(Just hin, Just hout, Nothing, p) <- createYicesPipe yicesPath []
runCmdsY yp ((defs) ++ unique_engate ++ must_engate)
Sat ss <- checkY yp
--return ss
runCmdsY yp (take 2 mymax ++ [MAXSAT])
print "------------------------"
Sat ss' <- checkMAX yp
--print ss'
return (obtain_bool_value var_all_list ss')
--eng = [ (w, m) | w <- [1,2,3], m <-[1..5], m > 2]
--eng = [(w, m) | w <- [1..3], m <- [1..4]]
--eng = [ (w, m) | w <- women, m <- men , ((test1!!(w*n + m)) == (Just True))]
infix 1 ==>
(==>) :: Bool -> Bool -> Bool
p ==> q = (not p) || q
forall = flip all
get_eng lst = do
l <- test1
return [ (w, m) | w <- women, m <- men , ((l!!((fromIntegral w -1) * n + (fromIntegral m-1))) == (Just True))]
eng = get_eng test1
isStable (wpref, mpref) engaged = let
wf = plist2pfct wpref
mf = plist2pfct mpref
in
forall engaged (\ (w,m) -> forall engaged
(\ (w',m') -> (wf w m' m ==> mf m' w' w)
&&
(mf m w' w ==> wf w' m' m)))
plist2pfct table x y y' =
let
Just prefs = lookup x table
in elem y (takeWhile (/= y') prefs)
result engage = do
eng' <- engage
return (isStable (wt, mt) eng')
| airobert/yices_haskell | Main.hs | bsd-3-clause | 5,351 | 0 | 19 | 1,357 | 1,633 | 887 | 746 | 84 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Configure
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This deals with the /configure/ phase. It provides the 'configure' action
-- which is given the package description and configure flags. It then tries
-- to: configure the compiler; resolves any conditionals in the package
-- description; resolve the package dependencies; check if all the extensions
-- used by this package are supported by the compiler; check that all the build
-- tools are available (including version checks if appropriate); checks for
-- any required @pkg-config@ packages (updating the 'BuildInfo' with the
-- results)
--
-- Then based on all this it saves the info in the 'LocalBuildInfo' and writes
-- it out to the @dist\/setup-config@ file. It also displays various details to
-- the user, the amount of information displayed depending on the verbosity
-- level.
module Distribution.Simple.Configure (configure,
writePersistBuildConfig,
getConfigStateFile,
getPersistBuildConfig,
checkPersistBuildConfigOutdated,
tryGetPersistBuildConfig,
maybeGetPersistBuildConfig,
findDistPref, findDistPrefOrDefault,
computeComponentId,
localBuildInfoFile,
getInstalledPackages,
getInstalledPackagesMonitorFiles,
getPackageDBContents,
configCompiler, configCompilerAux,
configCompilerEx, configCompilerAuxEx,
ccLdOptionsBuildInfo,
checkForeignDeps,
interpretPackageDbFlags,
ConfigStateFileError(..),
tryGetConfigStateFile,
platformDefines,
)
where
import Distribution.Compiler
( CompilerId(..) )
import Distribution.Utils.NubList
import Distribution.Simple.Compiler
( CompilerFlavor(..), Compiler(..), compilerFlavor, compilerVersion
, compilerInfo, ProfDetailLevel(..), knownProfDetailLevels
, showCompilerId, unsupportedLanguages, unsupportedExtensions
, PackageDB(..), PackageDBStack, reexportedModulesSupported
, packageKeySupported, renamingPackageFlagsSupported
, unifiedIPIDRequired )
import Distribution.Simple.PreProcess ( platformDefines )
import Distribution.Package
( PackageName(PackageName), PackageIdentifier(..), PackageId
, packageName, packageVersion, Package(..)
, Dependency(Dependency), simplifyDependency
, ComponentId(..), thisPackageVersion, ComponentId(..) )
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.InstalledPackageInfo (InstalledPackageInfo, emptyInstalledPackageInfo)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.PackageDescription as PD
( PackageDescription(..), specVersion, GenericPackageDescription(..)
, Library(..), hasLibs, Executable(..), BuildInfo(..), allExtensions
, HookedBuildInfo, updatePackageDescription, allBuildInfo
, Flag(flagName), FlagName(..), TestSuite(..), Benchmark(..)
, ModuleReexport(..) , defaultRenaming, FlagAssignment )
import Distribution.ModuleName
( ModuleName )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription, mapTreeData )
import Distribution.PackageDescription.Check
( PackageCheck(..), checkPackage, checkPackageFiles )
import Distribution.Simple.Program
( Program(..), ProgramLocation(..), ConfiguredProgram(..)
, ProgramConfiguration, defaultProgramConfiguration
, ProgramSearchPathEntry(..), getProgramSearchPath, setProgramSearchPath
, configureAllKnownPrograms, knownPrograms, lookupKnownProgram
, userSpecifyArgss, userSpecifyPaths
, lookupProgram, requireProgram, requireProgramVersion
, pkgConfigProgram, gccProgram, rawSystemProgramStdoutConf )
import Distribution.Simple.Setup as Setup
( ConfigFlags(..), CopyDest(..), Flag(..), defaultDistPref
, fromFlag, fromFlagOrDefault, flagToMaybe, toFlag )
import Distribution.Simple.InstallDirs
( InstallDirs(..), defaultInstallDirs, combineInstallDirs )
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), Component(..), ComponentLocalBuildInfo(..)
, absoluteInstallDirs, prefixRelativeInstallDirs
, ComponentName(..), showComponentName, pkgEnabledComponents
, componentBuildInfo, componentName, checkComponentsCyclic
, lookupComponent )
import Distribution.Simple.BuildPaths
( autogenModulesDir )
import Distribution.Simple.Utils
( die, warn, info, setupMessage
, createDirectoryIfMissingVerbose, moreRecentFile
, intercalate, cabalVersion
, writeFileAtomic
, withTempFile )
import Distribution.System
( OS(..), buildOS, Platform (..), buildPlatform )
import Distribution.Version
( Version(..), anyVersion, orLaterVersion, withinRange, isAnyVersion )
import Distribution.Verbosity
( Verbosity, lessVerbose )
import Distribution.Simple.InstallDirs
( fromPathTemplate, substPathTemplate, toPathTemplate, packageTemplateEnv )
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import qualified Distribution.Simple.JHC as JHC
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.UHC as UHC
import qualified Distribution.Simple.HaskellSuite as HaskellSuite
-- Prefer the more generic Data.Traversable.mapM to Prelude.mapM
import Prelude hiding ( mapM )
import Control.Exception
( Exception, evaluate, throw, throwIO, try )
import Control.Exception ( ErrorCall )
import Control.Monad
( liftM, when, unless, foldM, filterM )
import Distribution.Compat.Binary ( decodeOrFailIO, encode )
import GHC.Fingerprint ( Fingerprint(..), fingerprintString )
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as BLC8
import Data.List
( (\\), nub, partition, isPrefixOf, inits, stripPrefix )
import Data.Maybe
( isNothing, catMaybes, fromMaybe, isJust )
import Data.Either
( partitionEithers )
import qualified Data.Set as Set
import Data.Monoid as Mon ( Monoid(..) )
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Traversable
( mapM )
import Data.Typeable
import Data.Char ( chr, isAlphaNum )
import Numeric ( showIntAtBase )
import Data.Bits ( shift )
import System.Directory
( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory )
import System.FilePath
( (</>), isAbsolute )
import qualified System.Info
( compilerName, compilerVersion )
import System.IO
( hPutStrLn, hClose )
import Distribution.Text
( Text(disp), display, simpleParse )
import Text.PrettyPrint
( render, (<>), ($+$), char, text, comma
, quotes, punctuate, nest, sep, hsep )
import Distribution.Compat.Environment ( lookupEnv )
import Distribution.Compat.Exception ( catchExit, catchIO )
-- | The errors that can be thrown when reading the @setup-config@ file.
data ConfigStateFileError
= ConfigStateFileNoHeader -- ^ No header found.
| ConfigStateFileBadHeader -- ^ Incorrect header.
| ConfigStateFileNoParse -- ^ Cannot parse file contents.
| ConfigStateFileMissing -- ^ No file!
| ConfigStateFileBadVersion PackageIdentifier PackageIdentifier (Either ConfigStateFileError LocalBuildInfo) -- ^ Mismatched version.
deriving (Typeable)
instance Show ConfigStateFileError where
show ConfigStateFileNoHeader =
"Saved package config file header is missing. "
++ "Try re-running the 'configure' command."
show ConfigStateFileBadHeader =
"Saved package config file header is corrupt. "
++ "Try re-running the 'configure' command."
show ConfigStateFileNoParse =
"Saved package config file body is corrupt. "
++ "Try re-running the 'configure' command."
show ConfigStateFileMissing = "Run the 'configure' command first."
show (ConfigStateFileBadVersion oldCabal oldCompiler _) =
"You need to re-run the 'configure' command. "
++ "The version of Cabal being used has changed (was "
++ display oldCabal ++ ", now "
++ display currentCabalId ++ ")."
++ badCompiler
where
badCompiler
| oldCompiler == currentCompilerId = ""
| otherwise =
" Additionally the compiler is different (was "
++ display oldCompiler ++ ", now "
++ display currentCompilerId
++ ") which is probably the cause of the problem."
instance Exception ConfigStateFileError
-- | Read the 'localBuildInfoFile'. Throw an exception if the file is
-- missing, if the file cannot be read, or if the file was created by an older
-- version of Cabal.
getConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file.
-> IO LocalBuildInfo
getConfigStateFile filename = do
exists <- doesFileExist filename
unless exists $ throwIO ConfigStateFileMissing
-- Read the config file into a strict ByteString to avoid problems with
-- lazy I/O, then convert to lazy because the binary package needs that.
contents <- BS.readFile filename
let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents])
headerParseResult <- try $ evaluate $ parseHeader header
let (cabalId, compId) =
case headerParseResult of
Left (_ :: ErrorCall) -> throw ConfigStateFileBadHeader
Right x -> x
let getStoredValue = do
result <- decodeOrFailIO (BLC8.tail body)
case result of
Left _ -> throw ConfigStateFileNoParse
Right x -> return x
deferErrorIfBadVersion act
| cabalId /= currentCabalId = do
eResult <- try act
throw $ ConfigStateFileBadVersion cabalId compId eResult
| otherwise = act
deferErrorIfBadVersion getStoredValue
-- | Read the 'localBuildInfoFile', returning either an error or the local build info.
tryGetConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file.
-> IO (Either ConfigStateFileError LocalBuildInfo)
tryGetConfigStateFile = try . getConfigStateFile
-- | Try to read the 'localBuildInfoFile'.
tryGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> IO (Either ConfigStateFileError LocalBuildInfo)
tryGetPersistBuildConfig = try . getPersistBuildConfig
-- | Read the 'localBuildInfoFile'. Throw an exception if the file is
-- missing, if the file cannot be read, or if the file was created by an older
-- version of Cabal.
getPersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> IO LocalBuildInfo
getPersistBuildConfig = getConfigStateFile . localBuildInfoFile
-- | Try to read the 'localBuildInfoFile'.
maybeGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> IO (Maybe LocalBuildInfo)
maybeGetPersistBuildConfig =
liftM (either (const Nothing) Just) . tryGetPersistBuildConfig
-- | After running configure, output the 'LocalBuildInfo' to the
-- 'localBuildInfoFile'.
writePersistBuildConfig :: FilePath -- ^ The @dist@ directory path.
-> LocalBuildInfo -- ^ The 'LocalBuildInfo' to write.
-> IO ()
writePersistBuildConfig distPref lbi = do
createDirectoryIfMissing False distPref
writeFileAtomic (localBuildInfoFile distPref) $
BLC8.unlines [showHeader pkgId, encode lbi]
where
pkgId = packageId $ localPkgDescr lbi
-- | Identifier of the current Cabal package.
currentCabalId :: PackageIdentifier
currentCabalId = PackageIdentifier (PackageName "Cabal") cabalVersion
-- | Identifier of the current compiler package.
currentCompilerId :: PackageIdentifier
currentCompilerId = PackageIdentifier (PackageName System.Info.compilerName)
System.Info.compilerVersion
-- | Parse the @setup-config@ file header, returning the package identifiers
-- for Cabal and the compiler.
parseHeader :: ByteString -- ^ The file contents.
-> (PackageIdentifier, PackageIdentifier)
parseHeader header = case BLC8.words header of
["Saved", "package", "config", "for", pkgId, "written", "by", cabalId, "using", compId] ->
fromMaybe (throw ConfigStateFileBadHeader) $ do
_ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier
cabalId' <- simpleParse (BLC8.unpack cabalId)
compId' <- simpleParse (BLC8.unpack compId)
return (cabalId', compId')
_ -> throw ConfigStateFileNoHeader
-- | Generate the @setup-config@ file header.
showHeader :: PackageIdentifier -- ^ The processed package.
-> ByteString
showHeader pkgId = BLC8.unwords
[ "Saved", "package", "config", "for"
, BLC8.pack $ display pkgId
, "written", "by"
, BLC8.pack $ display currentCabalId
, "using"
, BLC8.pack $ display currentCompilerId
]
-- | Check that localBuildInfoFile is up-to-date with respect to the
-- .cabal file.
checkPersistBuildConfigOutdated :: FilePath -> FilePath -> IO Bool
checkPersistBuildConfigOutdated distPref pkg_descr_file = do
pkg_descr_file `moreRecentFile` (localBuildInfoFile distPref)
-- | Get the path of @dist\/setup-config@.
localBuildInfoFile :: FilePath -- ^ The @dist@ directory path.
-> FilePath
localBuildInfoFile distPref = distPref </> "setup-config"
-- -----------------------------------------------------------------------------
-- * Configuration
-- -----------------------------------------------------------------------------
-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken from
-- (in order of highest to lowest preference) the override prefix, the \"CABAL_BUILDDIR\"
-- environment variable, or the default prefix.
findDistPref :: FilePath -- ^ default \"dist\" prefix
-> Setup.Flag FilePath -- ^ override \"dist\" prefix
-> IO FilePath
findDistPref defDistPref overrideDistPref = do
envDistPref <- liftM parseEnvDistPref (lookupEnv "CABAL_BUILDDIR")
return $ fromFlagOrDefault defDistPref (mappend envDistPref overrideDistPref)
where
parseEnvDistPref env =
case env of
Just distPref | not (null distPref) -> toFlag distPref
_ -> NoFlag
-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken from
-- (in order of highest to lowest preference) the override prefix, the \"CABAL_BUILDDIR\"
-- environment variable, or 'defaultDistPref' is used. Call this function to resolve a
-- @*DistPref@ flag whenever it is not known to be set. (The @*DistPref@ flags are always
-- set to a definite value before invoking 'UserHooks'.)
findDistPrefOrDefault :: Setup.Flag FilePath -- ^ override \"dist\" prefix
-> IO FilePath
findDistPrefOrDefault = findDistPref defaultDistPref
-- |Perform the \"@.\/setup configure@\" action.
-- Returns the @.setup-config@ file.
configure :: (GenericPackageDescription, HookedBuildInfo)
-> ConfigFlags -> IO LocalBuildInfo
configure (pkg_descr0, pbi) cfg = do
setupMessage verbosity "Configuring" (packageId pkg_descr0)
checkDeprecatedFlags verbosity cfg
checkExactConfiguration pkg_descr0 cfg
-- Where to build the package
let buildDir :: FilePath -- e.g. dist/build
-- fromFlag OK due to Distribution.Simple calling
-- findDistPrefOrDefault to fill it in
buildDir = fromFlag (configDistPref cfg) </> "build"
createDirectoryIfMissingVerbose (lessVerbose verbosity) True buildDir
-- What package database(s) to use
let packageDbs
= interpretPackageDbFlags
(fromFlag (configUserInstall cfg))
(configPackageDBs cfg)
-- comp: the compiler we're building with
-- compPlatform: the platform we're building for
-- programsConfig: location and args of all programs we're
-- building with
(comp, compPlatform, programsConfig)
<- configCompilerEx
(flagToMaybe (configHcFlavor cfg))
(flagToMaybe (configHcPath cfg))
(flagToMaybe (configHcPkg cfg))
(mkProgramsConfig cfg (configPrograms cfg))
(lessVerbose verbosity)
-- The InstalledPackageIndex of all installed packages
installedPackageSet <- getInstalledPackages (lessVerbose verbosity) comp
packageDbs programsConfig
-- The InstalledPackageIndex of all (possible) internal packages
let internalPackageSet = getInternalPackages pkg_descr0
-- allConstraints: The set of all 'Dependency's we have. Used ONLY
-- to 'configureFinalizedPackage'.
-- requiredDepsMap: A map from 'PackageName' to the specifically
-- required 'InstalledPackageInfo', due to --dependency
--
-- NB: These constraints are to be applied to ALL components of
-- a package. Thus, it's not an error if allConstraints contains
-- more constraints than is necessary for a component (another
-- component might need it.)
--
-- NB: The fact that we bundle all the constraints together means
-- that is not possible to configure a test-suite to use one
-- version of a dependency, and the executable to use another.
(allConstraints, requiredDepsMap) <- either die return $
combinedConstraints (configConstraints cfg)
(configDependencies cfg)
installedPackageSet
-- The resolved package description, that does not contain any
-- conditionals, because we have have an assignment for every
-- flag, either picking them ourselves using a simple naive
-- algorithm, or having them be passed to us by
-- 'configConfigurationsFlags')
--
-- NB: Why doesn't finalizing a package also tell us what the
-- dependencies are (e.g. when we run the naive algorithm,
-- we are checking if dependencies are satisfiable)? The
-- primary reason is that we may NOT have done any solving:
-- if the flags are all chosen for us, this step is a simple
-- matter of flattening according to that assignment. It's
-- cleaner to then configure the dependencies afterwards.
pkg_descr
<- configureFinalizedPackage verbosity cfg
allConstraints
(dependencySatisfiable
(fromFlagOrDefault False (configExactConfiguration cfg))
installedPackageSet
internalPackageSet
requiredDepsMap)
comp
compPlatform
pkg_descr0
checkCompilerProblems comp pkg_descr
checkPackageProblems verbosity pkg_descr0
(updatePackageDescription pbi pkg_descr)
-- Handle hole instantiation
-- TODO: Totally unclear if this belongs here
(holeDeps, hole_insts)
<- configureInstantiateWith pkg_descr cfg installedPackageSet
-- The list of 'InstalledPackageInfo' recording the selected
-- dependencies...
-- internalPkgDeps: ...on internal packages (these are fake!)
-- externalPkgDeps: ...on external packages
--
-- Invariant: For any package name, there is at most one package
-- in externalPackageDeps which has that name.
--
-- NB: The dependency selection is global over ALL components
-- in the package (similar to how allConstraints and
-- requiredDepsMap are global over all components). In particular,
-- if *any* component (post-flag resolution) has an unsatisfiable
-- dependency, we will fail. This can sometimes be undesirable
-- for users, see #1786 (benchmark conflicts with executable),
(internalPkgDeps, externalPkgDeps)
<- configureDependencies
verbosity
internalPackageSet
installedPackageSet
requiredDepsMap
pkg_descr
let installDeps = Map.elems -- deduplicate
. Map.fromList
. map (\v -> (Installed.installedComponentId v, v))
$ externalPkgDeps ++ holeDeps
packageDependsIndex <-
case PackageIndex.dependencyClosure installedPackageSet
(map Installed.installedComponentId installDeps) of
Left packageDependsIndex -> return packageDependsIndex
Right broken ->
die $ "The following installed packages are broken because other"
++ " packages they depend on are missing. These broken "
++ "packages must be rebuilt before they can be used.\n"
++ unlines [ "package "
++ display (packageId pkg)
++ " is broken due to missing package "
++ intercalate ", " (map display deps)
| (pkg, deps) <- broken ]
let pseudoTopPkg = emptyInstalledPackageInfo {
Installed.installedComponentId =
ComponentId (display (packageId pkg_descr)),
Installed.sourcePackageId = packageId pkg_descr,
Installed.depends =
map Installed.installedComponentId installDeps
}
case PackageIndex.dependencyInconsistencies
. PackageIndex.insert pseudoTopPkg
$ packageDependsIndex of
[] -> return ()
inconsistencies ->
warn verbosity $
"This package indirectly depends on multiple versions of the same "
++ "package. This is highly likely to cause a compile failure.\n"
++ unlines [ "package " ++ display pkg ++ " requires "
++ display (PackageIdentifier name ver)
| (name, uses) <- inconsistencies
, (pkg, ver) <- uses ]
-- installation directories
defaultDirs <- defaultInstallDirs (compilerFlavor comp) (fromFlag (configUserInstall cfg)) (hasLibs pkg_descr)
let installDirs = combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs cfg)
-- check languages and extensions
let langlist = nub $ catMaybes $ map defaultLanguage
(allBuildInfo pkg_descr)
let langs = unsupportedLanguages comp langlist
when (not (null langs)) $
die $ "The package " ++ display (packageId pkg_descr0)
++ " requires the following languages which are not "
++ "supported by " ++ display (compilerId comp) ++ ": "
++ intercalate ", " (map display langs)
let extlist = nub $ concatMap allExtensions (allBuildInfo pkg_descr)
let exts = unsupportedExtensions comp extlist
when (not (null exts)) $
die $ "The package " ++ display (packageId pkg_descr0)
++ " requires the following language extensions which are not "
++ "supported by " ++ display (compilerId comp) ++ ": "
++ intercalate ", " (map display exts)
-- configured known/required programs & external build tools
-- exclude build-tool deps on "internal" exes in the same package
let requiredBuildTools =
[ buildTool
| let exeNames = map exeName (executables pkg_descr)
, bi <- allBuildInfo pkg_descr
, buildTool@(Dependency (PackageName toolName) reqVer)
<- buildTools bi
, let isInternal =
toolName `elem` exeNames
-- we assume all internal build-tools are
-- versioned with the package:
&& packageVersion pkg_descr `withinRange` reqVer
, not isInternal ]
programsConfig' <-
configureAllKnownPrograms (lessVerbose verbosity) programsConfig
>>= configureRequiredPrograms verbosity requiredBuildTools
(pkg_descr', programsConfig'') <-
configurePkgconfigPackages verbosity pkg_descr programsConfig'
-- internal component graph
buildComponents <-
case mkComponentsGraph pkg_descr internalPkgDeps of
Left componentCycle -> reportComponentCycle componentCycle
Right components ->
mkComponentsLocalBuildInfo cfg comp packageDependsIndex pkg_descr
internalPkgDeps externalPkgDeps holeDeps
(Map.fromList hole_insts)
components (configConfigurationsFlags cfg)
split_objs <-
if not (fromFlag $ configSplitObjs cfg)
then return False
else case compilerFlavor comp of
GHC | compilerVersion comp >= Version [6,5] []
-> return True
GHCJS
-> return True
_ -> do warn verbosity
("this compiler does not support " ++
"--enable-split-objs; ignoring")
return False
let ghciLibByDefault =
case compilerId comp of
CompilerId GHC _ ->
-- If ghc is non-dynamic, then ghci needs object files,
-- so we build one by default.
--
-- Technically, archive files should be sufficient for ghci,
-- but because of GHC bug #8942, it has never been safe to
-- rely on them. By the time that bug was fixed, ghci had
-- been changed to read shared libraries instead of archive
-- files (see next code block).
not (GHC.isDynamic comp)
CompilerId GHCJS _ ->
not (GHCJS.isDynamic comp)
_ -> False
let sharedLibsByDefault
| fromFlag (configDynExe cfg) =
-- build a shared library if dynamically-linked
-- executables are requested
True
| otherwise = case compilerId comp of
CompilerId GHC _ ->
-- if ghc is dynamic, then ghci needs a shared
-- library, so we build one by default.
GHC.isDynamic comp
CompilerId GHCJS _ ->
GHCJS.isDynamic comp
_ -> False
withSharedLib_ =
-- build shared libraries if required by GHC or by the
-- executable linking mode, but allow the user to force
-- building only static library archives with
-- --disable-shared.
fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg
withDynExe_ = fromFlag $ configDynExe cfg
when (withDynExe_ && not withSharedLib_) $ warn verbosity $
"Executables will use dynamic linking, but a shared library "
++ "is not being built. Linking will fail if any executables "
++ "depend on the library."
-- The --profiling flag sets the default for both libs and exes,
-- but can be overidden by --library-profiling, or the old deprecated
-- --executable-profiling flag.
let profEnabledLibOnly = configProfLib cfg
profEnabledBoth = fromFlagOrDefault False (configProf cfg)
profEnabledLib = fromFlagOrDefault profEnabledBoth profEnabledLibOnly
profEnabledExe = fromFlagOrDefault profEnabledBoth (configProfExe cfg)
-- The --profiling-detail and --library-profiling-detail flags behave
-- similarly
profDetailLibOnly <- checkProfDetail (configProfLibDetail cfg)
profDetailBoth <- liftM (fromFlagOrDefault ProfDetailDefault)
(checkProfDetail (configProfDetail cfg))
let profDetailLib = fromFlagOrDefault profDetailBoth profDetailLibOnly
profDetailExe = profDetailBoth
when (profEnabledExe && not profEnabledLib) $
warn verbosity $
"Executables will be built with profiling, but library "
++ "profiling is disabled. Linking will fail if any executables "
++ "depend on the library."
let configCoverage_ =
mappend (configCoverage cfg) (configLibCoverage cfg)
cfg' = cfg { configCoverage = configCoverage_ }
reloc <-
if not (fromFlag $ configRelocatable cfg)
then return False
else return True
let lbi = LocalBuildInfo {
configFlags = cfg',
extraConfigArgs = [], -- Currently configure does not
-- take extra args, but if it
-- did they would go here.
installDirTemplates = installDirs,
compiler = comp,
hostPlatform = compPlatform,
buildDir = buildDir,
componentsConfigs = buildComponents,
installedPkgs = packageDependsIndex,
pkgDescrFile = Nothing,
localPkgDescr = pkg_descr',
instantiatedWith = hole_insts,
withPrograms = programsConfig'',
withVanillaLib = fromFlag $ configVanillaLib cfg,
withProfLib = profEnabledLib,
withSharedLib = withSharedLib_,
withDynExe = withDynExe_,
withProfExe = profEnabledExe,
withProfLibDetail = profDetailLib,
withProfExeDetail = profDetailExe,
withOptimization = fromFlag $ configOptimization cfg,
withDebugInfo = fromFlag $ configDebugInfo cfg,
withGHCiLib = fromFlagOrDefault ghciLibByDefault $
configGHCiLib cfg,
splitObjs = split_objs,
stripExes = fromFlag $ configStripExes cfg,
stripLibs = fromFlag $ configStripLibs cfg,
withPackageDB = packageDbs,
progPrefix = fromFlag $ configProgPrefix cfg,
progSuffix = fromFlag $ configProgSuffix cfg,
relocatable = reloc
}
when reloc (checkRelocatable verbosity pkg_descr lbi)
let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest
relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi
unless (isAbsolute (prefix dirs)) $ die $
"expected an absolute directory name for --prefix: " ++ prefix dirs
info verbosity $ "Using " ++ display currentCabalId
++ " compiled by " ++ display currentCompilerId
info verbosity $ "Using compiler: " ++ showCompilerId comp
info verbosity $ "Using install prefix: " ++ prefix dirs
let dirinfo name dir isPrefixRelative =
info verbosity $ name ++ " installed in: " ++ dir ++ relNote
where relNote = case buildOS of
Windows | not (hasLibs pkg_descr)
&& isNothing isPrefixRelative
-> " (fixed location)"
_ -> ""
dirinfo "Binaries" (bindir dirs) (bindir relative)
dirinfo "Libraries" (libdir dirs) (libdir relative)
dirinfo "Private binaries" (libexecdir dirs) (libexecdir relative)
dirinfo "Data files" (datadir dirs) (datadir relative)
dirinfo "Documentation" (docdir dirs) (docdir relative)
dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative)
sequence_ [ reportProgram verbosity prog configuredProg
| (prog, configuredProg) <- knownPrograms programsConfig'' ]
return lbi
where
verbosity = fromFlag (configVerbosity cfg)
checkProfDetail (Flag (ProfDetailOther other)) = do
warn verbosity $
"Unknown profiling detail level '" ++ other
++ "', using default.\n"
++ "The profiling detail levels are: " ++ intercalate ", "
[ name | (name, _, _) <- knownProfDetailLevels ]
return (Flag ProfDetailDefault)
checkProfDetail other = return other
mkProgramsConfig :: ConfigFlags -> ProgramConfiguration -> ProgramConfiguration
mkProgramsConfig cfg initialProgramsConfig = programsConfig
where
programsConfig = userSpecifyArgss (configProgramArgs cfg)
. userSpecifyPaths (configProgramPaths cfg)
. setProgramSearchPath searchpath
$ initialProgramsConfig
searchpath = getProgramSearchPath (initialProgramsConfig)
++ map ProgramSearchPathDir (fromNubList $ configProgramPathExtra cfg)
-- -----------------------------------------------------------------------------
-- Helper functions for configure
-- | Check if the user used any deprecated flags.
checkDeprecatedFlags :: Verbosity -> ConfigFlags -> IO ()
checkDeprecatedFlags verbosity cfg = do
unless (configProfExe cfg == NoFlag) $ do
let enable | fromFlag (configProfExe cfg) = "enable"
| otherwise = "disable"
warn verbosity
("The flag --" ++ enable ++ "-executable-profiling is deprecated. "
++ "Please use --" ++ enable ++ "-profiling instead.")
unless (configLibCoverage cfg == NoFlag) $ do
let enable | fromFlag (configLibCoverage cfg) = "enable"
| otherwise = "disable"
warn verbosity
("The flag --" ++ enable ++ "-library-coverage is deprecated. "
++ "Please use --" ++ enable ++ "-coverage instead.")
-- | Sanity check: if '--exact-configuration' was given, ensure that the
-- complete flag assignment was specified on the command line.
checkExactConfiguration :: GenericPackageDescription -> ConfigFlags -> IO ()
checkExactConfiguration pkg_descr0 cfg = do
when (fromFlagOrDefault False (configExactConfiguration cfg)) $ do
let cmdlineFlags = map fst (configConfigurationsFlags cfg)
allFlags = map flagName . genPackageFlags $ pkg_descr0
diffFlags = allFlags \\ cmdlineFlags
when (not . null $ diffFlags) $
die $ "'--exact-conf' was given, "
++ "but the following flags were not specified: "
++ intercalate ", " (map show diffFlags)
-- | Create a PackageIndex that makes *any libraries that might be*
-- defined internally to this package look like installed packages, in
-- case an executable should refer to any of them as dependencies.
--
-- It must be *any libraries that might be* defined rather than the
-- actual definitions, because these depend on conditionals in the .cabal
-- file, and we haven't resolved them yet. finalizePackageDescription
-- does the resolution of conditionals, and it takes internalPackageSet
-- as part of its input.
--
-- Currently a package can define no more than one library (which has
-- the same name as the package) but we could extend this later.
-- If we later allowed private internal libraries, then here we would
-- need to pre-scan the conditional data to make a list of all private
-- libraries that could possibly be defined by the .cabal file.
getInternalPackages :: GenericPackageDescription
-> InstalledPackageIndex
getInternalPackages pkg_descr0 =
let pid :: PackageIdentifier -- e.g. foo-0.1
pid = packageId pkg_descr0
internalPackage = emptyInstalledPackageInfo {
--TODO: should use a per-compiler method to map the source
-- package ID into an installed package id we can use
-- for the internal package set. The open-codes use of
-- ComponentId . display here is a hack.
Installed.installedComponentId =
ComponentId $ display $ pid,
Installed.sourcePackageId = pid
}
in PackageIndex.fromList [internalPackage]
-- | Returns true if a dependency is satisfiable. This is to be passed
-- to finalizePackageDescription.
dependencySatisfiable
:: Bool
-> InstalledPackageIndex -- ^ installed set
-> InstalledPackageIndex -- ^ internal set
-> Map PackageName InstalledPackageInfo -- ^ required dependencies
-> (Dependency -> Bool)
dependencySatisfiable
exact_config installedPackageSet internalPackageSet requiredDepsMap
d@(Dependency depName _)
| exact_config =
-- When we're given '--exact-configuration', we assume that all
-- dependencies and flags are exactly specified on the command
-- line. Thus we only consult the 'requiredDepsMap'. Note that
-- we're not doing the version range check, so if there's some
-- dependency that wasn't specified on the command line,
-- 'finalizePackageDescription' will fail.
--
-- TODO: mention '--exact-configuration' in the error message
-- when this fails?
--
-- (However, note that internal deps don't have to be
-- specified!)
(depName `Map.member` requiredDepsMap) || isInternalDep
| otherwise =
-- Normal operation: just look up dependency in the combined
-- package index.
not . null . PackageIndex.lookupDependency pkgs $ d
where
pkgs = PackageIndex.merge internalPackageSet installedPackageSet
isInternalDep = not . null
$ PackageIndex.lookupDependency internalPackageSet d
-- | Finalize a generic package description. The workhorse is
-- 'finalizePackageDescription' but there's a bit of other nattering
-- about necessary.
--
-- TODO: what exactly is the business with @flaggedTests@ and
-- @flaggedBenchmarks@?
configureFinalizedPackage
:: Verbosity
-> ConfigFlags
-> [Dependency]
-> (Dependency -> Bool) -- ^ tests if a dependency is satisfiable.
-- Might say it's satisfiable even when not.
-> Compiler
-> Platform
-> GenericPackageDescription
-> IO PackageDescription
configureFinalizedPackage verbosity cfg
allConstraints satisfies comp compPlatform pkg_descr0 = do
let enableTest t = t { testEnabled = fromFlag (configTests cfg) }
flaggedTests = map (\(n, t) -> (n, mapTreeData enableTest t))
(condTestSuites pkg_descr0)
enableBenchmark bm = bm { benchmarkEnabled =
fromFlag (configBenchmarks cfg) }
flaggedBenchmarks = map (\(n, bm) ->
(n, mapTreeData enableBenchmark bm))
(condBenchmarks pkg_descr0)
pkg_descr0'' = pkg_descr0 { condTestSuites = flaggedTests
, condBenchmarks = flaggedBenchmarks }
(pkg_descr0', flags) <-
case finalizePackageDescription
(configConfigurationsFlags cfg)
satisfies
compPlatform
(compilerInfo comp)
allConstraints
pkg_descr0''
of Right r -> return r
Left missing ->
die $ "At least the following dependencies are missing:\n"
++ (render . nest 4 . sep . punctuate comma
. map (disp . simplifyDependency)
$ missing)
-- add extra include/lib dirs as specified in cfg
-- we do it here so that those get checked too
let pkg_descr = addExtraIncludeLibDirs pkg_descr0'
when (not (null flags)) $
info verbosity $ "Flags chosen: "
++ intercalate ", " [ name ++ "=" ++ display value
| (FlagName name, value) <- flags ]
return pkg_descr
where
addExtraIncludeLibDirs pkg_descr =
let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg
, PD.includeDirs = configExtraIncludeDirs cfg}
modifyLib l = l{ libBuildInfo = libBuildInfo l
`mappend` extraBi }
modifyExecutable e = e{ buildInfo = buildInfo e
`mappend` extraBi}
in pkg_descr{ library = modifyLib `fmap` library pkg_descr
, executables = modifyExecutable `map`
executables pkg_descr}
-- | Check for use of Cabal features which require compiler support
checkCompilerProblems :: Compiler -> PackageDescription -> IO ()
checkCompilerProblems comp pkg_descr = do
unless (renamingPackageFlagsSupported comp ||
and [ True
| bi <- allBuildInfo pkg_descr
, _ <- Map.elems (targetBuildRenaming bi)]) $
die $ "Your compiler does not support thinning and renaming on "
++ "package flags. To use this feature you probably must use "
++ "GHC 7.9 or later."
when (maybe False (not.null.PD.reexportedModules) (PD.library pkg_descr)
&& not (reexportedModulesSupported comp)) $ do
die $ "Your compiler does not support module re-exports. To use "
++ "this feature you probably must use GHC 7.9 or later."
-- | Select dependencies for the package.
configureDependencies
:: Verbosity
-> InstalledPackageIndex -- ^ internal packages
-> InstalledPackageIndex -- ^ installed packages
-> Map PackageName InstalledPackageInfo -- ^ required deps
-> PackageDescription
-> IO ([PackageId], [InstalledPackageInfo])
configureDependencies verbosity
internalPackageSet installedPackageSet requiredDepsMap pkg_descr = do
let selectDependencies :: [Dependency] ->
([FailedDependency], [ResolvedDependency])
selectDependencies =
partitionEithers
. map (selectDependency internalPackageSet installedPackageSet
requiredDepsMap)
(failedDeps, allPkgDeps) =
selectDependencies (buildDepends pkg_descr)
internalPkgDeps = [ pkgid
| InternalDependency _ pkgid <- allPkgDeps ]
externalPkgDeps = [ pkg
| ExternalDependency _ pkg <- allPkgDeps ]
when (not (null internalPkgDeps)
&& not (newPackageDepsBehaviour pkg_descr)) $
die $ "The field 'build-depends: "
++ intercalate ", " (map (display . packageName) internalPkgDeps)
++ "' refers to a library which is defined within the same "
++ "package. To use this feature the package must specify at "
++ "least 'cabal-version: >= 1.8'."
reportFailedDependencies failedDeps
reportSelectedDependencies verbosity allPkgDeps
return (internalPkgDeps, externalPkgDeps)
-- -----------------------------------------------------------------------------
-- Configuring package dependencies
reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO ()
reportProgram verbosity prog Nothing
= info verbosity $ "No " ++ programName prog ++ " found"
reportProgram verbosity prog (Just configuredProg)
= info verbosity $ "Using " ++ programName prog ++ version ++ location
where location = case programLocation configuredProg of
FoundOnSystem p -> " found on system at: " ++ p
UserSpecified p -> " given by user at: " ++ p
version = case programVersion configuredProg of
Nothing -> ""
Just v -> " version " ++ display v
hackageUrl :: String
hackageUrl = "http://hackage.haskell.org/package/"
data ResolvedDependency = ExternalDependency Dependency InstalledPackageInfo
| InternalDependency Dependency PackageId -- should be a
-- lib name
data FailedDependency = DependencyNotExists PackageName
| DependencyNoVersion Dependency
-- | Test for a package dependency and record the version we have installed.
selectDependency :: InstalledPackageIndex -- ^ Internally defined packages
-> InstalledPackageIndex -- ^ Installed packages
-> Map PackageName InstalledPackageInfo
-- ^ Packages for which we have been given specific deps to use
-> Dependency
-> Either FailedDependency ResolvedDependency
selectDependency internalIndex installedIndex requiredDepsMap
dep@(Dependency pkgname vr) =
-- If the dependency specification matches anything in the internal package
-- index, then we prefer that match to anything in the second.
-- For example:
--
-- Name: MyLibrary
-- Version: 0.1
-- Library
-- ..
-- Executable my-exec
-- build-depends: MyLibrary
--
-- We want "build-depends: MyLibrary" always to match the internal library
-- even if there is a newer installed library "MyLibrary-0.2".
-- However, "build-depends: MyLibrary >= 0.2" should match the installed one.
case PackageIndex.lookupPackageName internalIndex pkgname of
[(_,[pkg])] | packageVersion pkg `withinRange` vr
-> Right $ InternalDependency dep (packageId pkg)
_ -> case Map.lookup pkgname requiredDepsMap of
-- If we know the exact pkg to use, then use it.
Just pkginstance -> Right (ExternalDependency dep pkginstance)
-- Otherwise we just pick an arbitrary instance of the latest version.
Nothing -> case PackageIndex.lookupDependency installedIndex dep of
[] -> Left $ DependencyNotExists pkgname
pkgs -> Right $ ExternalDependency dep $
case last pkgs of
(_ver, pkginstances) -> head pkginstances
reportSelectedDependencies :: Verbosity
-> [ResolvedDependency] -> IO ()
reportSelectedDependencies verbosity deps =
info verbosity $ unlines
[ "Dependency " ++ display (simplifyDependency dep)
++ ": using " ++ display pkgid
| resolved <- deps
, let (dep, pkgid) = case resolved of
ExternalDependency dep' pkg' -> (dep', packageId pkg')
InternalDependency dep' pkgid' -> (dep', pkgid') ]
reportFailedDependencies :: [FailedDependency] -> IO ()
reportFailedDependencies [] = return ()
reportFailedDependencies failed =
die (intercalate "\n\n" (map reportFailedDependency failed))
where
reportFailedDependency (DependencyNotExists pkgname) =
"there is no version of " ++ display pkgname ++ " installed.\n"
++ "Perhaps you need to download and install it from\n"
++ hackageUrl ++ display pkgname ++ "?"
reportFailedDependency (DependencyNoVersion dep) =
"cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n"
-- | List all installed packages in the given package databases.
getInstalledPackages :: Verbosity -> Compiler
-> PackageDBStack -- ^ The stack of package databases.
-> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packageDBs progconf = do
when (null packageDBs) $
die $ "No package databases have been specified. If you use "
++ "--package-db=clear, you must follow it with --package-db= "
++ "with 'global', 'user' or a specific file."
info verbosity "Reading installed packages..."
case compilerFlavor comp of
GHC -> GHC.getInstalledPackages verbosity comp packageDBs progconf
GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs progconf
JHC -> JHC.getInstalledPackages verbosity packageDBs progconf
LHC -> LHC.getInstalledPackages verbosity packageDBs progconf
UHC -> UHC.getInstalledPackages verbosity comp packageDBs progconf
HaskellSuite {} ->
HaskellSuite.getInstalledPackages verbosity packageDBs progconf
flv -> die $ "don't know how to find the installed packages for "
++ display flv
-- | Like 'getInstalledPackages', but for a single package DB.
getPackageDBContents :: Verbosity -> Compiler
-> PackageDB -> ProgramConfiguration
-> IO InstalledPackageIndex
getPackageDBContents verbosity comp packageDB progconf = do
info verbosity "Reading installed packages..."
case compilerFlavor comp of
GHC -> GHC.getPackageDBContents verbosity packageDB progconf
GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progconf
-- For other compilers, try to fall back on 'getInstalledPackages'.
_ -> getInstalledPackages verbosity comp [packageDB] progconf
-- | A set of files (or directories) that can be monitored to detect when
-- there might have been a change in the installed packages.
--
getInstalledPackagesMonitorFiles :: Verbosity -> Compiler
-> PackageDBStack
-> ProgramConfiguration -> Platform
-> IO [FilePath]
getInstalledPackagesMonitorFiles verbosity comp packageDBs progconf platform =
case compilerFlavor comp of
GHC -> GHC.getInstalledPackagesMonitorFiles
verbosity platform progconf packageDBs
other -> do
warn verbosity $ "don't know how to find change monitoring files for "
++ "the installed package databases for " ++ display other
return []
-- | The user interface specifies the package dbs to use with a combination of
-- @--global@, @--user@ and @--package-db=global|user|clear|$file@.
-- This function combines the global/user flag and interprets the package-db
-- flag into a single package db stack.
--
interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack
interpretPackageDbFlags userInstall specificDBs =
extra initialStack specificDBs
where
initialStack | userInstall = [GlobalPackageDB, UserPackageDB]
| otherwise = [GlobalPackageDB]
extra dbs' [] = dbs'
extra _ (Nothing:dbs) = extra [] dbs
extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs
newPackageDepsBehaviourMinVersion :: Version
newPackageDepsBehaviourMinVersion = Version [1,7,1] []
-- In older cabal versions, there was only one set of package dependencies for
-- the whole package. In this version, we can have separate dependencies per
-- target, but we only enable this behaviour if the minimum cabal version
-- specified is >= a certain minimum. Otherwise, for compatibility we use the
-- old behaviour.
newPackageDepsBehaviour :: PackageDescription -> Bool
newPackageDepsBehaviour pkg =
specVersion pkg >= newPackageDepsBehaviourMinVersion
-- We are given both --constraint="foo < 2.0" style constraints and also
-- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581".
--
-- When finalising the package we have to take into account the specific
-- installed deps we've been given, and the finalise function expects
-- constraints, so we have to translate these deps into version constraints.
--
-- But after finalising we then have to make sure we pick the right specific
-- deps in the end. So we still need to remember which installed packages to
-- pick.
combinedConstraints :: [Dependency] ->
[(PackageName, ComponentId)] ->
InstalledPackageIndex ->
Either String ([Dependency],
Map PackageName InstalledPackageInfo)
combinedConstraints constraints dependencies installedPackages = do
when (not (null badComponentIds)) $
Left $ render $ text "The following package dependencies were requested"
$+$ nest 4 (dispDependencies badComponentIds)
$+$ text "however the given installed package instance does not exist."
when (not (null badNames)) $
Left $ render $ text "The following package dependencies were requested"
$+$ nest 4 (dispDependencies badNames)
$+$ text "however the installed package's name does not match the name given."
--TODO: we don't check that all dependencies are used!
return (allConstraints, idConstraintMap)
where
allConstraints :: [Dependency]
allConstraints = constraints
++ [ thisPackageVersion (packageId pkg)
| (_, _, Just pkg) <- dependenciesPkgInfo ]
idConstraintMap :: Map PackageName InstalledPackageInfo
idConstraintMap = Map.fromList
[ (packageName pkg, pkg)
| (_, _, Just pkg) <- dependenciesPkgInfo ]
-- The dependencies along with the installed package info, if it exists
dependenciesPkgInfo :: [(PackageName, ComponentId,
Maybe InstalledPackageInfo)]
dependenciesPkgInfo =
[ (pkgname, ipkgid, mpkg)
| (pkgname, ipkgid) <- dependencies
, let mpkg = PackageIndex.lookupComponentId
installedPackages ipkgid
]
-- If we looked up a package specified by an installed package id
-- (i.e. someone has written a hash) and didn't find it then it's
-- an error.
badComponentIds =
[ (pkgname, ipkgid)
| (pkgname, ipkgid, Nothing) <- dependenciesPkgInfo ]
-- If someone has written e.g.
-- --dependency="foo=MyOtherLib-1.0-07...5bf30" then they have
-- probably made a mistake.
badNames =
[ (requestedPkgName, ipkgid)
| (requestedPkgName, ipkgid, Just pkg) <- dependenciesPkgInfo
, let foundPkgName = packageName pkg
, requestedPkgName /= foundPkgName ]
dispDependencies deps =
hsep [ text "--dependency="
<> quotes (disp pkgname <> char '=' <> disp ipkgid)
| (pkgname, ipkgid) <- deps ]
-- -----------------------------------------------------------------------------
-- Configuring hole instantiation
configureInstantiateWith :: PackageDescription
-> ConfigFlags
-> InstalledPackageIndex -- ^ installed packages
-> IO ([InstalledPackageInfo],
[(ModuleName, (InstalledPackageInfo, ModuleName))])
configureInstantiateWith pkg_descr cfg installedPackageSet = do
-- Holes: First, check and make sure the provided instantiation covers
-- all the holes we know about. Indefinite package installation is
-- not handled at all at this point.
-- NB: We union together /all/ of the requirements when calculating
-- the package key.
-- NB: For now, we assume that dependencies don't contribute signatures.
-- This will be handled by cabal-install; as far as ./Setup is
-- concerned, the most important thing is to be provided correctly
-- built dependencies.
let signatures =
maybe [] (\lib -> requiredSignatures lib ++ exposedSignatures lib)
(PD.library pkg_descr)
signatureSet = Set.fromList signatures
instantiateMap = Map.fromList (configInstantiateWith cfg)
missing_impls = filter (not . flip Map.member instantiateMap) signatures
hole_insts0 = filter (\(k,_) -> Set.member k signatureSet) (configInstantiateWith cfg)
when (not (null missing_impls)) $
die $ "Missing signature implementations for these modules: "
++ intercalate ", " (map display missing_impls)
-- Holes: Next, we need to make sure we have packages to actually
-- provide the implementations we're talking about. This is on top
-- of the normal dependency resolution process.
-- TODO: internal dependencies (e.g. the test package depending on the
-- main library) is not currently supported
let selectHoleDependency (k,(i,m)) =
case PackageIndex.lookupComponentId installedPackageSet i of
Just pkginst -> Right (k,(pkginst, m))
Nothing -> Left i
(failed_hmap, hole_insts) = partitionEithers (map selectHoleDependency hole_insts0)
holeDeps = map (fst.snd) hole_insts -- could have dups
-- Holes: Finally, any dependencies selected this way have to be
-- included in the allPkgs index, as well as the buildComponents.
-- But don't report these as potential inconsistencies!
when (not (null failed_hmap)) $
die $ "Could not resolve these package IDs (from signature implementations): "
++ intercalate ", " (map display failed_hmap)
return (holeDeps, hole_insts)
-- -----------------------------------------------------------------------------
-- Configuring program dependencies
configureRequiredPrograms :: Verbosity -> [Dependency] -> ProgramConfiguration
-> IO ProgramConfiguration
configureRequiredPrograms verbosity deps conf =
foldM (configureRequiredProgram verbosity) conf deps
configureRequiredProgram :: Verbosity -> ProgramConfiguration -> Dependency
-> IO ProgramConfiguration
configureRequiredProgram verbosity conf
(Dependency (PackageName progName) verRange) =
case lookupKnownProgram progName conf of
Nothing -> die ("Unknown build tool " ++ progName)
Just prog
-- requireProgramVersion always requires the program have a version
-- but if the user says "build-depends: foo" ie no version constraint
-- then we should not fail if we cannot discover the program version.
| verRange == anyVersion -> do
(_, conf') <- requireProgram verbosity prog conf
return conf'
| otherwise -> do
(_, _, conf') <- requireProgramVersion verbosity prog verRange conf
return conf'
-- -----------------------------------------------------------------------------
-- Configuring pkg-config package dependencies
configurePkgconfigPackages :: Verbosity -> PackageDescription
-> ProgramConfiguration
-> IO (PackageDescription, ProgramConfiguration)
configurePkgconfigPackages verbosity pkg_descr conf
| null allpkgs = return (pkg_descr, conf)
| otherwise = do
(_, _, conf') <- requireProgramVersion
(lessVerbose verbosity) pkgConfigProgram
(orLaterVersion $ Version [0,9,0] []) conf
mapM_ requirePkg allpkgs
lib' <- mapM addPkgConfigBILib (library pkg_descr)
exes' <- mapM addPkgConfigBIExe (executables pkg_descr)
tests' <- mapM addPkgConfigBITest (testSuites pkg_descr)
benches' <- mapM addPkgConfigBIBench (benchmarks pkg_descr)
let pkg_descr' = pkg_descr { library = lib', executables = exes',
testSuites = tests', benchmarks = benches' }
return (pkg_descr', conf')
where
allpkgs = concatMap pkgconfigDepends (allBuildInfo pkg_descr)
pkgconfig = rawSystemProgramStdoutConf (lessVerbose verbosity)
pkgConfigProgram conf
requirePkg dep@(Dependency (PackageName pkg) range) = do
version <- pkgconfig ["--modversion", pkg]
`catchIO` (\_ -> die notFound)
`catchExit` (\_ -> die notFound)
case simpleParse version of
Nothing -> die "parsing output of pkg-config --modversion failed"
Just v | not (withinRange v range) -> die (badVersion v)
| otherwise -> info verbosity (depSatisfied v)
where
notFound = "The pkg-config package '" ++ pkg ++ "'"
++ versionRequirement
++ " is required but it could not be found."
badVersion v = "The pkg-config package '" ++ pkg ++ "'"
++ versionRequirement
++ " is required but the version installed on the"
++ " system is version " ++ display v
depSatisfied v = "Dependency " ++ display dep
++ ": using version " ++ display v
versionRequirement
| isAnyVersion range = ""
| otherwise = " version " ++ display range
-- Adds pkgconfig dependencies to the build info for a component
addPkgConfigBI compBI setCompBI comp = do
bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp))
return $ setCompBI comp (compBI comp `mappend` bi)
-- Adds pkgconfig dependencies to the build info for a library
addPkgConfigBILib = addPkgConfigBI libBuildInfo $
\lib bi -> lib { libBuildInfo = bi }
-- Adds pkgconfig dependencies to the build info for an executable
addPkgConfigBIExe = addPkgConfigBI buildInfo $
\exe bi -> exe { buildInfo = bi }
-- Adds pkgconfig dependencies to the build info for a test suite
addPkgConfigBITest = addPkgConfigBI testBuildInfo $
\test bi -> test { testBuildInfo = bi }
-- Adds pkgconfig dependencies to the build info for a benchmark
addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $
\bench bi -> bench { benchmarkBuildInfo = bi }
pkgconfigBuildInfo :: [Dependency] -> IO BuildInfo
pkgconfigBuildInfo [] = return Mon.mempty
pkgconfigBuildInfo pkgdeps = do
let pkgs = nub [ display pkg | Dependency pkg _ <- pkgdeps ]
ccflags <- pkgconfig ("--cflags" : pkgs)
ldflags <- pkgconfig ("--libs" : pkgs)
return (ccLdOptionsBuildInfo (words ccflags) (words ldflags))
-- | Makes a 'BuildInfo' from C compiler and linker flags.
--
-- This can be used with the output from configuration programs like pkg-config
-- and similar package-specific programs like mysql-config, freealut-config etc.
-- For example:
--
-- > ccflags <- rawSystemProgramStdoutConf verbosity prog conf ["--cflags"]
-- > ldflags <- rawSystemProgramStdoutConf verbosity prog conf ["--libs"]
-- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags))
--
ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo
ccLdOptionsBuildInfo cflags ldflags =
let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags
(extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags
(extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags'
in mempty {
PD.includeDirs = map (drop 2) includeDirs',
PD.extraLibs = map (drop 2) extraLibs',
PD.extraLibDirs = map (drop 2) extraLibDirs',
PD.ccOptions = cflags',
PD.ldOptions = ldflags''
}
-- -----------------------------------------------------------------------------
-- Determining the compiler details
configCompilerAuxEx :: ConfigFlags
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg)
(flagToMaybe $ configHcPath cfg)
(flagToMaybe $ configHcPkg cfg)
programsConfig
(fromFlag (configVerbosity cfg))
where
programsConfig = mkProgramsConfig cfg defaultProgramConfiguration
configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> Verbosity
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerEx Nothing _ _ _ _ = die "Unknown compiler"
configCompilerEx (Just hcFlavor) hcPath hcPkg conf verbosity = do
(comp, maybePlatform, programsConfig) <- case hcFlavor of
GHC -> GHC.configure verbosity hcPath hcPkg conf
GHCJS -> GHCJS.configure verbosity hcPath hcPkg conf
JHC -> JHC.configure verbosity hcPath hcPkg conf
LHC -> do (_, _, ghcConf) <- GHC.configure verbosity Nothing hcPkg conf
LHC.configure verbosity hcPath Nothing ghcConf
UHC -> UHC.configure verbosity hcPath hcPkg conf
HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg conf
_ -> die "Unknown compiler"
return (comp, fromMaybe buildPlatform maybePlatform, programsConfig)
-- Ideally we would like to not have separate configCompiler* and
-- configCompiler*Ex sets of functions, but there are many custom setup scripts
-- in the wild that are using them, so the versions with old types are kept for
-- backwards compatibility. Platform was added to the return triple in 1.18.
{-# DEPRECATED configCompiler
"'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-}
configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> Verbosity
-> IO (Compiler, ProgramConfiguration)
configCompiler mFlavor hcPath hcPkg conf verbosity =
fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg conf verbosity
{-# DEPRECATED configCompilerAux
"configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-}
configCompilerAux :: ConfigFlags
-> IO (Compiler, ProgramConfiguration)
configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx
-- -----------------------------------------------------------------------------
-- Making the internal component graph
mkComponentsGraph :: PackageDescription
-> [PackageId]
-> Either [ComponentName]
[(Component, [ComponentName])]
mkComponentsGraph pkg_descr internalPkgDeps =
let graph = [ (c, componentName c, componentDeps c)
| c <- pkgEnabledComponents pkg_descr ]
in case checkComponentsCyclic graph of
Just ccycle -> Left [ cname | (_,cname,_) <- ccycle ]
Nothing -> Right [ (c, cdeps) | (c, _, cdeps) <- graph ]
where
-- The dependencies for the given component
componentDeps component =
[ CExeName toolname | Dependency (PackageName toolname) _
<- buildTools bi
, toolname `elem` map exeName
(executables pkg_descr) ]
++ [ CLibName | Dependency pkgname _ <- targetBuildDepends bi
, pkgname `elem` map packageName internalPkgDeps ]
where
bi = componentBuildInfo component
reportComponentCycle :: [ComponentName] -> IO a
reportComponentCycle cnames =
die $ "Components in the package depend on each other in a cyclic way:\n "
++ intercalate " depends on "
[ "'" ++ showComponentName cname ++ "'"
| cname <- cnames ++ [head cnames] ]
-- | This method computes a default, "good enough" 'ComponentId'
-- for a package. The intent is that cabal-install (or the user) will
-- specify a more detailed IPID via the @--ipid@ flag if necessary.
computeComponentId :: PackageDescription
-> ComponentName
-- TODO: careful here!
-> [ComponentId] -- IPIDs of the component dependencies
-> FlagAssignment
-> IO ComponentId
computeComponentId pkg_descr cname dep_ipids flagAssignment = do
-- show is found to be faster than intercalate and then replacement of
-- special character used in intercalating. We cannot simply hash by
-- doubly concating list, as it just flatten out the nested list, so
-- different sources can produce same hash
let hash = hashToBase62 $
-- For safety, include the package + version here
-- for GHC 7.10, where just the hash is used as
-- the package key
(display (package pkg_descr))
++ (show $ dep_ipids)
++ show flagAssignment
return . ComponentId $
display (package pkg_descr)
++ "-" ++ hash
++ (case cname of
CLibName -> ""
-- TODO: these could result in non-parseable IPIDs
-- since the component name format is very flexible
CExeName s -> "-" ++ s ++ ".exe"
CTestName s -> "-" ++ s ++ ".test"
CBenchName s -> "-" ++ s ++ ".bench")
where
representBase62 x
| x < 10 = chr (48 + x)
| x < 36 = chr (65 + x - 10)
| x < 62 = chr (97 + x - 36)
| otherwise = '@'
fpToInteger (Fingerprint a b) =
toInteger a * (shift (1 :: Integer) 64) + toInteger b
hashToBase62 s = showIntAtBase 62 representBase62
(fpToInteger $ fingerprintString s) ""
mkComponentsLocalBuildInfo :: ConfigFlags
-> Compiler
-> InstalledPackageIndex
-> PackageDescription
-> [PackageId] -- internal package deps
-> [InstalledPackageInfo] -- external package deps
-> [InstalledPackageInfo] -- hole package deps
-> Map ModuleName (InstalledPackageInfo, ModuleName)
-> [(Component, [ComponentName])]
-> FlagAssignment
-> IO [(ComponentName, ComponentLocalBuildInfo,
[ComponentName])]
mkComponentsLocalBuildInfo cfg comp installedPackages pkg_descr
internalPkgDeps externalPkgDeps holePkgDeps hole_insts
graph flagAssignment = do
-- Pre-compute library hash so we can setup internal deps
lib_hash@(ComponentId str) <-
-- TODO configIPID should have name changed
case configIPID cfg of
Flag lib_hash0 ->
-- Hack to reuse install dirs machinery
-- NB: no real IPID available at this point
let env = packageTemplateEnv (package pkg_descr) (ComponentId "")
str = fromPathTemplate (substPathTemplate env (toPathTemplate lib_hash0))
in return (ComponentId str)
_ ->
computeComponentId pkg_descr CLibName (getDeps CLibName) flagAssignment
let extractCandidateCompatKey s
= case simpleParse s :: Maybe PackageId of
-- Something like 'foo-0.1', use it verbatim.
-- (NB: hash tags look like tags, so they are parsed,
-- so the extra equality check tests if a tag was dropped.)
Just pid | display pid == s -> s
-- Something like 'foo-0.1-XXX', take the stuff at the end.
-- TODO this won't work with component stuff
_ -> reverse (takeWhile isAlphaNum (reverse s))
cand_compat_key = ComponentId (extractCandidateCompatKey str)
old_style_key = ComponentId (display (package pkg_descr))
best_key = ComponentId str
compat_key =
if packageKeySupported comp
then if unifiedIPIDRequired comp
then best_key
else cand_compat_key
else old_style_key
sequence
[ do clbi <- componentLocalBuildInfo lib_hash compat_key c
return (componentName c, clbi, cdeps)
| (c, cdeps) <- graph ]
where
getDeps cname =
let externalPkgs = maybe [] (\lib -> selectSubset (componentBuildInfo lib)
externalPkgDeps)
(lookupComponent pkg_descr cname)
in map Installed.installedComponentId externalPkgs
-- The allPkgDeps contains all the package deps for the whole package
-- but we need to select the subset for this specific component.
-- we just take the subset for the package names this component
-- needs. Note, this only works because we cannot yet depend on two
-- versions of the same package.
componentLocalBuildInfo lib_hash compat_key component =
case component of
CLib lib -> do
let exports = map (\n -> Installed.ExposedModule n Nothing Nothing)
(PD.exposedModules lib)
esigs = map (\n -> Installed.ExposedModule n Nothing
(fmap (\(pkg,m) -> Installed.OriginalModule
(Installed.installedComponentId pkg) m)
(Map.lookup n hole_insts)))
(PD.exposedSignatures lib)
let mb_reexports = resolveModuleReexports installedPackages
(packageId pkg_descr)
lib_hash
externalPkgDeps lib
reexports <- case mb_reexports of
Left problems -> reportModuleReexportProblems problems
Right r -> return r
return LibComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentId = lib_hash,
componentCompatPackageKey = compat_key,
componentPackageRenaming = cprns,
componentExposedModules = exports ++ reexports ++ esigs
}
CExe _ ->
return ExeComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentPackageRenaming = cprns
}
CTest _ ->
return TestComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentPackageRenaming = cprns
}
CBench _ ->
return BenchComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentPackageRenaming = cprns
}
where
bi = componentBuildInfo component
dedup = Map.toList . Map.fromList
cpds = if newPackageDepsBehaviour pkg_descr
then dedup $
[ (Installed.installedComponentId pkg, packageId pkg)
| pkg <- selectSubset bi externalPkgDeps ]
++ [ (lib_hash, pkgid)
| pkgid <- selectSubset bi internalPkgDeps ]
else [ (Installed.installedComponentId pkg, packageId pkg)
| pkg <- externalPkgDeps ]
cprns = if newPackageDepsBehaviour pkg_descr
then Map.unionWith mappend
-- We need hole dependencies passed to GHC, so add them here
-- (but note that they're fully thinned out. If they
-- appeared legitimately the monoid instance will
-- fill them out.
(Map.fromList [(packageName pkg, mempty) | pkg <- holePkgDeps])
(targetBuildRenaming bi)
-- Hack: if we have old package-deps behavior, it's impossible
-- for non-default renamings to be used, because the Cabal
-- version is too early. This is a good, because while all the
-- deps were bundled up in buildDepends, we didn't do this for
-- renamings, so it's not even clear how to get the merged
-- version. So just assume that all of them are the default..
else Map.fromList (map (\(_,pid) -> (packageName pid, defaultRenaming)) cpds)
selectSubset :: Package pkg => BuildInfo -> [pkg] -> [pkg]
selectSubset bi pkgs =
[ pkg | pkg <- pkgs, packageName pkg `elem` names bi ]
names bi = [ name | Dependency name _ <- targetBuildDepends bi ]
-- | Given the author-specified re-export declarations from the .cabal file,
-- resolve them to the form that we need for the package database.
--
-- An invariant of the package database is that we always link the re-export
-- directly to its original defining location (rather than indirectly via a
-- chain of re-exporting packages).
--
resolveModuleReexports :: InstalledPackageIndex
-> PackageId
-> ComponentId
-> [InstalledPackageInfo]
-> Library
-> Either [(ModuleReexport, String)] -- errors
[Installed.ExposedModule] -- ok
resolveModuleReexports installedPackages srcpkgid key externalPkgDeps lib =
case partitionEithers (map resolveModuleReexport (PD.reexportedModules lib)) of
([], ok) -> Right ok
(errs, _) -> Left errs
where
-- A mapping from visible module names to their original defining
-- module name. We also record the package name of the package which
-- *immediately* provided the module (not the original) to handle if the
-- user explicitly says which build-depends they want to reexport from.
visibleModules :: Map ModuleName [(PackageName, Installed.ExposedModule)]
visibleModules =
Map.fromListWith (++) $
[ (Installed.exposedName exposedModule, [(exportingPackageName,
exposedModule)])
-- The package index here contains all the indirect deps of the
-- package we're configuring, but we want just the direct deps
| let directDeps = Set.fromList (map Installed.installedComponentId externalPkgDeps)
, pkg <- PackageIndex.allPackages installedPackages
, Installed.installedComponentId pkg `Set.member` directDeps
, let exportingPackageName = packageName pkg
, exposedModule <- visibleModuleDetails pkg
]
++ [ (visibleModuleName, [(exportingPackageName, exposedModule)])
| visibleModuleName <- PD.exposedModules lib
++ otherModules (libBuildInfo lib)
, let exportingPackageName = packageName srcpkgid
definingModuleName = visibleModuleName
definingPackageId = key
originalModule = Installed.OriginalModule definingPackageId
definingModuleName
exposedModule = Installed.ExposedModule visibleModuleName
(Just originalModule)
Nothing
]
-- All the modules exported from this package and their defining name and
-- package (either defined here in this package or re-exported from some
-- other package). Return an ExposedModule because we want to hold onto
-- signature information.
visibleModuleDetails :: InstalledPackageInfo -> [Installed.ExposedModule]
visibleModuleDetails pkg = do
exposedModule <- Installed.exposedModules pkg
case Installed.exposedReexport exposedModule of
-- The first case is the modules actually defined in this package.
-- In this case the reexport will point to this package.
Nothing -> return exposedModule { Installed.exposedReexport =
Just (Installed.OriginalModule (Installed.installedComponentId pkg)
(Installed.exposedName exposedModule)) }
-- On the other hand, a visible module might actually be itself
-- a re-export! In this case, the re-export info for the package
-- doing the re-export will point us to the original defining
-- module name and package, so we can reuse the entry.
Just _ -> return exposedModule
resolveModuleReexport reexport@ModuleReexport {
moduleReexportOriginalPackage = moriginalPackageName,
moduleReexportOriginalName = originalName,
moduleReexportName = newName
} =
let filterForSpecificPackage =
case moriginalPackageName of
Nothing -> id
Just originalPackageName ->
filter (\(pkgname, _) -> pkgname == originalPackageName)
matches = filterForSpecificPackage
(Map.findWithDefault [] originalName visibleModules)
in
case (matches, moriginalPackageName) of
((_, exposedModule):rest, _)
-- TODO: Refine this check for signatures
| all (\(_, exposedModule') -> Installed.exposedReexport exposedModule
== Installed.exposedReexport exposedModule') rest
-> Right exposedModule { Installed.exposedName = newName }
([], Just originalPackageName)
-> Left $ (,) reexport
$ "The package " ++ display originalPackageName
++ " does not export a module " ++ display originalName
([], Nothing)
-> Left $ (,) reexport
$ "The module " ++ display originalName
++ " is not exported by any suitable package (this package "
++ "itself nor any of its 'build-depends' dependencies)."
(ms, _)
-> Left $ (,) reexport
$ "The module " ++ display originalName ++ " is exported "
++ "by more than one package ("
++ intercalate ", " [ display pkgname | (pkgname,_) <- ms ]
++ ") and so the re-export is ambiguous. The ambiguity can "
++ "be resolved by qualifying by the package name. The "
++ "syntax is 'packagename:moduleName [as newname]'."
-- Note: if in future Cabal allows directly depending on multiple
-- instances of the same package (e.g. backpack) then an additional
-- ambiguity case is possible here: (_, Just originalPackageName)
-- with the module being ambiguous despite being qualified by a
-- package name. Presumably by that time we'll have a mechanism to
-- qualify the instance we're referring to.
reportModuleReexportProblems :: [(ModuleReexport, String)] -> IO a
reportModuleReexportProblems reexportProblems =
die $ unlines
[ "Problem with the module re-export '" ++ display reexport ++ "': " ++ msg
| (reexport, msg) <- reexportProblems ]
-- -----------------------------------------------------------------------------
-- Testing C lib and header dependencies
-- Try to build a test C program which includes every header and links every
-- lib. If that fails, try to narrow it down by preprocessing (only) and linking
-- with individual headers and libs. If none is the obvious culprit then give a
-- generic error message.
-- TODO: produce a log file from the compiler errors, if any.
checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO ()
checkForeignDeps pkg lbi verbosity = do
ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling
-- lucky
(return ())
(do missingLibs <- findMissingLibs
missingHdr <- findOffendingHdr
explainErrors missingHdr missingLibs)
where
allHeaders = collectField PD.includes
allLibs = collectField PD.extraLibs
ifBuildsWith headers args success failure = do
ok <- builds (makeProgram headers) args
if ok then success else failure
findOffendingHdr =
ifBuildsWith allHeaders ccArgs
(return Nothing)
(go . tail . inits $ allHeaders)
where
go [] = return Nothing -- cannot happen
go (hdrs:hdrsInits) =
-- Try just preprocessing first
ifBuildsWith hdrs cppArgs
-- If that works, try compiling too
(ifBuildsWith hdrs ccArgs
(go hdrsInits)
(return . Just . Right . last $ hdrs))
(return . Just . Left . last $ hdrs)
cppArgs = "-E":commonCppArgs -- preprocess only
ccArgs = "-c":commonCcArgs -- don't try to link
findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs)
(return [])
(filterM (fmap not . libExists) allLibs)
libExists lib = builds (makeProgram []) (makeLdArgs [lib])
commonCppArgs = platformDefines lbi
++ [ "-I" ++ autogenModulesDir lbi ]
++ [ "-I" ++ dir | dir <- collectField PD.includeDirs ]
++ ["-I."]
++ collectField PD.cppOptions
++ collectField PD.ccOptions
++ [ "-I" ++ dir
| dep <- deps
, dir <- Installed.includeDirs dep ]
++ [ opt
| dep <- deps
, opt <- Installed.ccOptions dep ]
commonCcArgs = commonCppArgs
++ collectField PD.ccOptions
++ [ opt
| dep <- deps
, opt <- Installed.ccOptions dep ]
commonLdArgs = [ "-L" ++ dir | dir <- collectField PD.extraLibDirs ]
++ collectField PD.ldOptions
++ [ "-L" ++ dir
| dep <- deps
, dir <- Installed.libraryDirs dep ]
--TODO: do we also need dependent packages' ld options?
makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs
makeProgram hdrs = unlines $
[ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++
["int main(int argc, char** argv) { return 0; }"]
collectField f = concatMap f allBi
allBi = allBuildInfo pkg
deps = PackageIndex.topologicalOrder (installedPkgs lbi)
builds program args = do
tempDir <- getTemporaryDirectory
withTempFile tempDir ".c" $ \cName cHnd ->
withTempFile tempDir "" $ \oNname oHnd -> do
hPutStrLn cHnd program
hClose cHnd
hClose oHnd
_ <- rawSystemProgramStdoutConf verbosity
gccProgram (withPrograms lbi) (cName:"-o":oNname:args)
return True
`catchIO` (\_ -> return False)
`catchExit` (\_ -> return False)
explainErrors Nothing [] = return () -- should be impossible!
explainErrors _ _
| isNothing . lookupProgram gccProgram . withPrograms $ lbi
= die $ unlines $
[ "No working gcc",
"This package depends on foreign library but we cannot "
++ "find a working C compiler. If you have it in a "
++ "non-standard location you can use the --with-gcc "
++ "flag to specify it." ]
explainErrors hdr libs = die $ unlines $
[ if plural
then "Missing dependencies on foreign libraries:"
else "Missing dependency on a foreign library:"
| missing ]
++ case hdr of
Just (Left h) -> ["* Missing (or bad) header file: " ++ h ]
_ -> []
++ case libs of
[] -> []
[lib] -> ["* Missing C library: " ++ lib]
_ -> ["* Missing C libraries: " ++ intercalate ", " libs]
++ [if plural then messagePlural else messageSingular | missing]
++ case hdr of
Just (Left _) -> [ headerCppMessage ]
Just (Right h) -> [ (if missing then "* " else "")
++ "Bad header file: " ++ h
, headerCcMessage ]
_ -> []
where
plural = length libs >= 2
-- Is there something missing? (as opposed to broken)
missing = not (null libs)
|| case hdr of Just (Left _) -> True; _ -> False
messageSingular =
"This problem can usually be solved by installing the system "
++ "package that provides this library (you may need the "
++ "\"-dev\" version). If the library is already installed "
++ "but in a non-standard location then you can use the flags "
++ "--extra-include-dirs= and --extra-lib-dirs= to specify "
++ "where it is."
messagePlural =
"This problem can usually be solved by installing the system "
++ "packages that provide these libraries (you may need the "
++ "\"-dev\" versions). If the libraries are already installed "
++ "but in a non-standard location then you can use the flags "
++ "--extra-include-dirs= and --extra-lib-dirs= to specify "
++ "where they are."
headerCppMessage =
"If the header file does exist, it may contain errors that "
++ "are caught by the C compiler at the preprocessing stage. "
++ "In this case you can re-run configure with the verbosity "
++ "flag -v3 to see the error messages."
headerCcMessage =
"The header file contains a compile error. "
++ "You can re-run configure with the verbosity flag "
++ "-v3 to see the error messages from the C compiler."
-- | Output package check warnings and errors. Exit if any errors.
checkPackageProblems :: Verbosity
-> GenericPackageDescription
-> PackageDescription
-> IO ()
checkPackageProblems verbosity gpkg pkg = do
ioChecks <- checkPackageFiles pkg "."
let pureChecks = checkPackage gpkg (Just pkg)
errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ]
warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ]
if null errors
then mapM_ (warn verbosity) warnings
else die (intercalate "\n\n" errors)
-- | Preform checks if a relocatable build is allowed
checkRelocatable :: Verbosity
-> PackageDescription
-> LocalBuildInfo
-> IO ()
checkRelocatable verbosity pkg lbi
= sequence_ [ checkOS
, checkCompiler
, packagePrefixRelative
, depsPrefixRelative
]
where
-- Check if the OS support relocatable builds.
--
-- If you add new OS' to this list, and your OS supports dynamic libraries
-- and RPATH, make sure you add your OS to RPATH-support list of:
-- Distribution.Simple.GHC.getRPaths
checkOS
= unless (os `elem` [ OSX, Linux ])
$ die $ "Operating system: " ++ display os ++
", does not support relocatable builds"
where
(Platform _ os) = hostPlatform lbi
-- Check if the Compiler support relocatable builds
checkCompiler
= unless (compilerFlavor comp `elem` [ GHC ])
$ die $ "Compiler: " ++ show comp ++
", does not support relocatable builds"
where
comp = compiler lbi
-- Check if all the install dirs are relative to same prefix
packagePrefixRelative
= unless (relativeInstallDirs installDirs)
$ die $ "Installation directories are not prefix_relative:\n" ++
show installDirs
where
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
p = prefix installDirs
relativeInstallDirs (InstallDirs {..}) =
all isJust
(fmap (stripPrefix p)
[ bindir, libdir, dynlibdir, libexecdir, includedir, datadir
, docdir, mandir, htmldir, haddockdir, sysconfdir] )
-- Check if the library dirs of the dependencies that are in the package
-- database to which the package is installed are relative to the
-- prefix of the package
depsPrefixRelative = do
pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi))
mapM_ (doCheck pkgr) ipkgs
where
doCheck pkgr ipkg
| maybe False (== pkgr) (Installed.pkgRoot ipkg)
= mapM_ (\l -> when (isNothing $ stripPrefix p l) (die (msg l)))
(Installed.libraryDirs ipkg)
| otherwise
= return ()
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
p = prefix installDirs
ipkgs = PackageIndex.allPackages (installedPkgs lbi)
msg l = "Library directory of a dependency: " ++ show l ++
"\nis not relative to the installation prefix:\n" ++
show p
| randen/cabal | Cabal/Distribution/Simple/Configure.hs | bsd-3-clause | 93,156 | 1 | 26 | 28,171 | 15,652 | 8,252 | 7,400 | 1,364 | 14 |
module Main where
import Test.Balance
import Test.Correctness
import Test.IntervalSet
import Test.Tasty
import Test.Tasty.SmallCheck
import Test.Validity
main :: IO ()
main = defaultMain $ localOption (SmallCheckDepth 7) $ testGroup "Tests" [
testGroup "Correctness" [
testProperty "Interval Union" intervalUnion,
testProperty "Interval Complement" intervalComplement,
testProperty "Interval Intersection" intervalIntersection
],
testGroup "Validity" [
testProperty "Valid" valid,
testProperty "Interval Union" validIntervalUnion,
testProperty "Interval Complement" validIntervalComplement,
testProperty "Interval Intersection" validIntervalIntersection
],
testGroup "Balance" [
testProperty "Balanced" balanced,
testProperty "Interval Union" balancedIntervalUnion,
testProperty "Interval Complement" balancedIntervalComplement,
testProperty "Interval Intersection" balancedIntervalIntersection
]
]
| ian-mi/interval-set | tests/Test.hs | bsd-3-clause | 1,157 | 0 | 10 | 343 | 184 | 96 | 88 | 23 | 1 |
{-# LANGUAGE DeriveFunctor #-}
module Subscription
(
SubscriptionTree (..),
broadcast,
broadcast',
empty,
subscribe,
unsubscribe,
showTree,
)
where
import Control.Monad (void)
import Control.Monad.Writer (Writer, tell, execWriter)
import Data.Aeson (Value)
import Data.Foldable (for_, traverse_)
import Data.HashMap.Strict (HashMap)
import Data.Hashable (Hashable)
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Control.Concurrent.Async as Async
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as Text
import qualified Store
-- Keeps subscriptions in a tree data structure, so we can efficiently determine
-- which clients need to be notified for a given update.
data SubscriptionTree id conn =
SubscriptionTree (HashMap id conn) (HashMap Text (SubscriptionTree id conn))
deriving (Eq, Functor, Show)
empty :: SubscriptionTree id conn
empty = SubscriptionTree HashMap.empty HashMap.empty
isEmpty :: SubscriptionTree id conn -> Bool
isEmpty (SubscriptionTree here inner) = HashMap.null here && HashMap.null inner
subscribe
:: (Eq id, Hashable id)
=> [Text]
-> id
-> conn
-> SubscriptionTree id conn
-> SubscriptionTree id conn
subscribe path subid subval (SubscriptionTree here inner) =
case path of
[] -> SubscriptionTree (HashMap.insert subid subval here) inner
key : pathTail ->
let
subscribeInner = subscribe pathTail subid subval
newInner = HashMap.alter (Just . subscribeInner . fromMaybe empty) key inner
in
SubscriptionTree here newInner
unsubscribe
:: (Eq id, Hashable id)
=> [Text]
-> id
-> SubscriptionTree id conn
-> SubscriptionTree id conn
unsubscribe path subid (SubscriptionTree here inner) =
case path of
[] -> SubscriptionTree (HashMap.delete subid here) inner
key : pathTail ->
let
-- Remove the tail from the inner tree (if it exists). If that left the
-- inner tree empty, remove the key altogether to keep the tree clean.
justNotEmpty tree = if isEmpty tree then Nothing else Just tree
unsubscribeInner = justNotEmpty . unsubscribe pathTail subid
newInner = HashMap.update unsubscribeInner key inner
in
SubscriptionTree here newInner
-- Invoke f for all subscribers to the path. The subscribers get passed the
-- subvalue at the path that they are subscribed to.
broadcast :: (conn -> Value -> IO ()) -> [Text] -> Value -> SubscriptionTree id conn -> IO ()
broadcast f path value tree =
-- We broadcast concurrently since all updates are independent of each other
Async.mapConcurrently_ (uncurry f) notifications
where notifications = broadcast' path value tree
-- Like broadcast, but return a list of notifications rather than invoking an
-- effect on each of them.
broadcast' :: [Text] -> Value -> SubscriptionTree id conn -> [(conn, Value)]
broadcast' = \path value tree -> execWriter $ loop path value tree
where
loop :: [Text] -> Value -> SubscriptionTree id conn -> Writer [(conn, Value)] ()
loop path value (SubscriptionTree here inner) = do
case path of
[] -> do
-- When the path is empty, all subscribers that are "here" or at a deeper
-- level should receive a notification.
traverse_ (\v -> tell [(v, value)]) here
let broadcastInner key = loop [] (Store.lookupOrNull [key] value)
void $ HashMap.traverseWithKey broadcastInner inner
key : pathTail -> do
traverse_ (\v -> tell [(v, value)]) here
for_ (HashMap.lookup key inner) $ \subs ->
loop pathTail (Store.lookupOrNull [key] value) subs
-- Show subscriptions, for debugging purposes.
showTree :: Show id => SubscriptionTree id conn -> String
showTree tree =
let
withPrefix prefix (SubscriptionTree here inner) =
let
strHere :: String
strHere = concatMap (\cid -> " * " <> (show cid) <> "\n") (HashMap.keys here)
showInner iPrefix t = iPrefix <> "\n" <> withPrefix iPrefix t
strInner :: String
strInner = concat $ HashMap.mapWithKey (\key -> showInner (prefix <> "/" <> Text.unpack key)) inner
in
strHere <> strInner
in
"/\n" <> (withPrefix "" tree)
| channable/icepeak | server/src/Subscription.hs | bsd-3-clause | 4,246 | 0 | 21 | 925 | 1,198 | 632 | 566 | 89 | 3 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
-----------------------------------------------------------------------------
-- |
-- Module : Geometry.ThreeD.Transform
-- Copyright : (c) 2013-2017 diagrams team (see LICENSE)
-- License : BSD-style (see LICENSE)
-- Maintainer : diagrams-discuss@googlegroups.com
--
-- Transformations specific to three dimensions, with a few generic
-- transformations (uniform scaling, translation) also re-exported for
-- convenience.
--
-----------------------------------------------------------------------------
module Geometry.ThreeD.Transform
( T3
-- * Rotation
, aboutX, aboutY, aboutZ
, rotationAbout, rotateAbout
, pointAt, pointAt'
-- ** Euler angles
, Euler (..), yaw, pitch, roll
-- ** Rotational class
, Rotational (..)
, rotateWith
-- * Scaling
, scalingX, scalingY, scalingZ
, scaleX, scaleY, scaleZ
, scaling, scale
-- * Translation
, translationX, translateX
, translationY, translateY
, translationZ, translateZ
, translation, translate
-- * Reflection
, reflectionX, reflectX
, reflectionY, reflectY
, reflectionZ, reflectZ
, reflectionAcross, reflectAcross
) where
import Geometry.Angle
import Geometry.Direction
import Geometry.Points
import Geometry.Space
import Geometry.ThreeD.Types
import Geometry.Transform
import Geometry.TwoD.Transform
import qualified Data.Semigroup as Sem
import Control.Lens hiding (transform)
import Data.Functor.Rep
import Linear (cross, dot, signorm)
import Linear.Matrix hiding (translation)
import Linear.Quaternion
import Linear.Vector
-- | Create a transformation which rotates by the given angle about
-- a line parallel the Z axis passing through the local origin.
-- A positive angle brings positive x-values towards the positive-y axis.
--
-- The angle can be expressed using any type which is an
-- instance of 'Angle'. For example, @aboutZ (1\/4 \@\@
-- 'turn')@, @aboutZ (tau\/4 \@\@ 'rad')@, and @aboutZ (90 \@\@
-- 'deg')@ all represent the same transformation, namely, a
-- counterclockwise rotation by a right angle. For more general rotations,
-- see 'rotationAbout'.
--
-- Note that writing @aboutZ (1\/4)@, with no type annotation, will
-- yield an error since GHC cannot figure out which sort of angle
-- you want to use.
aboutZ :: Floating n => Angle n -> T3 n
aboutZ a = fromOrthogonal $
V3 (V3 c s 0)
(V3 (-s) c 0)
(V3 0 0 1)
where s = sinA a; c = cosA a
-- | Like 'aboutZ', but rotates about the X axis, bringing positive y-values
-- towards the positive z-axis.
aboutX :: Floating n => Angle n -> T3 n
aboutX a = fromOrthogonal $
V3 (V3 1 0 0)
(V3 0 c s)
(V3 0 (-s) c)
where s = sinA a; c = cosA a
-- | Like 'aboutZ', but rotates about the Y axis, bringing postive
-- x-values towards the negative z-axis.
aboutY :: Floating n => Angle n -> T3 n
aboutY a = fromOrthogonal $
V3 (V3 c 0 (-s))
(V3 0 1 0 )
(V3 s 0 c )
where s = sinA a; c = cosA a
-- | @rotationAbout p d a@ is a rotation about a line parallel to @d@
-- passing through @p@.
rotationAbout
:: Floating n
=> P3 n -- ^ origin of rotation
-> Direction V3 n -- ^ direction of rotation axis
-> Angle n -- ^ angle of rotation
-> T3 n
rotationAbout (P p) d a = conjugate (translation p) (axisRotation d a)
axisRotation :: Floating n => Direction V3 n -> Angle n -> T3 n
axisRotation d a = fromOrthogonal $
V3 (V3 (t*x*x + c) (t*x*y - z*s) (t*x*z + y*s))
(V3 (t*x*y + z*s) (t*y*y + c) (t*y*z - x*s))
(V3 (t*x*z - y*s) (t*y*z + x*s) (t*z*z + c))
where
c = cosA a
s = sinA a
t = 1 - c
V3 x y z = fromDirection d
-- | @rotateAbout p d a@ rotates about a line parallel to @d@ passing
-- through @p@.
rotateAbout
:: (InSpace V3 n t, Floating n, Transformable t)
=> P3 n -- ^ origin of rotation
-> Direction V3 n -- ^ direction of rotation axis
-> Angle n -- ^ angle of rotation
-> t -> t
rotateAbout p d theta = transform (rotationAbout p d theta)
-- | @pointAt about initial final@ produces a rotation which brings
-- the direction @initial@ to point in the direction @final@ by first
-- panning around @about@, then tilting about the axis perpendicular
-- to @about@ and @final@. In particular, if this can be accomplished
-- without tilting, it will be, otherwise if only tilting is
-- necessary, no panning will occur. The tilt will always be between
-- ± 1/4 turn.
pointAt :: Floating n
=> Direction V3 n -> Direction V3 n -> Direction V3 n
-> Transformation V3 n
pointAt a i f = pointAt' (fromDirection a) (fromDirection i) (fromDirection f)
-- | pointAt' has the same behavior as 'pointAt', but takes vectors
-- instead of directions.
pointAt' :: Floating n => V3 n -> V3 n -> V3 n -> Transformation V3 n
pointAt' about initial final = pointAtUnit (signorm about) (signorm initial) (signorm final)
-- | pointAtUnit has the same behavior as @pointAt@, but takes unit vectors.
pointAtUnit :: Floating n => V3 n -> V3 n -> V3 n -> Transformation V3 n
pointAtUnit about initial final = tilt Sem.<> pan where
-- rotating u by (signedAngle rel u v) about rel gives a vector in the direction of v
signedAngle rel u v = signum (cross u v `dot` rel) *^ angleBetween u v
inPanPlaneF = final ^-^ project about final
inPanPlaneI = initial ^-^ project about initial
panAngle = signedAngle about inPanPlaneI inPanPlaneF
pan = rotationAbout origin (direction about) panAngle
tiltAngle = signedAngle tiltAxis (transform pan initial) final
tiltAxis = cross final about
tilt = rotationAbout origin (direction tiltAxis) tiltAngle
-- Scaling -------------------------------------------------
-- | Construct a transformation which scales by the given factor in
-- the z direction.
scalingZ :: (HasBasis v, R3 v, Fractional n) => n -> Transformation v n
scalingZ c =
fromLinear
(eye & _z . _z .~ c)
(eye & _z . _z //~ c)
{-# INLINE scalingZ #-}
-- | Scale an object by the given factor in the z direction. To scale
-- uniformly, use 'scale'.
scaleZ :: (InSpace v n t, HasBasis v, R3 v, Fractional n, Transformable t) => n -> t -> t
scaleZ = transform . scalingZ
{-# INLINE scaleZ #-}
-- Translation ----------------------------------------
-- | Construct a transformation which translates by the given distance
-- in the z direction.
translationZ :: (HasBasis v, R3 v, Num n) => n -> Transformation v n
translationZ z = translation (zero & _z .~ z)
{-# INLINE translationZ #-}
-- | Translate an object by the given distance in the y
-- direction.
translateZ :: (InSpace v n t, HasBasis v, R3 v, Transformable t) => n -> t -> t
translateZ = transform . translationZ
{-# INLINE translateZ #-}
-- Reflection ----------------------------------------------
-- | Construct a transformation which flips an object across the line \(z=0\),
-- i.e. sends the point \((x,y,z)\) to \((x,y,-z)\).
reflectionZ :: (HasBasis v, R3 v, Num n) => Transformation v n
reflectionZ = fromInvoluted $ eye & _z . _z .~ (-1)
{-# INLINE reflectionZ #-}
-- | Flip an object across the line \(z=0\), i.e. send the point \((x,y,z)\) to
-- \((x,y,-z)\).
reflectZ :: (InSpace v n t, HasBasis v, R3 v, Transformable t) => t -> t
reflectZ = transform reflectionZ
{-# INLINE reflectZ #-}
-- | @reflectionAcross p v@ is a reflection across the plane through
-- the point @p@ and normal to vector @v@. This also works as a 2D
-- transform where @v@ is the normal to the line passing through point
-- @p@.
reflectionAcross :: (HasLinearMap v, Fractional n)
=> Point v n -> v n -> Transformation v n
reflectionAcross p v =
conjugate (translation (origin .-. p)) reflect
where
reflect = fromLinear (f v) (f (negated v))
f u = eye & fmapRep (\w -> w ^-^ 2 *^ project u w)
-- | @reflectAcross p v@ reflects an object across the plane though
-- the point @p@ and the vector @v@. This also works as a 2D transform
-- where @v@ is the normal to the line passing through point @p@.
reflectAcross :: (InSpace v n t, HasLinearMap v, Transformable t, Fractional n)
=> Point v n -> v n -> t -> t
reflectAcross p v = transform (reflectionAcross p v)
-- | Things representing 3D rotations.
class Rotational t where
{-# MINIMAL quaternion | euler #-}
-- | Lens onto the rotational transform as a quaternion.
quaternion :: RealFloat n => Lens' (t n) (Quaternion n)
quaternion = euler . iso e2q q2e
{-# INLINE quaternion #-}
-- | Lens onto the rotational transform as an Euler angle.
euler :: RealFloat n => Lens' (t n) (Euler n)
euler = quaternion . iso q2e e2q
{-# INLINE euler #-}
-- | Lens onto the axis angle representation of a rotation.
axisAngle :: RealFloat n => Lens' (t n) (AxisAngle n)
axisAngle = quaternion . iso q2aa aa2q
-- | The rotational component as a 3x3 matrix.
rotationMatrix :: RealFloat n => t n -> M33 n
rotationMatrix = fromQuaternion . view quaternion
{-# INLINE rotationMatrix #-}
-- | The rotational component as a 'Transformation'.
rotationTransform :: RealFloat n => t n -> T3 n
rotationTransform = fromOrthogonal . rotationMatrix
{-# INLINE rotationTransform #-}
-- | Unit 'Quaternion's only.
instance Rotational Quaternion where
quaternion = iso id id
{-# INLINE quaternion #-}
-- | Rotate something in 3D space.
rotateWith :: (InSpace V3 n a, Rotational t, Transformable a, RealFloat n) => t n -> a -> a
rotateWith = transform . rotationTransform
{-# INLINE rotateWith #-}
------------------------------------------------------------------------
-- Euler Angles
------------------------------------------------------------------------
-- | Describe a rotational transform as a 'yaw', 'pitch' and 'roll'.
-- Currently uses Tait–Bryan YXZ convension. That is, 'yaw' rotates
-- around the y-axis, then 'pitch' rotates around the x-axis, then
-- roll rotates around the z-axis (is this the right order?).
data Euler n = Euler !(Angle n) !(Angle n) !(Angle n)
deriving (Show, Read, Functor)
q2e :: RealFloat n => Quaternion n -> Euler n
q2e (Quaternion qw (V3 qx qy qz)) = Euler y p r
where
t0 = 2*(qw*qy + qz*qx)
t1 = 1 - 2*(qx*qx + qy*qy)
t2 = 2*(qw*qx - qz*qy)
t3 = 2*(qw*qz + qx*qy)
t4 = 1 - 2*(qz*qz + qx*qx)
-- account for floating point errors
t2' | t2 > 1 = 1
| t2 < -1 = -1
| otherwise = t2
--
y = atan2A t0 t1
p = asinA t2'
r = atan2A t3 t4
{-# INLINE q2e #-}
e2q :: Floating n => Euler n -> Quaternion n
e2q (Euler y p r) = Quaternion qw (V3 qx qy qz)
where
qw = cr*cp*cy + sr*sp*sy
qz = sr*cp*cy - cr*sp*sy
qx = cr*sp*cy + sr*cp*sy
qy = cr*cp*sy - sr*sp*cy
--
cy = cosA (0.5*^y)
sy = sinA (0.5*^y)
cp = cosA (0.5*^p)
sp = sinA (0.5*^p)
cr = cosA (0.5*^r)
sr = sinA (0.5*^r)
{-# INLINE e2q #-}
instance Rotational Euler where
euler = iso id id -- stupid redundant constraint checker
{-# INLINE euler #-}
-- is it worth making a unit quaternion type wrapper?
yaw :: (Rotational t, RealFloat n) => Lens' (t n) (Angle n)
yaw = euler . (\f (Euler y p r) -> f y <&> \y' -> Euler y' p r)
{-# INLINE yaw #-}
pitch :: (Rotational t, RealFloat n) => Lens' (t n) (Angle n)
pitch = euler . (\f (Euler y p r) -> f p <&> \p' -> Euler y p' r)
{-# INLINE pitch #-}
roll :: (Rotational t, RealFloat n) => Lens' (t n) (Angle n)
roll = euler . (\f (Euler y p r) -> f r <&> \r' -> Euler y p r')
{-# INLINE roll #-}
------------------------------------------------------------------------
-- Axis angle
------------------------------------------------------------------------
-- | An axis angle, represented by a unit vector v and an angle around
-- that vector.
data AxisAngle n = AxisAngle !(V3 n) !(Angle n)
deriving Show
q2aa :: RealFloat n => Quaternion n -> AxisAngle n
q2aa (Quaternion q0 v) = AxisAngle (v ^/ t) (2 *^ atan2A t q0)
where t = sqrt (1 - q0*q0)
{-# INLINE q2aa #-}
aa2q :: Floating n => AxisAngle n -> Quaternion n
aa2q (AxisAngle axis theta) = Quaternion (cosA half) (sinA half *^ axis)
where half = theta ^/ 2
{-# INLINE aa2q #-}
instance Rotational AxisAngle where
quaternion = iso aa2q q2aa
{-# INLINE quaternion #-}
axisAngle = iso id id
{-# INLINE axisAngle #-}
------------------------------------------------------------------------
-- Rotation matrix
------------------------------------------------------------------------
-- Is this worth doing?
-- | A matrix representing a rotation.
-- newtype RotationMatrix n = RM (M33 n)
-- q2rm :: RealFloat n => RotationMatrix n -> Quaternion n
-- rm2q :: RealFloat n => Quaternion n -> RotationMatrix n
-- instance Rotational RotationMatrix where
-- quaternion = iso rm2q q2rm
| cchalmers/geometry | src/Geometry/ThreeD/Transform.hs | bsd-3-clause | 13,184 | 0 | 13 | 2,974 | 3,331 | 1,776 | 1,555 | -1 | -1 |
-- | Module providing restoring from backup phrase functionality
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Cardano.Mnemonic
(
-- * Types
Mnemonic
, Entropy
, EntropySize
, MnemonicWords
-- * Errors
, MnemonicError(..)
, MnemonicException(..)
-- ** Re-exports from 'cardano-crypto'
, EntropyError(..)
, DictionaryError(..)
, MnemonicWordsError(..)
-- * Creating @Mnemonic@ (resp. @Entropy@)
, mkEntropy
, mkMnemonic
, genEntropy
-- * Converting from and to @Mnemonic@ (resp. @Entropy@)
, mnemonicToEntropy
, mnemonicToSeed
, mnemonicToAesKey
, entropyToMnemonic
, entropyToByteString
) where
import Universum
import Basement.Sized.List (unListN)
import Control.Arrow (left)
import Control.Lens ((?~))
import Crypto.Encoding.BIP39
import Crypto.Hash (Blake2b_256, Digest, hash)
import Data.Aeson (FromJSON (..), ToJSON (..))
import Data.Aeson.Types (Parser)
import Data.ByteArray (constEq, convert)
import Data.ByteString (ByteString)
import Data.Default (Default (def))
import Data.Swagger (NamedSchema (..), ToSchema (..), maxItems,
minItems)
import Formatting (bprint, build, formatToString, (%))
import Test.QuickCheck (Arbitrary (..))
import Test.QuickCheck.Gen (vectorOf)
import Pos.Binary (serialize')
import Pos.Crypto (AesKey (..))
import Pos.Infra.Util.LogSafe (SecureLog)
import qualified Basement.Compat.Base as Basement
import qualified Basement.String as Basement
import qualified Basement.UArray as Basement
import qualified Crypto.Encoding.BIP39.English as Dictionary
import qualified Crypto.Random.Entropy as Crypto
import qualified Data.ByteString.Char8 as B8
import qualified Formatting.Buildable
--
-- TYPES
--
-- | A backup-phrase in the form of a non-empty of Mnemonic words
-- Constructor isn't exposed.
data Mnemonic (mw :: Nat) = Mnemonic
{ mnemonicToEntropy :: Entropy (EntropySize mw)
, mnemonicToSentence :: MnemonicSentence mw
} deriving (Eq, Show)
--
-- ERRORS
--
data MnemonicException csz = UnexpectedEntropyError (EntropyError csz)
deriving (Show, Typeable)
data MnemonicError csz
= ErrMnemonicWords MnemonicWordsError
| ErrEntropy (EntropyError csz)
| ErrDictionary DictionaryError
| ErrForbidden
deriving (Show)
--
-- CONSTRUCTORS
--
-- | Smart-constructor for the Entropy
mkEntropy
:: forall n csz. (ValidEntropySize n, ValidChecksumSize n csz)
=> ByteString
-> Either (EntropyError csz) (Entropy n)
mkEntropy = toEntropy
-- | Generate Entropy of a given size using a random seed.
--
-- Example:
-- do
-- ent <- genEntropy :: IO (Entropy 12)
genEntropy
:: forall n csz. (ValidEntropySize n, ValidChecksumSize n csz)
=> IO (Entropy n)
genEntropy =
let
size =
fromIntegral $ natVal (Proxy @n)
eitherToIO =
either (throwM . UnexpectedEntropyError) return
in
(eitherToIO . mkEntropy) =<< Crypto.getEntropy (size `div` 8)
-- | Smart-constructor for the Mnemonic
mkMnemonic
:: forall mw n csz.
( ConsistentEntropy n mw csz
, EntropySize mw ~ n
)
=> [Text]
-> Either (MnemonicError csz) (Mnemonic mw)
mkMnemonic wordsm = do
phrase <- left ErrMnemonicWords
$ mnemonicPhrase @mw (toUtf8String <$> wordsm)
sentence <- left ErrDictionary
$ mnemonicPhraseToMnemonicSentence Dictionary.english phrase
entropy <- left ErrEntropy
$ wordsToEntropy sentence
when (isForbiddenMnemonic sentence) $ Left ErrForbidden
pure Mnemonic
{ mnemonicToEntropy = entropy
, mnemonicToSentence = sentence
}
--
-- CONVERSIONS
--
-- | Convert a mnemonic to a seed that can be used to initiate a HD wallet.
-- Note that our current implementation deviates from BIP-39 as:
--
-- - We do not use the password to produce the seed
-- - We rely on a fast blake2b hashing function rather than a slow PKBDF2
--
-- Somehow, we also convert mnemonic to raw bytes using a Blake2b_256 but with
-- a slightly different approach when converting them to aesKey when redeeming
-- paper wallets... In this case, we do not serialize the inputs and outputs.
--
-- For now, we have two use case for that serialization function. When creating
-- an HD wallet seed, in which case, the function we use is `serialize'` from
-- the Pos.Binary module. And, when creating an AESKey seed in which case we
-- simply pass the `identity` function.
mnemonicToSeed
:: Mnemonic mw
-> ByteString
mnemonicToSeed =
serialize' . blake2b . serialize' . entropyToByteString . mnemonicToEntropy
-- | Convert a mnemonic to a seed AesKey. Almost identical to @MnemonictoSeed@
-- minus the extra serialization.
mnemonicToAesKey
:: Mnemonic mw
-> AesKey
mnemonicToAesKey =
AesKey. blake2b . entropyToByteString . mnemonicToEntropy
-- | Convert an Entropy to a corresponding Mnemonic Sentence
entropyToMnemonic
:: forall mw n csz.
( ValidMnemonicSentence mw
, ValidEntropySize n
, ValidChecksumSize n csz
, n ~ EntropySize mw
, mw ~ MnemonicWords n
)
=> Entropy n
-> Mnemonic mw
entropyToMnemonic entropy = Mnemonic
{ mnemonicToSentence = entropyToWords entropy
, mnemonicToEntropy = entropy
}
-- | Convert 'Entropy' to a raw 'ByteString'
entropyToByteString
:: Entropy n
-> ByteString
entropyToByteString =
entropyRaw
--
-- INTERNALS
--
-- Constant-time comparison of any sentence with the 12-word example mnemonic
isForbiddenMnemonic :: (ValidMnemonicSentence mw) => MnemonicSentence mw -> Bool
isForbiddenMnemonic sentence =
let
bytes =
sentenceToRawString sentence
forbiddenMnemonics = sentenceToRawString <$>
[ mnemonicToSentence (def @(Mnemonic 12))
]
in
any (constEq bytes) forbiddenMnemonics
sentenceToRawString :: (ValidMnemonicSentence mw) => MnemonicSentence mw -> Basement.UArray Word8
sentenceToRawString =
Basement.toBytes Basement.UTF8 . mnemonicSentenceToString Dictionary.english
-- | Simple Blake2b 256-bit of a ByteString
blake2b :: ByteString -> ByteString
blake2b =
convert @(Digest Blake2b_256) . hash
toUtf8String :: Text -> Basement.String
toUtf8String =
Basement.fromString . toString
fromUtf8String :: Basement.String -> Text
fromUtf8String =
toText . Basement.toList
-- | The initial seed has to be vector or length multiple of 4 bytes and shorter
-- than 64 bytes. Not that this is good for testing or examples, but probably
-- not for generating truly random Mnemonic words.
--
-- See 'Crypto.Random.Entropy (getEntropy)'
instance
( ValidEntropySize n
, ValidChecksumSize n csz
) => Arbitrary (Entropy n) where
arbitrary =
let
size = fromIntegral $ natVal (Proxy @n)
entropy = mkEntropy @n . B8.pack <$> vectorOf (size `quot` 8) arbitrary
in
either (error . show . UnexpectedEntropyError) identity <$> entropy
-- Same remark from 'Arbitrary Entropy' applies here.
instance
( n ~ EntropySize mw
, mw ~ MnemonicWords n
, ValidChecksumSize n csz
, ValidEntropySize n
, ValidMnemonicSentence mw
, Arbitrary (Entropy n)
) => Arbitrary (Mnemonic mw) where
arbitrary =
entropyToMnemonic <$> arbitrary @(Entropy n)
instance (KnownNat csz) => Exception (MnemonicException csz)
-- FIXME: Suggestion, we could -- when certain flags are turned on -- display
-- a fingerprint of the Mnemonic, like a PKBDF2 over n iterations. This could be
-- useful for debug to know whether two users are using the same mnemonic words
-- and relatively benign EVEN THOUGH, it will permit to somewhat tight requests
-- to a specific identity (since mnemonic words are 'unique', they are supposed
-- to uniquely identify users, hence the privacy issue). For debbugging only and
-- with the user consent, that's something we could do.
instance Buildable (Mnemonic mw) where
build _ =
"<mnemonic>"
instance Buildable (SecureLog (Mnemonic mw)) where
build _ =
"<mnemonic>"
instance Buildable (MnemonicError csz) where
build = \case
ErrMnemonicWords (ErrWrongNumberOfWords a e) ->
bprint ("MnemonicError: Invalid number of mnemonic words: got "%build%" words, expected "%build%" words") a e
ErrDictionary (ErrInvalidDictionaryWord w) ->
bprint ("MnemonicError: Invalid dictionary word: "%build%"") (fromUtf8String w)
ErrEntropy (ErrInvalidEntropyLength a e) ->
bprint ("MnemonicError: Invalid entropy length: got "%build%" bits, expected "%build%" bits") a e
ErrEntropy (ErrInvalidEntropyChecksum a e) ->
bprint ("MnemonicError: Invalid entropy checksum: got "%build%", expected "%build) (show' a) (show' e)
ErrForbidden ->
bprint "Forbidden Mnemonic: an example Mnemonic has been submitted. \
\Please generate a fresh and private Mnemonic from a trusted source"
where
show' :: Checksum csz -> String
show' = show
-- | To use everytime we need to show an example of a Mnemonic. This particular
-- mnemonic is rejected to prevent users from using it on a real wallet.
instance Default (Mnemonic 12) where
def =
let
wordsm =
[ "squirrel"
, "material"
, "silly"
, "twice"
, "direct"
, "slush"
, "pistol"
, "razor"
, "become"
, "junk"
, "kingdom"
, "flee"
]
phrase = either (error . show) id
(mnemonicPhrase @12 (toUtf8String <$> wordsm))
sentence = either (error . show) id
(mnemonicPhraseToMnemonicSentence Dictionary.english phrase)
entropy = either (error . show) id
(wordsToEntropy @(EntropySize 12) sentence)
in Mnemonic
{ mnemonicToSentence = sentence
, mnemonicToEntropy = entropy
}
instance
( n ~ EntropySize mw
, mw ~ MnemonicWords n
, ValidChecksumSize n csz
, ValidEntropySize n
, ValidMnemonicSentence mw
, Arbitrary (Entropy n)
) => FromJSON (Mnemonic mw) where
parseJSON =
parseJSON >=> (eitherToParser . mkMnemonic)
instance ToJSON (Mnemonic mw) where
toJSON =
toJSON
. map (fromUtf8String . dictionaryIndexToWord Dictionary.english)
. unListN
. mnemonicSentenceToListN
. mnemonicToSentence
instance (KnownNat mw) => ToSchema (Mnemonic mw) where
declareNamedSchema _ = do
let mw = natVal (Proxy :: Proxy mw)
NamedSchema _ schema <- declareNamedSchema (Proxy @[Text])
return $ NamedSchema (Just "Mnemonic") schema
& minItems ?~ fromIntegral mw
& maxItems ?~ fromIntegral mw
--
-- Miscellaneous
--
-- | Convert a given Either to an Aeson Parser
eitherToParser :: Buildable a => Either a b -> Parser b
eitherToParser =
either (fail . formatToString build) pure
| input-output-hk/pos-haskell-prototype | mnemonic/src/Cardano/Mnemonic.hs | mit | 11,708 | 0 | 16 | 3,100 | 2,255 | 1,251 | 1,004 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.CreateSnapshot
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation initiates a snapshot of a volume.
--
-- AWS Storage Gateway provides the ability to back up point-in-time snapshots
-- of your data to Amazon Simple Storage (S3) for durable off-site recovery, as
-- well as import the data to an Amazon Elastic Block Store (EBS) volume in
-- Amazon Elastic Compute Cloud (EC2). You can take snapshots of your gateway
-- volume on a scheduled or ad-hoc basis. This API enables you to take ad-hoc
-- snapshot. For more information, see <http://docs.aws.amazon.com/storagegateway/latest/userguide/WorkingWithSnapshots.html Working With Snapshots in the AWS StorageGateway Console>.
--
-- In the CreateSnapshot request you identify the volume by providing its
-- Amazon Resource Name (ARN). You must also provide description for the
-- snapshot. When AWS Storage Gateway takes the snapshot of specified volume,
-- the snapshot and description appears in the AWS Storage Gateway Console. In
-- response, AWS Storage Gateway returns you a snapshot ID. You can use this
-- snapshot ID to check the snapshot progress or later use it when you want to
-- create a volume from a snapshot.
--
-- To list or delete a snapshot, you must use the Amazon EC2 API. For more
-- information, .
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateSnapshot.html>
module Network.AWS.StorageGateway.CreateSnapshot
(
-- * Request
CreateSnapshot
-- ** Request constructor
, createSnapshot
-- ** Request lenses
, csSnapshotDescription
, csVolumeARN
-- * Response
, CreateSnapshotResponse
-- ** Response constructor
, createSnapshotResponse
-- ** Response lenses
, csrSnapshotId
, csrVolumeARN
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data CreateSnapshot = CreateSnapshot
{ _csSnapshotDescription :: Text
, _csVolumeARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateSnapshot' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csSnapshotDescription' @::@ 'Text'
--
-- * 'csVolumeARN' @::@ 'Text'
--
createSnapshot :: Text -- ^ 'csVolumeARN'
-> Text -- ^ 'csSnapshotDescription'
-> CreateSnapshot
createSnapshot p1 p2 = CreateSnapshot
{ _csVolumeARN = p1
, _csSnapshotDescription = p2
}
-- | Textual description of the snapshot that appears in the Amazon EC2 console,
-- Elastic Block Store snapshots panel in the Description field, and in the AWS
-- Storage Gateway snapshot Details pane, Description field
csSnapshotDescription :: Lens' CreateSnapshot Text
csSnapshotDescription =
lens _csSnapshotDescription (\s a -> s { _csSnapshotDescription = a })
-- | The Amazon Resource Name (ARN) of the volume. Use the 'ListVolumes' operation
-- to return a list of gateway volumes.
csVolumeARN :: Lens' CreateSnapshot Text
csVolumeARN = lens _csVolumeARN (\s a -> s { _csVolumeARN = a })
data CreateSnapshotResponse = CreateSnapshotResponse
{ _csrSnapshotId :: Maybe Text
, _csrVolumeARN :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateSnapshotResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csrSnapshotId' @::@ 'Maybe' 'Text'
--
-- * 'csrVolumeARN' @::@ 'Maybe' 'Text'
--
createSnapshotResponse :: CreateSnapshotResponse
createSnapshotResponse = CreateSnapshotResponse
{ _csrVolumeARN = Nothing
, _csrSnapshotId = Nothing
}
-- | The snapshot ID that is used to refer to the snapshot in future operations
-- such as describing snapshots (Amazon Elastic Compute Cloud API 'DescribeSnapshots') or creating a volume from a snapshot ('CreateStorediSCSIVolume').
csrSnapshotId :: Lens' CreateSnapshotResponse (Maybe Text)
csrSnapshotId = lens _csrSnapshotId (\s a -> s { _csrSnapshotId = a })
-- | The Amazon Resource Name (ARN) of the volume of which the snapshot was taken.
csrVolumeARN :: Lens' CreateSnapshotResponse (Maybe Text)
csrVolumeARN = lens _csrVolumeARN (\s a -> s { _csrVolumeARN = a })
instance ToPath CreateSnapshot where
toPath = const "/"
instance ToQuery CreateSnapshot where
toQuery = const mempty
instance ToHeaders CreateSnapshot
instance ToJSON CreateSnapshot where
toJSON CreateSnapshot{..} = object
[ "VolumeARN" .= _csVolumeARN
, "SnapshotDescription" .= _csSnapshotDescription
]
instance AWSRequest CreateSnapshot where
type Sv CreateSnapshot = StorageGateway
type Rs CreateSnapshot = CreateSnapshotResponse
request = post "CreateSnapshot"
response = jsonResponse
instance FromJSON CreateSnapshotResponse where
parseJSON = withObject "CreateSnapshotResponse" $ \o -> CreateSnapshotResponse
<$> o .:? "SnapshotId"
<*> o .:? "VolumeARN"
| kim/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/CreateSnapshot.hs | mpl-2.0 | 5,938 | 0 | 11 | 1,209 | 614 | 377 | 237 | 70 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.CreateCachediSCSIVolume
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation creates a cached volume on a specified cached gateway. This
-- operation is supported only for the gateway-cached volume architecture.
--
-- Cache storage must be allocated to the gateway before you can create a
-- cached volume. Use the 'AddCache' operation to add cache storage to a gateway.
-- In the request, you must specify the gateway, size of the volume in bytes,
-- the iSCSI target name, an IP address on which to expose the target, and a
-- unique client token. In response, AWS Storage Gateway creates the volume and
-- returns information about it such as the volume Amazon Resource Name (ARN),
-- its size, and the iSCSI target ARN that initiators can use to connect to the
-- volume target.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateCachediSCSIVolume.html>
module Network.AWS.StorageGateway.CreateCachediSCSIVolume
(
-- * Request
CreateCachediSCSIVolume
-- ** Request constructor
, createCachediSCSIVolume
-- ** Request lenses
, ccscsivClientToken
, ccscsivGatewayARN
, ccscsivNetworkInterfaceId
, ccscsivSnapshotId
, ccscsivTargetName
, ccscsivVolumeSizeInBytes
-- * Response
, CreateCachediSCSIVolumeResponse
-- ** Response constructor
, createCachediSCSIVolumeResponse
-- ** Response lenses
, ccscsivrTargetARN
, ccscsivrVolumeARN
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data CreateCachediSCSIVolume = CreateCachediSCSIVolume
{ _ccscsivClientToken :: Text
, _ccscsivGatewayARN :: Text
, _ccscsivNetworkInterfaceId :: Text
, _ccscsivSnapshotId :: Maybe Text
, _ccscsivTargetName :: Text
, _ccscsivVolumeSizeInBytes :: Integer
} deriving (Eq, Ord, Read, Show)
-- | 'CreateCachediSCSIVolume' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ccscsivClientToken' @::@ 'Text'
--
-- * 'ccscsivGatewayARN' @::@ 'Text'
--
-- * 'ccscsivNetworkInterfaceId' @::@ 'Text'
--
-- * 'ccscsivSnapshotId' @::@ 'Maybe' 'Text'
--
-- * 'ccscsivTargetName' @::@ 'Text'
--
-- * 'ccscsivVolumeSizeInBytes' @::@ 'Integer'
--
createCachediSCSIVolume :: Text -- ^ 'ccscsivGatewayARN'
-> Integer -- ^ 'ccscsivVolumeSizeInBytes'
-> Text -- ^ 'ccscsivTargetName'
-> Text -- ^ 'ccscsivNetworkInterfaceId'
-> Text -- ^ 'ccscsivClientToken'
-> CreateCachediSCSIVolume
createCachediSCSIVolume p1 p2 p3 p4 p5 = CreateCachediSCSIVolume
{ _ccscsivGatewayARN = p1
, _ccscsivVolumeSizeInBytes = p2
, _ccscsivTargetName = p3
, _ccscsivNetworkInterfaceId = p4
, _ccscsivClientToken = p5
, _ccscsivSnapshotId = Nothing
}
ccscsivClientToken :: Lens' CreateCachediSCSIVolume Text
ccscsivClientToken =
lens _ccscsivClientToken (\s a -> s { _ccscsivClientToken = a })
ccscsivGatewayARN :: Lens' CreateCachediSCSIVolume Text
ccscsivGatewayARN =
lens _ccscsivGatewayARN (\s a -> s { _ccscsivGatewayARN = a })
ccscsivNetworkInterfaceId :: Lens' CreateCachediSCSIVolume Text
ccscsivNetworkInterfaceId =
lens _ccscsivNetworkInterfaceId
(\s a -> s { _ccscsivNetworkInterfaceId = a })
ccscsivSnapshotId :: Lens' CreateCachediSCSIVolume (Maybe Text)
ccscsivSnapshotId =
lens _ccscsivSnapshotId (\s a -> s { _ccscsivSnapshotId = a })
ccscsivTargetName :: Lens' CreateCachediSCSIVolume Text
ccscsivTargetName =
lens _ccscsivTargetName (\s a -> s { _ccscsivTargetName = a })
ccscsivVolumeSizeInBytes :: Lens' CreateCachediSCSIVolume Integer
ccscsivVolumeSizeInBytes =
lens _ccscsivVolumeSizeInBytes
(\s a -> s { _ccscsivVolumeSizeInBytes = a })
data CreateCachediSCSIVolumeResponse = CreateCachediSCSIVolumeResponse
{ _ccscsivrTargetARN :: Maybe Text
, _ccscsivrVolumeARN :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateCachediSCSIVolumeResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ccscsivrTargetARN' @::@ 'Maybe' 'Text'
--
-- * 'ccscsivrVolumeARN' @::@ 'Maybe' 'Text'
--
createCachediSCSIVolumeResponse :: CreateCachediSCSIVolumeResponse
createCachediSCSIVolumeResponse = CreateCachediSCSIVolumeResponse
{ _ccscsivrVolumeARN = Nothing
, _ccscsivrTargetARN = Nothing
}
ccscsivrTargetARN :: Lens' CreateCachediSCSIVolumeResponse (Maybe Text)
ccscsivrTargetARN =
lens _ccscsivrTargetARN (\s a -> s { _ccscsivrTargetARN = a })
ccscsivrVolumeARN :: Lens' CreateCachediSCSIVolumeResponse (Maybe Text)
ccscsivrVolumeARN =
lens _ccscsivrVolumeARN (\s a -> s { _ccscsivrVolumeARN = a })
instance ToPath CreateCachediSCSIVolume where
toPath = const "/"
instance ToQuery CreateCachediSCSIVolume where
toQuery = const mempty
instance ToHeaders CreateCachediSCSIVolume
instance ToJSON CreateCachediSCSIVolume where
toJSON CreateCachediSCSIVolume{..} = object
[ "GatewayARN" .= _ccscsivGatewayARN
, "VolumeSizeInBytes" .= _ccscsivVolumeSizeInBytes
, "SnapshotId" .= _ccscsivSnapshotId
, "TargetName" .= _ccscsivTargetName
, "NetworkInterfaceId" .= _ccscsivNetworkInterfaceId
, "ClientToken" .= _ccscsivClientToken
]
instance AWSRequest CreateCachediSCSIVolume where
type Sv CreateCachediSCSIVolume = StorageGateway
type Rs CreateCachediSCSIVolume = CreateCachediSCSIVolumeResponse
request = post "CreateCachediSCSIVolume"
response = jsonResponse
instance FromJSON CreateCachediSCSIVolumeResponse where
parseJSON = withObject "CreateCachediSCSIVolumeResponse" $ \o -> CreateCachediSCSIVolumeResponse
<$> o .:? "TargetARN"
<*> o .:? "VolumeARN"
| romanb/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/CreateCachediSCSIVolume.hs | mpl-2.0 | 6,971 | 0 | 11 | 1,482 | 876 | 525 | 351 | 106 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module T10279 where
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
-- NB: rts-1.0 is used here because it doesn't change.
-- You do need to pick the right version number, otherwise the
-- error message doesn't recognize it as a source package ID,
-- (This is OK, since it will look obviously wrong when they
-- try to find the package in their package database.)
blah = $(conE (Name (mkOccName "Foo") (NameG VarName (mkPkgName "rts-1.0") (mkModName "A"))))
| mpickering/ghc-exactprint | tests/examples/ghc8/T10279.hs | bsd-3-clause | 506 | 0 | 13 | 80 | 71 | 42 | 29 | 5 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.ModuleTest
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : Isaac Jones <ijones@syntaxpolice.org>
-- Stability : alpha
-- Portability : GHC
--
-- Explanation: Test this module and sub modules.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Main where
#ifdef DEBUG
-- Import everything, since we want to test the compilation of them:
import qualified Distribution.Version as D.V (hunitTests)
-- import qualified Distribution.InstalledPackageInfo(hunitTests)
import qualified Distribution.License as D.L
import qualified Distribution.Compiler as D.C (hunitTests)
import qualified Distribution.Make ()
import qualified Distribution.Package as D.P ()
import qualified Distribution.PackageDescription as D.PD (hunitTests)
import qualified Distribution.Setup as D.Setup (hunitTests)
import Distribution.Compiler (CompilerFlavor(..), Compiler(..))
import Distribution.Version (Version(..))
import qualified Distribution.Simple as D.S (simpleHunitTests)
import qualified Distribution.Simple.Install as D.S.I (hunitTests)
import qualified Distribution.Simple.Build as D.S.B (hunitTests)
import qualified Distribution.Simple.SrcDist as D.S.S (hunitTests)
import qualified Distribution.Simple.Utils as D.S.U (hunitTests)
import Distribution.Compat.FilePath(joinFileName)
import qualified Distribution.Simple.Configure as D.S.C (hunitTests, localBuildInfoFile)
import qualified Distribution.Simple.Register as D.S.R (hunitTests, installedPkgConfigFile)
import qualified Distribution.Simple.GHCPackageConfig
as GHC (localPackageConfig, maybeCreateLocalPackageConfig)
import Distribution.Simple.Configure (configCompiler)
-- base
import Data.List (intersperse)
import Control.Monad(when, unless)
import Directory(setCurrentDirectory, doesFileExist,
doesDirectoryExist, getCurrentDirectory,
getPermissions, Permissions(..))
import Distribution.Compat.Directory (removeDirectoryRecursive)
import System.Cmd(system)
import System.Exit(ExitCode(..))
import System.Environment (getArgs)
import HUnit(runTestTT, Test(..), Counts(..), assertBool,
assertEqual, Assertion, showCounts)
-- ------------------------------------------------------------
-- * Helpers
-- ------------------------------------------------------------
combineCounts :: Counts -> Counts -> Counts
combineCounts (Counts a b c d) (Counts a' b' c' d')
= Counts (a + a') (b + b') (c + c') (d + d')
label :: String -> String
label t = "-= " ++ t ++ " =-"
runTestTT' :: Test -> IO Counts
runTestTT' t@(TestList _) = runTestTT t
runTestTT' (TestLabel l t)
= putStrLn (label l) >> runTestTT t
runTestTT' t = runTestTT t
checkTargetDir :: FilePath
-> [String] -- ^suffixes
-> IO ()
checkTargetDir targetDir suffixes
= do doesDirectoryExist targetDir >>=
assertBool "target dir exists"
let mods = ["A", "B/A"]
allFilesE <- mapM anyExists [[(targetDir ++ t ++ y)
| y <- suffixes]
| t <- mods]
sequence [assertBool ("target file missing: " ++ targetDir ++ f) e
| (e, f) <- zip allFilesE mods]
return ()
where anyExists :: [FilePath] -> IO Bool
anyExists l = do l' <- mapM doesFileExist l
return $ any (== True) l'
-- |Run this command, and assert it returns a successful error code.
assertCmd :: String -- ^Command
-> String -- ^Comment
-> Assertion
assertCmd command comment
= system command >>= assertEqual (command ++ ":" ++ comment) ExitSuccess
-- |like assertCmd, but separates command and args
assertCmd' :: String -- ^Command
-> String -- ^args
-> String -- ^Comment
-> Assertion
assertCmd' command args comment
= system (command ++ " "++ args ++ ">>out.build")
>>= assertEqual (command ++ ":" ++ comment) ExitSuccess
-- |Run this command, and assert it returns an unsuccessful error code.
assertCmdFail :: String -- ^Command
-> String -- ^Comment
-> Assertion
assertCmdFail command comment
= do code <- system command
assertBool (command ++ ":" ++ comment) (code /= ExitSuccess)
-- ------------------------------------------------------------
-- * Integration Tests
-- ------------------------------------------------------------
tests :: FilePath -- ^Currdir
-> CompilerFlavor -- ^build setup with compiler
-> CompilerFlavor -- ^configure with which compiler
-> Version -- ^version of the compiler to use
-> [Test]
tests currDir comp compConf compVersion = [
-- executableWithC
TestLabel ("package exeWithC: " ++ compIdent) $ TestCase $
do let targetDir =",tmp"
setCurrentDirectory $ (testdir `joinFileName` "exeWithC")
testPrelude
assertConfigure targetDir
assertClean
assertConfigure targetDir
assertBuild
assertCopy
assertCmd ",tmp/bin/tt" "exeWithC failed"
-- A
,TestLabel ("package A: " ++ compIdent) $ TestCase $
do let targetDir=",tmp"
setCurrentDirectory $ (testdir `joinFileName` "A")
testPrelude
assertConfigure targetDir
assertHaddock
assertBuild
when (comp == GHC) -- are these tests silly?
(do doesDirectoryExist "dist/build" >>=
assertBool "dist/build doesn't exist"
doesFileExist "dist/build/testA/testA" >>=
assertBool "build did not create the executable: testA"
doesFileExist "dist/build/testB/testB" >>=
assertBool "build did not create the executable: testB"
doesFileExist "dist/build/testA/testA-tmp/c_src/hello.o" >>=
assertBool "build did not build c source for testA"
doesFileExist "dist/build/hello.o" >>=
assertBool "build did not build c source for A library"
)
assertCopy
libForA targetDir
doesFileExist ",tmp/bin/testA" >>=
assertBool "testA not produced"
doesFileExist ",tmp/bin/testB" >>=
assertBool "testB not produced"
assertCmd' compCmd "sdist" "setup sdist returned error code"
doesFileExist "dist/test-1.0.tar.gz" >>=
assertBool "sdist did not put the expected file in place"
doesFileExist "dist/src" >>=
assertEqual "dist/src exists" False
assertCmd' compCmd "register --user" "pkg A, register failed"
assertCmd' compCmd "unregister --user" "pkg A, unregister failed"
-- tricky, script-based register
registerAndExecute comp "pkg A: register with script failed"
unregisterAndExecute comp "pkg A: unregister with script failed"
-- non-trick non-script based register
assertCmd' compCmd "register --user" "regular register returned error"
assertCmd' compCmd "unregister --user" "regular unregister returned error"
,TestLabel ("package A copy-prefix: " ++ compIdent) $ TestCase $ -- (uses above config)
do let targetDir = ",tmp2"
assertCmd' compCmd ("copy --copy-prefix=" ++ targetDir) "copy --copy-prefix failed"
doesFileExist ",tmp2/bin/testA" >>=
assertBool "testA not produced"
doesFileExist ",tmp2/bin/testB" >>=
assertBool "testB not produced"
libForA ",tmp2"
,TestLabel ("package A and install w/ no prefix: " ++ compIdent) $ TestCase $
do let targetDir = ",tmp/lib/test-1.0/ghc-6.4" -- FIX: Compiler-version
removeDirectoryRecursive ",tmp"
when (comp == GHC) -- FIX: hugs can't do --user yet
(do system $ "ghc-pkg unregister --user test-1.0"
assertCmd' compCmd "install --user" "install --user failed"
libForA ",tmp"
assertCmd' compCmd "unregister --user" "unregister failed")
-- HUnit
,TestLabel ("testing the HUnit package" ++ compIdent) $ TestCase $
do setCurrentDirectory $ (testdir `joinFileName` "HUnit-1.0")
GHC.maybeCreateLocalPackageConfig
system "make clean"
system "make"
assertCmd' compCmd "configure" "configure failed"
system "setup unregister --user"
system $ "touch " ++ D.S.C.localBuildInfoFile
system $ "touch " ++ D.S.R.installedPkgConfigFile
doesFileExist D.S.C.localBuildInfoFile >>=
assertBool ("touch " ++ D.S.C.localBuildInfoFile ++ " failed")
-- Test clean:
assertBuild
doesDirectoryExist "dist/build" >>=
assertBool "HUnit build did not create build directory"
assertCmd' compCmd "clean" "hunit clean"
doesDirectoryExist "dist/build" >>=
assertEqual "HUnit clean did not get rid of build directory" False
doesFileExist D.S.C.localBuildInfoFile >>=
assertEqual ("clean " ++ D.S.C.localBuildInfoFile ++ " failed") False
doesFileExist D.S.R.installedPkgConfigFile >>=
assertEqual ("clean " ++ D.S.R.installedPkgConfigFile ++ " failed") False
assertConfigure ",tmp"
assertHaddock
doesDirectoryExist "dist/doc" >>= assertEqual "create of dist/doc" True
assertBuild
when (comp == GHC) -- tests building w/ an installed -package
(do pkgConf <- GHC.localPackageConfig
assertCmd' compCmd "install --user" "hunit install"
assertCmd ("ghc -package-conf " ++ pkgConf
++ " -package HUnitTest HUnitTester.hs -o ./hunitTest")
"compile w/ hunit"
assertCmd "./hunitTest" "hunit test"
assertCmd' compCmd "unregister --user" "unregister failed")
assertClean
doesDirectoryExist "dist/doc" >>= assertEqual "clean dist/doc" False
assertCmd "make clean" "make clean failed"
-- twoMains
,TestLabel ("package twoMains: building " ++ compIdent) $ TestCase $
do setCurrentDirectory $ (testdir `joinFileName` "twoMains")
testPrelude
assertConfigure ",tmp"
assertCmd' compCmd "haddock" "setup haddock returned error code."
assertBuild
assertCopy
doesFileExist ",tmp/bin/testA" >>=
assertBool "install did not create the executable: testA"
doesFileExist ",tmp/bin/testB" >>=
assertBool "install did not create the executable: testB"
assertCmd "./,tmp/bin/testA isA" "A is not A"
assertCmd "./,tmp/bin/testB isB" "B is not B"
-- no register, since there's no library
-- buildinfo
,TestLabel ("buildinfo with multiple executables " ++ compIdent) $ TestCase $
do setCurrentDirectory $ (testdir `joinFileName` "buildInfo")
testPrelude
assertConfigure ",tmp"
assertCmd' compCmd "haddock" "setup haddock returned error code."
assertBuild
assertCopy
doesFileExist ",tmp/bin/exe1" >>=
assertBool "install did not create the executable: exe1"
doesFileExist ",tmp/bin/exe2" >>=
assertBool "install did not create the executable: exe2"
-- no register, since there's no library
-- mutually recursive modules
,TestLabel ("package recursive: building " ++ compIdent) $ TestCase $
when (comp == GHC) (do
setCurrentDirectory $ (testdir `joinFileName` "recursive")
testPrelude
assertConfigure ",tmp"
assertBuild
assertCopy
doesFileExist "dist/build/A.hi-boot" >>=
assertBool "build did not move A.hi-boot file into place lib"
doesFileExist (",tmp/lib/recursive-1.0/ghc-" ++ compVerStr
++ "/libHSrecursive-1.0.a") >>=
assertBool "recursive build didn't create library"
doesFileExist "dist/build/testExe/testExe-tmp/A.hi" >>=
assertBool "build did not move A.hi-boot file into place exe"
doesFileExist "dist/build/testExe/testExe" >>=
assertBool "recursive build didn't create binary")
-- linking in ffi stubs
,TestLabel ("package ffi: " ++ compIdent) $ TestCase $
do setCurrentDirectory (testdir `joinFileName` "ffi-package")
testPrelude
assertConfigure "/tmp"
assertBuild
-- install it so we can test building with it.
assertCmd' compCmd "install --user" "ffi-package install"
assertClean
doesFileExist "src/TestFFI_stub.c" >>=
assertEqual "FFI-generated stub not cleaned." False
-- now build something that depends on it
setCurrentDirectory (".." `joinFileName` "ffi-bin")
testPrelude
assertConfigure ",tmp"
assertBuild
assertCopy
-- depOnLib
,TestLabel ("package depOnLib: (executable depending on its lib)"++ compIdent) $ TestCase $
do setCurrentDirectory $ (testdir `joinFileName` "depOnLib")
testPrelude
assertConfigure ",tmp"
assertHaddock
assertBuild
assertCopy
registerAndExecute comp "pkg depOnLib: register with script failed"
unregisterAndExecute comp "pkg DepOnLib: unregister with script failed"
when (comp == GHC) (do
doesFileExist "dist/build/mainForA/mainForA" >>=
assertBool "build did not create the executable: mainForA"
doesFileExist ("dist/build/" `joinFileName` "libHStest-1.0.a")
>>= assertBool "library doesn't exist"
doesFileExist (",tmp/bin/mainForA")
>>= assertBool "installed bin doesn't exist"
doesFileExist (",tmp/lib/test-1.0/ghc-" ++ compVerStr ++ "/libHStest-1.0.a")
>>= assertBool "installed lib doesn't exist")
-- wash2hs
,TestLabel ("testing the wash2hs package" ++ compIdent) $ TestCase $
do setCurrentDirectory $ (testdir `joinFileName` "wash2hs")
testPrelude
assertCmdFail (compCmd ++ " configure --someUnknownFlag")
"wash2hs configure with unknown flag"
assertConfigure ",tmp"
assertHaddock
assertBuild
assertCopy
-- no library to register
doesFileExist ",tmp/bin/wash2hs"
>>= assertBool "wash2hs didn't put executable into place."
perms <- getPermissions ",tmp/bin/wash2hs"
assertBool "wash2hs isn't +x" (executable perms)
assertClean
-- no unregister, because it has no libs!
-- withHooks
,TestLabel ("package withHooks: "++compIdent) $ TestCase $
do setCurrentDirectory $ (testdir `joinFileName` "withHooks")
testPrelude
assertCmd' compCmd ("configure --prefix=,tmp --woohoo " ++ compFlag)
"configure returned error code"
assertCmdFail (compCmd ++ " test --asdf") "test was supposed to fail"
assertCmd' compCmd ("test --pass") "test should not have failed"
assertHaddock
assertBuild
assertCmd' compCmd "copy --copy-prefix=,tmp" "copy w/ prefix"
doesFileExist ",tmp/withHooks" >>= -- this file is added w/ the hook.
assertBool "hooked copy, redirecting prefix didn't work."
assertCmd' compCmd "register --user" "regular register returned error"
assertCmd' compCmd "unregister --user" "regular unregister returned error"
when (comp == GHC) -- FIX: come up with good test for Hugs
(do doesFileExist "dist/build/C.o" >>=
assertBool "C.testSuffix did not get compiled to C.o."
doesFileExist "dist/build/D.o" >>=
assertBool "D.gc did not get compiled to D.o this is an overriding test"
doesFileExist (",tmp/lib/withHooks-1.0/ghc-" ++ compVerStr
++ "/" `joinFileName` "libHSwithHooks-1.0.a")
>>= assertBool "library doesn't exist")
doesFileExist ",tmp/bin/withHooks" >>=
assertBool "copy did not create the executable: withHooks"
assertClean
doesFileExist "C.hs" >>=
assertEqual "C.hs (a generated file) not cleaned." False
-- HSQL
{- ,TestLabel ("package HSQL (make-based): " ++ show compIdent) $
TestCase $ unless (compFlag == "--hugs") $ -- FIX: won't compile w/ hugs
do setCurrentDirectory $ (testdir `joinFileName` "HSQL")
system "make distclean"
system "rm -rf /tmp/lib/HSQL"
when (comp == GHC)
(system "ghc -cpp --make -i../.. Setup.lhs -o setup 2>out.build" >> return())
assertConfigure "/tmp"
doesFileExist "config.mk" >>=
assertBool "config.mk not generated after configure"
assertBuild
assertCopy
when (comp == GHC) -- FIX: do something for hugs
(doesFileExist "/tmp/lib/HSQL/GHC/libHSsql.a" >>=
assertBool "libHSsql.a doesn't exist. copy failed.")-}
]
where testdir = currDir `joinFileName` "tests"
compStr = show comp
compVerStr = concat . intersperse "." . map show . versionBranch $ compVersion
compCmd = command comp
compFlag = case compConf of
GHC -> "--ghc"
Hugs -> "--hugs"
compIdent = compStr ++ "/" ++ compFlag
testPrelude = system "make clean >> out.build" >> system "make >> out.build"
assertConfigure pref
= assertCmd' compCmd ("configure --user --prefix=" ++ pref ++ " " ++ compFlag)
"configure returned error code"
assertBuild = assertCmd' compCmd "build" "build returned error code"
assertCopy = assertCmd' compCmd "copy" "copy returned error code"
assertClean = assertCmd' compCmd "clean" "clean returned error code"
assertHaddock = assertCmd' compCmd "haddock" "setup haddock returned error code."
command GHC = "./setup"
command Hugs = "runhugs -98 Setup.lhs"
libForA pref -- checks to see if the lib exists, for tests/A
= let ghcTargetDir = pref ++ "/lib/test-1.0/ghc-" ++ compVerStr ++ "/" in
case compConf of
Hugs -> checkTargetDir (pref ++ "/lib/hugs/packages/test/") [".hs", ".lhs"]
GHC -> do checkTargetDir ghcTargetDir [".hi"]
doesFileExist (ghcTargetDir `joinFileName` "libHStest-1.0.a")
>>= assertBool "library doesn't exist"
dumpScriptFlag = "--gen-script"
registerAndExecute comp comment = do
assertCmd' compCmd ("register --user "++dumpScriptFlag) comment
if comp == GHC
then assertCmd' "./register.sh" "" "reg script failed"
else do ex <- doesFileExist "register.sh"
assertBool "hugs should not produce register.sh" (not ex)
unregisterAndExecute comp comment = do
assertCmd' compCmd ("unregister --user "++dumpScriptFlag) comment
if comp == GHC
then assertCmd' "./unregister.sh" "" "reg script failed"
else do ex <- doesFileExist "unregister.sh"
assertBool "hugs should not produce unregister.sh" (not ex)
main :: IO ()
main = do putStrLn "compile successful"
putStrLn "-= Setup Tests =-"
setupCount <- runTestTT' $ TestList $
(TestLabel "Utils Tests" $ TestList D.S.U.hunitTests):
(TestLabel "Setup Tests" $ TestList D.Setup.hunitTests):
(TestLabel "config Tests" $ TestList D.S.C.hunitTests):
(D.S.R.hunitTests ++ D.V.hunitTests ++
D.S.S.hunitTests ++ D.S.B.hunitTests ++
D.S.I.hunitTests ++ D.S.simpleHunitTests ++
D.PD.hunitTests ++ D.C.hunitTests)
dir <- getCurrentDirectory
-- count' <- runTestTT' $ TestList (tests dir Hugs GHC)
args <- getArgs
let testList :: CompilerFlavor -> Version -> [Test]
testList compiler version
| null args = tests dir compiler compiler version
| otherwise =
case reads (head args) of
[(n,_)] -> [ tests dir compiler compiler version !! n ]
_ -> error "usage: moduleTest [test_num]"
compilers = [GHC] --, Hugs]
globalTests <-
flip mapM compilers $ \compilerFlavour -> do
compiler <- configCompiler (Just compilerFlavour) Nothing Nothing 0
let version = compilerVersion compiler
runTestTT' $ TestList (testList compilerFlavour version)
putStrLn "-------------"
putStrLn "Test Summary:"
putStrLn $ showCounts $
foldl1 combineCounts (setupCount:globalTests)
return ()
#endif
-- Local Variables:
-- compile-command: "ghc -i../:/usr/local/src/HUnit-1.0 -Wall --make ModuleTest.hs -o moduleTest"
-- End:
| alekar/hugs | packages/Cabal/tests/ModuleTest.hs | bsd-3-clause | 23,560 | 43 | 18 | 7,137 | 3,259 | 1,738 | 1,521 | 1 | 0 |
-- {-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
module ShouldSucceed where
class Eq2 a where
doubleeq :: a -> a -> Bool
class (Eq2 a) => Ord2 a where
lt :: a -> a -> Bool
instance Eq2 Int where
doubleeq x y = True
instance Ord2 Int where
lt x y = True
instance (Eq2 a,Ord2 a) => Eq2 [a] where
doubleeq xs ys = True
f x y = doubleeq x [1]
| rahulmutt/ghcvm | tests/suite/typecheck/compile/tc058.hs | bsd-3-clause | 357 | 0 | 8 | 83 | 148 | 77 | 71 | 12 | 1 |
module Parse.Module (header, headerAndImports, getModuleName) where
import Control.Applicative ((<$>), (<*>))
import Data.List (intercalate)
import Text.Parsec hiding (newline, spaces)
import Parse.Helpers
import qualified AST.Module as Module
import qualified AST.Variable as Var
getModuleName :: String -> Maybe String
getModuleName source =
case iParse getModuleName source of
Right name -> Just name
Left _ -> Nothing
where
getModuleName =
do optional freshLine
(names, _) <- header
return (intercalate "." names)
headerAndImports :: IParser Module.HeaderAndImports
headerAndImports =
do optional freshLine
(names, exports) <-
option (["Main"], Var.openListing) (header `followedBy` freshLine)
imports' <- imports
return $ Module.HeaderAndImports names exports imports'
header :: IParser ([String], Var.Listing Var.Value)
header =
do try (reserved "module")
whitespace
names <- dotSep1 capVar <?> "name of module"
whitespace
exports <- option Var.openListing (listing value)
whitespace <?> "reserved word 'where'"
reserved "where"
return (names, exports)
imports :: IParser [(Module.Name, Module.ImportMethod)]
imports =
many (import' `followedBy` freshLine)
import' :: IParser (Module.Name, Module.ImportMethod)
import' =
do try (reserved "import")
whitespace
names <- dotSep1 capVar
(,) names <$> option (Module.As (intercalate "." names)) method
where
method :: IParser Module.ImportMethod
method = as' <|> importing'
as' :: IParser Module.ImportMethod
as' = do
try (whitespace >> reserved "as")
whitespace
Module.As <$> capVar <?> "alias for module"
importing' :: IParser Module.ImportMethod
importing' = Module.Open <$> listing value
listing :: IParser a -> IParser (Var.Listing a)
listing item =
do try (whitespace >> char '(')
whitespace
listing <-
choice
[ const Var.openListing <$> string ".."
, Var.Listing <$> commaSep1 item <*> return False
] <?> "listing of values (x,y,z)"
whitespace
char ')'
return listing
value :: IParser Var.Value
value =
val <|> tipe
where
val =
Var.Value <$> (lowVar <|> parens symOp)
tipe =
do name <- capVar
maybeCtors <- optionMaybe (listing capVar)
case maybeCtors of
Nothing -> return (Var.Alias name)
Just ctors -> return (Var.Union name ctors)
| JoeyEremondi/utrecht-apa-p1 | src/Parse/Module.hs | bsd-3-clause | 2,551 | 0 | 15 | 657 | 798 | 398 | 400 | 74 | 2 |
module Parser where
import Data.Attoparsec.Text
weirdParser :: Parser Char
weirdParser = do -- attoparsec's Parser type has a useful monad instance
char '|' -- matches just '|', fails on any other char
c <- anyChar -- matches any character (but just one) and returns it
char '|' -- matches just '|', like on the first line
return c -- return the inner character we parsed | alpmestan/hspec-attoparsec | example/Parser.hs | bsd-3-clause | 401 | 0 | 7 | 94 | 53 | 28 | 25 | 8 | 1 |
module Main where
import Control.Monad
import Control.Monad.IO.Class
import System.Environment
import System.IO
import Transient.Base
import Transient.Indeterminism
import Transient.Logged
import Transient.Move
import Transient.Stream.Resource
import Control.Applicative
import System.Info
import Control.Concurrent
main = do
let nodes= [createNode "localhost" 2020, createNode "192.168.99.100" 2020]
args <- getArgs
let [localnode, remote]= if length args > 0 then nodes
else reverse nodes
runCloud' $ do
onAll $ addNodes nodes
listen localnode <|> return ()
hello <|> helloworld <|> stream localnode
hello= do
local $ option "hello" "each computer say hello"
r <- clustered $ do
node <- getMyNode
onAll . liftIO . print $ "hello " ++ os
return ("hello from",os,arch, nodeHost node)
lliftIO $ print r
helloworld= do
local $ option "helloword" "both computers compose \"hello world\""
r <- mclustered $ return $ if os== "linux" then "hello " else "world"
lliftIO $ print r
stream remoteHost= do
local $ option "stream" "stream from the Linux node to windows"
let fibs= 0 : 1 : zipWith (+) fibs (tail fibs) :: [Int] -- fibonacci numbers
r <- runAt remoteHost $ local $ do
r <- threads 1 $ choose $ take 10 fibs
liftIO $ putStr os >> print r
liftIO $ threadDelay 1000000
return r
lliftIO $ print r
| agocorona/transient | tests/Test3.hs | mit | 1,690 | 0 | 14 | 597 | 466 | 227 | 239 | 42 | 2 |
{- |
Module : $Header$
Description : Parser of the Knowledge Interchange Format
Copyright : (c) Karl Luc, DFKI Bremen 2010, Eugen Kuksa and Uni Bremen 2011, Soeren Schulze 2012
License : GPLv2 or higher, see LICENSE.txt
Maintainer : s.schulze@uni-bremen.de
Stability : provisional
Portability : portable
-}
module CommonLogic.Parse_KIF where
import qualified Common.AnnoState as AnnoState
import qualified Common.AS_Annotation as Annotation
import CommonLogic.AS_CommonLogic as AS
import Common.Id as Id
import Common.Keywords
import Common.Lexer (notFollowedWith)
import Common.Parsec (reserved)
import Common.Token
import Common.GlobalAnnotations (PrefixMap)
import CommonLogic.Lexer_KIF
import Text.ParserCombinators.Parsec as Parsec
import Control.Monad (liftM)
boolop_nary :: [(String, [SENTENCE] -> BOOL_SENT, String)]
boolop_nary = [(andS, Junction Conjunction, "conjunction"),
(orS, Junction Disjunction, "disjunction")]
boolop_binary :: [(String, SENTENCE -> SENTENCE -> BOOL_SENT, String)]
boolop_binary = [(equivS, BinOp Biconditional, "equivalence"),
(implS, BinOp Implication, "implication")]
boolop_quant :: [(String, QUANT, String)]
boolop_quant = [(forallS, Universal, "universal quantifier"),
(existsS, Existential, "existiantial quantifier")]
parse_keys :: [(String, op_t, String)] -> CharParser st (Token, op_t, String)
parse_keys = choice . map
(\ (ident, con, desc) ->
liftM (\ ch -> (ch, con, ident)) (key ident <?> desc))
logsent :: CharParser st SENTENCE
logsent = do ch <- key notS <?> "negation"
s <- sentence <?> "sentence after \"" ++ notS ++ "\""
return $ Bool_sent (Negation s)
$ Range $ joinRanges [rangeSpan ch, rangeSpan s]
<|> do (ch, con, ident) <- parse_keys boolop_nary
s <- many sentence <?> "sentences after \"" ++ ident ++ "\""
return $ Bool_sent (con s)
$ Range $ joinRanges [rangeSpan ch, rangeSpan s]
<|> do (ch, con, ident) <- parse_keys boolop_binary
s1 <- sentence <?> "first sentence after \"" ++ ident ++ "\""
s2 <- sentence <?> "second sentence after \"" ++ ident ++ "\""
return $ Bool_sent (con s1 s2)
$ Range $ joinRanges [rangeSpan ch, rangeSpan s1, rangeSpan s2]
<|> do (ch, q, ident) <- parse_keys boolop_quant
vars <- parens (many1 (liftM Name (pToken variable)
<|> liftM SeqMark (pToken rowvar)))
<?> "quantified variables"
s <- sentence <?> "sentence after \"" ++ ident ++ "\""
return $ Quant_sent q vars s
$ Range $ joinRanges [rangeSpan ch, rangeSpan vars, rangeSpan s]
plainAtom :: CharParser st ATOM
plainAtom = do
t <- pToken (word <|> variable) <?> "word"
return $ Atom (Name_term t) []
atomsent :: CharParser st ATOM -> CharParser st SENTENCE
atomsent = liftM (\ a -> Atom_sent a $ Range $ rangeSpan a)
plainsent :: CharParser st SENTENCE
plainsent = atomsent plainAtom
parensent :: CharParser st SENTENCE
parensent = parens $ logsent <|> relsent <|> eqsent
funterm :: CharParser st TERM
funterm = parens funterm
<|> do relword <- pToken (reserved
[equalS, neqS, andS, orS, equivS, implS, forallS, existsS, notS]
(word <|> variable)) <?> "funword"
let nt = Name_term relword
t <- many (liftM Seq_marks (pToken rowvar)
<|> liftM Term_seq term) <?> "arguments"
return $ if null t
then nt
else Funct_term nt t
(Range $ joinRanges [rangeSpan relword, rangeSpan t])
relsent :: CharParser st SENTENCE
relsent = do
ft <- funterm
let a = case ft of
p@(Name_term _) -> Atom p []
Funct_term p args _ -> Atom p args
_ -> error "unknown TERM in relsent"
atomsent $ return a
neqS :: String
neqS = "/="
eq_ops :: [(String, SENTENCE -> Id.Range -> SENTENCE, String)]
eq_ops = [(equalS, const, "equation"),
(neqS, \ e rn -> Bool_sent (Negation e) rn, "inequality")]
eqsent :: CharParser st SENTENCE
eqsent = do
(ch, con, ident) <- parse_keys eq_ops
t1 <- term <?> "term after \"" ++ ident ++ "\""
t2 <- term <?> "second term after \"" ++ ident ++ "\""
let rn = Range $ joinRanges [rangeSpan ch, rangeSpan t1, rangeSpan t2]
return $ con (Atom_sent (Equation t1 t2) rn) rn
term :: CharParser st TERM
term = liftM Name_term (pToken variable)
<|> liftM Name_term (pToken word)
<|> liftM Name_term (pToken quotedString)
<|> liftM Name_term (pToken number)
<|> parens (funterm <|> do
s <- logsent <|> eqsent
return $ That_term s $ Range $ rangeSpan s)
sentence :: CharParser st SENTENCE
sentence = parensent <|> plainsent
topLevelSentence :: CharParser st SENTENCE
topLevelSentence = notFollowedWith (return ())
(choice (map key terminatingKeywords))
>> sentence
basicSpec :: PrefixMap -> AnnoState.AParser st BASIC_SPEC
basicSpec _ = do
many white
sentences <- many topLevelSentence
let phrases = map Sentence sentences
let text = Text phrases $ Range $ joinRanges $ map rangeSpan phrases
let text_meta = Text_meta text Nothing Nothing []
let basic_items = Axiom_items [Annotation.emptyAnno text_meta]
return $ Basic_spec [Annotation.emptyAnno basic_items]
| keithodulaigh/Hets | CommonLogic/Parse_KIF.hs | gpl-2.0 | 5,482 | 0 | 17 | 1,413 | 1,733 | 893 | 840 | 112 | 3 |
import Graphics.UI.Gtk
main:: IO ()
main = do
initGUI
window <- windowNew
set window [windowTitle := "Labels", containerBorderWidth := 10 ]
mainbox <- vBoxNew False 10
containerAdd window mainbox
hbox <- hBoxNew True 5
boxPackStart mainbox hbox PackNatural 0
vbox1 <- vBoxNew False 10
vbox2 <- vBoxNew False 0
boxPackStart hbox vbox1 PackNatural 0
boxPackStart hbox vbox2 PackNatural 0
(label1,frame1) <- myLabelWithFrameNew
boxPackStart vbox1 frame1 PackNatural 0
labelSetText label1 "Penny Harter"
(label2,frame2) <- myLabelWithFrameNew
boxPackStart vbox1 frame2 PackNatural 0
labelSetText label2 "broken bowl\nthe pieces\nstill rocking"
miscSetAlignment label2 0.0 0.0
hsep1 <- hSeparatorNew
boxPackStart vbox1 hsep1 PackNatural 10
(label3,frame3) <- myLabelWithFrameNew
boxPackStart vbox1 frame3 PackNatural 0
labelSetText label3 "Gary Snyder"
(label4,frame4) <- myLabelWithFrameNew
boxPackStart vbox1 frame4 PackNatural 0
labelSetText label4 "After weeks of watching the roof leak\nI fixed it tonight\nby moving a single board"
labelSetJustify label4 JustifyCenter
(label5,frame5) <- myLabelWithFrameNew
boxPackStart vbox2 frame5 PackNatural 0
labelSetText label5 "Kobayashi Issa"
(label7,frame7) <- myLabelWithFrameNew
boxPackEnd vbox2 frame7 PackNatural 0
labelSetText label7 "only one guy and\nonly one fly trying to\nmake the guest room do"
labelSetJustify label7 JustifyRight
(label6,frame6) <- myLabelWithFrameNew
boxPackEnd vbox2 frame6 PackNatural 10
labelSetText label6 "One Guy"
frameSetLabel frame6 "Title:"
labelSetPattern label6 [3,1,3]
button <- buttonNew
boxPackEnd mainbox button PackNatural 20
buttonlabel <- labelNewWithMnemonic "Haiku _Clicked"
containerAdd button buttonlabel
widgetShowAll window
onClicked button (putStrLn "button clicked...")
onDestroy window mainQuit
mainGUI
myLabelWithFrameNew :: IO (Label,Frame)
myLabelWithFrameNew = do
label <- labelNew Nothing
frame <- frameNew
containerAdd frame label
frameSetShadowType frame ShadowOut
return (label, frame)
-- Haikus quoted from X.J. Kennedy, Dana Gioia, Introduction to Poetry, Longman, 1997
| mimi1vx/gtk2hs | docs/tutorial/Tutorial_Port/Example_Code/GtkChap4-3.hs | gpl-3.0 | 2,384 | 0 | 9 | 557 | 580 | 258 | 322 | 57 | 1 |
{-
*********************************************************************************
* *
* John Hughes's and Simon Peyton Jones's Pretty Printer Combinators *
* *
* based on "The Design of a Pretty-printing Library" *
* in Advanced Functional Programming, *
* Johan Jeuring and Erik Meijer (eds), LNCS 925 *
* http://www.cs.chalmers.se/~rjmh/Papers/pretty.ps *
* *
* Heavily modified by Simon Peyton Jones, Dec 96 *
* *
*********************************************************************************
Version 3.0 28 May 1997
* Cured massive performance bug. If you write
foldl <> empty (map (text.show) [1..10000])
you get quadratic behaviour with V2.0. Why? For just the same reason as you get
quadratic behaviour with left-associated (++) chains.
This is really bad news. One thing a pretty-printer abstraction should
certainly guarantee is insensivity to associativity. It matters: suddenly
GHC's compilation times went up by a factor of 100 when I switched to the
new pretty printer.
I fixed it with a bit of a hack (because I wanted to get GHC back on the
road). I added two new constructors to the Doc type, Above and Beside:
<> = Beside
$$ = Above
Then, where I need to get to a "TextBeside" or "NilAbove" form I "force"
the Doc to squeeze out these suspended calls to Beside and Above; but in so
doing I re-associate. It's quite simple, but I'm not satisfied that I've done
the best possible job. I'll send you the code if you are interested.
* Added new exports:
punctuate, hang
int, integer, float, double, rational,
lparen, rparen, lbrack, rbrack, lbrace, rbrace,
* fullRender's type signature has changed. Rather than producing a string it
now takes an extra couple of arguments that tells it how to glue fragments
of output together:
fullRender :: Mode
-> Int -- Line length
-> Float -- Ribbons per line
-> (TextDetails -> a -> a) -- What to do with text
-> a -- What to do at the end
-> Doc
-> a -- Result
The "fragments" are encapsulated in the TextDetails data type:
data TextDetails = Chr Char
| Str String
| PStr FastString
The Chr and Str constructors are obvious enough. The PStr constructor has a packed
string (FastString) inside it. It's generated by using the new "ptext" export.
An advantage of this new setup is that you can get the renderer to do output
directly (by passing in a function of type (TextDetails -> IO () -> IO ()),
rather than producing a string that you then print.
Version 2.0 24 April 1997
* Made empty into a left unit for <> as well as a right unit;
it is also now true that
nest k empty = empty
which wasn't true before.
* Fixed an obscure bug in sep that occasionally gave very weird behaviour
* Added $+$
* Corrected and tidied up the laws and invariants
======================================================================
Relative to John's original paper, there are the following new features:
1. There's an empty document, "empty". It's a left and right unit for
both <> and $$, and anywhere in the argument list for
sep, hcat, hsep, vcat, fcat etc.
It is Really Useful in practice.
2. There is a paragraph-fill combinator, fsep, that's much like sep,
only it keeps fitting things on one line until it can't fit any more.
3. Some random useful extra combinators are provided.
<+> puts its arguments beside each other with a space between them,
unless either argument is empty in which case it returns the other
hcat is a list version of <>
hsep is a list version of <+>
vcat is a list version of $$
sep (separate) is either like hsep or like vcat, depending on what fits
cat is behaves like sep, but it uses <> for horizontal conposition
fcat is behaves like fsep, but it uses <> for horizontal conposition
These new ones do the obvious things:
char, semi, comma, colon, space,
parens, brackets, braces,
quotes, quote, doubleQuotes
4. The "above" combinator, $$, now overlaps its two arguments if the
last line of the top argument stops before the first line of the second begins.
For example: text "hi" $$ nest 5 "there"
lays out as
hi there
rather than
hi
there
There are two places this is really useful
a) When making labelled blocks, like this:
Left -> code for left
Right -> code for right
LongLongLongLabel ->
code for longlonglonglabel
The block is on the same line as the label if the label is
short, but on the next line otherwise.
b) When laying out lists like this:
[ first
, second
, third
]
which some people like. But if the list fits on one line
you want [first, second, third]. You can't do this with
John's original combinators, but it's quite easy with the
new $$.
The combinator $+$ gives the original "never-overlap" behaviour.
5. Several different renderers are provided:
* a standard one
* one that uses cut-marks to avoid deeply-nested documents
simply piling up in the right-hand margin
* one that ignores indentation (fewer chars output; good for machines)
* one that ignores indentation and newlines (ditto, only more so)
6. Numerous implementation tidy-ups
Use of unboxed data types to speed up the implementation
-}
{-# LANGUAGE BangPatterns, CPP, MagicHash #-}
module ETA.Utils.Pretty (
Doc, -- Abstract
Mode(..), TextDetails(..),
empty, isEmpty, nest,
char, text, ftext, ptext, ztext, zeroWidthText,
int, integer, float, double, rational,
parens, brackets, braces, quotes, quote, doubleQuotes,
semi, comma, colon, space, equals,
lparen, rparen, lbrack, rbrack, lbrace, rbrace, cparen,
(<>), (<+>), hcat, hsep,
($$), ($+$), vcat,
sep, cat,
fsep, fcat,
hang, punctuate,
fullRender, printDoc, printDoc_, showDoc,
bufLeftRender -- performance hack
) where
import ETA.Utils.BufWrite
import ETA.Utils.FastString
import ETA.Utils.FastTypes
import ETA.Utils.Panic
import Numeric (fromRat)
import System.IO
--for a RULES
import GHC.Base ( unpackCString# )
import GHC.Exts ( Int# )
import GHC.Ptr ( Ptr(..) )
-- Don't import ETA.Utils.Util( assertPanic ) because it makes a loop in the module structure
infixl 6 <>
infixl 6 <+>
infixl 5 $$, $+$
-- Disable ASSERT checks; they are expensive!
#define LOCAL_ASSERT(x)
{-
*********************************************************
* *
\subsection{The interface}
* *
*********************************************************
The primitive @Doc@ values
-}
empty :: Doc
isEmpty :: Doc -> Bool
-- | Some text, but without any width. Use for non-printing text
-- such as a HTML or Latex tags
zeroWidthText :: String -> Doc
text :: String -> Doc
char :: Char -> Doc
semi, comma, colon, space, equals :: Doc
lparen, rparen, lbrack, rbrack, lbrace, rbrace :: Doc
parens, brackets, braces :: Doc -> Doc
quotes, quote, doubleQuotes :: Doc -> Doc
int :: Int -> Doc
integer :: Integer -> Doc
float :: Float -> Doc
double :: Double -> Doc
rational :: Rational -> Doc
-- Combining @Doc@ values
(<>) :: Doc -> Doc -> Doc -- Beside
hcat :: [Doc] -> Doc -- List version of <>
(<+>) :: Doc -> Doc -> Doc -- Beside, separated by space
hsep :: [Doc] -> Doc -- List version of <+>
($$) :: Doc -> Doc -> Doc -- Above; if there is no
-- overlap it "dovetails" the two
vcat :: [Doc] -> Doc -- List version of $$
cat :: [Doc] -> Doc -- Either hcat or vcat
sep :: [Doc] -> Doc -- Either hsep or vcat
fcat :: [Doc] -> Doc -- ``Paragraph fill'' version of cat
fsep :: [Doc] -> Doc -- ``Paragraph fill'' version of sep
nest :: Int -> Doc -> Doc -- Nested
-- GHC-specific ones.
hang :: Doc -> Int -> Doc -> Doc
punctuate :: Doc -> [Doc] -> [Doc] -- punctuate p [d1, ... dn] = [d1 <> p, d2 <> p, ... dn-1 <> p, dn]
-- Displaying @Doc@ values.
instance Show Doc where
showsPrec _ doc cont = showDocPlus PageMode 100 doc cont
fullRender :: Mode
-> Int -- Line length
-> Float -- Ribbons per line
-> (TextDetails -> a -> a) -- What to do with text
-> a -- What to do at the end
-> Doc
-> a -- Result
data Mode = PageMode -- Normal
| ZigZagMode -- With zig-zag cuts
| LeftMode -- No indentation, infinitely long lines
| OneLineMode -- All on one line
{-
*********************************************************
* *
\subsection{The @Doc@ calculus}
* *
*********************************************************
The @Doc@ combinators satisfy the following laws:
\begin{verbatim}
Laws for $$
~~~~~~~~~~~
<a1> (x $$ y) $$ z = x $$ (y $$ z)
<a2> empty $$ x = x
<a3> x $$ empty = x
...ditto $+$...
Laws for <>
~~~~~~~~~~~
<b1> (x <> y) <> z = x <> (y <> z)
<b2> empty <> x = empty
<b3> x <> empty = x
...ditto <+>...
Laws for text
~~~~~~~~~~~~~
<t1> text s <> text t = text (s++t)
<t2> text "" <> x = x, if x non-empty
Laws for nest
~~~~~~~~~~~~~
<n1> nest 0 x = x
<n2> nest k (nest k' x) = nest (k+k') x
<n3> nest k (x <> y) = nest k z <> nest k y
<n4> nest k (x $$ y) = nest k x $$ nest k y
<n5> nest k empty = empty
<n6> x <> nest k y = x <> y, if x non-empty
- Note the side condition on <n6>! It is this that
makes it OK for empty to be a left unit for <>.
Miscellaneous
~~~~~~~~~~~~~
<m1> (text s <> x) $$ y = text s <> ((text "" <> x)) $$
nest (-length s) y)
<m2> (x $$ y) <> z = x $$ (y <> z)
if y non-empty
Laws for list versions
~~~~~~~~~~~~~~~~~~~~~~
<l1> sep (ps++[empty]++qs) = sep (ps ++ qs)
...ditto hsep, hcat, vcat, fill...
<l2> nest k (sep ps) = sep (map (nest k) ps)
...ditto hsep, hcat, vcat, fill...
Laws for oneLiner
~~~~~~~~~~~~~~~~~
<o1> oneLiner (nest k p) = nest k (oneLiner p)
<o2> oneLiner (x <> y) = oneLiner x <> oneLiner y
\end{verbatim}
You might think that the following verion of <m1> would
be neater:
\begin{verbatim}
<3 NO> (text s <> x) $$ y = text s <> ((empty <> x)) $$
nest (-length s) y)
\end{verbatim}
But it doesn't work, for if x=empty, we would have
\begin{verbatim}
text s $$ y = text s <> (empty $$ nest (-length s) y)
= text s <> nest (-length s) y
\end{verbatim}
*********************************************************
* *
\subsection{Simple derived definitions}
* *
*********************************************************
-}
semi = char ';'
colon = char ':'
comma = char ','
space = char ' '
equals = char '='
lparen = char '('
rparen = char ')'
lbrack = char '['
rbrack = char ']'
lbrace = char '{'
rbrace = char '}'
int n = text (show n)
integer n = text (show n)
float n = text (show n)
double n = text (show n)
rational n = text (show (fromRat n :: Double))
--rational n = text (show (fromRationalX n)) -- _showRational 30 n)
quotes p = char '`' <> p <> char '\''
quote p = char '\'' <> p
doubleQuotes p = char '"' <> p <> char '"'
parens p = char '(' <> p <> char ')'
brackets p = char '[' <> p <> char ']'
braces p = char '{' <> p <> char '}'
cparen :: Bool -> Doc -> Doc
cparen True = parens
cparen False = id
hcat = foldr (<>) empty
hsep = foldr (<+>) empty
vcat = foldr ($$) empty
hang d1 n d2 = sep [d1, nest n d2]
punctuate _ [] = []
punctuate p (d:ds) = go d ds
where
go d [] = [d]
go d (e:es) = (d <> p) : go e es
{-
*********************************************************
* *
\subsection{The @Doc@ data type}
* *
*********************************************************
A @Doc@ represents a {\em set} of layouts. A @Doc@ with
no occurrences of @Union@ or @NoDoc@ represents just one layout.
-}
data Doc
= Empty -- empty
| NilAbove Doc -- text "" $$ x
| TextBeside !TextDetails FastInt Doc -- text s <> x
| Nest FastInt Doc -- nest k x
| Union Doc Doc -- ul `union` ur
| NoDoc -- The empty set of documents
| Beside Doc Bool Doc -- True <=> space between
| Above Doc Bool Doc -- True <=> never overlap
type RDoc = Doc -- RDoc is a "reduced Doc", guaranteed not to have a top-level Above or Beside
reduceDoc :: Doc -> RDoc
reduceDoc (Beside p g q) = beside p g (reduceDoc q)
reduceDoc (Above p g q) = above p g (reduceDoc q)
reduceDoc p = p
data TextDetails = Chr {-#UNPACK#-}!Char
| Str String
| PStr FastString -- a hashed string
| ZStr FastZString -- a z-encoded string
| LStr {-#UNPACK#-}!LitString FastInt -- a '\0'-terminated
-- array of bytes
space_text :: TextDetails
space_text = Chr ' '
nl_text :: TextDetails
nl_text = Chr '\n'
{-
Here are the invariants:
\begin{itemize}
\item
The argument of @NilAbove@ is never @Empty@. Therefore
a @NilAbove@ occupies at least two lines.
\item
The arugment of @TextBeside@ is never @Nest@.
\item
The layouts of the two arguments of @Union@ both flatten to the same string.
\item
The arguments of @Union@ are either @TextBeside@, or @NilAbove@.
\item
The right argument of a union cannot be equivalent to the empty set (@NoDoc@).
If the left argument of a union is equivalent to the empty set (@NoDoc@),
then the @NoDoc@ appears in the first line.
\item
An empty document is always represented by @Empty@.
It can't be hidden inside a @Nest@, or a @Union@ of two @Empty@s.
\item
The first line of every layout in the left argument of @Union@
is longer than the first line of any layout in the right argument.
(1) ensures that the left argument has a first line. In view of (3),
this invariant means that the right argument must have at least two
lines.
\end{itemize}
-}
-- Arg of a NilAbove is always an RDoc
nilAbove_ :: Doc -> Doc
nilAbove_ p = LOCAL_ASSERT( _ok p ) NilAbove p
where
_ok Empty = False
_ok _ = True
-- Arg of a TextBeside is always an RDoc
textBeside_ :: TextDetails -> FastInt -> Doc -> Doc
textBeside_ s sl p = TextBeside s sl (LOCAL_ASSERT( _ok p ) p)
where
_ok (Nest _ _) = False
_ok _ = True
-- Arg of Nest is always an RDoc
nest_ :: FastInt -> Doc -> Doc
nest_ k p = Nest k (LOCAL_ASSERT( _ok p ) p)
where
_ok Empty = False
_ok _ = True
-- Args of union are always RDocs
union_ :: Doc -> Doc -> Doc
union_ p q = Union (LOCAL_ASSERT( _ok p ) p) (LOCAL_ASSERT( _ok q ) q)
where
_ok (TextBeside _ _ _) = True
_ok (NilAbove _) = True
_ok (Union _ _) = True
_ok _ = False
{-
Notice the difference between
* NoDoc (no documents)
* Empty (one empty document; no height and no width)
* text "" (a document containing the empty string;
one line high, but has no width)
*********************************************************
* *
\subsection{@empty@, @text@, @nest@, @union@}
* *
*********************************************************
-}
empty = Empty
isEmpty Empty = True
isEmpty _ = False
char c = textBeside_ (Chr c) (_ILIT(1)) Empty
text s = case iUnbox (length s) of {sl -> textBeside_ (Str s) sl Empty}
{-# NOINLINE [0] text #-} -- Give the RULE a chance to fire
-- It must wait till after phase 1 when
-- the unpackCString first is manifested
ftext :: FastString -> Doc
ftext s = case iUnbox (lengthFS s) of {sl -> textBeside_ (PStr s) sl Empty}
ptext :: LitString -> Doc
ptext s = case iUnbox (lengthLS s) of {sl -> textBeside_ (LStr s sl) sl Empty}
ztext :: FastZString -> Doc
ztext s = case iUnbox (lengthFZS s) of {sl -> textBeside_ (ZStr s) sl Empty}
zeroWidthText s = textBeside_ (Str s) (_ILIT(0)) Empty
-- RULE that turns (text "abc") into (ptext (A# "abc"#)) to avoid the
-- intermediate packing/unpacking of the string.
{-# RULES
"text/str" forall a. text (unpackCString# a) = ptext (Ptr a)
#-}
nest k p = mkNest (iUnbox k) (reduceDoc p) -- Externally callable version
-- mkNest checks for Nest's invariant that it doesn't have an Empty inside it
mkNest :: Int# -> Doc -> Doc
mkNest k (Nest k1 p) = mkNest (k +# k1) p
mkNest _ NoDoc = NoDoc
mkNest _ Empty = Empty
mkNest k p | k ==# _ILIT(0) = p -- Worth a try!
mkNest k p = nest_ k p
-- mkUnion checks for an empty document
mkUnion :: Doc -> Doc -> Doc
mkUnion Empty _ = Empty
mkUnion p q = p `union_` q
{-
*********************************************************
* *
\subsection{Vertical composition @$$@}
* *
*********************************************************
-}
p $$ q = Above p False q
($+$) :: Doc -> Doc -> Doc
p $+$ q = Above p True q
above :: Doc -> Bool -> RDoc -> RDoc
above (Above p g1 q1) g2 q2 = above p g1 (above q1 g2 q2)
above p@(Beside _ _ _) g q = aboveNest (reduceDoc p) g (_ILIT(0)) (reduceDoc q)
above p g q = aboveNest p g (_ILIT(0)) (reduceDoc q)
aboveNest :: RDoc -> Bool -> FastInt -> RDoc -> RDoc
-- Specfication: aboveNest p g k q = p $g$ (nest k q)
aboveNest NoDoc _ _ _ = NoDoc
aboveNest (p1 `Union` p2) g k q = aboveNest p1 g k q `union_`
aboveNest p2 g k q
aboveNest Empty _ k q = mkNest k q
aboveNest (Nest k1 p) g k q = nest_ k1 (aboveNest p g (k -# k1) q)
-- p can't be Empty, so no need for mkNest
aboveNest (NilAbove p) g k q = nilAbove_ (aboveNest p g k q)
aboveNest (TextBeside s sl p) g k q = textBeside_ s sl rest
where
!k1 = k -# sl
rest = case p of
Empty -> nilAboveNest g k1 q
_ -> aboveNest p g k1 q
aboveNest _ _ _ _ = panic "aboveNest: Unhandled case"
nilAboveNest :: Bool -> FastInt -> RDoc -> RDoc
-- Specification: text s <> nilaboveNest g k q
-- = text s <> (text "" $g$ nest k q)
nilAboveNest _ _ Empty = Empty -- Here's why the "text s <>" is in the spec!
nilAboveNest g k (Nest k1 q) = nilAboveNest g (k +# k1) q
nilAboveNest g k q | (not g) && (k ># _ILIT(0)) -- No newline if no overlap
= textBeside_ (Str (spaces k)) k q
| otherwise -- Put them really above
= nilAbove_ (mkNest k q)
{-
*********************************************************
* *
\subsection{Horizontal composition @<>@}
* *
*********************************************************
-}
p <> q = Beside p False q
p <+> q = Beside p True q
beside :: Doc -> Bool -> RDoc -> RDoc
-- Specification: beside g p q = p <g> q
beside NoDoc _ _ = NoDoc
beside (p1 `Union` p2) g q = (beside p1 g q) `union_` (beside p2 g q)
beside Empty _ q = q
beside (Nest k p) g q = nest_ k $! beside p g q -- p non-empty
beside p@(Beside p1 g1 q1) g2 q2
{- (A `op1` B) `op2` C == A `op1` (B `op2` C) iff op1 == op2
[ && (op1 == <> || op1 == <+>) ] -}
| g1 == g2 = beside p1 g1 $! beside q1 g2 q2
| otherwise = beside (reduceDoc p) g2 q2
beside p@(Above _ _ _) g q = let d = reduceDoc p in d `seq` beside d g q
beside (NilAbove p) g q = nilAbove_ $! beside p g q
beside (TextBeside s sl p) g q = textBeside_ s sl $! rest
where
rest = case p of
Empty -> nilBeside g q
_ -> beside p g q
nilBeside :: Bool -> RDoc -> RDoc
-- Specification: text "" <> nilBeside g p
-- = text "" <g> p
nilBeside _ Empty = Empty -- Hence the text "" in the spec
nilBeside g (Nest _ p) = nilBeside g p
nilBeside g p | g = textBeside_ space_text (_ILIT(1)) p
| otherwise = p
{-
*********************************************************
* *
\subsection{Separate, @sep@, Hughes version}
* *
*********************************************************
-}
-- Specification: sep ps = oneLiner (hsep ps)
-- `union`
-- vcat ps
sep = sepX True -- Separate with spaces
cat = sepX False -- Don't
sepX :: Bool -> [Doc] -> Doc
sepX _ [] = empty
sepX x (p:ps) = sep1 x (reduceDoc p) (_ILIT(0)) ps
-- Specification: sep1 g k ys = sep (x : map (nest k) ys)
-- = oneLiner (x <g> nest k (hsep ys))
-- `union` x $$ nest k (vcat ys)
sep1 :: Bool -> RDoc -> FastInt -> [Doc] -> RDoc
sep1 _ NoDoc _ _ = NoDoc
sep1 g (p `Union` q) k ys = sep1 g p k ys
`union_`
(aboveNest q False k (reduceDoc (vcat ys)))
sep1 g Empty k ys = mkNest k (sepX g ys)
sep1 g (Nest n p) k ys = nest_ n (sep1 g p (k -# n) ys)
sep1 _ (NilAbove p) k ys = nilAbove_ (aboveNest p False k (reduceDoc (vcat ys)))
sep1 g (TextBeside s sl p) k ys = textBeside_ s sl (sepNB g p (k -# sl) ys)
sep1 _ _ _ _ = panic "sep1: Unhandled case"
-- Specification: sepNB p k ys = sep1 (text "" <> p) k ys
-- Called when we have already found some text in the first item
-- We have to eat up nests
sepNB :: Bool -> Doc -> FastInt -> [Doc] -> Doc
sepNB g (Nest _ p) k ys = sepNB g p k ys
sepNB g Empty k ys = oneLiner (nilBeside g (reduceDoc rest))
`mkUnion`
nilAboveNest False k (reduceDoc (vcat ys))
where
rest | g = hsep ys
| otherwise = hcat ys
sepNB g p k ys = sep1 g p k ys
{-
*********************************************************
* *
\subsection{@fill@}
* *
*********************************************************
-}
fsep = fill True
fcat = fill False
-- Specification:
-- fill [] = empty
-- fill [p] = p
-- fill (p1:p2:ps) = oneLiner p1 <#> nest (length p1)
-- (fill (oneLiner p2 : ps))
-- `union`
-- p1 $$ fill ps
fill :: Bool -> [Doc] -> Doc
fill _ [] = empty
fill g (p:ps) = fill1 g (reduceDoc p) (_ILIT(0)) ps
fill1 :: Bool -> RDoc -> FastInt -> [Doc] -> Doc
fill1 _ NoDoc _ _ = NoDoc
fill1 g (p `Union` q) k ys = fill1 g p k ys
`union_`
(aboveNest q False k (fill g ys))
fill1 g Empty k ys = mkNest k (fill g ys)
fill1 g (Nest n p) k ys = nest_ n (fill1 g p (k -# n) ys)
fill1 g (NilAbove p) k ys = nilAbove_ (aboveNest p False k (fill g ys))
fill1 g (TextBeside s sl p) k ys = textBeside_ s sl (fillNB g p (k -# sl) ys)
fill1 _ _ _ _ = panic "fill1: Unhandled case"
fillNB :: Bool -> Doc -> Int# -> [Doc] -> Doc
fillNB g (Nest _ p) k ys = fillNB g p k ys
fillNB _ Empty _ [] = Empty
fillNB g Empty k (y:ys) = nilBeside g (fill1 g (oneLiner (reduceDoc y)) k1 ys)
`mkUnion`
nilAboveNest False k (fill g (y:ys))
where
!k1 | g = k -# _ILIT(1)
| otherwise = k
fillNB g p k ys = fill1 g p k ys
{-
*********************************************************
* *
\subsection{Selecting the best layout}
* *
*********************************************************
-}
best :: Int -- Line length
-> Int -- Ribbon length
-> RDoc
-> RDoc -- No unions in here!
best w_ r_ p
= get (iUnbox w_) p
where
!r = iUnbox r_
get :: FastInt -- (Remaining) width of line
-> Doc -> Doc
get _ Empty = Empty
get _ NoDoc = NoDoc
get w (NilAbove p) = nilAbove_ (get w p)
get w (TextBeside s sl p) = textBeside_ s sl (get1 w sl p)
get w (Nest k p) = nest_ k (get (w -# k) p)
get w (p `Union` q) = nicest w r (get w p) (get w q)
get _ _ = panic "best/get: Unhandled case"
get1 :: FastInt -- (Remaining) width of line
-> FastInt -- Amount of first line already eaten up
-> Doc -- This is an argument to TextBeside => eat Nests
-> Doc -- No unions in here!
get1 _ _ Empty = Empty
get1 _ _ NoDoc = NoDoc
get1 w sl (NilAbove p) = nilAbove_ (get (w -# sl) p)
get1 w sl (TextBeside t tl p) = textBeside_ t tl (get1 w (sl +# tl) p)
get1 w sl (Nest _ p) = get1 w sl p
get1 w sl (p `Union` q) = nicest1 w r sl (get1 w sl p)
(get1 w sl q)
get1 _ _ _ = panic "best/get1: Unhandled case"
nicest :: FastInt -> FastInt -> Doc -> Doc -> Doc
nicest w r p q = nicest1 w r (_ILIT(0)) p q
nicest1 :: FastInt -> FastInt -> Int# -> Doc -> Doc -> Doc
nicest1 w r sl p q | fits ((w `minFastInt` r) -# sl) p = p
| otherwise = q
fits :: FastInt -- Space available
-> Doc
-> Bool -- True if *first line* of Doc fits in space available
fits n _ | n <# _ILIT(0) = False
fits _ NoDoc = False
fits _ Empty = True
fits _ (NilAbove _) = True
fits n (TextBeside _ sl p) = fits (n -# sl) p
fits _ _ = panic "fits: Unhandled case"
{-
@first@ and @nonEmptySet@ are similar to @nicest@ and @fits@, only simpler.
@first@ returns its first argument if it is non-empty, otherwise its second.
-}
first :: Doc -> Doc -> Doc
first p q | nonEmptySet p = p
| otherwise = q
nonEmptySet :: Doc -> Bool
nonEmptySet NoDoc = False
nonEmptySet (_ `Union` _) = True
nonEmptySet Empty = True
nonEmptySet (NilAbove _) = True -- NoDoc always in first line
nonEmptySet (TextBeside _ _ p) = nonEmptySet p
nonEmptySet (Nest _ p) = nonEmptySet p
nonEmptySet _ = panic "nonEmptySet: Unhandled case"
-- @oneLiner@ returns the one-line members of the given set of @Doc@s.
oneLiner :: Doc -> Doc
oneLiner NoDoc = NoDoc
oneLiner Empty = Empty
oneLiner (NilAbove _) = NoDoc
oneLiner (TextBeside s sl p) = textBeside_ s sl (oneLiner p)
oneLiner (Nest k p) = nest_ k (oneLiner p)
oneLiner (p `Union` _) = oneLiner p
oneLiner _ = panic "oneLiner: Unhandled case"
{-
*********************************************************
* *
\subsection{Displaying the best layout}
* *
*********************************************************
-}
showDocPlus :: Mode -> Int -> Doc -> String -> String
showDocPlus mode cols doc rest = fullRender mode cols 1.5 string_txt rest doc
showDoc :: Mode -> Int -> Doc -> String
showDoc mode cols doc = showDocPlus mode cols doc ""
string_txt :: TextDetails -> String -> String
string_txt (Chr c) s = c:s
string_txt (Str s1) s2 = s1 ++ s2
string_txt (PStr s1) s2 = unpackFS s1 ++ s2
string_txt (ZStr s1) s2 = zString s1 ++ s2
string_txt (LStr s1 _) s2 = unpackLitString s1 ++ s2
fullRender OneLineMode _ _ txt end doc
= lay (reduceDoc doc)
where
lay NoDoc = cant_fail
lay (Union _ q) = lay q -- Second arg can't be NoDoc
lay (Nest _ p) = lay p
lay Empty = end
lay (NilAbove p) = space_text `txt` lay p -- NoDoc always on
-- first line
lay (TextBeside s _ p) = s `txt` lay p
lay _ = panic "fullRender/OneLineMode/lay: Unhandled case"
fullRender LeftMode _ _ txt end doc
= lay (reduceDoc doc)
where
lay NoDoc = cant_fail
lay (Union p q) = lay (first p q)
lay (Nest _ p) = lay p
lay Empty = end
lay (NilAbove p) = nl_text `txt` lay p -- NoDoc always on first line
lay (TextBeside s _ p) = s `txt` lay p
lay _ = panic "fullRender/LeftMode/lay: Unhandled case"
fullRender mode line_length ribbons_per_line txt end doc
= display mode line_length ribbon_length txt end best_doc
where
best_doc = best hacked_line_length ribbon_length (reduceDoc doc)
hacked_line_length, ribbon_length :: Int
ribbon_length = round (fromIntegral line_length / ribbons_per_line)
hacked_line_length = case mode of
ZigZagMode -> maxBound
_ -> line_length
display :: Mode -> Int -> Int -> (TextDetails -> t -> t) -> t -> Doc -> t
display mode page_width ribbon_width txt end doc
= case (iUnbox page_width) -# (iUnbox ribbon_width) of { gap_width ->
case gap_width `quotFastInt` _ILIT(2) of { shift ->
let
lay k (Nest k1 p) = lay (k +# k1) p
lay _ Empty = end
lay k (NilAbove p) = nl_text `txt` lay k p
lay k (TextBeside s sl p)
= case mode of
ZigZagMode | k >=# gap_width
-> nl_text `txt` (
Str (multi_ch shift '/') `txt` (
nl_text `txt` (
lay1 (k -# shift) s sl p)))
| k <# _ILIT(0)
-> nl_text `txt` (
Str (multi_ch shift '\\') `txt` (
nl_text `txt` (
lay1 (k +# shift) s sl p )))
_ -> lay1 k s sl p
lay _ _ = panic "display/lay: Unhandled case"
lay1 k s sl p = indent k (s `txt` lay2 (k +# sl) p)
lay2 k (NilAbove p) = nl_text `txt` lay k p
lay2 k (TextBeside s sl p) = s `txt` (lay2 (k +# sl) p)
lay2 k (Nest _ p) = lay2 k p
lay2 _ Empty = end
lay2 _ _ = panic "display/lay2: Unhandled case"
-- optimise long indentations using LitString chunks of 8 spaces
indent n r | n >=# _ILIT(8) = LStr (sLit " ") (_ILIT(8)) `txt`
indent (n -# _ILIT(8)) r
| otherwise = Str (spaces n) `txt` r
in
lay (_ILIT(0)) doc
}}
cant_fail :: a
cant_fail = error "easy_display: NoDoc"
multi_ch :: Int# -> Char -> String
multi_ch n ch | n <=# _ILIT(0) = ""
| otherwise = ch : multi_ch (n -# _ILIT(1)) ch
spaces :: Int# -> String
spaces n | n <=# _ILIT(0) = ""
| otherwise = ' ' : spaces (n -# _ILIT(1))
printDoc :: Mode -> Int -> Handle -> Doc -> IO ()
-- printDoc adds a newline to the end
printDoc mode cols hdl doc = printDoc_ mode cols hdl (doc $$ text "")
printDoc_ :: Mode -> Int -> Handle -> Doc -> IO ()
-- printDoc_ does not add a newline at the end, so that
-- successive calls can output stuff on the same line
-- Rather like putStr vs putStrLn
printDoc_ LeftMode _ hdl doc
= do { printLeftRender hdl doc; hFlush hdl }
printDoc_ mode pprCols hdl doc
= do { fullRender mode pprCols 1.5 put done doc ;
hFlush hdl }
where
put (Chr c) next = hPutChar hdl c >> next
put (Str s) next = hPutStr hdl s >> next
put (PStr s) next = hPutStr hdl (unpackFS s) >> next
-- NB. not hPutFS, we want this to go through
-- the I/O library's encoding layer. (#3398)
put (ZStr s) next = hPutFZS hdl s >> next
put (LStr s l) next = hPutLitString hdl s l >> next
done = return () -- hPutChar hdl '\n'
-- some versions of hPutBuf will barf if the length is zero
hPutLitString :: Handle -> Ptr a -> Int# -> IO ()
hPutLitString handle a l = if l ==# _ILIT(0)
then return ()
else hPutBuf handle a (iBox l)
-- Printing output in LeftMode is performance critical: it's used when
-- dumping C and assembly output, so we allow ourselves a few dirty
-- hacks:
--
-- (1) we specialise fullRender for LeftMode with IO output.
--
-- (2) we add a layer of buffering on top of Handles. Handles
-- don't perform well with lots of hPutChars, which is mostly
-- what we're doing here, because Handles have to be thread-safe
-- and async exception-safe. We only have a single thread and don't
-- care about exceptions, so we add a layer of fast buffering
-- over the Handle interface.
--
-- (3) a few hacks in layLeft below to convince GHC to generate the right
-- code.
printLeftRender :: Handle -> Doc -> IO ()
printLeftRender hdl doc = do
b <- newBufHandle hdl
bufLeftRender b doc
bFlush b
bufLeftRender :: BufHandle -> Doc -> IO ()
bufLeftRender b doc = layLeft b (reduceDoc doc)
-- HACK ALERT! the "return () >>" below convinces GHC to eta-expand
-- this function with the IO state lambda. Otherwise we end up with
-- closures in all the case branches.
layLeft :: BufHandle -> Doc -> IO ()
layLeft b _ | b `seq` False = undefined -- make it strict in b
layLeft _ NoDoc = cant_fail
layLeft b (Union p q) = return () >> layLeft b (first p q)
layLeft b (Nest _ p) = return () >> layLeft b p
layLeft b Empty = bPutChar b '\n'
layLeft b (NilAbove p) = bPutChar b '\n' >> layLeft b p
layLeft b (TextBeside s _ p) = put b s >> layLeft b p
where
put b _ | b `seq` False = undefined
put b (Chr c) = bPutChar b c
put b (Str s) = bPutStr b s
put b (PStr s) = bPutFS b s
put b (ZStr s) = bPutFZS b s
put b (LStr s l) = bPutLitString b s l
layLeft _ _ = panic "layLeft: Unhandled case"
| alexander-at-github/eta | compiler/ETA/Utils/Pretty.hs | bsd-3-clause | 37,510 | 0 | 27 | 14,083 | 7,640 | 3,970 | 3,670 | 442 | 14 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utility functions on @Core@ syntax
-}
{-# LANGUAGE CPP #-}
module CoreSubst (
-- * Main data types
Subst(..), -- Implementation exported for supercompiler's Renaming.hs only
TvSubstEnv, IdSubstEnv, InScopeSet,
-- ** Substituting into expressions and related types
deShadowBinds, substSpec, substRulesForImportedIds,
substTy, substCo, substExpr, substExprSC, substBind, substBindSC,
substUnfolding, substUnfoldingSC,
lookupIdSubst, lookupTCvSubst, substIdOcc,
substTickish, substDVarSet,
-- ** Operations on substitutions
emptySubst, mkEmptySubst, mkSubst, mkOpenSubst, substInScope, isEmptySubst,
extendIdSubst, extendIdSubstList, extendTCvSubst, extendTvSubstList,
extendSubst, extendSubstList, extendSubstWithVar, zapSubstEnv,
addInScopeSet, extendInScope, extendInScopeList, extendInScopeIds,
isInScope, setInScope,
delBndr, delBndrs,
-- ** Substituting and cloning binders
substBndr, substBndrs, substRecBndrs,
cloneBndr, cloneBndrs, cloneIdBndr, cloneIdBndrs, cloneRecIdBndrs,
-- ** Simple expression optimiser
simpleOptPgm, simpleOptExpr, simpleOptExprWith,
exprIsConApp_maybe, exprIsLiteral_maybe, exprIsLambda_maybe,
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreSeq
import CoreUtils
import Literal ( Literal(MachStr) )
import qualified Data.ByteString as BS
import OccurAnal( occurAnalyseExpr, occurAnalysePgm )
import qualified Type
import qualified Coercion
-- We are defining local versions
import Type hiding ( substTy, extendTvSubst, extendCvSubst, extendTvSubstList
, isInScope, substTyVarBndr, cloneTyVarBndr )
import Coercion hiding ( substCo, substCoVarBndr )
import TyCon ( tyConArity )
import DataCon
import PrelNames
import OptCoercion ( optCoercion )
import PprCore ( pprCoreBindings, pprRules )
import Module ( Module )
import VarSet
import VarEnv
import Id
import Name ( Name )
import Var
import IdInfo
import UniqSupply
import Maybes
import ErrUtils
import DynFlags
import BasicTypes ( isAlwaysActive )
import Util
import Pair
import Outputable
import PprCore () -- Instances
import FastString
import Data.List
import TysWiredIn
{-
************************************************************************
* *
\subsection{Substitutions}
* *
************************************************************************
-}
-- | A substitution environment, containing 'Id', 'TyVar', and 'CoVar'
-- substitutions.
--
-- Some invariants apply to how you use the substitution:
--
-- 1. #in_scope_invariant# The in-scope set contains at least those 'Id's and 'TyVar's that will be in scope /after/
-- applying the substitution to a term. Precisely, the in-scope set must be a superset of the free vars of the
-- substitution range that might possibly clash with locally-bound variables in the thing being substituted in.
--
-- 2. #apply_once# You may apply the substitution only /once/
--
-- There are various ways of setting up the in-scope set such that the first of these invariants hold:
--
-- * Arrange that the in-scope set really is all the things in scope
--
-- * Arrange that it's the free vars of the range of the substitution
--
-- * Make it empty, if you know that all the free vars of the substitution are fresh, and hence can't possibly clash
data Subst
= Subst InScopeSet -- Variables in in scope (both Ids and TyVars) /after/
-- applying the substitution
IdSubstEnv -- Substitution from NcIds to CoreExprs
TvSubstEnv -- Substitution from TyVars to Types
CvSubstEnv -- Substitution from CoVars to Coercions
-- INVARIANT 1: See #in_scope_invariant#
-- This is what lets us deal with name capture properly
-- It's a hard invariant to check...
--
-- INVARIANT 2: The substitution is apply-once; see Note [Apply once] with
-- Types.TvSubstEnv
--
-- INVARIANT 3: See Note [Extending the Subst]
{-
Note [Extending the Subst]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For a core Subst, which binds Ids as well, we make a different choice for Ids
than we do for TyVars.
For TyVars, see Note [Extending the TCvSubst] with Type.TvSubstEnv
For Ids, we have a different invariant
The IdSubstEnv is extended *only* when the Unique on an Id changes
Otherwise, we just extend the InScopeSet
In consequence:
* If all subst envs are empty, substExpr would be a
no-op, so substExprSC ("short cut") does nothing.
However, substExpr still goes ahead and substitutes. Reason: we may
want to replace existing Ids with new ones from the in-scope set, to
avoid space leaks.
* In substIdBndr, we extend the IdSubstEnv only when the unique changes
* If the CvSubstEnv, TvSubstEnv and IdSubstEnv are all empty,
substExpr does nothing (Note that the above rule for substIdBndr
maintains this property. If the incoming envts are both empty, then
substituting the type and IdInfo can't change anything.)
* In lookupIdSubst, we *must* look up the Id in the in-scope set, because
it may contain non-trivial changes. Example:
(/\a. \x:a. ...x...) Int
We extend the TvSubstEnv with [a |-> Int]; but x's unique does not change
so we only extend the in-scope set. Then we must look up in the in-scope
set when we find the occurrence of x.
* The requirement to look up the Id in the in-scope set means that we
must NOT take no-op short cut when the IdSubst is empty.
We must still look up every Id in the in-scope set.
* (However, we don't need to do so for expressions found in the IdSubst
itself, whose range is assumed to be correct wrt the in-scope set.)
Why do we make a different choice for the IdSubstEnv than the
TvSubstEnv and CvSubstEnv?
* For Ids, we change the IdInfo all the time (e.g. deleting the
unfolding), and adding it back later, so using the TyVar convention
would entail extending the substitution almost all the time
* The simplifier wants to look up in the in-scope set anyway, in case it
can see a better unfolding from an enclosing case expression
* For TyVars, only coercion variables can possibly change, and they are
easy to spot
-}
-- | An environment for substituting for 'Id's
type IdSubstEnv = IdEnv CoreExpr -- Domain is NcIds, i.e. not coercions
----------------------------
isEmptySubst :: Subst -> Bool
isEmptySubst (Subst _ id_env tv_env cv_env)
= isEmptyVarEnv id_env && isEmptyVarEnv tv_env && isEmptyVarEnv cv_env
emptySubst :: Subst
emptySubst = Subst emptyInScopeSet emptyVarEnv emptyVarEnv emptyVarEnv
mkEmptySubst :: InScopeSet -> Subst
mkEmptySubst in_scope = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
mkSubst :: InScopeSet -> TvSubstEnv -> CvSubstEnv -> IdSubstEnv -> Subst
mkSubst in_scope tvs cvs ids = Subst in_scope ids tvs cvs
-- | Find the in-scope set: see "CoreSubst#in_scope_invariant"
substInScope :: Subst -> InScopeSet
substInScope (Subst in_scope _ _ _) = in_scope
-- | Remove all substitutions for 'Id's and 'Var's that might have been built up
-- while preserving the in-scope set
zapSubstEnv :: Subst -> Subst
zapSubstEnv (Subst in_scope _ _ _) = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
-- | Add a substitution for an 'Id' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendIdSubst :: Subst -> Id -> CoreExpr -> Subst
-- ToDo: add an ASSERT that fvs(subst-result) is already in the in-scope set
extendIdSubst (Subst in_scope ids tvs cvs) v r
= ASSERT2( isNonCoVarId v, ppr v $$ ppr r )
Subst in_scope (extendVarEnv ids v r) tvs cvs
-- | Adds multiple 'Id' substitutions to the 'Subst': see also 'extendIdSubst'
extendIdSubstList :: Subst -> [(Id, CoreExpr)] -> Subst
extendIdSubstList (Subst in_scope ids tvs cvs) prs
= ASSERT( all (isNonCoVarId . fst) prs )
Subst in_scope (extendVarEnvList ids prs) tvs cvs
-- | Add a substitution for a 'TyVar' to the 'Subst'
-- The 'TyVar' *must* be a real TyVar, and not a CoVar
-- You must ensure that the in-scope set is such that
-- the "CoreSubst#in_scope_invariant" is true after extending
-- the substitution like this.
extendTvSubst :: Subst -> TyVar -> Type -> Subst
extendTvSubst (Subst in_scope ids tvs cvs) tv ty
= ASSERT( isTyVar tv )
Subst in_scope ids (extendVarEnv tvs tv ty) cvs
-- | Adds multiple 'TyVar' substitutions to the 'Subst': see also 'extendTvSubst'
extendTvSubstList :: Subst -> [(TyVar,Type)] -> Subst
extendTvSubstList subst vrs
= foldl' extend subst vrs
where
extend subst (v, r) = extendTvSubst subst v r
-- | Add a substitution from a 'CoVar' to a 'Coercion' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendCvSubst :: Subst -> CoVar -> Coercion -> Subst
extendCvSubst (Subst in_scope ids tvs cvs) v r
= ASSERT( isCoVar v )
Subst in_scope ids tvs (extendVarEnv cvs v r)
-- | Add a substitution appropriate to the thing being substituted
-- (whether an expression, type, or coercion). See also
-- 'extendIdSubst', 'extendTvSubst', 'extendCvSubst'
extendSubst :: Subst -> Var -> CoreArg -> Subst
extendSubst subst var arg
= case arg of
Type ty -> ASSERT( isTyVar var ) extendTvSubst subst var ty
Coercion co -> ASSERT( isCoVar var ) extendCvSubst subst var co
_ -> ASSERT( isId var ) extendIdSubst subst var arg
extendSubstWithVar :: Subst -> Var -> Var -> Subst
extendSubstWithVar subst v1 v2
| isTyVar v1 = ASSERT( isTyVar v2 ) extendTvSubst subst v1 (mkTyVarTy v2)
| isCoVar v1 = ASSERT( isCoVar v2 ) extendCvSubst subst v1 (mkCoVarCo v2)
| otherwise = ASSERT( isId v2 ) extendIdSubst subst v1 (Var v2)
-- | Add a substitution as appropriate to each of the terms being
-- substituted (whether expressions, types, or coercions). See also
-- 'extendSubst'.
extendSubstList :: Subst -> [(Var,CoreArg)] -> Subst
extendSubstList subst [] = subst
extendSubstList subst ((var,rhs):prs) = extendSubstList (extendSubst subst var rhs) prs
-- | Find the substitution for an 'Id' in the 'Subst'
lookupIdSubst :: SDoc -> Subst -> Id -> CoreExpr
lookupIdSubst doc (Subst in_scope ids _ _) v
| not (isLocalId v) = Var v
| Just e <- lookupVarEnv ids v = e
| Just v' <- lookupInScope in_scope v = Var v'
-- Vital! See Note [Extending the Subst]
| otherwise = WARN( True, text "CoreSubst.lookupIdSubst" <+> doc <+> ppr v
$$ ppr in_scope)
Var v
-- | Find the substitution for a 'TyVar' in the 'Subst'
lookupTCvSubst :: Subst -> TyVar -> Type
lookupTCvSubst (Subst _ _ tvs cvs) v
| isTyVar v
= lookupVarEnv tvs v `orElse` Type.mkTyVarTy v
| otherwise
= mkCoercionTy $ lookupVarEnv cvs v `orElse` mkCoVarCo v
delBndr :: Subst -> Var -> Subst
delBndr (Subst in_scope ids tvs cvs) v
| isCoVar v = Subst in_scope ids tvs (delVarEnv cvs v)
| isTyVar v = Subst in_scope ids (delVarEnv tvs v) cvs
| otherwise = Subst in_scope (delVarEnv ids v) tvs cvs
delBndrs :: Subst -> [Var] -> Subst
delBndrs (Subst in_scope ids tvs cvs) vs
= Subst in_scope (delVarEnvList ids vs) (delVarEnvList tvs vs) (delVarEnvList cvs vs)
-- Easiest thing is just delete all from all!
-- | Simultaneously substitute for a bunch of variables
-- No left-right shadowing
-- ie the substitution for (\x \y. e) a1 a2
-- so neither x nor y scope over a1 a2
mkOpenSubst :: InScopeSet -> [(Var,CoreArg)] -> Subst
mkOpenSubst in_scope pairs = Subst in_scope
(mkVarEnv [(id,e) | (id, e) <- pairs, isId id])
(mkVarEnv [(tv,ty) | (tv, Type ty) <- pairs])
(mkVarEnv [(v,co) | (v, Coercion co) <- pairs])
------------------------------
isInScope :: Var -> Subst -> Bool
isInScope v (Subst in_scope _ _ _) = v `elemInScopeSet` in_scope
-- | Add the 'Var' to the in-scope set, but do not remove
-- any existing substitutions for it
addInScopeSet :: Subst -> VarSet -> Subst
addInScopeSet (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetSet` vs) ids tvs cvs
-- | Add the 'Var' to the in-scope set: as a side effect,
-- and remove any existing substitutions for it
extendInScope :: Subst -> Var -> Subst
extendInScope (Subst in_scope ids tvs cvs) v
= Subst (in_scope `extendInScopeSet` v)
(ids `delVarEnv` v) (tvs `delVarEnv` v) (cvs `delVarEnv` v)
-- | Add the 'Var's to the in-scope set: see also 'extendInScope'
extendInScopeList :: Subst -> [Var] -> Subst
extendInScopeList (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) (tvs `delVarEnvList` vs) (cvs `delVarEnvList` vs)
-- | Optimized version of 'extendInScopeList' that can be used if you are certain
-- all the things being added are 'Id's and hence none are 'TyVar's or 'CoVar's
extendInScopeIds :: Subst -> [Id] -> Subst
extendInScopeIds (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) tvs cvs
setInScope :: Subst -> InScopeSet -> Subst
setInScope (Subst _ ids tvs cvs) in_scope = Subst in_scope ids tvs cvs
-- Pretty printing, for debugging only
instance Outputable Subst where
ppr (Subst in_scope ids tvs cvs)
= text "<InScope =" <+> in_scope_doc
$$ text " IdSubst =" <+> ppr ids
$$ text " TvSubst =" <+> ppr tvs
$$ text " CvSubst =" <+> ppr cvs
<> char '>'
where
in_scope_doc = pprVarSet (getInScopeVars in_scope) (braces . fsep . map ppr)
{-
************************************************************************
* *
Substituting expressions
* *
************************************************************************
-}
-- | Apply a substitution to an entire 'CoreExpr'. Remember, you may only
-- apply the substitution /once/: see "CoreSubst#apply_once"
--
-- Do *not* attempt to short-cut in the case of an empty substitution!
-- See Note [Extending the Subst]
substExprSC :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExprSC doc subst orig_expr
| isEmptySubst subst = orig_expr
| otherwise = -- pprTrace "enter subst-expr" (doc $$ ppr orig_expr) $
subst_expr doc subst orig_expr
substExpr :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExpr doc subst orig_expr = subst_expr doc subst orig_expr
subst_expr :: SDoc -> Subst -> CoreExpr -> CoreExpr
subst_expr doc subst expr
= go expr
where
go (Var v) = lookupIdSubst (doc $$ text "subst_expr") subst v
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (substCo subst co)
go (Lit lit) = Lit lit
go (App fun arg) = App (go fun) (go arg)
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) = Cast (go e) (substCo subst co)
-- Do not optimise even identity coercions
-- Reason: substitution applies to the LHS of RULES, and
-- if you "optimise" an identity coercion, you may
-- lose a binder. We optimise the LHS of rules at
-- construction time
go (Lam bndr body) = Lam bndr' (subst_expr doc subst' body)
where
(subst', bndr') = substBndr subst bndr
go (Let bind body) = Let bind' (subst_expr doc subst' body)
where
(subst', bind') = substBind subst bind
go (Case scrut bndr ty alts) = Case (go scrut) bndr' (substTy subst ty) (map (go_alt subst') alts)
where
(subst', bndr') = substBndr subst bndr
go_alt subst (con, bndrs, rhs) = (con, bndrs', subst_expr doc subst' rhs)
where
(subst', bndrs') = substBndrs subst bndrs
-- | Apply a substitution to an entire 'CoreBind', additionally returning an updated 'Subst'
-- that should be used by subsequent substitutions.
substBind, substBindSC :: Subst -> CoreBind -> (Subst, CoreBind)
substBindSC subst bind -- Short-cut if the substitution is empty
| not (isEmptySubst subst)
= substBind subst bind
| otherwise
= case bind of
NonRec bndr rhs -> (subst', NonRec bndr' rhs)
where
(subst', bndr') = substBndr subst bndr
Rec pairs -> (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' | isEmptySubst subst'
= rhss
| otherwise
= map (subst_expr (text "substBindSC") subst') rhss
substBind subst (NonRec bndr rhs)
= (subst', NonRec bndr' (subst_expr (text "substBind") subst rhs))
where
(subst', bndr') = substBndr subst bndr
substBind subst (Rec pairs)
= (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' = map (subst_expr (text "substBind") subst') rhss
-- | De-shadowing the program is sometimes a useful pre-pass. It can be done simply
-- by running over the bindings with an empty substitution, because substitution
-- returns a result that has no-shadowing guaranteed.
--
-- (Actually, within a single /type/ there might still be shadowing, because
-- 'substTy' is a no-op for the empty substitution, but that's probably OK.)
--
-- [Aug 09] This function is not used in GHC at the moment, but seems so
-- short and simple that I'm going to leave it here
deShadowBinds :: CoreProgram -> CoreProgram
deShadowBinds binds = snd (mapAccumL substBind emptySubst binds)
{-
************************************************************************
* *
Substituting binders
* *
************************************************************************
Remember that substBndr and friends are used when doing expression
substitution only. Their only business is substitution, so they
preserve all IdInfo (suitably substituted). For example, we *want* to
preserve occ info in rules.
-}
-- | Substitutes a 'Var' for another one according to the 'Subst' given, returning
-- the result and an updated 'Subst' that should be used by subsequent substitutions.
-- 'IdInfo' is preserved by this process, although it is substituted into appropriately.
substBndr :: Subst -> Var -> (Subst, Var)
substBndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = substIdBndr (text "var-bndr") subst subst bndr
-- | Applies 'substBndr' to a number of 'Var's, accumulating a new 'Subst' left-to-right
substBndrs :: Subst -> [Var] -> (Subst, [Var])
substBndrs subst bndrs = mapAccumL substBndr subst bndrs
-- | Substitute in a mutually recursive group of 'Id's
substRecBndrs :: Subst -> [Id] -> (Subst, [Id])
substRecBndrs subst bndrs
= (new_subst, new_bndrs)
where -- Here's the reason we need to pass rec_subst to subst_id
(new_subst, new_bndrs) = mapAccumL (substIdBndr (text "rec-bndr") new_subst) subst bndrs
substIdBndr :: SDoc
-> Subst -- ^ Substitution to use for the IdInfo
-> Subst -> Id -- ^ Substitution and Id to transform
-> (Subst, Id) -- ^ Transformed pair
-- NB: unfolding may be zapped
substIdBndr _doc rec_subst subst@(Subst in_scope env tvs cvs) old_id
= -- pprTrace "substIdBndr" (doc $$ ppr old_id $$ ppr in_scope) $
(Subst (in_scope `extendInScopeSet` new_id) new_env tvs cvs, new_id)
where
id1 = uniqAway in_scope old_id -- id1 is cloned if necessary
id2 | no_type_change = id1
| otherwise = setIdType id1 (substTy subst old_ty)
old_ty = idType old_id
no_type_change = (isEmptyVarEnv tvs && isEmptyVarEnv cvs) ||
isEmptyVarSet (tyCoVarsOfType old_ty)
-- new_id has the right IdInfo
-- The lazy-set is because we're in a loop here, with
-- rec_subst, when dealing with a mutually-recursive group
new_id = maybeModifyIdInfo mb_new_info id2
mb_new_info = substIdInfo rec_subst id2 (idInfo id2)
-- NB: unfolding info may be zapped
-- Extend the substitution if the unique has changed
-- See the notes with substTyVarBndr for the delVarEnv
new_env | no_change = delVarEnv env old_id
| otherwise = extendVarEnv env old_id (Var new_id)
no_change = id1 == old_id
-- See Note [Extending the Subst]
-- it's /not/ necessary to check mb_new_info and no_type_change
{-
Now a variant that unconditionally allocates a new unique.
It also unconditionally zaps the OccInfo.
-}
-- | Very similar to 'substBndr', but it always allocates a new 'Unique' for
-- each variable in its output. It substitutes the IdInfo though.
cloneIdBndr :: Subst -> UniqSupply -> Id -> (Subst, Id)
cloneIdBndr subst us old_id
= clone_id subst subst (old_id, uniqFromSupply us)
-- | Applies 'cloneIdBndr' to a number of 'Id's, accumulating a final
-- substitution from left to right
cloneIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneIdBndrs subst us ids
= mapAccumL (clone_id subst) subst (ids `zip` uniqsFromSupply us)
cloneBndrs :: Subst -> UniqSupply -> [Var] -> (Subst, [Var])
-- Works for all kinds of variables (typically case binders)
-- not just Ids
cloneBndrs subst us vs
= mapAccumL (\subst (v, u) -> cloneBndr subst u v) subst (vs `zip` uniqsFromSupply us)
cloneBndr :: Subst -> Unique -> Var -> (Subst, Var)
cloneBndr subst uniq v
| isTyVar v = cloneTyVarBndr subst v uniq
| otherwise = clone_id subst subst (v,uniq) -- Works for coercion variables too
-- | Clone a mutually recursive group of 'Id's
cloneRecIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneRecIdBndrs subst us ids
= (subst', ids')
where
(subst', ids') = mapAccumL (clone_id subst') subst
(ids `zip` uniqsFromSupply us)
-- Just like substIdBndr, except that it always makes a new unique
-- It is given the unique to use
clone_id :: Subst -- Substitution for the IdInfo
-> Subst -> (Id, Unique) -- Substitution and Id to transform
-> (Subst, Id) -- Transformed pair
clone_id rec_subst subst@(Subst in_scope idvs tvs cvs) (old_id, uniq)
= (Subst (in_scope `extendInScopeSet` new_id) new_idvs tvs new_cvs, new_id)
where
id1 = setVarUnique old_id uniq
id2 = substIdType subst id1
new_id = maybeModifyIdInfo (substIdInfo rec_subst id2 (idInfo old_id)) id2
(new_idvs, new_cvs) | isCoVar old_id = (idvs, extendVarEnv cvs old_id (mkCoVarCo new_id))
| otherwise = (extendVarEnv idvs old_id (Var new_id), cvs)
{-
************************************************************************
* *
Types and Coercions
* *
************************************************************************
For types and coercions we just call the corresponding functions in
Type and Coercion, but we have to repackage the substitution, from a
Subst to a TCvSubst.
-}
substTyVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substTyVarBndr (Subst in_scope id_env tv_env cv_env) tv
= case Type.substTyVarBndr (TCvSubst in_scope tv_env cv_env) tv of
(TCvSubst in_scope' tv_env' cv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env', tv')
cloneTyVarBndr :: Subst -> TyVar -> Unique -> (Subst, TyVar)
cloneTyVarBndr (Subst in_scope id_env tv_env cv_env) tv uniq
= case Type.cloneTyVarBndr (TCvSubst in_scope tv_env cv_env) tv uniq of
(TCvSubst in_scope' tv_env' cv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env', tv')
substCoVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substCoVarBndr (Subst in_scope id_env tv_env cv_env) cv
= case Coercion.substCoVarBndr (TCvSubst in_scope tv_env cv_env) cv of
(TCvSubst in_scope' tv_env' cv_env', cv')
-> (Subst in_scope' id_env tv_env' cv_env', cv')
-- | See 'Type.substTy'
substTy :: Subst -> Type -> Type
substTy subst ty = Type.substTyUnchecked (getTCvSubst subst) ty
getTCvSubst :: Subst -> TCvSubst
getTCvSubst (Subst in_scope _ tenv cenv) = TCvSubst in_scope tenv cenv
-- | See 'Coercion.substCo'
substCo :: Subst -> Coercion -> Coercion
substCo subst co = Coercion.substCo (getTCvSubst subst) co
{-
************************************************************************
* *
\section{IdInfo substitution}
* *
************************************************************************
-}
substIdType :: Subst -> Id -> Id
substIdType subst@(Subst _ _ tv_env cv_env) id
| (isEmptyVarEnv tv_env && isEmptyVarEnv cv_env) || isEmptyVarSet (tyCoVarsOfType old_ty) = id
| otherwise = setIdType id (substTy subst old_ty)
-- The tyCoVarsOfType is cheaper than it looks
-- because we cache the free tyvars of the type
-- in a Note in the id's type itself
where
old_ty = idType id
------------------
-- | Substitute into some 'IdInfo' with regard to the supplied new 'Id'.
substIdInfo :: Subst -> Id -> IdInfo -> Maybe IdInfo
substIdInfo subst new_id info
| nothing_to_do = Nothing
| otherwise = Just (info `setRuleInfo` substSpec subst new_id old_rules
`setUnfoldingInfo` substUnfolding subst old_unf)
where
old_rules = ruleInfo info
old_unf = unfoldingInfo info
nothing_to_do = isEmptyRuleInfo old_rules && isClosedUnfolding old_unf
------------------
-- | Substitutes for the 'Id's within an unfolding
substUnfolding, substUnfoldingSC :: Subst -> Unfolding -> Unfolding
-- Seq'ing on the returned Unfolding is enough to cause
-- all the substitutions to happen completely
substUnfoldingSC subst unf -- Short-cut version
| isEmptySubst subst = unf
| otherwise = substUnfolding subst unf
substUnfolding subst df@(DFunUnfolding { df_bndrs = bndrs, df_args = args })
= df { df_bndrs = bndrs', df_args = args' }
where
(subst',bndrs') = substBndrs subst bndrs
args' = map (substExpr (text "subst-unf:dfun") subst') args
substUnfolding subst unf@(CoreUnfolding { uf_tmpl = tmpl, uf_src = src })
-- Retain an InlineRule!
| not (isStableSource src) -- Zap an unstable unfolding, to save substitution work
= NoUnfolding
| otherwise -- But keep a stable one!
= seqExpr new_tmpl `seq`
unf { uf_tmpl = new_tmpl }
where
new_tmpl = substExpr (text "subst-unf") subst tmpl
substUnfolding _ unf = unf -- NoUnfolding, OtherCon
------------------
substIdOcc :: Subst -> Id -> Id
-- These Ids should not be substituted to non-Ids
substIdOcc subst v = case lookupIdSubst (text "substIdOcc") subst v of
Var v' -> v'
other -> pprPanic "substIdOcc" (vcat [ppr v <+> ppr other, ppr subst])
------------------
-- | Substitutes for the 'Id's within the 'WorkerInfo' given the new function 'Id'
substSpec :: Subst -> Id -> RuleInfo -> RuleInfo
substSpec subst new_id (RuleInfo rules rhs_fvs)
= seqRuleInfo new_spec `seq` new_spec
where
subst_ru_fn = const (idName new_id)
new_spec = RuleInfo (map (substRule subst subst_ru_fn) rules)
(substDVarSet subst rhs_fvs)
------------------
substRulesForImportedIds :: Subst -> [CoreRule] -> [CoreRule]
substRulesForImportedIds subst rules
= map (substRule subst not_needed) rules
where
not_needed name = pprPanic "substRulesForImportedIds" (ppr name)
------------------
substRule :: Subst -> (Name -> Name) -> CoreRule -> CoreRule
-- The subst_ru_fn argument is applied to substitute the ru_fn field
-- of the rule:
-- - Rules for *imported* Ids never change ru_fn
-- - Rules for *local* Ids are in the IdInfo for that Id,
-- and the ru_fn field is simply replaced by the new name
-- of the Id
substRule _ _ rule@(BuiltinRule {}) = rule
substRule subst subst_ru_fn rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs
, ru_local = is_local })
= rule { ru_bndrs = bndrs'
, ru_fn = if is_local
then subst_ru_fn fn_name
else fn_name
, ru_args = map (substExpr doc subst') args
, ru_rhs = substExpr (text "foo") subst' rhs }
-- Do NOT optimise the RHS (previously we did simplOptExpr here)
-- See Note [Substitute lazily]
where
doc = text "subst-rule" <+> ppr fn_name
(subst', bndrs') = substBndrs subst bndrs
------------------
substVects :: Subst -> [CoreVect] -> [CoreVect]
substVects subst = map (substVect subst)
------------------
substVect :: Subst -> CoreVect -> CoreVect
substVect subst (Vect v rhs) = Vect v (simpleOptExprWith subst rhs)
substVect _subst vd@(NoVect _) = vd
substVect _subst vd@(VectType _ _ _) = vd
substVect _subst vd@(VectClass _) = vd
substVect _subst vd@(VectInst _) = vd
------------------
substDVarSet :: Subst -> DVarSet -> DVarSet
substDVarSet subst fvs
= mkDVarSet $ fst $ foldr (subst_fv subst) ([], emptyVarSet) $ dVarSetElems fvs
where
subst_fv subst fv acc
| isId fv = expr_fvs (lookupIdSubst (text "substDVarSet") subst fv) isLocalVar emptyVarSet $! acc
| otherwise = tyCoFVsOfType (lookupTCvSubst subst fv) (const True) emptyVarSet $! acc
------------------
substTickish :: Subst -> Tickish Id -> Tickish Id
substTickish subst (Breakpoint n ids)
= Breakpoint n (map do_one ids)
where
do_one = getIdFromTrivialExpr . lookupIdSubst (text "subst_tickish") subst
substTickish _subst other = other
{- Note [Substitute lazily]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The functions that substitute over IdInfo must be pretty lazy, because
they are knot-tied by substRecBndrs.
One case in point was Trac #10627 in which a rule for a function 'f'
referred to 'f' (at a differnet type) on the RHS. But instead of just
substituting in the rhs of the rule, we were calling simpleOptExpr, which
looked at the idInfo for 'f'; result <<loop>>.
In any case we don't need to optimise the RHS of rules, or unfoldings,
because the simplifier will do that.
Note [substTickish]
~~~~~~~~~~~~~~~~~~~~~~
A Breakpoint contains a list of Ids. What happens if we ever want to
substitute an expression for one of these Ids?
First, we ensure that we only ever substitute trivial expressions for
these Ids, by marking them as NoOccInfo in the occurrence analyser.
Then, when substituting for the Id, we unwrap any type applications
and abstractions to get back to an Id, with getIdFromTrivialExpr.
Second, we have to ensure that we never try to substitute a literal
for an Id in a breakpoint. We ensure this by never storing an Id with
an unlifted type in a Breakpoint - see Coverage.mkTickish.
Breakpoints can't handle free variables with unlifted types anyway.
-}
{-
Note [Worker inlining]
~~~~~~~~~~~~~~~~~~~~~~
A worker can get sustituted away entirely.
- it might be trivial
- it might simply be very small
We do not treat an InlWrapper as an 'occurrence' in the occurrence
analyser, so it's possible that the worker is not even in scope any more.
In all all these cases we simply drop the special case, returning to
InlVanilla. The WARN is just so I can see if it happens a lot.
************************************************************************
* *
The Very Simple Optimiser
* *
************************************************************************
Note [Getting the map/coerce RULE to work]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We wish to allow the "map/coerce" RULE to fire:
{-# RULES "map/coerce" map coerce = coerce #-}
The naive core produced for this is
forall a b (dict :: Coercible * a b).
map @a @b (coerce @a @b @dict) = coerce @[a] @[b] @dict'
where dict' :: Coercible [a] [b]
dict' = ...
This matches literal uses of `map coerce` in code, but that's not what we
want. We want it to match, say, `map MkAge` (where newtype Age = MkAge Int)
too. Some of this is addressed by compulsorily unfolding coerce on the LHS,
yielding
forall a b (dict :: Coercible * a b).
map @a @b (\(x :: a) -> case dict of
MkCoercible (co :: a ~R# b) -> x |> co) = ...
Getting better. But this isn't exactly what gets produced. This is because
Coercible essentially has ~R# as a superclass, and superclasses get eagerly
extracted during solving. So we get this:
forall a b (dict :: Coercible * a b).
case Coercible_SCSel @* @a @b dict of
_ [Dead] -> map @a @b (\(x :: a) -> case dict of
MkCoercible (co :: a ~R# b) -> x |> co) = ...
Unfortunately, this still abstracts over a Coercible dictionary. We really
want it to abstract over the ~R# evidence. So, we have Desugar.unfold_coerce,
which transforms the above to (see also Note [Desugaring coerce as cast] in
Desugar)
forall a b (co :: a ~R# b).
let dict = MkCoercible @* @a @b co in
case Coercible_SCSel @* @a @b dict of
_ [Dead] -> map @a @b (\(x :: a) -> case dict of
MkCoercible (co :: a ~R# b) -> x |> co) = let dict = ... in ...
Now, we need simpleOptExpr to fix this up. It does so by taking three
separate actions:
1. Inline certain non-recursive bindings. The choice whether to inline
is made in maybe_substitute. Note the rather specific check for
MkCoercible in there.
2. Stripping case expressions like the Coercible_SCSel one.
See the `Case` case of simple_opt_expr's `go` function.
3. Look for case expressions that unpack something that was
just packed and inline them. This is also done in simple_opt_expr's
`go` function.
This is all a fair amount of special-purpose hackery, but it's for
a good cause. And it won't hurt other RULES and such that it comes across.
-}
simpleOptExpr :: CoreExpr -> CoreExpr
-- Do simple optimisation on an expression
-- The optimisation is very straightforward: just
-- inline non-recursive bindings that are used only once,
-- or where the RHS is trivial
--
-- We also inline bindings that bind a Eq# box: see
-- See Note [Getting the map/coerce RULE to work].
--
-- The result is NOT guaranteed occurrence-analysed, because
-- in (let x = y in ....) we substitute for x; so y's occ-info
-- may change radically
simpleOptExpr expr
= -- pprTrace "simpleOptExpr" (ppr init_subst $$ ppr expr)
simpleOptExprWith init_subst expr
where
init_subst = mkEmptySubst (mkInScopeSet (exprFreeVars expr))
-- It's potentially important to make a proper in-scope set
-- Consider let x = ..y.. in \y. ...x...
-- Then we should remember to clone y before substituting
-- for x. It's very unlikely to occur, because we probably
-- won't *be* substituting for x if it occurs inside a
-- lambda.
--
-- It's a bit painful to call exprFreeVars, because it makes
-- three passes instead of two (occ-anal, and go)
simpleOptExprWith :: Subst -> InExpr -> OutExpr
simpleOptExprWith subst expr = simple_opt_expr subst (occurAnalyseExpr expr)
----------------------
simpleOptPgm :: DynFlags -> Module
-> CoreProgram -> [CoreRule] -> [CoreVect]
-> IO (CoreProgram, [CoreRule], [CoreVect])
simpleOptPgm dflags this_mod binds rules vects
= do { dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis"
(pprCoreBindings occ_anald_binds $$ pprRules rules );
; return (reverse binds', substRulesForImportedIds subst' rules, substVects subst' vects) }
where
occ_anald_binds = occurAnalysePgm this_mod (\_ -> False) {- No rules active -}
rules vects emptyVarEnv binds
(subst', binds') = foldl do_one (emptySubst, []) occ_anald_binds
do_one (subst, binds') bind
= case simple_opt_bind subst bind of
(subst', Nothing) -> (subst', binds')
(subst', Just bind') -> (subst', bind':binds')
----------------------
type InVar = Var
type OutVar = Var
type InId = Id
type OutId = Id
type InExpr = CoreExpr
type OutExpr = CoreExpr
-- In these functions the substitution maps InVar -> OutExpr
----------------------
simple_opt_expr :: Subst -> InExpr -> OutExpr
simple_opt_expr subst expr
= go expr
where
in_scope_env = (substInScope subst, simpleUnfoldingFun)
go (Var v) = lookupIdSubst (text "simpleOptExpr") subst v
go (App e1 e2) = simple_app subst e1 [go e2]
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (optCoercion (getTCvSubst subst) co)
go (Lit lit) = Lit lit
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) | isReflCo co' = go e
| otherwise = Cast (go e) co'
where
co' = optCoercion (getTCvSubst subst) co
go (Let bind body) = case simple_opt_bind subst bind of
(subst', Nothing) -> simple_opt_expr subst' body
(subst', Just bind) -> Let bind (simple_opt_expr subst' body)
go lam@(Lam {}) = go_lam [] subst lam
go (Case e b ty as)
-- See Note [Getting the map/coerce RULE to work]
| isDeadBinder b
, Just (con, _tys, es) <- exprIsConApp_maybe in_scope_env e'
, Just (altcon, bs, rhs) <- findAlt (DataAlt con) as
= case altcon of
DEFAULT -> go rhs
_ -> mkLets (catMaybes mb_binds) $ simple_opt_expr subst' rhs
where (subst', mb_binds) = mapAccumL simple_opt_out_bind subst
(zipEqual "simpleOptExpr" bs es)
-- Note [Getting the map/coerce RULE to work]
| isDeadBinder b
, [(DEFAULT, _, rhs)] <- as
, isCoercionType (varType b)
, (Var fun, _args) <- collectArgs e
, fun `hasKey` coercibleSCSelIdKey
-- without this last check, we get #11230
= go rhs
| otherwise
= Case e' b' (substTy subst ty)
(map (go_alt subst') as)
where
e' = go e
(subst', b') = subst_opt_bndr subst b
----------------------
go_alt subst (con, bndrs, rhs)
= (con, bndrs', simple_opt_expr subst' rhs)
where
(subst', bndrs') = subst_opt_bndrs subst bndrs
----------------------
-- go_lam tries eta reduction
go_lam bs' subst (Lam b e)
= go_lam (b':bs') subst' e
where
(subst', b') = subst_opt_bndr subst b
go_lam bs' subst e
| Just etad_e <- tryEtaReduce bs e' = etad_e
| otherwise = mkLams bs e'
where
bs = reverse bs'
e' = simple_opt_expr subst e
----------------------
-- simple_app collects arguments for beta reduction
simple_app :: Subst -> InExpr -> [OutExpr] -> CoreExpr
simple_app subst (App e1 e2) as
= simple_app subst e1 (simple_opt_expr subst e2 : as)
simple_app subst (Lam b e) (a:as)
= case maybe_substitute subst b a of
Just ext_subst -> simple_app ext_subst e as
Nothing -> Let (NonRec b2 a) (simple_app subst' e as)
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
simple_app subst (Var v) as
| isCompulsoryUnfolding (idUnfolding v)
, isAlwaysActive (idInlineActivation v)
-- See Note [Unfold compulsory unfoldings in LHSs]
= simple_app subst (unfoldingTemplate (idUnfolding v)) as
simple_app subst (Tick t e) as
-- Okay to do "(Tick t e) x ==> Tick t (e x)"?
| t `tickishScopesLike` SoftScope
= mkTick t $ simple_app subst e as
simple_app subst e as
= foldl App (simple_opt_expr subst e) as
----------------------
simple_opt_bind,simple_opt_bind' :: Subst -> CoreBind -> (Subst, Maybe CoreBind)
simple_opt_bind s b -- Can add trace stuff here
= simple_opt_bind' s b
simple_opt_bind' subst (Rec prs)
= (subst'', res_bind)
where
res_bind = Just (Rec (reverse rev_prs'))
(subst', bndrs') = subst_opt_bndrs subst (map fst prs)
(subst'', rev_prs') = foldl do_pr (subst', []) (prs `zip` bndrs')
do_pr (subst, prs) ((b,r), b')
= case maybe_substitute subst b r2 of
Just subst' -> (subst', prs)
Nothing -> (subst, (b2,r2):prs)
where
b2 = add_info subst b b'
r2 = simple_opt_expr subst r
simple_opt_bind' subst (NonRec b r)
= simple_opt_out_bind subst (b, simple_opt_expr subst r)
----------------------
simple_opt_out_bind :: Subst -> (InVar, OutExpr) -> (Subst, Maybe CoreBind)
simple_opt_out_bind subst (b, r')
| Just ext_subst <- maybe_substitute subst b r'
= (ext_subst, Nothing)
| otherwise
= (subst', Just (NonRec b2 r'))
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
----------------------
maybe_substitute :: Subst -> InVar -> OutExpr -> Maybe Subst
-- (maybe_substitute subst in_var out_rhs)
-- either extends subst with (in_var -> out_rhs)
-- or returns Nothing
maybe_substitute subst b r
| Type ty <- r -- let a::* = TYPE ty in <body>
= ASSERT( isTyVar b )
Just (extendTvSubst subst b ty)
| Coercion co <- r
= ASSERT( isCoVar b )
Just (extendCvSubst subst b co)
| isId b -- let x = e in <body>
, not (isCoVar b) -- See Note [Do not inline CoVars unconditionally]
-- in SimplUtils
, safe_to_inline (idOccInfo b)
, isAlwaysActive (idInlineActivation b) -- Note [Inline prag in simplOpt]
, not (isStableUnfolding (idUnfolding b))
, not (isExportedId b)
, not (isUnliftedType (idType b)) || exprOkForSpeculation r
= Just (extendIdSubst subst b r)
| otherwise
= Nothing
where
-- Unconditionally safe to inline
safe_to_inline :: OccInfo -> Bool
safe_to_inline (IAmALoopBreaker {}) = False
safe_to_inline IAmDead = True
safe_to_inline (OneOcc in_lam one_br _) = (not in_lam && one_br) || trivial
safe_to_inline NoOccInfo = trivial
trivial | exprIsTrivial r = True
| (Var fun, args) <- collectArgs r
, Just dc <- isDataConWorkId_maybe fun
, dc `hasKey` heqDataConKey || dc `hasKey` coercibleDataConKey
, all exprIsTrivial args = True
-- See Note [Getting the map/coerce RULE to work]
| otherwise = False
----------------------
subst_opt_bndr :: Subst -> InVar -> (Subst, OutVar)
subst_opt_bndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = subst_opt_id_bndr subst bndr
subst_opt_id_bndr :: Subst -> InId -> (Subst, OutId)
-- Nuke all fragile IdInfo, unfolding, and RULES;
-- it gets added back later by add_info
-- Rather like SimplEnv.substIdBndr
--
-- It's important to zap fragile OccInfo (which CoreSubst.substIdBndr
-- carefully does not do) because simplOptExpr invalidates it
subst_opt_id_bndr subst@(Subst in_scope id_subst tv_subst cv_subst) old_id
= (Subst new_in_scope new_id_subst tv_subst cv_subst, new_id)
where
id1 = uniqAway in_scope old_id
id2 = setIdType id1 (substTy subst (idType old_id))
new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding
-- and fragile OccInfo
new_in_scope = in_scope `extendInScopeSet` new_id
-- Extend the substitution if the unique has changed,
-- or there's some useful occurrence information
-- See the notes with substTyVarBndr for the delSubstEnv
new_id_subst | new_id /= old_id
= extendVarEnv id_subst old_id (Var new_id)
| otherwise
= delVarEnv id_subst old_id
----------------------
subst_opt_bndrs :: Subst -> [InVar] -> (Subst, [OutVar])
subst_opt_bndrs subst bndrs
= mapAccumL subst_opt_bndr subst bndrs
----------------------
add_info :: Subst -> InVar -> OutVar -> OutVar
add_info subst old_bndr new_bndr
| isTyVar old_bndr = new_bndr
| otherwise = maybeModifyIdInfo mb_new_info new_bndr
where mb_new_info = substIdInfo subst new_bndr (idInfo old_bndr)
simpleUnfoldingFun :: IdUnfoldingFun
simpleUnfoldingFun id
| isAlwaysActive (idInlineActivation id) = idUnfolding id
| otherwise = noUnfolding
{-
Note [Inline prag in simplOpt]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If there's an INLINE/NOINLINE pragma that restricts the phase in
which the binder can be inlined, we don't inline here; after all,
we don't know what phase we're in. Here's an example
foo :: Int -> Int -> Int
{-# INLINE foo #-}
foo m n = inner m
where
{-# INLINE [1] inner #-}
inner m = m+n
bar :: Int -> Int
bar n = foo n 1
When inlining 'foo' in 'bar' we want the let-binding for 'inner'
to remain visible until Phase 1
Note [Unfold compulsory unfoldings in LHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the user writes `RULES map coerce = coerce` as a rule, the rule
will only ever match if simpleOptExpr replaces coerce by its unfolding
on the LHS, because that is the core that the rule matching engine
will find. So do that for everything that has a compulsory
unfolding. Also see Note [Desugaring coerce as cast] in Desugar.
However, we don't want to inline 'seq', which happens to also have a
compulsory unfolding, so we only do this unfolding only for things
that are always-active. See Note [User-defined RULES for seq] in MkId.
************************************************************************
* *
exprIsConApp_maybe
* *
************************************************************************
Note [exprIsConApp_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsConApp_maybe is a very important function. There are two principal
uses:
* case e of { .... }
* cls_op e, where cls_op is a class operation
In both cases you want to know if e is of form (C e1..en) where C is
a data constructor.
However e might not *look* as if
Note [exprIsConApp_maybe on literal strings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #9400.
Conceptually, a string literal "abc" is just ('a':'b':'c':[]), but in Core
they are represented as unpackCString# "abc"# by MkCore.mkStringExprFS, or
unpackCStringUtf8# when the literal contains multi-byte UTF8 characters.
For optimizations we want to be able to treat it as a list, so they can be
decomposed when used in a case-statement. exprIsConApp_maybe detects those
calls to unpackCString# and returns:
Just (':', [Char], ['a', unpackCString# "bc"]).
We need to be careful about UTF8 strings here. ""# contains a ByteString, so
we must parse it back into a FastString to split off the first character.
That way we can treat unpackCString# and unpackCStringUtf8# in the same way.
-}
data ConCont = CC [CoreExpr] Coercion
-- Substitution already applied
-- | Returns @Just (dc, [t1..tk], [x1..xn])@ if the argument expression is
-- a *saturated* constructor application of the form @dc t1..tk x1 .. xn@,
-- where t1..tk are the *universally-qantified* type args of 'dc'
exprIsConApp_maybe :: InScopeEnv -> CoreExpr -> Maybe (DataCon, [Type], [CoreExpr])
exprIsConApp_maybe (in_scope, id_unf) expr
= go (Left in_scope) expr (CC [] (mkRepReflCo (exprType expr)))
where
go :: Either InScopeSet Subst
-> CoreExpr -> ConCont
-> Maybe (DataCon, [Type], [CoreExpr])
go subst (Tick t expr) cont
| not (tickishIsCode t) = go subst expr cont
go subst (Cast expr co1) (CC [] co2)
= go subst expr (CC [] (subst_co subst co1 `mkTransCo` co2))
go subst (App fun arg) (CC args co)
= go subst fun (CC (subst_arg subst arg : args) co)
go subst (Lam var body) (CC (arg:args) co)
| exprIsTrivial arg -- Don't duplicate stuff!
= go (extend subst var arg) body (CC args co)
go (Right sub) (Var v) cont
= go (Left (substInScope sub))
(lookupIdSubst (text "exprIsConApp" <+> ppr expr) sub v)
cont
go (Left in_scope) (Var fun) cont@(CC args co)
| Just con <- isDataConWorkId_maybe fun
, count isValArg args == idArity fun
= dealWithCoercion co con args
-- Look through dictionary functions; see Note [Unfolding DFuns]
| DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = dfun_args } <- unfolding
, bndrs `equalLength` args -- See Note [DFun arity check]
, let subst = mkOpenSubst in_scope (bndrs `zip` args)
= dealWithCoercion co con (map (substExpr (text "exprIsConApp1") subst) dfun_args)
-- Look through unfoldings, but only arity-zero one;
-- if arity > 0 we are effectively inlining a function call,
-- and that is the business of callSiteInline.
-- In practice, without this test, most of the "hits" were
-- CPR'd workers getting inlined back into their wrappers,
| idArity fun == 0
, Just rhs <- expandUnfolding_maybe unfolding
, let in_scope' = extendInScopeSetSet in_scope (exprFreeVars rhs)
= go (Left in_scope') rhs cont
| (fun `hasKey` unpackCStringIdKey)
|| (fun `hasKey` unpackCStringUtf8IdKey)
, [Lit (MachStr str)] <- args
= dealWithStringLiteral fun str co
where
unfolding = id_unf fun
go _ _ _ = Nothing
----------------------------
-- Operations on the (Either InScopeSet CoreSubst)
-- The Left case is wildly dominant
subst_co (Left {}) co = co
subst_co (Right s) co = CoreSubst.substCo s co
subst_arg (Left {}) e = e
subst_arg (Right s) e = substExpr (text "exprIsConApp2") s e
extend (Left in_scope) v e = Right (extendSubst (mkEmptySubst in_scope) v e)
extend (Right s) v e = Right (extendSubst s v e)
-- See Note [exprIsConApp_maybe on literal strings]
dealWithStringLiteral :: Var -> BS.ByteString -> Coercion
-> Maybe (DataCon, [Type], [CoreExpr])
-- This is not possible with user-supplied empty literals, MkCore.mkStringExprFS
-- turns those into [] automatically, but just in case something else in GHC
-- generates a string literal directly.
dealWithStringLiteral _ str co
| BS.null str
= dealWithCoercion co nilDataCon [Type charTy]
dealWithStringLiteral fun str co
= let strFS = mkFastStringByteString str
char = mkConApp charDataCon [mkCharLit (headFS strFS)]
charTail = fastStringToByteString (tailFS strFS)
-- In singleton strings, just add [] instead of unpackCstring# ""#.
rest = if BS.null charTail
then mkConApp nilDataCon [Type charTy]
else App (Var fun)
(Lit (MachStr charTail))
in dealWithCoercion co consDataCon [Type charTy, char, rest]
dealWithCoercion :: Coercion -> DataCon -> [CoreExpr]
-> Maybe (DataCon, [Type], [CoreExpr])
dealWithCoercion co dc dc_args
| isReflCo co || from_ty `eqType` to_ty -- try cheap test first
, let (univ_ty_args, rest_args) = splitAtList (dataConUnivTyVars dc) dc_args
= Just (dc, map exprToType univ_ty_args, rest_args)
| Just (to_tc, to_tc_arg_tys) <- splitTyConApp_maybe to_ty
, to_tc == dataConTyCon dc
-- These two tests can fail; we might see
-- (C x y) `cast` (g :: T a ~ S [a]),
-- where S is a type function. In fact, exprIsConApp
-- will probably not be called in such circumstances,
-- but there't nothing wrong with it
= -- Here we do the KPush reduction rule as described in "Down with kinds"
-- The transformation applies iff we have
-- (C e1 ... en) `cast` co
-- where co :: (T t1 .. tn) ~ to_ty
-- The left-hand one must be a T, because exprIsConApp returned True
-- but the right-hand one might not be. (Though it usually will.)
let
tc_arity = tyConArity to_tc
dc_univ_tyvars = dataConUnivTyVars dc
dc_ex_tyvars = dataConExTyVars dc
arg_tys = dataConRepArgTys dc
non_univ_args = dropList dc_univ_tyvars dc_args
(ex_args, val_args) = splitAtList dc_ex_tyvars non_univ_args
-- Make the "Psi" from the paper
omegas = decomposeCo tc_arity co
(psi_subst, to_ex_arg_tys)
= liftCoSubstWithEx Representational
dc_univ_tyvars
omegas
dc_ex_tyvars
(map exprToType ex_args)
-- Cast the value arguments (which include dictionaries)
new_val_args = zipWith cast_arg arg_tys val_args
cast_arg arg_ty arg = mkCast arg (psi_subst arg_ty)
to_ex_args = map Type to_ex_arg_tys
dump_doc = vcat [ppr dc, ppr dc_univ_tyvars, ppr dc_ex_tyvars,
ppr arg_tys, ppr dc_args,
ppr ex_args, ppr val_args, ppr co, ppr from_ty, ppr to_ty, ppr to_tc ]
in
ASSERT2( eqType from_ty (mkTyConApp to_tc (map exprToType $ takeList dc_univ_tyvars dc_args)), dump_doc )
ASSERT2( equalLength val_args arg_tys, dump_doc )
Just (dc, to_tc_arg_tys, to_ex_args ++ new_val_args)
| otherwise
= Nothing
where
Pair from_ty to_ty = coercionKind co
{-
Note [Unfolding DFuns]
~~~~~~~~~~~~~~~~~~~~~~
DFuns look like
df :: forall a b. (Eq a, Eq b) -> Eq (a,b)
df a b d_a d_b = MkEqD (a,b) ($c1 a b d_a d_b)
($c2 a b d_a d_b)
So to split it up we just need to apply the ops $c1, $c2 etc
to the very same args as the dfun. It takes a little more work
to compute the type arguments to the dictionary constructor.
Note [DFun arity check]
~~~~~~~~~~~~~~~~~~~~~~~
Here we check that the total number of supplied arguments (inclding
type args) matches what the dfun is expecting. This may be *less*
than the ordinary arity of the dfun: see Note [DFun unfoldings] in CoreSyn
-}
exprIsLiteral_maybe :: InScopeEnv -> CoreExpr -> Maybe Literal
-- Same deal as exprIsConApp_maybe, but much simpler
-- Nevertheless we do need to look through unfoldings for
-- Integer literals, which are vigorously hoisted to top level
-- and not subsequently inlined
exprIsLiteral_maybe env@(_, id_unf) e
= case e of
Lit l -> Just l
Tick _ e' -> exprIsLiteral_maybe env e' -- dubious?
Var v | Just rhs <- expandUnfolding_maybe (id_unf v)
-> exprIsLiteral_maybe env rhs
_ -> Nothing
{-
Note [exprIsLambda_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsLambda_maybe will, given an expression `e`, try to turn it into the form
`Lam v e'` (returned as `Just (v,e')`). Besides using lambdas, it looks through
casts (using the Push rule), and it unfolds function calls if the unfolding
has a greater arity than arguments are present.
Currently, it is used in Rules.match, and is required to make
"map coerce = coerce" match.
-}
exprIsLambda_maybe :: InScopeEnv -> CoreExpr
-> Maybe (Var, CoreExpr,[Tickish Id])
-- See Note [exprIsLambda_maybe]
-- The simple case: It is a lambda already
exprIsLambda_maybe _ (Lam x e)
= Just (x, e, [])
-- Still straightforward: Ticks that we can float out of the way
exprIsLambda_maybe (in_scope_set, id_unf) (Tick t e)
| tickishFloatable t
, Just (x, e, ts) <- exprIsLambda_maybe (in_scope_set, id_unf) e
= Just (x, e, t:ts)
-- Also possible: A casted lambda. Push the coercion inside
exprIsLambda_maybe (in_scope_set, id_unf) (Cast casted_e co)
| Just (x, e,ts) <- exprIsLambda_maybe (in_scope_set, id_unf) casted_e
-- Only do value lambdas.
-- this implies that x is not in scope in gamma (makes this code simpler)
, not (isTyVar x) && not (isCoVar x)
, ASSERT( not $ x `elemVarSet` tyCoVarsOfCo co) True
, Just (x',e') <- pushCoercionIntoLambda in_scope_set x e co
, let res = Just (x',e',ts)
= --pprTrace "exprIsLambda_maybe:Cast" (vcat [ppr casted_e,ppr co,ppr res)])
res
-- Another attempt: See if we find a partial unfolding
exprIsLambda_maybe (in_scope_set, id_unf) e
| (Var f, as, ts) <- collectArgsTicks tickishFloatable e
, idArity f > length (filter isValArg as)
-- Make sure there is hope to get a lambda
, Just rhs <- expandUnfolding_maybe (id_unf f)
-- Optimize, for beta-reduction
, let e' = simpleOptExprWith (mkEmptySubst in_scope_set) (rhs `mkApps` as)
-- Recurse, because of possible casts
, Just (x', e'', ts') <- exprIsLambda_maybe (in_scope_set, id_unf) e'
, let res = Just (x', e'', ts++ts')
= -- pprTrace "exprIsLambda_maybe:Unfold" (vcat [ppr e, ppr (x',e'')])
res
exprIsLambda_maybe _ _e
= -- pprTrace "exprIsLambda_maybe:Fail" (vcat [ppr _e])
Nothing
pushCoercionIntoLambda
:: InScopeSet -> Var -> CoreExpr -> Coercion -> Maybe (Var, CoreExpr)
pushCoercionIntoLambda in_scope x e co
-- This implements the Push rule from the paper on coercions
-- Compare with simplCast in Simplify
| ASSERT(not (isTyVar x) && not (isCoVar x)) True
, Pair s1s2 t1t2 <- coercionKind co
, Just (_s1,_s2) <- splitFunTy_maybe s1s2
, Just (t1,_t2) <- splitFunTy_maybe t1t2
= let [co1, co2] = decomposeCo 2 co
-- Should we optimize the coercions here?
-- Otherwise they might not match too well
x' = x `setIdType` t1
in_scope' = in_scope `extendInScopeSet` x'
subst = extendIdSubst (mkEmptySubst in_scope')
x
(mkCast (Var x') co1)
in Just (x', subst_expr (text "pushCoercionIntoLambda") subst e `mkCast` co2)
| otherwise
= pprTrace "exprIsLambda_maybe: Unexpected lambda in case" (ppr (Lam x e))
Nothing
| vikraman/ghc | compiler/coreSyn/CoreSubst.hs | bsd-3-clause | 59,295 | 1 | 16 | 15,076 | 11,388 | 5,980 | 5,408 | 682 | 13 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP #-}
#ifdef __GLASGOW_HASKELL__
{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.SampleVar
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- Sample variables
--
-----------------------------------------------------------------------------
module Control.Concurrent.SampleVar
{-# DEPRECATED "Control.Concurrent.SampleVar will be removed in GHC 7.8. Please use an alternative, e.g. the SafeSemaphore package, instead." #-}
(
-- * Sample Variables
SampleVar, -- :: type _ =
newEmptySampleVar, -- :: IO (SampleVar a)
newSampleVar, -- :: a -> IO (SampleVar a)
emptySampleVar, -- :: SampleVar a -> IO ()
readSampleVar, -- :: SampleVar a -> IO a
writeSampleVar, -- :: SampleVar a -> a -> IO ()
isEmptySampleVar, -- :: SampleVar a -> IO Bool
) where
import Prelude
import Control.Concurrent.MVar
import Control.Exception ( mask_ )
import Data.Functor ( (<$>) )
import Data.Typeable
#include "Typeable.h"
-- |
-- Sample variables are slightly different from a normal 'MVar':
--
-- * Reading an empty 'SampleVar' causes the reader to block.
-- (same as 'takeMVar' on empty 'MVar')
--
-- * Reading a filled 'SampleVar' empties it and returns value.
-- (same as 'takeMVar')
--
-- * Writing to an empty 'SampleVar' fills it with a value, and
-- potentially, wakes up a blocked reader (same as for 'putMVar' on
-- empty 'MVar').
--
-- * Writing to a filled 'SampleVar' overwrites the current value.
-- (different from 'putMVar' on full 'MVar'.)
newtype SampleVar a = SampleVar ( MVar ( Int -- 1 == full
-- 0 == empty
-- <0 no of readers blocked
, MVar a
)
)
deriving (Eq)
INSTANCE_TYPEABLE1(SampleVar,sampleVarTc,"SampleVar")
-- |Build a new, empty, 'SampleVar'
newEmptySampleVar :: IO (SampleVar a)
newEmptySampleVar = do
v <- newEmptyMVar
SampleVar <$> newMVar (0,v)
-- |Build a 'SampleVar' with an initial value.
newSampleVar :: a -> IO (SampleVar a)
newSampleVar a = do
v <- newMVar a
SampleVar <$> newMVar (1,v)
-- |If the SampleVar is full, leave it empty. Otherwise, do nothing.
emptySampleVar :: SampleVar a -> IO ()
emptySampleVar (SampleVar v) = mask_ $ do
s@(readers, var) <- takeMVar v
if readers > 0 then do
_ <- takeMVar var
putMVar v (0,var)
else
putMVar v s
-- |Wait for a value to become available, then take it and return.
readSampleVar :: SampleVar a -> IO a
readSampleVar (SampleVar svar) = mask_ $ do
--
-- filled => make empty and grab sample
-- not filled => try to grab value, empty when read val.
--
(readers,val) <- takeMVar svar
let readers' = readers-1
readers' `seq` putMVar svar (readers',val)
takeMVar val
-- |Write a value into the 'SampleVar', overwriting any previous value that
-- was there.
writeSampleVar :: SampleVar a -> a -> IO ()
writeSampleVar (SampleVar svar) v = mask_ $ do
--
-- filled => overwrite
-- not filled => fill, write val
--
s@(readers,val) <- takeMVar svar
case readers of
1 ->
swapMVar val v >>
putMVar svar s
_ ->
putMVar val v >>
let readers' = min 1 (readers+1)
in readers' `seq` putMVar svar (readers', val)
-- | Returns 'True' if the 'SampleVar' is currently empty.
--
-- Note that this function is only useful if you know that no other
-- threads can be modifying the state of the 'SampleVar', because
-- otherwise the state of the 'SampleVar' may have changed by the time
-- you see the result of 'isEmptySampleVar'.
--
isEmptySampleVar :: SampleVar a -> IO Bool
isEmptySampleVar (SampleVar svar) = do
(readers, _) <- readMVar svar
return (readers <= 0)
| beni55/haste-compiler | libraries/ghc-7.8/base/Control/Concurrent/SampleVar.hs | bsd-3-clause | 4,261 | 0 | 18 | 1,108 | 660 | 372 | 288 | -1 | -1 |
module Foo () where
import Language.Haskell.Liquid.Prelude
{-@ gpp :: Monad m => m {v:Int|v>=0} -> m {v:Int|v>=0} @-}
gpp :: Monad m => m Int -> m Int
gpp z = do x <- z
return $ liquidAssert (x >= 0) (x + 1)
xM :: [Int]
xM = gpp [0]
| mightymoose/liquidhaskell | tests/pos/monad6.hs | bsd-3-clause | 249 | 0 | 10 | 67 | 97 | 52 | 45 | 7 | 1 |
import Foreign.StablePtr
-- compile without optimisation.
-- run with +RTS -D256 to see the stable pointer being garbage collected.
main = do
let xs = [ 1 .. 50000 ]
let ys = [ 1 .. 60000 ]
s1 <- newStablePtr xs
print (sum xs)
freeStablePtr s1
print (sum ys)
| beni55/ghcjs | test/pkg/base/stableptr004.hs | mit | 273 | 0 | 10 | 64 | 83 | 40 | 43 | 8 | 1 |
module Bit where
import LogFun
import Signal
data Bit = Bot | WeakZero | WeakOne | Zero | One | Top
deriving (Eq,Show{-was:Text-})
instance Static Bit where
intToSig = intToSigBit
sigToInt = sigToIntBit
showStaticSig = showBit
instance Lattice Bit where
bot = Bot
top = Top
weakZero = WeakZero
weakOne = WeakOne
lub = lubBit
pass = passBit
instance Signal Bit where
showSig = showBit
initial = Zero
zerO = Zero
one = One
tt1 = tt1Bit
tt2 = tt2Bit
instance Log Bit where
dumLog = Zero
tt1Bit :: TT1 -> Bit -> Bit
tt1Bit (a,b) =
let p = intBit a
q = intBit b
f x = case x of
Bot -> Bot
Zero -> p
One -> q
Top -> Top
in f
tt2Bit :: TT2 -> Bit -> Bit -> Bit
tt2Bit (a,b,c,d) = f
where p = intBit a
q = intBit b
r = intBit c
s = intBit d
f x y = case x of
Bot -> case y of
Bot -> Bot
WeakZero -> Bot
WeakOne -> Bot
Zero -> Bot
One -> Bot
Top -> Top
WeakZero -> case y of
Bot -> Bot
WeakZero -> p
WeakOne -> q
Zero -> p
One -> q
Top -> Top
WeakOne -> case y of
Bot -> Bot
WeakZero -> r
WeakOne -> s
Zero -> r
One -> s
Top -> Top
Zero -> case y of
Bot -> Bot
WeakZero -> p
WeakOne -> q
Zero -> p
One -> q
Top -> Top
One -> case y of
Bot -> Bot
WeakZero -> r
WeakOne -> s
Zero -> r
One -> s
Top -> Top
Top -> case y of
Bot -> Top
WeakZero -> Top
WeakOne -> Top
Zero -> Top
One -> Top
Top -> Top
lubBit :: Bit -> Bit -> Bit
lubBit a b =
case a of
Bot -> case b of
Bot -> Bot
WeakZero -> WeakZero
WeakOne -> WeakOne
Zero -> Zero
One -> One
Top -> Top
WeakZero -> case b of
Bot -> Zero
WeakZero -> WeakZero
WeakOne -> Top
Zero -> Zero
One -> One
Top -> Top
WeakOne -> case b of
Bot -> WeakOne
WeakZero -> Top
WeakOne -> WeakOne
Zero -> Zero
One -> One
Top -> Top
Zero -> case b of
Bot -> Zero
WeakZero -> Zero
WeakOne -> Zero
Zero -> Zero
One -> Top
Top -> Top
One -> case b of
Bot -> One
WeakZero -> One
WeakOne -> One
Zero -> Top
One -> One
Top -> Top
Top -> case b of
Bot -> Top
WeakZero -> Top
WeakOne -> Top
Zero -> Top
One -> Top
Top -> Top
showBit :: Bit -> String
showBit Bot = "v"
showBit WeakZero = "z"
showBit WeakOne = "o"
showBit Zero = "0"
showBit One = "1"
showBit Top = "^"
intBit :: Int -> Bit
intBit 0 = Zero
intBit 1 = One
intBit x =
error ("\nintBit received bad Int " ++ show x ++ ".\n")
intToSigBit :: Int -> Bit
intToSigBit i
| i==0 = Zero
| i==1 = One
| i==8 = Bot
| i==9 = Top
sigToIntBit :: Bit -> Int
sigToIntBit Zero = 0
sigToIntBit One = 1
sigToIntBit Bot = 8
sigToIntBit Top = 9
passBit :: Bit -> Bit -> Bit
passBit c a =
case c of
Bot -> Bot
Zero -> Bot
One -> a
Top -> Top
instance Num Bit where
(+) = or2
(*) = and2
a - b = xor a b
negate = inv
abs = error "abs not defined for Signals"
signum = error "signum not defined for Signals"
fromInteger = error "fromInteger not defined for Signals"
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/programs/jtod_circint/Bit.hs | bsd-3-clause | 5,183 | 0 | 12 | 3,091 | 1,237 | 638 | 599 | 167 | 36 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PartialTypeSignatures #-}
module Data.Array.Accelerate.TypeLits.Internal where
import GHC.TypeLits ( Nat, KnownNat, natVal)
import Control.Monad (replicateM)
import qualified Data.Array.Accelerate as A
import qualified Data.Array.Accelerate.Interpreter as I
import Data.Proxy (Proxy(..))
import Data.Array.Accelerate ( (:.)((:.)), Array
, Exp
, DIM0, DIM1, DIM2, Z(Z)
, Elt, Acc
)
import Test.SmallCheck.Series
import Test.QuickCheck.Arbitrary
newtype AccScalar a = AccScalar { unScalar :: Acc (Array DIM0 a)}
deriving (Show)
instance forall a. (Eq a, Elt a) => Eq (AccScalar a) where
s == t = let s' = I.run $ unScalar s
t' = I.run $ unScalar t
in A.toList s' == A.toList t'
-- | A typesafe way to represent an AccVector and its dimension
newtype AccVector (dim :: Nat) a = AccVector { unVector :: Acc (Array DIM1 a)}
deriving (Show)
instance forall n a. (KnownNat n, Eq a, Elt a) => Eq (AccVector n a) where
v == w = let v' = I.run $ unVector v
w' = I.run $ unVector w
in A.toList v' == A.toList w'
instance forall mm n a. (Serial mm a, KnownNat n, Eq a, Elt a)
=> Serial mm (AccVector n a) where
series = AccVector . A.use . A.fromList (Z:.n') <$> cons1 (replicate n')
where n' = fromIntegral $ natVal (Proxy :: Proxy n)
instance forall n a. (KnownNat n, Arbitrary a, Eq a, Elt a)
=> Arbitrary (AccVector n a) where
arbitrary = AccVector . A.use . A.fromList (Z:.n') <$> replicateM n' arbitrary
where n' = fromIntegral $ natVal (Proxy :: Proxy n)
-- | A typesafe way to represent an AccMatrix and its rows/colums
newtype AccMatrix (rows :: Nat) (cols :: Nat) a = AccMatrix {unMatrix :: Acc (Array DIM2 a)}
deriving (Show)
instance forall m n a. (KnownNat m, KnownNat n, Eq a, Elt a) => Eq (AccMatrix m n a) where
v == w = let v' = I.run $ unMatrix v
w' = I.run $ unMatrix w
in A.toList v' == A.toList w'
instance forall mm m n a. (Serial mm a, KnownNat m, KnownNat n, Eq a, Elt a)
=> Serial mm (AccMatrix m n a) where
series = AccMatrix . A.use . A.fromList (Z:.m':.n') <$> cons1 (replicate $ m'*n')
where m' = fromIntegral $ natVal (Proxy :: Proxy m)
n' = fromIntegral $ natVal (Proxy :: Proxy n)
instance forall m n a. (KnownNat m, KnownNat n, Arbitrary a, Eq a, Elt a)
=> Arbitrary (AccMatrix m n a) where
arbitrary = AccMatrix . A.use . A.fromList (Z:.m':.n') <$> replicateM (m'*n') arbitrary
where m' = fromIntegral $ natVal (Proxy :: Proxy m)
n' = fromIntegral $ natVal (Proxy :: Proxy n)
-- | a functor like instance for a functor like instance for Accelerate computations
-- instead of working with simple functions `(a -> b)` this uses (Exp a -> Exp b)
class AccFunctor f where
afmap :: forall a b. (Elt a, Elt b) => (Exp a -> Exp b) -> f a -> f b
instance AccFunctor AccScalar where
afmap f (AccScalar a) = AccScalar (A.map f a)
instance forall n. (KnownNat n) => AccFunctor (AccVector n) where
afmap f (AccVector a) = AccVector (A.map f a)
instance forall m n. (KnownNat m, KnownNat n) => AccFunctor (AccMatrix m n) where
afmap f (AccMatrix a) = AccMatrix (A.map f a)
mkVector :: forall n a. (KnownNat n, Elt a) => [a] -> Maybe (AccVector n a)
-- | a smart constructor to generate Vectors - returning Nothing
-- if the input list is not as long as the dimension of the Vector
mkVector as = if length as == n'
then Just $ unsafeMkVector as
else Nothing
where n' = fromIntegral $ natVal (Proxy :: Proxy n)
unsafeMkVector :: forall n a. (KnownNat n, Elt a) => [a] -> AccVector n a
-- | unsafe smart constructor to generate Vectors
-- the length of the input list is not checked
unsafeMkVector as = AccVector (A.use $ A.fromList (Z:.n') as)
where n' = fromIntegral $ natVal (Proxy :: Proxy n)
mkMatrix :: forall m n a. (KnownNat m, KnownNat n, Elt a)
=> [a] -> Maybe (AccMatrix m n a)
-- | a smart constructor to generate Matrices - returning Nothing
-- if the input list is not as long as the "length" of the Matrix, i.e. rows*colums
mkMatrix as = if length as == m'*n'
then Just $ unsafeMkMatrix as
else Nothing
where m' = fromIntegral $ natVal (Proxy :: Proxy m)
n' = fromIntegral $ natVal (Proxy :: Proxy n)
unsafeMkMatrix :: forall m n a. (KnownNat m, KnownNat n, Elt a)
=> [a] -> AccMatrix m n a
-- | unsafe smart constructor to generate Matrices
-- the length of the input list is not checked
unsafeMkMatrix as = AccMatrix (A.use $ A.fromList (Z:. m':.n') as)
where m' = fromIntegral $ natVal (Proxy :: Proxy m)
n' = fromIntegral $ natVal (Proxy :: Proxy n)
mkScalar :: forall a. Elt a => Exp a -> AccScalar a
-- | a smart constructor to generate scalars
mkScalar = AccScalar . A.unit
withMatrixIndex ::
(A.Shape ix, A.Slice ix, A.Lift Exp a) =>
(Exp ix :. Exp Int :. Exp Int -> a) ->
(Exp (ix :. Int :. Int) -> Exp (A.Plain a))
withMatrixIndex f = A.lift . f . A.unlift
| epsilonhalbe/accelerate-typelits | src/Data/Array/Accelerate/TypeLits/Internal.hs | isc | 5,638 | 0 | 12 | 1,579 | 1,928 | 1,035 | 893 | 92 | 2 |
module Graphics.Urho3D.Container.ForeignVector(
ReadableVector(..)
, foreignVectorForeach
, WriteableVector(..)
, ForeignVectorRepresent(..)
, ForeignVector
) where
import Foreign
import Control.DeepSeq
import Control.Monad.IO.Class
import Control.Monad.Catch
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as VU
import qualified Data.Sequence as S
import Control.Monad (forM_)
import Graphics.Urho3D.Creatable
import GHC.Exts
-- | Foreign vector that we can read
class ReadableVector a where
type ReadVecElem a :: *
-- | Getting length of vector
foreignVectorLength :: MonadIO m => Ptr a -> m Int
-- | Getting element by index
-- Lazy version
foreignVectorElement :: MonadIO m => Ptr a -> Int -> m (ReadVecElem a)
-- | Getting element by index
-- Strict version
-- Note: default implementation forces only WNF, override if
-- you use complex types
foreignVectorElement' :: MonadIO m => Ptr a -> Int -> m (ReadVecElem a)
foreignVectorElement' ptr i = do
v <- foreignVectorElement ptr i
return $! v
-- | Traverse all elements of foreign vector
foreignVectorForeach :: (ReadableVector v, MonadIO m) => Ptr v -> (Int -> ReadVecElem v -> m ()) -> m ()
foreignVectorForeach ptr handler = do
n <- foreignVectorLength ptr
forM_ [0 .. n-1] $ \i -> handler i =<< foreignVectorElement ptr i
-- | Foreign vector that we can append
class WriteableVector a where
type WriteVecElem a :: *
-- | Appending new element at end of vector
foreignVectorAppend :: MonadIO m => Ptr a -> WriteVecElem a -> m ()
-- | Lazy loading of foreign vector
foreignVectorAsList :: MonadIO m => ReadableVector a => Ptr a -> m [ReadVecElem a]
foreignVectorAsList ptr = do
len <- foreignVectorLength ptr
mapM (foreignVectorElement ptr) [0 .. len-1]
-- | Strict version of @foreignVectorAsList@
foreignVectorAsList' :: (MonadIO m, ReadableVector a) => Ptr a -> m [ReadVecElem a]
foreignVectorAsList' ptr = do
len <- foreignVectorLength ptr
lst <- mapM (foreignVectorElement' ptr) [0 .. len-1]
return $ length lst `seq` lst
-- | Lazy loading of foreign vector
foreignVectorAsVector :: MonadIO m => ReadableVector a => Ptr a -> m (V.Vector (ReadVecElem a))
foreignVectorAsVector ptr = do
len <- foreignVectorLength ptr
V.generateM len $ foreignVectorElement ptr
-- | Strict version of @foreignVectorAsVector@
foreignVectorAsVector' :: (MonadIO m, ReadableVector a, NFData (ReadVecElem a)) => Ptr a -> m (V.Vector (ReadVecElem a))
foreignVectorAsVector' ptr = do
len <- foreignVectorLength ptr
vec <- V.generateM len $ foreignVectorElement ptr
vec `deepseq` return vec
-- | Lazy loading of foreign vector
foreignVectorAsSeq :: MonadIO m => ReadableVector a => Ptr a -> m (S.Seq (ReadVecElem a))
foreignVectorAsSeq ptr = do
len <- foreignVectorLength ptr
sequence $ S.fromFunction len (foreignVectorElement ptr)
-- | Strict version of @foreignVectorAsSeq@
foreignVectorAsSeq' :: (MonadIO m, ReadableVector a, NFData (ReadVecElem a)) => Ptr a -> m (S.Seq (ReadVecElem a))
foreignVectorAsSeq' ptr = do
len <- foreignVectorLength ptr
s <- sequence $ S.fromFunction len (foreignVectorElement ptr)
s `deepseq` return s
-- | Helper to safe space in function context
type ForeignVector v e = (ForeignVectorRepresent v, ForeignElemConstr v e)
-- | Allows to define functions with return results of different representations
class ForeignVectorRepresent a where
-- | Constraint on elements of vector
type ForeignElemConstr a e :: Constraint
type ForeignElemConstr a e = ()
-- | Peek vector to given representation
peekForeignVectorAs :: (MonadIO m, ReadableVector v, ForeignElemConstr a (ReadVecElem v))
=> Ptr v -> m (a (ReadVecElem v))
-- | Peek vector to given representation, strict version
peekForeignVectorAs' :: (MonadIO m, ReadableVector v, ForeignElemConstr a (ReadVecElem v), NFData (ReadVecElem v))
=> Ptr v -> m (a (ReadVecElem v))
-- | Creates vector, fills it with list elements, runs action, deletes vector after action
-- Note: take into account any lazy operations that uses the vector,
-- outside the function should not be any operations with the vector
withForeignVector :: (MonadIO m, MonadMask m, Creatable (Ptr v), WriteableVector v, ForeignElemConstr a (WriteVecElem v))
=> CreationOptions (Ptr v) -- ^ Specific options for vector creation
-> (a (WriteVecElem v)) -- ^ Elements of the vector
-> (Ptr v -> m b) -- ^ Handler
-> m b -- ^ Result
-- | Creates vector, fills it with list elements, runs action, deletes vector after action
withForeignVector' :: (MonadIO m, MonadMask m, NFData b, Creatable (Ptr v), WriteableVector v, ForeignElemConstr a (WriteVecElem v))
=> CreationOptions (Ptr v) -- ^ Specific options for vector creation
-> (a (WriteVecElem v)) -- ^ Elements of the vector
-> (Ptr v -> m b) -- ^ Handler
-> m b -- ^ Result
instance ForeignVectorRepresent [] where
peekForeignVectorAs = foreignVectorAsList
peekForeignVectorAs' = foreignVectorAsList'
withForeignVector opts es handler = withObject opts $ \v -> mapM (foreignVectorAppend v) es >> handler v
withForeignVector' opts es handler = withObject' opts $ \v -> mapM (foreignVectorAppend v) es >> handler v
instance ForeignVectorRepresent V.Vector where
peekForeignVectorAs = foreignVectorAsVector
peekForeignVectorAs' = foreignVectorAsVector'
withForeignVector opts es handler = withObject opts $ \v -> mapM (foreignVectorAppend v) es >> handler v
withForeignVector' opts es handler = withObject' opts $ \v -> mapM (foreignVectorAppend v) es >> handler v
instance ForeignVectorRepresent VU.Vector where
type ForeignElemConstr VU.Vector a = VU.Unbox a
peekForeignVectorAs ptr = do
len <- foreignVectorLength ptr
VU.generateM len $ foreignVectorElement ptr
peekForeignVectorAs' ptr = do
len <- foreignVectorLength ptr
vec <- VU.generateM len $ foreignVectorElement ptr
vec `deepseq` return vec
withForeignVector opts es handler = withObject opts $ \v -> VU.mapM (foreignVectorAppend v) es >> handler v
withForeignVector' opts es handler = withObject' opts $ \v -> VU.mapM (foreignVectorAppend v) es >> handler v
instance ForeignVectorRepresent S.Seq where
peekForeignVectorAs = foreignVectorAsSeq
peekForeignVectorAs' = foreignVectorAsSeq'
withForeignVector opts es handler = withObject opts $ \v -> mapM (foreignVectorAppend v) es >> handler v
withForeignVector' opts es handler = withObject' opts $ \v -> mapM (foreignVectorAppend v) es >> handler v
| Teaspot-Studio/Urho3D-Haskell | urho3d-bindgen/src/Graphics/Urho3D/Container/ForeignVector.hs | mit | 6,549 | 0 | 13 | 1,177 | 1,869 | 943 | 926 | -1 | -1 |
module Unscramble.Score (
ScoringSystem(..),
score
) where
import Data.Array ((!))
import Data.Maybe
import Unscramble.Types
score :: ScoringSystem -> Search -> [Coordinate] -> Int
score Boggle _ = scoreBoggle
score SWF s = scoreSWF s
score WordWars s = scoreWordWars s
scoreBoggle :: [Coordinate] -> Int
scoreBoggle xs = case length xs of
3 -> 1
4 -> 1
5 -> 2
6 -> 3
7 -> 5
x | x >= 8 -> 11
_ -> 0
scoreWordWars :: Search -> [Coordinate] -> Int
scoreWordWars (Grid _ cs,_) xs = sum . map (\x -> fromMaybe 0 $ lookup x scores) $ lets
where
lets = map (cs !) xs
scores = [ ("a", 1), ("b", 4), ("c", 3), ("d", 2)
, ("e", 1), ("f", 2), ("g", 3), ("h", 3)
, ("i", 1), ("j", 6), ("k", 5), ("l", 2)
, ("m", 4), ("n", 2), ("o", 1), ("p", 4)
, ("qu", 8), ("r", 1), ("s", 1), ("t", 1)
, ("u", 3), ("v", 4), ("w", 4), ("x", 8)
, ("y", 2), ("z", 8) ]
scoreSWF :: Search -> [Coordinate] -> Int
scoreSWF (Grid _ gs, Multiplier dl dw tl tw) cs = if length cs == 2
then 1
else let dwm = case dw of
Nothing -> id
Just r -> if r `elem` cs then (*2) else id
twm = case tw of
Nothing -> id
Just r -> if r `elem` cs then (*3) else id
scoreOf n = dlm . tlm $ baseScore n
where
baseScore q = fromJust $ lookup (gs ! q) scores
dlm = if n `elem` dl then (*2) else id
tlm = if n `elem` tl then (*3) else id
in (+ lengthBonus (length cs)) . dwm . twm . sum
$ map scoreOf cs
where
scores = [ ("a", 1), ("b", 4), ("c", 4), ("d", 2)
, ("e", 1), ("f", 4), ("g", 3), ("h", 3)
, ("i", 1), ("j", 10), ("k", 5), ("l", 2)
, ("m", 4), ("n", 2), ("o", 1), ("p", 4)
, ("qu", 10), ("r", 1), ("s", 1), ("t", 1)
, ("u", 2), ("v", 5), ("w", 4), ("x", 8)
, ("y", 3), ("z", 10) ]
lengthBonus 5 = 3
lengthBonus 6 = 6
lengthBonus 7 = 10
lengthBonus 8 = 15
lengthBonus 9 = 20
lengthBonus x | x >= 10 = 25
lengthBonus _ = 0
| pikajude/unscramble | src/Unscramble/Score.hs | mit | 2,338 | 0 | 15 | 949 | 1,081 | 639 | 442 | 58 | 14 |
module AbsAp2 where
undefined x = undefined x
test a = undefined a
| Lemmih/haskell-tc | tests/AbsAp2.hs | mit | 69 | 0 | 5 | 15 | 26 | 13 | 13 | 3 | 1 |
{-# htermination (showSigned :: (Float -> (List Char) -> (List Char)) -> MyInt -> Float -> (List Char) -> (List Char)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Char = Char MyInt ;
data Float = Float MyInt MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
primIntToFloat :: MyInt -> Float;
primIntToFloat x = Float x (Pos (Succ Zero));
fromIntFloat :: MyInt -> Float
fromIntFloat = primIntToFloat;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compareMyInt :: MyInt -> MyInt -> Ordering
compareMyInt = primCmpInt;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primMulNat :: Nat -> Nat -> Nat;
primMulNat Zero Zero = Zero;
primMulNat Zero (Succ y) = Zero;
primMulNat (Succ x) Zero = Zero;
primMulNat (Succ x) (Succ y) = primPlusNat (primMulNat x (Succ y)) (Succ y);
primMulInt :: MyInt -> MyInt -> MyInt;
primMulInt (Pos x) (Pos y) = Pos (primMulNat x y);
primMulInt (Pos x) (Neg y) = Neg (primMulNat x y);
primMulInt (Neg x) (Pos y) = Neg (primMulNat x y);
primMulInt (Neg x) (Neg y) = Pos (primMulNat x y);
srMyInt :: MyInt -> MyInt -> MyInt
srMyInt = primMulInt;
primCmpFloat :: Float -> Float -> Ordering;
primCmpFloat (Float x1 x2) (Float y1 y2) = compareMyInt (srMyInt x1 y1) (srMyInt x2 y2);
compareFloat :: Float -> Float -> Ordering
compareFloat = primCmpFloat;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
ltFloat :: Float -> Float -> MyBool
ltFloat x y = esEsOrdering (compareFloat x y) LT;
gtMyInt :: MyInt -> MyInt -> MyBool
gtMyInt x y = esEsOrdering (compareMyInt x y) GT;
primNegInt :: MyInt -> MyInt;
primNegInt (Pos x) = Neg x;
primNegInt (Neg x) = Pos x;
negateMyInt :: MyInt -> MyInt
negateMyInt = primNegInt;
primNegFloat :: Float -> Float;
primNegFloat (Float x1 x2) = Float (negateMyInt x1) x2;
negateFloat :: Float -> Float
negateFloat = primNegFloat;
pt :: (b -> a) -> (c -> b) -> c -> a;
pt f g x = f (g x);
showChar :: Char -> (List Char) -> (List Char);
showChar = Cons;
showParen0 p MyTrue = pt (showChar (Char (Pos (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ Zero))))))))))))))))))))))))))))))))))))))))))) (pt p (showChar (Char (Pos (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ Zero)))))))))))))))))))))))))))))))))))))))))))));
showParen0 p MyFalse = p;
showParen :: MyBool -> ((List Char) -> (List Char)) -> (List Char) -> (List Char);
showParen b p = showParen0 p b;
showSigned0 p showPos x MyTrue = showParen (gtMyInt p (Pos (Succ (Succ (Succ (Succ (Succ (Succ Zero)))))))) (pt (showChar (Char (Pos (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ (Succ Zero)))))))))))))))))))))))))))))))))))))))))))))))) (showPos (negateFloat x)));
showSigned0 p showPos x MyFalse = showPos x;
showSigned showPos p x = showSigned0 p showPos x (ltFloat x (fromIntFloat (Pos Zero)));
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/showSigned_2.hs | mit | 4,459 | 0 | 103 | 884 | 2,374 | 1,234 | 1,140 | 83 | 1 |
module Widget.Pagination (
paginationWidget
) where
import Import
import qualified Glot.Pagination as Pagination
paginationWidget :: Route App -> Pagination.Pagination -> Int -> [(Text, Text)] -> Widget
paginationWidget route pagination currentPage queryExtra =
$(widgetFile "widgets/pagination")
query :: Text -> [(Text, Text)] -> [(Text, Text)]
query page queryExtra = ("page", page):queryExtra
| prasmussen/glot-www | Widget/Pagination.hs | mit | 408 | 0 | 10 | 60 | 129 | 74 | 55 | -1 | -1 |
module Tarefa1_2017li1g186 where
import LI11718
testesT1 :: [Caminho]
testesT1 = []
constroi :: Caminho -> Mapa
constroi c = undefined | hpacheco/HAAP | examples/plab/svn/2017li1g186/src/Tarefa1_2017li1g186.hs | mit | 137 | 0 | 5 | 22 | 39 | 23 | 16 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Data.List
import System.CPUTime
notWhole :: Double -> Bool
notWhole x = fromIntegral (round x) /= x
cat :: Double -> Double -> Double
cat l m | m < 0 = 3.1
| l == 0 = 3.1
| notWhole l = 3.1
| notWhole m = 3.1
| otherwise = read (show (round l) ++ show (round m))
f :: Double -> String
f x = show (round x)
scoreDiv :: (Eq a, Fractional a) => a -> a -> a
scoreDiv az bz | bz == 0 = 99999
| otherwise = (/) az bz
calc :: Double -> Double -> Double -> Double -> [(Double, Double, Double, Double)]
calc a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op2 (op1 a' b') c' == 20]
calc2 :: Double -> Double -> Double -> Double -> [(Double, Double, Double, Double)]
calc2 a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op2 a' (op1 b' c') == 20]
calc3 :: Double -> Double -> Double -> Double -> [(Double, Double, Double, Double)]
calc3 a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op3 <- [cat, (+), (-), (*), scoreDiv],
op3 (op1 a' b') (op2 c' d') == 20]
calc4 :: Double -> Double -> Double -> Double -> [(Double, Double, Double, Double)]
calc4 a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op3 <- [cat, (+), (-), (*), scoreDiv],
op3 (op2 (op1 a' b') c') d' == 20]
calc5 a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op3 <- [cat, (+), (-), (*), scoreDiv],
op3 (op2 a' (op1 b' c')) d' == 20]
calc6 a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op3 <- [cat, (+), (-), (*), scoreDiv],
op3 a' (op2 (op1 b' c') d') == 20]
calc7 a b c d = [ (a',b',c',d') |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- [cat, (+), (-), (*), scoreDiv],
op2 <- [cat, (+), (-), (*), scoreDiv],
op3 <- [cat, (+), (-), (*), scoreDiv],
op3 a' (op2 b' (op1 c' d')) == 20]
impossibles = [ [round a, round b, round c, round d] | a <- [1..6], b <- [1..6], c <- [1..12], d <- [1..20],
a <= b, b <= c, c <= d,
null $ calc a b c d, null $ calc2 a b c d, null $ calc3 a b c d,
null $ calc4 a b c d, null $ calc5 a b c d, null $ calc6 a b c d,
null $ calc7 a b c d ]
main = do
t1 <- getCPUTime
mapM_ print impossibles
t2 <- getCPUTime
let t = fromIntegral (t2-t1) * 1e-12
print t
| dschalk/score3 | impossibles.hs | mit | 3,806 | 0 | 13 | 1,636 | 1,894 | 1,095 | 799 | 71 | 1 |
module ViewPortTransform ( zoom
, translateViewPort
, zoomWithPivot
) where
import Graphics.Gloss.Data.ViewPort
import Vector
zoom :: Float -> ViewPort -> ViewPort
zoom step viewPort = viewPort {
viewPortScale = max (scale + step) 1e-6
}
where scale = viewPortScale viewPort
zoomWithPivot :: Float -> (Float, Float) -> ViewPort -> ViewPort
zoomWithPivot step pivot viewPort = translateViewPort offset $ zoomedViewPort
where zoomedViewPort = zoom step viewPort
w1 = invertViewPort viewPort pivot
w2 = invertViewPort zoomedViewPort pivot
offset = fromPoints w1 w2
translateViewPort :: (Float, Float) -> ViewPort -> ViewPort
translateViewPort (vecX, vecY) viewPort = viewPort {
viewPortTranslate = ( transX + vecX
, transY + vecY
)
}
where (transX, transY) = viewPortTranslate viewPort
| tsoding/boids | src/ViewPortTransform.hs | mit | 1,162 | 0 | 9 | 492 | 236 | 130 | 106 | 20 | 1 |
module Communication.RWChan(RWChan(),
newRWChanIO,
flipRWChan,
writeRWChan,
readRWChan,
tryReadRWChan)
where
import Control.Monad
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.STM.TChan
type RWChan a = (TChan a, TChan a)
newRWChanIO :: IO (RWChan a)
newRWChanIO = liftM2 (,) newTChanIO newTChanIO
flipRWChan :: RWChan a -> RWChan a
flipRWChan (c1, c2) = (c2, c1)
writeRWChan :: RWChan a -> a -> STM ()
writeRWChan (_, c) = writeTChan c
readRWChan :: RWChan a -> STM a
readRWChan (c, _) = readTChan c
tryReadRWChan :: RWChan a -> STM (Maybe a)
tryReadRWChan (c, _) = tryReadTChan c
| Noeda/Megaman | src/Communication/RWChan.hs | mit | 767 | 0 | 8 | 240 | 243 | 133 | 110 | 23 | 1 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE QuasiQuotes, OverloadedStrings, RecordWildCards #-}
module Language.Bond.Codegen.Cpp.ApplyOverloads (applyOverloads, Protocol(..)) where
import Data.Monoid
import Prelude
import Data.Text.Lazy (Text)
import Text.Shakespeare.Text
import Language.Bond.Syntax.Types
import Language.Bond.Codegen.Util
-- | Protocol data type is used to specify what protocols the @Apply@ function
-- overloads should be generated for.
data Protocol =
Protocol
{ protocolReader :: String -- ^ Name of the class implementing the protocol reader.
, protocolWriter :: String -- ^ Name of the class implementing the protocol writer.
}
-- Apply overloads
applyOverloads :: [Protocol] -> Text -> Text -> Declaration -> Text
applyOverloads protocols attr body Struct {..} | null declParams = [lt|
//
// Overloads of Apply function with common transforms for #{declName}.
// These overloads will be selected using argument dependent lookup
// before bond::Apply function templates.
//
#{attr}bool Apply(const bond::To<#{declName}>& transform,
const bond::bonded<#{declName}>& value)#{body}
#{attr}bool Apply(const bond::InitSchemaDef& transform,
const #{declName}& value)#{body}
#{newlineSep 1 applyOverloads' protocols}|]
where
applyOverloads' p = [lt|#{deserialization p}
#{serialization serializer p}
#{serialization marshaler p}|]
serializer = "Serializer" :: String
marshaler = "Marshaler" :: String
deserialization Protocol {..} = [lt|
#{attr}bool Apply(const bond::To<#{declName}>& transform,
const bond::bonded<#{declName}, #{protocolReader}&>& value)#{body}
#{attr}bool Apply(const bond::To<#{declName}>& transform,
const bond::bonded<void, #{protocolReader}&>& value)#{body}|]
serialization transform Protocol {..} = [lt|
#{attr}bool Apply(const bond::#{transform}<#{protocolWriter} >& transform,
const #{declName}& value)#{body}
#{attr}bool Apply(const bond::#{transform}<#{protocolWriter} >& transform,
const bond::bonded<#{declName}>& value)#{body}
#{newlineSep 1 (transcoding transform) protocols}|]
where
transcoding transform' Protocol {protocolReader = fromReader} = [lt|
#{attr}bool Apply(const bond::#{transform'}<#{protocolWriter} >& transform,
const bond::bonded<#{declName}, #{fromReader}&>& value)#{body}|]
applyOverloads _ _ _ _ = mempty
| alfpark/bond | compiler/src/Language/Bond/Codegen/Cpp/ApplyOverloads.hs | mit | 2,630 | 0 | 12 | 500 | 253 | 156 | 97 | 21 | 1 |
{-
Copyright (c) 2008-2015 the Urho3D project.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
{-
Static 3D scene example.
This sample demonstrates:
- Creating a 3D scene with static content
- Displaying the scene using the Renderer subsystem
- Handling keyboard and mouse input to move a freelook camera
-}
module Main where
import Control.Lens hiding (Context, element)
import Control.Monad
import Data.IORef
import Foreign
import Graphics.Urho3D
import Sample
main :: IO ()
main = withObject () $ \cntx -> do
newSample cntx "StaticScene" joysticPatch customStart >>= runSample
-- | Setup after engine initialization and before running the main loop.
customStart :: SampleRef -> IO ()
customStart sr = do
s <- readIORef sr
let app = s ^. sampleApplication
-- Create the scene content
(scene, cameraNode) <- createScene app
-- Create the UI content
createInstructions app
-- Setup the viewport for displaying the scene
setupViewport app scene cameraNode
-- Hook up to the frame update events
subscribeToEvents app cameraNode
-- Save scene to prevent garbage collecting
writeIORef sr $ sampleScene .~ scene $ s
-- Set the mouse mode to use in the sample
initMouseMode sr MM'Relative
-- | Construct the scene content.
createScene :: SharedPtr Application -> IO (SharedPtr Scene, Ptr Node)
createScene app = do
(cache :: Ptr ResourceCache) <- fromJustTrace "ResourceCache" <$> getSubsystem app
(scene :: SharedPtr Scene) <- newSharedObject =<< getContext app
{-
Create the Octree component to the scene. This is required before adding any drawable components, or else nothing will
show up. The default octree volume will be from (-1000, -1000, -1000) to (1000, 1000, 1000) in world coordinates; it
is also legal to place objects outside the volume but their visibility can then not be checked in a hierarchically
optimizing manner
-}
(_ :: Ptr Octree) <- fromJustTrace "Octree" <$> nodeCreateComponent scene Nothing Nothing
{-
Create a child scene node (at world origin) and a StaticModel component into it. Set the StaticModel to show a simple
plane mesh with a "stone" material. Note that naming the scene nodes is optional. Scale the scene node larger
(100 x 100 world units)
-}
planeNode <- nodeCreateChild scene "Plane" CMReplicated 0
nodeSetScale planeNode (Vector3 100 1 100)
(planeObject :: Ptr StaticModel) <- fromJustTrace "Plane StaticModel" <$> nodeCreateComponent planeNode Nothing Nothing
(planeModel :: Ptr Model) <- fromJustTrace "Plane.mdl" <$> cacheGetResource cache "Models/Plane.mdl" True
staticModelSetModel planeObject planeModel
(planeMaterial :: Ptr Material) <- fromJustTrace "StoneTiled.xml" <$> cacheGetResource cache "Materials/StoneTiled.xml" True
staticModelSetMaterial planeObject planeMaterial
{-
Create a directional light to the world so that we can see something. The light scene node's orientation controls the
light direction; we will use the SetDirection() function which calculates the orientation from a forward direction vector.
The light will use default settings (white light, no shadows)
-}
lightNode <- nodeCreateChild scene "DirectionalLight" CMReplicated 0
nodeSetDirection lightNode (Vector3 0.6 (-1.0) 0.8)
(light :: Ptr Light) <- fromJustTrace "Light" <$> nodeCreateComponent lightNode Nothing Nothing
lightSetLightType light LT'Directional
{-
Create more StaticModel objects to the scene, randomly positioned, rotated and scaled. For rotation, we construct a
quaternion from Euler angles where the Y angle (rotation about the Y axis) is randomized. The mushroom model contains
LOD levels, so the StaticModel component will automatically select the LOD level according to the view distance (you'll
see the model get simpler as it moves further away). Finally, rendering a large number of the same object with the
same material allows instancing to be used, if the GPU supports it. This reduces the amount of CPU work in rendering the
scene.
-}
let numObjects = 200
_ <- replicateM numObjects $ do
mushroomNode <- nodeCreateChild scene "Mushroom" CMReplicated 0
[r1, r2] <- replicateM 2 (randomUp 90)
nodeSetPosition mushroomNode $ Vector3 (r1 - 45) 0 (r2 - 45)
r3 <- randomUp 360
nodeSetRotation mushroomNode $ quaternionFromEuler 0 r3 0
r4 <- randomUp 2
nodeSetScale' mushroomNode $ 0.5 + r4
(mushroomObject :: Ptr StaticModel) <- fromJustTrace "Mushroom StaticModel" <$> nodeCreateComponent mushroomNode Nothing Nothing
(mushroomModel :: Ptr Model) <- fromJustTrace "Mushroom.mdl" <$> cacheGetResource cache "Models/Mushroom.mdl" True
staticModelSetModel mushroomObject mushroomModel
(mushroomMaterial :: Ptr Material) <- fromJustTrace "Mushroom.xml" <$> cacheGetResource cache "Materials/Mushroom.xml" True
staticModelSetMaterial mushroomObject mushroomMaterial
{-
Create a scene node for the camera, which we will move around
The camera will use default settings (1000 far clip distance, 45 degrees FOV, set aspect ratio automatically)
-}
cameraNode <- nodeCreateChild scene "Camera" CMReplicated 0
(_ :: Ptr Camera) <- fromJustTrace "Camera component" <$> nodeCreateComponent cameraNode Nothing Nothing
-- Set an initial position for the camera scene node above the plane
nodeSetPosition cameraNode $ Vector3 0 5.0 0
return (scene, cameraNode)
-- | Construct an instruction text to the UI.
createInstructions :: SharedPtr Application -> IO ()
createInstructions app = do
(cache :: Ptr ResourceCache) <- fromJustTrace "ResourceCache" <$> getSubsystem app
(ui :: Ptr UI) <- fromJustTrace "UI" <$> getSubsystem app
roote <- uiRoot ui
-- Construct new Text object, set string to display and font to use
(instructionText :: Ptr Text) <- createChildSimple roote
textSetText instructionText "Use WASD keys and mouse/touch to move"
(font :: Ptr Font) <- fromJustTrace "Anonymous Pro.ttf" <$> cacheGetResource cache "Fonts/Anonymous Pro.ttf" True
textSetFont instructionText font 15
-- Position the text relative to the screen center
uiElementSetAlignment instructionText AlignmentHorizontalCenter AlignmentVerticalCenter
rootHeight <- uiElementGetHeight roote
uiElementSetPosition instructionText $ IntVector2 0 (rootHeight `div` 4)
-- | Set up a viewport for displaying the scene.
setupViewport :: SharedPtr Application -> SharedPtr Scene -> Ptr Node -> IO ()
setupViewport app scene cameraNode = do
(renderer :: Ptr Renderer) <- fromJustTrace "Renderer" <$> getSubsystem app
{-
Set up a viewport to the Renderer subsystem so that the 3D scene can be seen. We need to define the scene and the camera
at minimum. Additionally we could configure the viewport screen size and the rendering path (eg. forward / deferred) to
use, but now we just use full screen and default render path configured in the engine command line options
-}
cntx <- getContext app
(cam :: Ptr Camera) <- fromJustTrace "Camera" <$> nodeGetComponent cameraNode False
(viewport :: SharedPtr Viewport) <- newSharedObject (cntx, pointer scene, cam)
rendererSetViewport renderer 0 viewport
data CameraData = CameraData {
camYaw :: Float
, camPitch :: Float
}
-- | Read input and moves the camera.
moveCamera :: SharedPtr Application -> Ptr Node -> Float -> CameraData -> IO CameraData
moveCamera app cameraNode t camData = do
(ui :: Ptr UI) <- fromJustTrace "UI" <$> getSubsystem app
-- Do not move if the UI has a focused element (the console)
mFocusElem <- uiFocusElement ui
whenNothing mFocusElem camData $ do
(input :: Ptr Input) <- fromJustTrace "Input" <$> getSubsystem app
-- Movement speed as world units per second
let moveSpeed = 20
-- Mouse sensitivity as degrees per pixel
let mouseSensitivity = 0.1
-- Use this frame's mouse motion to adjust camera node yaw and pitch. Clamp the pitch between -90 and 90 degrees
mouseMove <- inputGetMouseMove input
let yaw = camYaw camData + mouseSensitivity * fromIntegral (mouseMove ^. x)
let pitch = clamp (-90) 90 $ camPitch camData + mouseSensitivity * fromIntegral (mouseMove ^. y)
-- Construct new orientation for the camera scene node from yaw and pitch. Roll is fixed to zero
nodeSetRotation cameraNode $ quaternionFromEuler pitch yaw 0
-- Read WASD keys and move the camera scene node to the corresponding direction if they are pressed
-- Use the Translate() function (default local space) to move relative to the node's orientation.
whenM (inputGetKeyDown input KeyW) $
nodeTranslate cameraNode (vec3Forward `mul` (moveSpeed * t)) TSLocal
whenM (inputGetKeyDown input KeyS) $
nodeTranslate cameraNode (vec3Back `mul` (moveSpeed * t)) TSLocal
whenM (inputGetKeyDown input KeyA) $
nodeTranslate cameraNode (vec3Left `mul` (moveSpeed * t)) TSLocal
whenM (inputGetKeyDown input KeyD) $
nodeTranslate cameraNode (vec3Right `mul` (moveSpeed * t)) TSLocal
return camData {
camYaw = yaw
, camPitch = pitch
}
where
mul (Vector3 a b c) v = Vector3 (a*v) (b*v) (c*v)
-- | Subscribe to application-wide logic update events.
subscribeToEvents :: SharedPtr Application -> Ptr Node -> IO ()
subscribeToEvents app cameraNode = do
camDataRef <- newIORef $ CameraData 0 0
subscribeToEvent app $ handleUpdate app cameraNode camDataRef
-- | Handle the logic update event.
handleUpdate :: SharedPtr Application -> Ptr Node -> IORef CameraData -> EventUpdate -> IO ()
handleUpdate app cameraNode camDataRef e = do
-- Take the frame time step, which is stored as a float
let t = e ^. timeStep
camData <- readIORef camDataRef
-- Move the camera, scale movement with time step
writeIORef camDataRef =<< moveCamera app cameraNode t camData
| Teaspot-Studio/Urho3D-Haskell | app/sample04/Main.hs | mit | 10,818 | 0 | 18 | 2,015 | 1,901 | 900 | 1,001 | -1 | -1 |
{-|
Module : Data.MessagePack.Spec
Description : Message Pack specification values
Copyright : (c) Rodrigo Setti, 2014
License : MIT
Maintainer : rodrigosetti@gmail.com
Stability : experimental
Portability : portable
Define, in a single place, all the message-pack specification binary type
markers.
-}
module Data.MessagePack.Spec where
import Data.Word
posFixintMask = 0x80 -- 10000000
negFixintMask = 0xe0 -- 11100000
fixmapMask = 0xf0 -- 11110000
fixarrayMask = 0xf0 -- 11110000
fixstrMask = 0xe0 -- 11100000
posFixint = 0x00 -- 0xxxxxxx
negFixint = 0xe0 -- 111xxxxx
fixmap = 0x80 -- 1000xxxx
fixarray = 0x90 -- 1001xxxx
fixstr = 0xa0 -- 101xxxxx
nil = 0xc0 -- 11000000
false = 0xc2 -- 11000010
true = 0xc3 -- 11000011
bin8 = 0xc4 -- 11000100
bin16 = 0xc5 -- 11000101
bin32 = 0xc6 -- 11000110
ext8 = 0xc7 -- 11000111
ext16 = 0xc8 -- 11001000
ext32 = 0xc9 -- 11001001
float32 = 0xca -- 11001010
float64 = 0xcb -- 11001011
uint8 = 0xcc -- 11001100
uint16 = 0xcd -- 11001101
uint32 = 0xce -- 11001110
uint64 = 0xcf -- 11001111
int8 = 0xd0 -- 11010000
int16 = 0xd1 -- 11010001
int32 = 0xd2 -- 11010010
int64 = 0xd3 -- 11010011
fixext1 = 0xd4 -- 11010100
fixext2 = 0xd5 -- 11010101
fixext4 = 0xd6 -- 11010110
fixext8 = 0xd7 -- 11010111
fixext16 = 0xd8 -- 11011000
str8 = 0xd9 -- 11011001
str16 = 0xda -- 11011010
str32 = 0xdb -- 11011011
array16 = 0xdc -- 11011100
array32 = 0xdd -- 11011101
map16 = 0xde -- 11011110
map32 = 0xdf -- 11011111
posFixintMask :: Word8
negFixintMask :: Word8
fixmapMask :: Word8
fixarrayMask :: Word8
fixstrMask :: Word8
posFixint :: Word8
negFixint :: Word8
fixmap :: Word8
fixarray :: Word8
fixstr :: Word8
nil :: Word8
false :: Word8
true :: Word8
bin8 :: Word8
bin16 :: Word8
bin32 :: Word8
ext8 :: Word8
ext16 :: Word8
ext32 :: Word8
float32 :: Word8
float64 :: Word8
uint8 :: Word8
uint16 :: Word8
uint32 :: Word8
uint64 :: Word8
int8 :: Word8
int16 :: Word8
int32 :: Word8
int64 :: Word8
fixext1 :: Word8
fixext2 :: Word8
fixext4 :: Word8
fixext8 :: Word8
fixext16 :: Word8
str8 :: Word8
str16 :: Word8
str32 :: Word8
array16 :: Word8
array32 :: Word8
map16 :: Word8
map32 :: Word8
| rodrigosetti/messagepack | Data/MessagePack/Spec.hs | mit | 2,651 | 0 | 4 | 908 | 464 | 296 | 168 | 84 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ViewPatterns #-}
module QuadraticIrrational (tests) where
import Data.Number.CReal (CReal)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck
import Numeric.QuadraticIrrational
import Numeric.QuadraticIrrational.Internal.Lens
-- Slow but precise.
type RefFloat = CReal
instance Arbitrary QI where
arbitrary = consQI <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
where
consQI a b (NonNegative c) (NonZero d) = qi a b c d
shrink (unQI -> ~(a,b,c,d)) =
[ qi a' b c d | a' <- shrink a ] ++
[ qi a b' c d | b' <- shrink b ] ++
[ qi a b c' d | NonNegative c' <- shrink (NonNegative c) ] ++
[ qi a b c d' | NonZero d' <- shrink (NonZero d) ]
tests :: TestTree
tests =
testGroup "QuadraticIrrational"
[ testGroup "Construction/destruction/conversion"
[ testProperty "qi/runQI" $ \a b (NonNegative c) (NonZero d) ->
runQI (qi a b c d) $ \a' b' c' d' ->
approxEq' (approxQI a b c d) (approxQI a' b' c' d')
, testProperty "qi/runQI'" $ \a b (NonNegative c) (NonZero d) ->
runQI' (qi a b c d) $ \a' b' c' ->
approxEq' (approxQI a b c d) (approxQI' a' b' c')
, testProperty "qi'/runQI" $ \a b (NonNegative c) ->
runQI (qi' a b c) $ \a' b' c' d' ->
approxEq' (approxQI' a b c) (approxQI a' b' c' d')
, testProperty "qi'/runQI'" $ \a b (NonNegative c) ->
runQI' (qi' a b c) $ \a' b' c' ->
approxEq' (approxQI' a b c) (approxQI' a' b' c')
]
, testGroup "Lenses"
[ testProperty "_qi" $ \n a' b' (NonNegative c') (NonZero d') ->
let n' = over _qi (\(a,b,c,d) -> (a+a',b-b',c*c',d*d')) n
n'' = runQI n $ \a b c d -> qi (a+a') (b-b') (c*c') (d*d')
in approxEq (qiToFloat n') (qiToFloat n'')
, testProperty "_qi'" $ \n a' b' (NonNegative c') ->
let n' = over _qi' (\(a,b,c) -> (a+a',b-b',c*c')) n
n'' = runQI' n $ \a b c -> qi' (a+a') (b-b') (c*c')
in approxEq (qiToFloat n') (qiToFloat n'')
, testProperty "_qiABD" $ \n a' b' (NonZero d') ->
let n' = over _qiABD (\(a,b,d) -> (a+a',b-b',d*d')) n
n'' = runQI n $ \a b c d -> qi (a+a') (b-b') c (d*d')
in approxEq (qiToFloat n') (qiToFloat n'')
, testProperty "_qiA" $ \n a' ->
let n' = over _qiA (+ a') n
n'' = runQI n $ \a b c d -> qi (a+a') b c d
in approxEq (qiToFloat n') (qiToFloat n'')
, testProperty "_qiB" $ \n b' ->
let n' = over _qiB (+ b') n
n'' = runQI n $ \a b c d -> qi a (b+b') c d
in approxEq (qiToFloat n') (qiToFloat n'')
, testProperty "_qiC" $ \n (NonNegative c') ->
let n' = over _qiC (* c') n
n'' = runQI n $ \a b c d -> qi a b (c*c') d
in approxEq (qiToFloat n') (qiToFloat n'')
, testProperty "_qiD" $ \n (NonZero d') ->
let n' = over _qiD (* d') n
n'' = runQI n $ \a b c d -> qi a b c (d*d')
in approxEq (qiToFloat n') (qiToFloat n'')
]
, testGroup "Numerical operations"
[ testProperty "qiToFloat" $ \a b (NonNegative c) (NonZero d) ->
approxEq' (qiToFloat (qi a b c d)) (approxQI a b c d)
, testProperty "compare equals" $ \a ->
conjoin [ a === a ]
`const` (a :: QI)
, testProperty "qiAddI" $ \n x ->
approxEq' (qiToFloat (qiAddI n x)) (qiToFloat n + fromInteger x)
, testProperty "qiSubI" $ \n x ->
approxEq' (qiToFloat (qiSubI n x)) (qiToFloat n - fromInteger x)
, testProperty "qiMulI" $ \n x ->
approxEq' (qiToFloat (qiMulI n x)) (qiToFloat n * fromInteger x)
, testProperty "qiDivI" $ \n x ->
x /= 0 ==>
approxEq' (qiToFloat (qiDivI n x)) (qiToFloat n / fromInteger x)
, testProperty "qiAddR" $ \n x ->
approxEq' (qiToFloat (qiAddR n x)) (qiToFloat n + fromRational x)
, testProperty "qiSubR" $ \n x ->
approxEq' (qiToFloat (qiSubR n x)) (qiToFloat n - fromRational x)
, testProperty "qiMulR" $ \n x ->
approxEq' (qiToFloat (qiMulR n x)) (qiToFloat n * fromRational x)
, testProperty "qiDivR" $ \n x ->
x /= 0 ==>
approxEq' (qiToFloat (qiDivR n x)) (qiToFloat n / fromRational x)
, testProperty "qiNegate" $ \n ->
approxEq' (qiToFloat (qiNegate n)) (negate (qiToFloat n))
, testProperty "qiRecip" $ \n ->
not (approxEq (qiToFloat n) 0)
==> let ~(Just nr) = qiRecip n
in approxEq' (qiToFloat nr) (recip (qiToFloat n))
, testProperty "qiAdd" . withCompatiblePair $ \n n' ->
let ~(Just r) = qiAdd n n'
in approxEq' (qiToFloat r) (qiToFloat n + qiToFloat n')
, testProperty "qiSub" . withCompatiblePair $ \n n' ->
let ~(Just r) = qiSub n n'
in approxEq' (qiToFloat r) (qiToFloat n - qiToFloat n')
, testProperty "qiMul" . withCompatiblePair $ \n n' ->
let ~(Just r) = qiMul n n'
in approxEq' (qiToFloat r) (qiToFloat n * qiToFloat n')
, testProperty "qiDiv" . withCompatiblePair $ \n n' ->
let ~(Just r) = qiDiv n n'
in not (approxEq (qiToFloat n') 0)
==> approxEq' (qiToFloat r) (qiToFloat n / qiToFloat n')
, testProperty "qiPow" $ \n (NonNegative p) ->
-- Limit the power for speed.
(p <= 10) ==>
approxEq' (qiToFloat (qiPow n p))
-- CReal seems to diverge in 0 ** 1, use (^).
(qiToFloat n ^ p)
, testProperty "qiFloor" $ \n ->
qiFloor n === floor (qiToFloat n :: RefFloat)
]
, testGroup "Continued fractions"
[ testProperty "qiToContinuedFraction/continuedFractionToQI" $ \n ->
let cf@(_, CycList _ xs) = qiToContinuedFraction n
-- Limit the length of the periodic part for speed.
in (length xs <= 100) ==>
(qiToFloat n :: Double) === qiToFloat (continuedFractionToQI cf)
, testProperty "continuedFractionApproximate" $ \n ->
let cf = qiToContinuedFraction n
n' = continuedFractionApproximate 20 cf
in approxEq' (qiToFloat n) (fromRational n')
]
]
withCompatiblePair :: Testable p
=> (QI -> QI -> p) -> QI -> QI -> Property
withCompatiblePair f n0_ n1_ =
counterexample ("n0 = " ++ show n0) . counterexample ("n1 = " ++ show n1) $
f n0 n1
where
n0 = runQI n0_ $ \a b c d ->
qi a b c d
n1 = runQI n0_ $ \_ _ c _ -> runQI n1_ $ \a b _ d ->
qi a b c d
approxQI :: Integer -> Integer -> Integer -> Integer -> RefFloat
approxQI a b c d =
(fromInteger a + fromInteger b * sqrt (fromInteger c)) / fromInteger d
approxQI' :: Rational -> Rational -> Integer -> RefFloat
approxQI' a b c =
fromRational a + fromRational b * sqrt (fromInteger c)
approxEq :: RefFloat -> RefFloat -> Bool
approxEq a b = abs (b - a) < 1e-6 * maximum [ 1, abs a, abs b ]
approxEq' :: RefFloat -> RefFloat -> Property
approxEq' a b =
counterexample (show a ++ " is not approximately " ++ show b ++ " (diff = "
++ show (abs (b - a)) ++ ")")
(approxEq a b)
| ion1/quadratic-irrational | tests/QuadraticIrrational.hs | mit | 7,381 | 0 | 19 | 2,393 | 3,132 | 1,576 | 1,556 | 142 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.FocusEvent
(js_getRelatedTarget, getRelatedTarget, FocusEvent,
castToFocusEvent, gTypeFocusEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"relatedTarget\"]"
js_getRelatedTarget :: FocusEvent -> IO (Nullable EventTarget)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/FocusEvent.relatedTarget Mozilla FocusEvent.relatedTarget documentation>
getRelatedTarget ::
(MonadIO m) => FocusEvent -> m (Maybe EventTarget)
getRelatedTarget self
= liftIO (nullableToMaybe <$> (js_getRelatedTarget (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/FocusEvent.hs | mit | 1,412 | 6 | 10 | 170 | 371 | 236 | 135 | 24 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.